Browse Source

Merge pull request #4 from apache/dev

regular update local dev
pull/3/MERGE
Zhou.Z 4 years ago committed by GitHub
parent
commit
871be17b86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 27
      .github/workflows/ci_ut.yml
  2. 1
      .gitignore
  3. 2
      docker/build/Dockerfile
  4. 4
      docker/build/README.md
  5. 4
      docker/build/README_zh_CN.md
  6. 5
      docker/build/conf/dolphinscheduler/worker.properties.tpl
  7. 1
      docker/build/startup-init-conf.sh
  8. 1
      docker/docker-swarm/docker-compose.yml
  9. 1
      docker/docker-swarm/docker-stack.yml
  10. 1
      docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml
  11. 6
      docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml
  12. 6
      docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml
  13. 11
      docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml
  14. 1
      docker/kubernetes/dolphinscheduler/values.yaml
  15. 6
      dolphinscheduler-api/pom.xml
  16. 30
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java
  17. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java
  18. 60
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  19. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java
  20. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java
  21. 30
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  22. 24
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java
  23. 5
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java
  24. 14
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java
  25. 126
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java
  26. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java
  27. 54
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java
  28. 17
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java
  29. 317
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java
  30. 19
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java
  31. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java
  32. 127
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java
  33. 8
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java
  34. 1368
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java
  35. 95
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java
  36. 328
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java
  37. 8
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java
  38. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java
  39. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java
  40. 110
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java
  41. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java
  42. 12
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java
  43. 6
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java
  44. 73
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java
  45. 2
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java
  46. 186
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java
  47. 384
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java
  48. 146
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java
  49. 1696
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  50. 443
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java
  51. 158
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java
  52. 8
      dolphinscheduler-api/src/main/resources/i18n/messages.properties
  53. 6
      dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties
  54. 6
      dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties
  55. 59
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java
  56. 83
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java
  57. 17
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java
  58. 109
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java
  59. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java
  60. 50
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java
  61. 76
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java
  62. 63
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java
  63. 2
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java
  64. 44
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java
  65. 378
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  66. 82
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java
  67. 175
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java
  68. 34
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java
  69. 11
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java
  70. 37
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java
  71. 57
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java
  72. 5
      dolphinscheduler-common/pom.xml
  73. 3
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  74. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java
  75. 7
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
  76. 76
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
  77. 156
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java
  78. 4
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java
  79. 27
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java
  80. 55
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java
  81. 46
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClientTest.java
  82. 1
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java
  83. 5
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java
  84. 39
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PrestoDataSource.java
  85. 101
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java
  86. 1
      dolphinscheduler-dist/release-docs/LICENSE
  87. 7
      dolphinscheduler-dist/release-docs/NOTICE
  88. 201
      dolphinscheduler-dist/release-docs/licenses/LICENSE-presto-jdbc.txt
  89. 63
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java
  90. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java
  91. 7
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java
  92. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java
  93. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java
  94. 45
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java
  95. 120
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java
  96. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java
  97. 12
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java
  98. 28
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java
  99. 24
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
  100. 3
      dolphinscheduler-server/src/main/resources/worker.properties
  101. Some files were not shown because too many files have changed in this diff Show More

27
.github/workflows/ci_ut.yml

@ -91,3 +91,30 @@ jobs:
mkdir -p ${LOG_DIR}
docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml logs dolphinscheduler-postgresql > ${LOG_DIR}/db.txt
continue-on-error: true
Checkstyle:
name: Check code style
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
# In the checkout@v2, it doesn't support git submodule. Execute the commands manually.
- name: checkout submodules
shell: bash
run: |
git submodule sync --recursive
git -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: check code style
env:
WORKDIR: ./
REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CHECKSTYLE_CONFIG: style/checkstyle.xml
REVIEWDOG_VERSION: v0.10.2
run: |
wget -O - -q https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.22/checkstyle-8.22-all.jar > /opt/checkstyle.jar
wget -O - -q https://raw.githubusercontent.com/reviewdog/reviewdog/master/install.sh | sh -s -- -b /opt ${REVIEWDOG_VERSION}
java -jar /opt/checkstyle.jar "${WORKDIR}" -c "${CHECKSTYLE_CONFIG}" -f xml \
| /opt/reviewdog -f=checkstyle \
-reporter="${INPUT_REPORTER:-github-pr-check}" \
-filter-mode="${INPUT_FILTER_MODE:-added}" \
-fail-on-error="${INPUT_FAIL_ON_ERROR:-false}"

1
.gitignore vendored

@ -19,6 +19,7 @@ third-party-dependencies.txt
*.iws
*.tgz
.*.swp
.factorypath
.vim
.tmp
**/node_modules

2
docker/build/Dockerfile

@ -27,7 +27,7 @@ ENV DEBIAN_FRONTEND noninteractive
#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
RUN apk update && \
apk add dos2unix shadow bash openrc python python3 sudo vim wget iputils net-tools openssh-server py2-pip tini && \
apk --update add --no-cache dos2unix shadow bash openrc python2 python3 sudo vim wget iputils net-tools openssh-server py-pip tini && \
apk add --update procps && \
openrc boot && \
pip install kazoo

4
docker/build/README.md

@ -238,6 +238,10 @@ This environment variable sets max cpu load avg for `worker-server`. The default
This environment variable sets reserved memory for `worker-server`. The default value is `0.1`.
**`WORKER_WEIGHT`**
This environment variable sets port for `worker-server`. The default value is `100`.
**`WORKER_LISTEN_PORT`**
This environment variable sets port for `worker-server`. The default value is `1234`.

4
docker/build/README_zh_CN.md

@ -238,6 +238,10 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些
配置`worker-server`的保留内存,默认值 `0.1`
**`WORKER_WEIGHT`**
配置`worker-server`的权重,默认之`100`。
**`WORKER_LISTEN_PORT`**
配置`worker-server`的端口,默认值 `1234`

5
docker/build/conf/dolphinscheduler/worker.properties.tpl

@ -34,4 +34,7 @@ worker.reserved.memory=${WORKER_RESERVED_MEMORY}
#worker.listen.port=${WORKER_LISTEN_PORT}
# default worker group
#worker.group=${WORKER_GROUP}
#worker.groups=${WORKER_GROUP}
# default worker weight
#worker.weight=${WORKER_WEIGHT}

1
docker/build/startup-init-conf.sh

@ -74,6 +74,7 @@ export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"}
export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"}
export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"}
export WORKER_GROUP=${WORKER_GROUP:-"default"}
export WORKER_WEIGHT=${WORKER_WEIGHT:-"100"}
#============================================================================
# Alert Server

1
docker/docker-swarm/docker-compose.yml

@ -187,6 +187,7 @@ services:
WORKER_MAX_CPULOAD_AVG: "100"
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUP: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432

1
docker/docker-swarm/docker-stack.yml

@ -187,6 +187,7 @@ services:
WORKER_MAX_CPULOAD_AVG: "100"
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUP: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432

1
docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml

@ -31,6 +31,7 @@ data:
WORKER_RESERVED_MEMORY: {{ .Values.worker.configmap.WORKER_RESERVED_MEMORY | quote }}
WORKER_LISTEN_PORT: {{ .Values.worker.configmap.WORKER_LISTEN_PORT | quote }}
WORKER_GROUP: {{ .Values.worker.configmap.WORKER_GROUP | quote }}
WORKER_WEIGHT: {{ .Values.worker.configmap.WORKER_WEIGHT | quote }}
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: {{ include "dolphinscheduler.worker.base.dir" . | quote }}
dolphinscheduler_env.sh: |-
{{- range .Values.worker.configmap.DOLPHINSCHEDULER_ENV }}

6
docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml

@ -162,6 +162,12 @@ spec:
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperQuorum }}
{{- end }}
- name: ZOOKEEPER_ROOT
{{- if .Values.zookeeper.enabled }}
value: "/dolphinscheduler"
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperRoot }}
{{- end }}
- name: RESOURCE_STORAGE_TYPE
valueFrom:
configMapKeyRef:

6
docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml

@ -228,6 +228,12 @@ spec:
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperQuorum }}
{{- end }}
- name: ZOOKEEPER_ROOT
{{- if .Values.zookeeper.enabled }}
value: "/dolphinscheduler"
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperRoot }}
{{- end }}
- name: RESOURCE_STORAGE_TYPE
valueFrom:
configMapKeyRef:

11
docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml

@ -162,6 +162,11 @@ spec:
configMapKeyRef:
name: {{ include "dolphinscheduler.fullname" . }}-worker
key: WORKER_GROUP
- name: WORKER_WEUGHT
valueFrom:
configMapKeyRef:
name: {{ include "dolphinscheduler.fullname" . }}-worker
key: WORKER_WEIGHT
- name: DOLPHINSCHEDULER_DATA_BASEDIR_PATH
valueFrom:
configMapKeyRef:
@ -225,6 +230,12 @@ spec:
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperQuorum }}
{{- end }}
- name: ZOOKEEPER_ROOT
{{- if .Values.zookeeper.enabled }}
value: "/dolphinscheduler"
{{- else }}
value: {{ .Values.externalZookeeper.zookeeperRoot }}
{{- end }}
- name: RESOURCE_STORAGE_TYPE
valueFrom:
configMapKeyRef:

1
docker/kubernetes/dolphinscheduler/values.yaml

@ -201,6 +201,7 @@ worker:
WORKER_RESERVED_MEMORY: "0.1"
WORKER_LISTEN_PORT: "1234"
WORKER_GROUP: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler"
DOLPHINSCHEDULER_ENV:
- "export HADOOP_HOME=/opt/soft/hadoop"

6
dolphinscheduler-api/pom.xml

@ -152,6 +152,10 @@
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
</exclusion>
</exclusions>
</dependency>
@ -244,4 +248,4 @@
</dependency>
</dependencies>
</project>
</project>

30
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java

@ -17,6 +17,12 @@
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GENERATE_TOKEN_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ACCESSTOKEN_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ACCESS_TOKEN_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.AccessTokenService;
@ -24,20 +30,26 @@ import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.util.Map;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
* access token controller

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java

@ -17,25 +17,34 @@
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
@ -70,7 +79,7 @@ public class LoggerController extends BaseController {
@GetMapping(value = "/detail")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_TASK_INSTANCE_LOG_ERROR)
public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
public Result<String> queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "taskInstanceId") int taskInstanceId,
@RequestParam(value = "skipLineNum") int skipNum,
@RequestParam(value = "limit") int limit) {

60
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -95,27 +95,65 @@ public class ProcessDefinitionController extends BaseController {
}
/**
* copy process definition
* copy process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return copy result code
*/
@ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
})
@PostMapping(value = "/copy")
@ResponseStatus(HttpStatus.OK)
@ApiException(COPY_PROCESS_DEFINITION_ERROR)
@ApiException(BATCH_COPY_PROCESS_DEFINITION_ERROR)
public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId) throws JsonProcessingException {
logger.info("copy process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
Map<String, Object> result = processDefinitionService.copyProcessDefinition(loginUser, projectName, processId);
return returnDataList(result);
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId",required = true) int targetProjectId) {
logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchCopyProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId));
}
/**
* move process definition
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return move result code
*/
@ApiOperation(value = "moveProcessDefinition", notes= "MOVE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer")
})
@PostMapping(value = "/move")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_MOVE_PROCESS_DEFINITION_ERROR)
public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId",required = true) int targetProjectId) {
logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchMoveProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId));
}
/**
@ -365,7 +403,7 @@ public class ProcessDefinitionController extends BaseController {
public Result getNodeListByDefinitionIdList(
@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIdList") String processDefinitionIdList) throws Exception {
@RequestParam("processDefinitionIdList") String processDefinitionIdList) {
logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}",
loginUser.getUserName(), projectName, processDefinitionIdList);
@ -420,7 +458,7 @@ public class ProcessDefinitionController extends BaseController {
logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processDefinitionIds)) {
String[] processDefinitionIdArray = processDefinitionIds.split(",");

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java

@ -370,7 +370,7 @@ public class ProcessInstanceController extends BaseController {
logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}",
loginUser.getUserName(), projectName, processInstanceIds);
// task queue
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processInstanceIds)) {
String[] processInstanceIdArray = processInstanceIds.split(",");

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java

@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
@ -226,6 +227,25 @@ public class ProjectController extends BaseController {
return returnDataList(result);
}
/**
* query user created project
*
* @param loginUser login user
* @return projects which the user create
*/
@ApiOperation(value = "queryProjectCreatedByUser", notes = "QUERY_USER_CREATED_PROJECT_NOTES")
@GetMapping(value = "/login-user-created-project")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_USER_CREATED_PROJECT_ERROR)
public Result queryProjectCreatedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) {
logger.info("login user {}, query authorized project by user id: {}.",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(String.valueOf(loginUser.getId())));
Map<String, Object> result = projectService.queryProjectCreatedByUser(loginUser);
return returnDataList(result);
}
/**
* import process definition
*

30
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -432,14 +432,34 @@ public class UsersController extends BaseController {
@RequestParam(value = "userPassword") String userPassword,
@RequestParam(value = "repeatPassword") String repeatPassword,
@RequestParam(value = "email") String email) throws Exception {
userName = userName.replaceAll("[\n|\r|\t]", "");
userPassword = userPassword.replaceAll("[\n|\r|\t]", "");
repeatPassword = repeatPassword.replaceAll("[\n|\r|\t]", "");
email = email.replaceAll("[\n|\r|\t]", "");
userName = ParameterUtils.handleEscapes(userName);
userPassword = ParameterUtils.handleEscapes(userPassword);
repeatPassword = ParameterUtils.handleEscapes(repeatPassword);
email = ParameterUtils.handleEscapes(email);
logger.info("user self-register, userName: {}, userPassword {}, repeatPassword {}, eamil {}",
userName, userPassword, repeatPassword, email);
userName, Constants.PASSWORD_DEFAULT, Constants.PASSWORD_DEFAULT, email);
Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email);
return returnDataList(result);
}
/**
* user activate
*
* @param userName user name
*/
@ApiOperation(value="activateUser",notes = "ACTIVATE_USER_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"),
})
@PostMapping("/activate")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_USER_ERROR)
public Result<Object> activateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "userName") String userName) {
userName = ParameterUtils.handleEscapes(userName);
logger.info("login user {}, activate user, userName: {}",
loginUser.getUserName(), userName);
Map<String, Object> result = usersService.activateUser(loginUser, userName);
return returnDataList(result);
}
}

24
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java

@ -168,15 +168,21 @@ public enum Status {
PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"),
QUERY_WORKFLOW_LINEAGE_ERROR(10143,"query workflow lineage error", "查询血缘失败"),
COPY_PROCESS_DEFINITION_ERROR(10148,"copy process definition error", "复制工作流错误"),
USER_DISABLED(10149,"The current user is disabled", "当前用户已停用"),
DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"),
USER_DISABLED(10148,"The current user is disabled", "当前用户已停用"),
COPY_PROCESS_DEFINITION_ERROR(10149,"copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
MOVE_PROCESS_DEFINITION_ERROR(10150,"move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
QUERY_USER_CREATED_PROJECT_ERROR(10151,"query user created project error error", "查询用户创建的项目错误"),
PROCESS_DEFINITION_IDS_IS_EMPTY(10152,"process definition ids is empty", "工作流IDS不能为空"),
BATCH_COPY_PROCESS_DEFINITION_ERROR(10153,"batch copy process definition error", "复制工作流错误"),
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10154,"batch move process definition error", "移动工作流错误"),
QUERY_WORKFLOW_LINEAGE_ERROR(10155,"query workflow lineage error", "查询血缘失败"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"),

5
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java

@ -18,17 +18,18 @@ package org.apache.dolphinscheduler.api.exceptions;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.method.HandlerMethod;
/**
* Exception Handler
*/
@ControllerAdvice
@RestControllerAdvice
@ResponseBody
public class ApiExceptionHandler {

14
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java

@ -16,32 +16,28 @@
*/
package org.apache.dolphinscheduler.api.interceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.security.Authenticator;
import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.HandlerInterceptor;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* login interceptor, must login first
*/
public class LoginHandlerInterceptor implements HandlerInterceptor {
private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class);
@Autowired
private SessionService sessionService;
@Autowired
private UserMapper userMapper;

126
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java

@ -16,35 +16,14 @@
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.Map;
/**
* user service
* access token service
*/
@Service
public class AccessTokenService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenService.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
public interface AccessTokenService {
/**
* query access token list
@ -55,123 +34,44 @@ public class AccessTokenService extends BaseService {
* @param pageSize page size
* @return token list for page number and page size
*/
public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<AccessToken> page = new Page(pageNo, pageSize);
int userId = loginUser.getId();
if (loginUser.getUserType() == UserType.ADMIN_USER){
userId = 0;
}
IPage<AccessToken> accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId);
pageInfo.setTotalCount((int)accessTokenList.getTotal());
pageInfo.setLists(accessTokenList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize);
/**
* create token
*
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return create result code
*/
public Map<String, Object> createToken(int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0.");
}
AccessToken accessToken = new AccessToken();
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setCreateTime(new Date());
accessToken.setUpdateTime(new Date());
// insert
int insert = accessTokenMapper.insert(accessToken);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR);
}
return result;
}
Map<String, Object> createToken(int userId, String expireTime, String token);
/**
* generate token
*
* @param userId token for user
* @param expireTime token expire time
* @return token string
*/
public Map<String, Object> generateToken(int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5);
String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis()));
result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> generateToken(int userId, String expireTime);
/**
* delete access token
* delete access token
*
* @param loginUser login user
* @param id token id
* @return delete result code
*/
public Map<String, Object> delAccessTokenById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
if (loginUser.getId() != accessToken.getUserId() &&
loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
accessTokenMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> delAccessTokenById(User loginUser, int id);
/**
* update token by id
*
* @param id token id
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return update result code
*/
public Map<String, Object> updateToken(int id,int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setUpdateTime(new Date());
accessTokenMapper.updateById(accessToken);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> updateToken(int id, int userId, String expireTime, String token);
}

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java

@ -55,7 +55,7 @@ public class AlertGroupService extends BaseService{
*/
public HashMap<String, Object> queryAlertgroup() {
HashMap<String, Object> result = new HashMap<>(5);
HashMap<String, Object> result = new HashMap<>();
List<AlertGroup> alertGroups = alertGroupMapper.queryAllGroupList();
result.put(Constants.DATA_LIST, alertGroups);
putMsg(result, Status.SUCCESS);
@ -74,7 +74,7 @@ public class AlertGroupService extends BaseService{
*/
public Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -101,7 +101,7 @@ public class AlertGroupService extends BaseService{
* @return create result code
*/
public Map<String, Object> createAlertgroup(User loginUser, String groupName, AlertType groupType, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (checkAdmin(loginUser, result)){
return result;
@ -138,7 +138,7 @@ public class AlertGroupService extends BaseService{
* @return update result code
*/
public Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, AlertType groupType, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)){
return result;
@ -179,7 +179,7 @@ public class AlertGroupService extends BaseService{
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> delAlertgroupById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
@ -209,7 +209,7 @@ public class AlertGroupService extends BaseService{
* @return grant result code
*/
public Map<String, Object> grantUser(User loginUser, int alertgroupId, String userIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate

54
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java

@ -1,54 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import java.util.List;
/**
* base DAG service
*/
public class BaseDAGService extends BaseService{
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
public static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
}

17
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java

@ -16,6 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.text.MessageFormat;
import java.util.Map;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
@ -24,11 +30,6 @@ import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import java.text.MessageFormat;
import java.util.Map;
/**
* base service
*/
@ -96,6 +97,7 @@ public class BaseService {
/**
* get cookie info by name
*
* @param request request
* @param name 'sessionId'
* @return get cookie info
@ -115,10 +117,11 @@ public class BaseService {
/**
* create tenant dir if not exists
*
* @param tenantCode tenant code
* @throws Exception if hdfs operation exception
*/
protected void createTenantDirIfNotExists(String tenantCode)throws Exception{
protected void createTenantDirIfNotExists(String tenantCode) throws Exception {
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode);
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode);
@ -129,7 +132,7 @@ public class BaseService {
HadoopUtils.getInstance().mkdir(udfsPath);
}
protected boolean hasPerm(User operateUser, int createUserId){
protected boolean hasPerm(User operateUser, int createUserId) {
return operateUser.getId() == createUserId || isAdmin(operateUser);
}
}

317
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java

@ -17,57 +17,14 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.CommandStateCount;
import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.apache.dolphinscheduler.dao.entity.User;
import java.text.MessageFormat;
import java.util.*;
import java.util.Map;
/**
* data analysis service
*/
@Service
public class DataAnalysisService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class);
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefinitionMapper;
@Autowired
CommandMapper commandMapper;
@Autowired
ErrorCommandMapper errorCommandMapper;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
ProcessService processService;
public interface DataAnalysisService {
/**
* statistical task instance status data
@ -78,46 +35,7 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date
* @return task state count data
*/
public Map<String,Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> taskInstanceStateCounts =
taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds);
if (taskInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
}
Map<String, Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate);
/**
* statistical process instance status data
@ -128,37 +46,7 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date
* @return process instance state count data
*/
public Map<String,Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> processInstanceStateCounts =
processInstanceMapper.countInstanceStateByUser(start, end,
projectIdArray);
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
Map<String, Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate);
/**
* statistics the process definition quantities of certain person
@ -167,20 +55,7 @@ public class DataAnalysisService extends BaseService{
* @param projectId project id
* @return definition count data
*/
public Map<String,Object> countDefinitionByUser(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>();
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), projectIdArray,isAdmin(loginUser));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> countDefinitionByUser(User loginUser, int projectId);
/**
* statistical command status data
@ -191,189 +66,15 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date
* @return command state count data
*/
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
if (startDate != null && endDate != null){
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
// count command state
List<CommandCount> commandStateCounts =
commandMapper.countCommandState(
loginUser.getId(),
start,
end,
projectIdArray);
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
//
Map<CommandType,Map<String,Integer>> dataMap = new HashMap<>();
Map<String,Integer> commonCommand = new HashMap<>();
commonCommand.put("commandState",0);
commonCommand.put("errorCommandState",0);
// init data map
/**
* START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS,
START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD;
*/
dataMap.put(CommandType.START_PROCESS,commonCommand);
dataMap.put(CommandType.START_CURRENT_TASK_PROCESS,commonCommand);
dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS,commonCommand);
dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,commonCommand);
dataMap.put(CommandType.START_FAILURE_TASK_PROCESS,commonCommand);
dataMap.put(CommandType.COMPLEMENT_DATA,commonCommand);
dataMap.put(CommandType.SCHEDULER,commonCommand);
dataMap.put(CommandType.REPEAT_RUNNING,commonCommand);
dataMap.put(CommandType.PAUSE,commonCommand);
dataMap.put(CommandType.STOP,commonCommand);
dataMap.put(CommandType.RECOVER_WAITTING_THREAD,commonCommand);
// put command state
for (CommandCount executeStatusCount : commandStateCounts){
Map<String,Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put("commandState", executeStatusCount.getCount());
dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap);
}
// put error command state
for (CommandCount errorExecutionStatus : errorCommandStateCounts){
Map<String,Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
Iterator<Map.Entry<CommandType, Map<String, Integer>>> iterator = dataMap.entrySet().iterator();
while (iterator.hasNext()){
Map.Entry<CommandType, Map<String, Integer>> next = iterator.next();
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"),
next.getValue().get("commandState"),next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
private Integer[] getProjectIdsArrays(User loginUser, int projectId) {
List<Integer> projectIds = new ArrayList<>();
if(projectId !=0){
projectIds.add(projectId);
}else if(loginUser.getUserType() == UserType.GENERAL_USER){
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if(projectIds.size() ==0 ){
projectIds.add(0);
}
}
return projectIds.toArray(new Integer[projectIds.size()]);
}
Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate);
/**
* count queue state
*
* @param loginUser login user
* @param projectId project id
* @return queue state count data
*/
public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String,Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER){
dataMap.put("taskQueue",tasksQueueList.size());
dataMap.put("taskKill",tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i =0;
for (String taskQueueStr : tasksQueueList){
if (StringUtils.isNotEmpty(taskQueueStr)){
String[] splits = taskQueueStr.split("_");
if (splits.length >= 4){
tasksQueueIds[i++] = Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList){
if (StringUtils.isNotEmpty(taskKillStr)){
String[] splits = taskKillStr.split("-");
if (splits.length == 2){
tasksKillIds[i++] = Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
if (tasksQueueIds.length != 0){
taskQueueCount = taskInstanceMapper.countTask(
projectIds,
tasksQueueIds);
}
if (tasksKillIds.length != 0){
taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds);
}
dataMap.put("taskQueue",taskQueueCount);
dataMap.put("taskKill",taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> countQueueState(User loginUser, int projectId);
private boolean checkProject(User loginUser, int projectId, Map<String, Object> result){
if(projectId != 0){
Project project = projectMapper.selectById(projectId);
return projectService.hasProjectAndPerm(loginUser, project, result);
}
return true;
}
}

19
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java

@ -88,7 +88,7 @@ public class DataSourceService extends BaseService{
*/
public Map<String, Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
// check name can use or not
if (checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
@ -249,6 +249,7 @@ public class DataSourceService extends BaseService{
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
case PRESTO:
separator = "&";
break;
default:
@ -340,7 +341,7 @@ public class DataSourceService extends BaseService{
* @return data source list page
*/
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<DataSource> datasourceList;
@ -430,6 +431,10 @@ public class DataSourceService extends BaseService{
datasource = JSONUtils.parseObject(parameter, DB2ServerDataSource.class);
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
case PRESTO:
datasource = JSONUtils.parseObject(parameter, PrestoDataSource.class);
Class.forName(Constants.COM_PRESTO_JDBC_DRIVER);
break;
default:
break;
}
@ -513,7 +518,8 @@ public class DataSourceService extends BaseService{
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())) {
|| Constants.ORACLE.equals(type.name())
|| Constants.PRESTO.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
@ -585,9 +591,12 @@ public class DataSourceService extends BaseService{
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
}else if (Constants.DB2.equals(type.name())) {
} else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
} else if (Constants.PRESTO.equals(type.name())) {
sb.append(Constants.JDBC_PRESTO);
sb.append(host).append(":").append(port);
}
return sb.toString();
@ -674,7 +683,7 @@ public class DataSourceService extends BaseService{
* @return authorized result code
*/
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java

@ -98,7 +98,7 @@ public class ExecutorService extends BaseService{
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
String receivers, String receiversCc, RunMode runMode,
Priority processInstancePriority, String workerGroup, Integer timeout) throws ParseException {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR);
@ -176,7 +176,7 @@ public class ExecutorService extends BaseService{
* @return check result code
*/
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (processDefinition == null) {
// check process definition exists
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId);
@ -201,7 +201,7 @@ public class ExecutorService extends BaseService{
* @return execute result code
*/
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project);
@ -294,7 +294,7 @@ public class ExecutorService extends BaseService{
*/
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
ExecutionStatus executionStatus = processInstance.getState();
boolean checkResult = false;
switch (executeType) {
@ -339,7 +339,7 @@ public class ExecutorService extends BaseService{
* @return update result
*/
private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
processInstance.setCommandType(commandType);
processInstance.addHistoryCmd(commandType);
@ -365,7 +365,7 @@ public class ExecutorService extends BaseService{
* @return insert result code
*/
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Command command = new Command();
command.setCommandType(commandType);
command.setProcessDefinitionId(processDefinitionId);

127
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java

@ -16,117 +16,30 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.nio.charset.StandardCharsets;
import javax.annotation.PreDestroy;
import org.apache.commons.lang.ArrayUtils;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* log service
*/
@Service
public class LoggerService {
public interface LoggerService {
/**
* view log
*
* @param taskInstId task instance id
* @param skipLineNum skip line number
* @param limit limit
* @return log string data
*/
Result<String> queryLog(int taskInstId, int skipLineNum, int limit);
/**
* get log size
*
* @param taskInstId task instance id
* @return log byte array
*/
byte[] getLogBytes(int taskInstId);
private static final Logger logger = LoggerFactory.getLogger(LoggerService.class);
private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s";
@Autowired
private ProcessService processService;
private final LogClientService logClient;
public LoggerService() {
logClient = new LogClientService();
}
@PreDestroy
public void close() {
logClient.close();
}
/**
* view log
*
* @param taskInstId task instance id
* @param skipLineNum skip line number
* @param limit limit
* @return log string data
*/
public Result queryLog(int taskInstId, int skipLineNum, int limit) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
return Result.error(Status.TASK_INSTANCE_NOT_FOUND);
}
String host = getHost(taskInstance.getHost());
Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(),
Constants.RPC_PORT);
StringBuilder log = new StringBuilder();
if (skipLineNum == 0) {
String head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR);
log.append(head);
}
log.append(logClient
.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit));
result.setData(log);
return result;
}
/**
* get log size
*
* @param taskInstId task instance id
* @return log byte array
*/
public byte[] getLogBytes(int taskInstId) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
throw new RuntimeException("task instance is null or host is null");
}
String host = getHost(taskInstance.getHost());
byte[] head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8);
return ArrayUtils.addAll(head,
logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()));
}
/**
* get host
*
* @param address address
* @return old version return true ,otherwise return false
*/
private String getHost(String address) {
if (Host.isOldVersion(address)) {
return address;
}
return Host.of(address).getIp();
}
}

8
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java

@ -56,7 +56,7 @@ public class MonitorService extends BaseService {
* @return data base state
*/
public Map<String,Object> queryDatabaseState(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<MonitorRecord> monitorRecordList = monitorDBDao.queryDatabaseState();
@ -75,7 +75,7 @@ public class MonitorService extends BaseService {
*/
public Map<String,Object> queryMaster(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<Server> masterServers = getServerListFromZK(true);
result.put(Constants.DATA_LIST, masterServers);
@ -91,7 +91,7 @@ public class MonitorService extends BaseService {
* @return zookeeper information list
*/
public Map<String,Object> queryZookeeperState(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<ZookeeperRecord> zookeeperRecordList = zookeeperMonitor.zookeeperInfoList();
@ -111,7 +111,7 @@ public class MonitorService extends BaseService {
*/
public Map<String,Object> queryWorker(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<WorkerServerModel> workerServers = getServerListFromZK(false)
.stream()
.map((Server server) -> {

1368
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java

File diff suppressed because it is too large Load Diff

95
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java

@ -16,8 +16,28 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
@ -31,14 +51,26 @@ import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -46,22 +78,14 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.*;
import java.util.stream.Collectors;
import static org.apache.dolphinscheduler.common.Constants.*;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service
*/
@Service
public class ProcessInstanceService extends BaseDAGService {
public class ProcessInstanceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class);
@ -154,7 +178,7 @@ public class ProcessInstanceService extends BaseDAGService {
* @return process instance detail
*/
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -167,7 +191,7 @@ public class ProcessInstanceService extends BaseDAGService {
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setReceivers(processDefinition.getReceivers());
processInstance.setReceiversCc(processDefinition.getReceiversCc());
result.put(Constants.DATA_LIST, processInstance);
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
return result;
@ -193,7 +217,7 @@ public class ProcessInstanceService extends BaseDAGService {
String searchVal, String executorName,ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -242,7 +266,7 @@ public class ProcessInstanceService extends BaseDAGService {
pageInfo.setTotalCount((int) processInstanceList.getTotal());
pageInfo.setLists(processInstances);
result.put(Constants.DATA_LIST, pageInfo);
result.put(DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
@ -273,7 +297,7 @@ public class ProcessInstanceService extends BaseDAGService {
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(Constants.DATA_LIST, resultMap);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
@ -362,7 +386,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("subProcessInstanceId", subWorkflowInstance.getId());
result.put(Constants.DATA_LIST, dataMap);
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
@ -501,7 +525,7 @@ public class ProcessInstanceService extends BaseDAGService {
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId());
result.put(Constants.DATA_LIST, dataMap);
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
@ -516,7 +540,7 @@ public class ProcessInstanceService extends BaseDAGService {
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -556,7 +580,7 @@ public class ProcessInstanceService extends BaseDAGService {
* @return variables data
*/
public Map<String, Object> viewVariables(Integer processInstanceId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
@ -618,7 +642,7 @@ public class ProcessInstanceService extends BaseDAGService {
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(Constants.DATA_LIST, resultMap);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
@ -668,9 +692,28 @@ public class ProcessInstanceService extends BaseDAGService {
}
ganttDto.setTasks(taskList);
result.put(Constants.DATA_LIST, ganttDto);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
private static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
}

328
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java

@ -16,45 +16,15 @@
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
import java.util.Map;
/**
* project service
*HttpTask./
**/
@Service
public class ProjectService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(ProjectService.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
public interface ProjectService {
/**
* create project
@ -64,38 +34,7 @@ public class ProjectService extends BaseService{
* @param desc description
* @return returns an error if it exists
*/
public Map<String, Object> createProject(User loginUser, String name, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByName(name);
if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
project = new Project();
Date now = new Date();
project.setName(name);
project.setDescription(desc);
project.setUserId(loginUser.getId());
project.setUserName(loginUser.getUserName());
project.setCreateTime(now);
project.setUpdateTime(now);
if (projectMapper.insert(project) > 0) {
Project insertedProject = projectMapper.queryByName(name);
result.put(Constants.DATA_LIST, insertedProject);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
}
Map<String, Object> createProject(User loginUser, String name, String desc);
/**
* query project details by id
@ -103,19 +42,7 @@ public class ProjectService extends BaseService{
* @param projectId project id
* @return project detail information
*/
public Map<String, Object> queryById(Integer projectId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.selectById(projectId);
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectId);
}
return result;
}
Map<String, Object> queryById(Integer projectId);
/**
* check project and authorization
@ -125,30 +52,9 @@ public class ProjectService extends BaseService{
* @param projectName project name
* @return true if the login user have permission to see the project
*/
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) {
Map<String, Object> result = new HashMap<>(5);
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
} else if (!checkReadPermission(loginUser, project)) {
// check read permission
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName);
}else {
putMsg(result, Status.SUCCESS);
}
return result;
}
Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName);
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
checkResult = true;
}
return checkResult;
}
boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result);
/**
* admin can view all projects
@ -159,29 +65,7 @@ public class ProjectService extends BaseService{
* @param pageNo page number
* @return project list which the login user have permission to see
*/
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Map<String, Object> result = new HashMap<>();
PageInfo pageInfo = new PageInfo<Project>(pageNo, pageSize);
Page<Project> page = new Page(pageNo, pageSize);
int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if(userId != 0){
for (Project project : projectList) {
project.setPerm(org.apache.dolphinscheduler.common.Constants.DEFAULT_ADMIN_PERMISSION);
}
}
pageInfo.setTotalCount((int)projectIPage.getTotal());
pageInfo.setLists(projectList);
result.put(Constants.COUNT, (int)projectIPage.getTotal());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal);
/**
* delete project by id
@ -190,50 +74,7 @@ public class ProjectService extends BaseService{
* @param projectId project id
* @return delete result code
*/
public Map<String, Object> deleteProject(User loginUser, Integer projectId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) {
return checkResult;
}
if (!hasPerm(loginUser, project.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId);
if(processDefinitionList.size() > 0){
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
int delete = projectMapper.deleteById(projectId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
}
/**
* get check result
*
* @param loginUser login user
* @param project project
* @return check result
*/
private Map<String, Object> getCheckResult(User loginUser, Project project) {
String projectName = project == null ? null:project.getName();
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
Map<String, Object> deleteProject(User loginUser, Integer projectId);
/**
* updateProcessInstance project
@ -244,37 +85,7 @@ public class ProjectService extends BaseService{
* @param desc description
* @return update result code
*/
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.selectById(projectId);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
Project tempProject = projectMapper.queryByName(projectName);
if (tempProject != null && tempProject.getId() != projectId) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName);
return result;
}
project.setName(projectName);
project.setDescription(desc);
project.setUpdateTime(new Date());
int update = projectMapper.updateById(project);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
}
Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc);
/**
* query unauthorized project
@ -283,48 +94,7 @@ public class ProjectService extends BaseService{
* @param userId user id
* @return the projects which user have not permission to see
*/
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
/**
* query all project list except specified userId
*/
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId);
List<Project> resultList = new ArrayList<>();
Set<Project> projectSet = null;
if (projectList != null && projectList.size() > 0) {
projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* get unauthorized project
*
* @param projectSet project set
* @param authedProjectList authed project list
* @return project list that authorization
*/
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) {
List<Project> resultList;
Set<Project> authedProjectSet = null;
if (authedProjectList != null && authedProjectList.size() > 0) {
authedProjectSet = new HashSet<>(authedProjectList);
projectSet.removeAll(authedProjectSet);
}
resultList = new ArrayList<>(projectSet);
return resultList;
}
Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId);
/**
* query authorized project
@ -333,83 +103,21 @@ public class ProjectService extends BaseService{
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS);
return result;
}
Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId);
/**
* check whether have read permission
*
* @param user user
* @param project project
* @return true if the user have permission to see the project, otherwise return false
*/
private boolean checkReadPermission(User user, Project project) {
int permissionId = queryPermission(user, project);
return (permissionId & Constants.READ_PERMISSION) != 0;
}
/**
* query permission id
* query authorized project
*
* @param user user
* @param project project
* @return permission
* @param loginUser login user
* @return projects which the user have permission to see, Except for items created by this user
*/
private int queryPermission(User user, Project project) {
if (user.getUserType() == UserType.ADMIN_USER) {
return Constants.READ_PERMISSION;
}
if (project.getUserId() == user.getId()) {
return Constants.ALL_PERMISSIONS;
}
ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) {
return 0;
}
return projectUser.getPerm();
}
Map<String, Object> queryProjectCreatedByUser(User loginUser);
/**
* query all project list that have one or more process definitions.
*
* @return project list
*/
public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.selectList(null);
List<ProcessDefinition> processDefinitions = processDefinitionMapper.selectList(null);
if(projects != null){
Set set = new HashSet<>();
for (ProcessDefinition processDefinition : processDefinitions){
set.add(processDefinition.getProjectId());
}
List<Project> tempDeletelist = new ArrayList<Project>();
for (Project project : projects) {
if(!set.contains(project.getId())){
tempDeletelist.add(project);
}
}
projects.removeAll(tempDeletelist);
}
result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS);
return result;
}
Map<String, Object> queryAllProjectList();
}
}

8
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java

@ -59,7 +59,7 @@ public class QueueService extends BaseService {
* @return queue list
*/
public Map<String, Object> queryList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -81,7 +81,7 @@ public class QueueService extends BaseService {
* @return queue list
*/
public Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -110,7 +110,7 @@ public class QueueService extends BaseService {
* @return create result
*/
public Map<String, Object> createQueue(User loginUser, String queue, String queueName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -159,7 +159,7 @@ public class QueueService extends BaseService {
* @return update result code
*/
public Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java

@ -402,7 +402,7 @@ public class ResourcesService extends BaseService {
putMsg(result, Status.SUCCESS);
Map<Object, Object> dataMap = new BeanMap(resource);
Map<String, Object> resultMap = new HashMap<>(5);
Map<String, Object> resultMap = new HashMap<>();
for (Map.Entry<Object, Object> entry: dataMap.entrySet()) {
if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) {
resultMap.put(entry.getKey().toString(), entry.getValue());
@ -447,7 +447,7 @@ public class ResourcesService extends BaseService {
*/
public Map<String, Object> queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) {
HashMap<String, Object> result = new HashMap<>(5);
HashMap<String, Object> result = new HashMap<>();
Page<Resource> page = new Page(pageNo, pageSize);
int userId = loginUser.getId();
if (isAdmin(loginUser)) {
@ -548,7 +548,7 @@ public class ResourcesService extends BaseService {
*/
public Map<String, Object> queryResourceList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
int userId = loginUser.getId();
if(isAdmin(loginUser)){
@ -571,7 +571,7 @@ public class ResourcesService extends BaseService {
*/
public Map<String, Object> queryResourceJarList(User loginUser, ResourceType type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
int userId = loginUser.getId();
if(isAdmin(loginUser)){
userId = 0;
@ -1094,7 +1094,7 @@ public class ResourcesService extends BaseService {
* @return unauthorized result code
*/
public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (checkAdmin(loginUser, result)) {
return result;
@ -1146,7 +1146,7 @@ public class ResourcesService extends BaseService {
* @return authorized result
*/
public Map<String, Object> authorizedFile(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)){
return result;
}

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java

@ -452,7 +452,7 @@ public class SchedulerService extends BaseService {
* @return schedule list
*/
public Map<String, Object> queryScheduleList(User loginUser, String projectName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
@ -534,7 +534,7 @@ public class SchedulerService extends BaseService {
*/
public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -583,7 +583,7 @@ public class SchedulerService extends BaseService {
* @return the next five fire time
*/
public Map<String,Object> previewSchedule(User loginUser, String projectName, String schedule) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
CronExpression cronExpression;
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
Date now = new Date();

110
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java

@ -16,36 +16,15 @@
*/
package org.apache.dolphinscheduler.api.service;
import javax.servlet.http.HttpServletRequest;
import org.apache.dolphinscheduler.api.controller.BaseController;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.dao.entity.Session;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.SessionMapper;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* session service
*/
@Service
public class SessionService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(SessionService.class);
@Autowired
private SessionMapper sessionMapper;
public interface SessionService {
/**
* get user session from request
@ -53,26 +32,7 @@ public class SessionService extends BaseService{
* @param request request
* @return session
*/
public Session getSession(HttpServletRequest request) {
String sessionId = request.getHeader(Constants.SESSION_ID);
if(StringUtils.isBlank(sessionId)) {
Cookie cookie = getCookie(request, Constants.SESSION_ID);
if (cookie != null) {
sessionId = cookie.getValue();
}
}
if(StringUtils.isBlank(sessionId)) {
return null;
}
String ip = BaseController.getClientIpAddress(request);
logger.debug("get session: {}, ip: {}", sessionId, ip);
return sessionMapper.selectById(sessionId);
}
Session getSession(HttpServletRequest request);
/**
* create session
@ -81,55 +41,7 @@ public class SessionService extends BaseService{
* @param ip ip
* @return session string
*/
@Transactional(rollbackFor = RuntimeException.class)
public String createSession(User user, String ip) {
Session session = null;
// logined
List<Session> sessionList = sessionMapper.queryByUserId(user.getId());
Date now = new Date();
/**
* if you have logged in and are still valid, return directly
*/
if (CollectionUtils.isNotEmpty(sessionList)) {
// is session list greater 1 , delete other ,get one
if (sessionList.size() > 1){
for (int i=1 ; i < sessionList.size();i++){
sessionMapper.deleteById(sessionList.get(i).getId());
}
}
session = sessionList.get(0);
if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) {
/**
* updateProcessInstance the latest login time
*/
session.setLastLoginTime(now);
sessionMapper.updateById(session);
return session.getId();
} else {
/**
* session expired, then delete this session first
*/
sessionMapper.deleteById(session.getId());
}
}
// assign new session
session = new Session();
session.setId(UUID.randomUUID().toString());
session.setIp(ip);
session.setUserId(user.getId());
session.setLastLoginTime(now);
sessionMapper.insert(session);
return session.getId();
}
String createSession(User user, String ip);
/**
* sign out
@ -138,17 +50,5 @@ public class SessionService extends BaseService{
* @param ip no use
* @param loginUser login user
*/
public void signOut(String ip, User loginUser) {
try {
/**
* query session by user id and ip
*/
Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(),ip);
//delete session
sessionMapper.deleteById(session.getId());
}catch (Exception e){
logger.warn("userId : {} , ip : {} , find more one session",loginUser.getId(),ip);
}
}
void signOut(String ip, User loginUser);
}

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java

@ -83,7 +83,7 @@ public class TaskInstanceService extends BaseService {
Integer processInstanceId, String taskName, String executorName, String startDate,
String endDate, String searchVal, ExecutionStatus stateType,String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);

12
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java

@ -86,7 +86,7 @@ public class TenantService extends BaseService{
int queueId,
String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (checkAdmin(loginUser, result)) {
return result;
@ -138,7 +138,7 @@ public class TenantService extends BaseService{
*/
public Map<String,Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
@ -169,7 +169,7 @@ public class TenantService extends BaseService{
*/
public Map<String, Object> updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (checkAdmin(loginUser, result)) {
@ -235,7 +235,7 @@ public class TenantService extends BaseService{
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
@ -292,7 +292,7 @@ public class TenantService extends BaseService{
*/
public Map<String, Object> queryTenantList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<Tenant> resourceList = tenantMapper.selectList(null);
result.put(Constants.DATA_LIST, resourceList);
@ -307,7 +307,7 @@ public class TenantService extends BaseService{
* @return tenant list
*/
public Map<String, Object> queryTenantList(String tenantCode) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<Tenant> resourceList = tenantMapper.queryByTenantCode(tenantCode);
if (CollectionUtils.isNotEmpty(resourceList)) {

6
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java

@ -151,7 +151,7 @@ public class UdfFuncService extends BaseService{
*/
public Map<String, Object> queryUdfFuncDetail(int id) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
@ -247,7 +247,7 @@ public class UdfFuncService extends BaseService{
* @return udf function list page
*/
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
@ -286,7 +286,7 @@ public class UdfFuncService extends BaseService{
* @return resource list
*/
public Map<String, Object> queryResourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type);
result.put(Constants.DATA_LIST, udfFuncList);

73
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java

@ -26,6 +26,7 @@ import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.*;
@ -101,7 +102,7 @@ public class UsersService extends BaseService {
String queue,
int state) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone);
@ -229,7 +230,7 @@ public class UsersService extends BaseService {
* @return user list page
*/
public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -269,7 +270,7 @@ public class UsersService extends BaseService {
String phone,
String queue,
int state) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
User user = userMapper.selectById(userId);
@ -392,7 +393,7 @@ public class UsersService extends BaseService {
* @throws Exception exception when operate hdfs
*/
public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
@ -432,7 +433,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
@ -482,7 +483,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -579,7 +580,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
@ -626,7 +627,7 @@ public class UsersService extends BaseService {
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
//only admin can operate
@ -706,7 +707,7 @@ public class UsersService extends BaseService {
* @return user list
*/
public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -727,7 +728,7 @@ public class UsersService extends BaseService {
* @return user list
*/
public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -771,7 +772,7 @@ public class UsersService extends BaseService {
*/
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -807,7 +808,7 @@ public class UsersService extends BaseService {
* @return authorized result code
*/
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
@ -917,10 +918,11 @@ public class UsersService extends BaseService {
* @param repeatPassword repeat password
* @param email email
* @return register result code
* @throws Exception exception
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> registerUser(String userName, String userPassword, String repeatPassword, String email) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//check user params
String msg = this.checkUserParams(userName, userPassword, email, "");
@ -934,10 +936,51 @@ public class UsersService extends BaseService {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same");
return result;
}
createUser(userName, userPassword, email, 1, "", "", 0);
User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal());
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, user);
return result;
}
/**
* activate user, only system admin have permission, change user state code 0 to 1
*
* @param loginUser login user
* @return create result code
*/
public Map<String, Object> activateUser(User loginUser, String userName) {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, false);
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!CheckUtils.checkUserName(userName)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User user = userMapper.queryByUserNameAccurately(userName);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userName);
return result;
}
if (user.getState() != Flag.NO.ordinal()) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
user.setState(Flag.YES.ordinal());
Date now = new Date();
user.setUpdateTime(now);
userMapper.updateById(user);
User responseUser = userMapper.queryByUserNameAccurately(userName);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, responseUser);
return result;
}
}

2
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java

@ -63,7 +63,7 @@ public class WorkerGroupService extends BaseService {
// list to index
Integer toIndex = (pageNo - 1) * pageSize + pageSize;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}

186
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java

@ -0,0 +1,186 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.AccessTokenService;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* access token service impl
*/
@Service
public class AccessTokenServiceImpl extends BaseService implements AccessTokenService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
/**
* query access token list
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return token list for page number and page size
*/
public Map<String, Object> queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
PageInfo<AccessToken> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<AccessToken> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getId();
if (loginUser.getUserType() == UserType.ADMIN_USER) {
userId = 0;
}
IPage<AccessToken> accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId);
pageInfo.setTotalCount((int) accessTokenList.getTotal());
pageInfo.setLists(accessTokenList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create token
*
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return create result code
*/
public Map<String, Object> createToken(int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
if (userId <= 0) {
throw new IllegalArgumentException("User id should not less than or equals to 0.");
}
AccessToken accessToken = new AccessToken();
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setCreateTime(new Date());
accessToken.setUpdateTime(new Date());
// insert
int insert = accessTokenMapper.insert(accessToken);
if (insert > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR);
}
return result;
}
/**
* generate token
*
* @param userId token for user
* @param expireTime token expire time
* @return token string
*/
public Map<String, Object> generateToken(int userId, String expireTime) {
Map<String, Object> result = new HashMap<>(5);
String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis()));
result.put(Constants.DATA_LIST, token);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete access token
*
* @param loginUser login user
* @param id token id
* @return delete result code
*/
public Map<String, Object> delAccessTokenById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
if (loginUser.getId() != accessToken.getUserId() &&
loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
accessTokenMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update token by id
*
* @param id token id
* @param userId token for user
* @param expireTime token expire time
* @param token token string
* @return update result code
*/
public Map<String, Object> updateToken(int id, int userId, String expireTime, String token) {
Map<String, Object> result = new HashMap<>(5);
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("access token not exist, access token id {}", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
accessToken.setUserId(userId);
accessToken.setExpireTime(DateUtils.stringToDate(expireTime));
accessToken.setToken(token);
accessToken.setUpdateTime(new Date());
accessTokenMapper.updateById(accessToken);
putMsg(result, Status.SUCCESS);
return result;
}
}

384
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java

@ -0,0 +1,384 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.CommandStateCount;
import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.DataAnalysisService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.TriFunction;
import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* data analysis service impl
*/
@Service
public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService {
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ProcessService processService;
private static final String COMMAND_STATE = "commandState";
private static final String ERROR_COMMAND_STATE = "errorCommandState";
/**
* statistical task instance status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return task state count data
*/
public Map<String, Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) {
return countStateByProject(
loginUser,
projectId,
startDate,
endDate,
(start, end, projectIds) -> this.taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds));
}
/**
* statistical process instance status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return process instance state count data
*/
public Map<String, Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) {
return this.countStateByProject(
loginUser,
projectId,
startDate,
endDate,
(start, end, projectIds) -> this.processInstanceMapper.countInstanceStateByUser(start, end, projectIds));
}
private Map<String, Object> countStateByProject(User loginUser, int projectId, String startDate, String endDate
, TriFunction<Date, Date, Integer[], List<ExecuteStatusCount>> instanceStateCounter) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
Date start;
Date end;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(), e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> processInstanceStateCounts =
instanceStateCounter.apply(start, end, projectIdArray);
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* statistics the process definition quantities of certain person
*
* @param loginUser login user
* @param projectId project id
* @return definition count data
*/
public Map<String, Object> countDefinitionByUser(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>();
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), projectIdArray, isAdmin(loginUser));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* statistical command status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return command state count data
*/
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
if (startDate != null && endDate != null) {
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(), e);
putErrorRequestParamsMsg(result);
return result;
}
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
// count command state
List<CommandCount> commandStateCounts =
commandMapper.countCommandState(
loginUser.getId(),
start,
end,
projectIdArray);
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
// enumMap
Map<CommandType, Map<String, Integer>> dataMap = new EnumMap<>(CommandType.class);
Map<String, Integer> commonCommand = new HashMap<>();
commonCommand.put(COMMAND_STATE, 0);
commonCommand.put(ERROR_COMMAND_STATE, 0);
// init data map
/**
* START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS,
START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD;
*/
dataMap.put(CommandType.START_PROCESS, commonCommand);
dataMap.put(CommandType.START_CURRENT_TASK_PROCESS, commonCommand);
dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS, commonCommand);
dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, commonCommand);
dataMap.put(CommandType.START_FAILURE_TASK_PROCESS, commonCommand);
dataMap.put(CommandType.COMPLEMENT_DATA, commonCommand);
dataMap.put(CommandType.SCHEDULER, commonCommand);
dataMap.put(CommandType.REPEAT_RUNNING, commonCommand);
dataMap.put(CommandType.PAUSE, commonCommand);
dataMap.put(CommandType.STOP, commonCommand);
dataMap.put(CommandType.RECOVER_WAITTING_THREAD, commonCommand);
// put command state
for (CommandCount executeStatusCount : commandStateCounts) {
Map<String, Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put(COMMAND_STATE, executeStatusCount.getCount());
dataMap.put(executeStatusCount.getCommandType(), commandStateCountsMap);
}
// put error command state
for (CommandCount errorExecutionStatus : errorCommandStateCounts) {
Map<String, Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put(ERROR_COMMAND_STATE, errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getCommandType(), errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
for (Map.Entry<CommandType, Map<String, Integer>> next : dataMap.entrySet()) {
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get(ERROR_COMMAND_STATE),
next.getValue().get(COMMAND_STATE), next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
private Integer[] getProjectIdsArrays(User loginUser, int projectId) {
List<Integer> projectIds = new ArrayList<>();
if (projectId != 0) {
projectIds.add(projectId);
} else if (loginUser.getUserType() == UserType.GENERAL_USER) {
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if (projectIds.isEmpty()) {
projectIds.add(0);
}
}
return projectIds.toArray(new Integer[0]);
}
/**
* count queue state
*
* @param loginUser login user
* @param projectId project id
* @return queue state count data
*/
public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
// TODO tasksQueueList and tasksKillList is never updated.
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String, Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER) {
dataMap.put("taskQueue", tasksQueueList.size());
dataMap.put("taskKill", tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i = 0;
for (String taskQueueStr : tasksQueueList) {
if (StringUtils.isNotEmpty(taskQueueStr)) {
String[] splits = taskQueueStr.split("_");
if (splits.length >= 4) {
tasksQueueIds[i++] = Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList) {
if (StringUtils.isNotEmpty(taskKillStr)) {
String[] splits = taskKillStr.split("-");
if (splits.length == 2) {
tasksKillIds[i++] = Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
if (tasksQueueIds.length != 0) {
taskQueueCount = taskInstanceMapper.countTask(
projectIds,
tasksQueueIds);
}
if (tasksKillIds.length != 0) {
taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds);
}
dataMap.put("taskQueue", taskQueueCount);
dataMap.put("taskKill", taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkProject(User loginUser, int projectId, Map<String, Object> result) {
if (projectId != 0) {
Project project = projectMapper.selectById(projectId);
return projectService.hasProjectAndPerm(loginUser, project, result);
}
return true;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
}
}

146
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java

@ -0,0 +1,146 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.LoggerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.commons.lang.ArrayUtils;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* log service
*/
@Service
public class LoggerServiceImpl implements LoggerService {
private static final Logger logger = LoggerFactory.getLogger(LoggerServiceImpl.class);
private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s";
@Autowired
private ProcessService processService;
private LogClientService logClient;
@PostConstruct
public void init() {
if (Objects.isNull(this.logClient)) {
this.logClient = new LogClientService();
}
}
@PreDestroy
public void close() {
if (Objects.nonNull(this.logClient) && this.logClient.isRunning()) {
logClient.close();
}
}
/**
* view log
*
* @param taskInstId task instance id
* @param skipLineNum skip line number
* @param limit limit
* @return log string data
*/
@SuppressWarnings("unchecked")
public Result<String> queryLog(int taskInstId, int skipLineNum, int limit) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
return Result.error(Status.TASK_INSTANCE_NOT_FOUND);
}
String host = getHost(taskInstance.getHost());
Result<String> result = new Result<>(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg());
logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(),
Constants.RPC_PORT);
StringBuilder log = new StringBuilder();
if (skipLineNum == 0) {
String head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR);
log.append(head);
}
log.append(logClient
.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit));
result.setData(log.toString());
return result;
}
/**
* get log size
*
* @param taskInstId task instance id
* @return log byte array
*/
public byte[] getLogBytes(int taskInstId) {
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId);
if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) {
throw new ServiceException("task instance is null or host is null");
}
String host = getHost(taskInstance.getHost());
byte[] head = String.format(LOG_HEAD_FORMAT,
taskInstance.getLogPath(),
host,
Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8);
return ArrayUtils.addAll(head,
logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()));
}
/**
* get host
*
* @param address address
* @return old version return true ,otherwise return false
*/
private String getHost(String address) {
if (Boolean.TRUE.equals(Host.isOldVersion(address))) {
return address;
}
return Host.of(address).getIp();
}
}

1696
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

File diff suppressed because it is too large Load Diff

443
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java

@ -0,0 +1,443 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* project service implement
**/
@Service
public class ProjectServiceImpl extends BaseService implements ProjectService {
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create project
*
* @param loginUser login user
* @param name project name
* @param desc description
* @return returns an error if it exists
*/
public Map<String, Object> createProject(User loginUser, String name, String desc) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByName(name);
if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
Date now = new Date();
project = Project
.newBuilder()
.name(name)
.description(desc)
.userId(loginUser.getId())
.userName(loginUser.getUserName())
.createTime(now)
.updateTime(now)
.build();
if (projectMapper.insert(project) > 0) {
Project insertedProject = projectMapper.queryByName(name);
result.put(Constants.DATA_LIST, insertedProject);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
}
/**
* query project details by id
*
* @param projectId project id
* @return project detail information
*/
public Map<String, Object> queryById(Integer projectId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.selectById(projectId);
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectId);
}
return result;
}
/**
* check project and authorization
*
* @param loginUser login user
* @param project project
* @param projectName project name
* @return true if the login user have permission to see the project
*/
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) {
Map<String, Object> result = new HashMap<>();
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
} else if (!checkReadPermission(loginUser, project)) {
// check read permission
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
checkResult = true;
}
return checkResult;
}
/**
* admin can view all projects
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return project list which the login user have permission to see
*/
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Map<String, Object> result = new HashMap<>();
PageInfo<Project> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<Project> page = new Page<>(pageNo, pageSize);
int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if (userId != 0) {
for (Project project : projectList) {
project.setPerm(Constants.DEFAULT_ADMIN_PERMISSION);
}
}
pageInfo.setTotalCount((int) projectIPage.getTotal());
pageInfo.setLists(projectList);
result.put(Constants.COUNT, (int) projectIPage.getTotal());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete project by id
*
* @param loginUser login user
* @param projectId project id
* @return delete result code
*/
public Map<String, Object> deleteProject(User loginUser, Integer projectId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) {
return checkResult;
}
if (!hasPerm(loginUser, project.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId);
if (!processDefinitionList.isEmpty()) {
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
int delete = projectMapper.deleteById(projectId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
}
/**
* get check result
*
* @param loginUser login user
* @param project project
* @return check result
*/
private Map<String, Object> getCheckResult(User loginUser, Project project) {
String projectName = project == null ? null : project.getName();
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
/**
* updateProcessInstance project
*
* @param loginUser login user
* @param projectId project id
* @param projectName project name
* @param desc description
* @return update result code
*/
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) {
Map<String, Object> result = new HashMap<>();
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.selectById(projectId);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
Project tempProject = projectMapper.queryByName(projectName);
if (tempProject != null && tempProject.getId() != projectId) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName);
return result;
}
project.setName(projectName);
project.setDescription(desc);
project.setUpdateTime(new Date());
int update = projectMapper.updateById(project);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
}
/**
* query unauthorized project
*
* @param loginUser login user
* @param userId user id
* @return the projects which user have not permission to see
*/
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
/**
* query all project list except specified userId
*/
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId);
List<Project> resultList = new ArrayList<>();
Set<Project> projectSet = null;
if (projectList != null && !projectList.isEmpty()) {
projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get unauthorized project
*
* @param projectSet project set
* @param authedProjectList authed project list
* @return project list that authorization
*/
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) {
List<Project> resultList;
Set<Project> authedProjectSet = null;
if (authedProjectList != null && !authedProjectList.isEmpty()) {
authedProjectSet = new HashSet<>(authedProjectList);
projectSet.removeAll(authedProjectSet);
}
resultList = new ArrayList<>(projectSet);
return resultList;
}
/**
* query authorized project
*
* @param loginUser login user
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query authorized project
*
* @param loginUser login user
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryProjectCreatedByUser(User loginUser) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryProjectCreatedByUser(loginUser.getId());
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check whether have read permission
*
* @param user user
* @param project project
* @return true if the user have permission to see the project, otherwise return false
*/
private boolean checkReadPermission(User user, Project project) {
int permissionId = queryPermission(user, project);
return (permissionId & Constants.READ_PERMISSION) != 0;
}
/**
* query permission id
*
* @param user user
* @param project project
* @return permission
*/
private int queryPermission(User user, Project project) {
if (user.getUserType() == UserType.ADMIN_USER) {
return Constants.READ_PERMISSION;
}
if (project.getUserId() == user.getId()) {
return Constants.ALL_PERMISSIONS;
}
ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) {
return 0;
}
return projectUser.getPerm();
}
/**
* query all project list that have one or more process definitions.
*
* @return project list
*/
public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.selectList(null);
List<ProcessDefinition> processDefinitions = processDefinitionMapper.selectList(null);
if (projects != null) {
Set<Integer> set = new HashSet<>();
for (ProcessDefinition processDefinition : processDefinitions) {
set.add(processDefinition.getProjectId());
}
List<Project> tempDeletelist = new ArrayList<>();
for (Project project : projects) {
if (!set.contains(project.getId())) {
tempDeletelist.add(project);
}
}
projects.removeAll(tempDeletelist);
}
result.put(Constants.DATA_LIST, projects);
putMsg(result, Status.SUCCESS);
return result;
}
}

158
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java

@ -0,0 +1,158 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.api.controller.BaseController;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.SessionService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.dao.entity.Session;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.SessionMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* session service implement
*/
@Service
public class SessionServiceImpl extends BaseService implements SessionService {
private static final Logger logger = LoggerFactory.getLogger(SessionService.class);
@Autowired
private SessionMapper sessionMapper;
/**
* get user session from request
*
* @param request request
* @return session
*/
public Session getSession(HttpServletRequest request) {
String sessionId = request.getHeader(Constants.SESSION_ID);
if (StringUtils.isBlank(sessionId)) {
Cookie cookie = getCookie(request, Constants.SESSION_ID);
if (cookie != null) {
sessionId = cookie.getValue();
}
}
if (StringUtils.isBlank(sessionId)) {
return null;
}
String ip = BaseController.getClientIpAddress(request);
logger.debug("get session: {}, ip: {}", sessionId, ip);
return sessionMapper.selectById(sessionId);
}
/**
* create session
*
* @param user user
* @param ip ip
* @return session string
*/
@Transactional(rollbackFor = RuntimeException.class)
public String createSession(User user, String ip) {
Session session = null;
// logined
List<Session> sessionList = sessionMapper.queryByUserId(user.getId());
Date now = new Date();
/**
* if you have logged in and are still valid, return directly
*/
if (CollectionUtils.isNotEmpty(sessionList)) {
// is session list greater 1 , delete other ,get one
if (sessionList.size() > 1) {
for (int i = 1; i < sessionList.size(); i++) {
sessionMapper.deleteById(sessionList.get(i).getId());
}
}
session = sessionList.get(0);
if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) {
/**
* updateProcessInstance the latest login time
*/
session.setLastLoginTime(now);
sessionMapper.updateById(session);
return session.getId();
} else {
/**
* session expired, then delete this session first
*/
sessionMapper.deleteById(session.getId());
}
}
// assign new session
session = new Session();
session.setId(UUID.randomUUID().toString());
session.setIp(ip);
session.setUserId(user.getId());
session.setLastLoginTime(now);
sessionMapper.insert(session);
return session.getId();
}
/**
* sign out
* remove ip restrictions
*
* @param ip no use
* @param loginUser login user
*/
public void signOut(String ip, User loginUser) {
try {
/**
* query session by user id and ip
*/
Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(), ip);
//delete session
sessionMapper.deleteById(session.getId());
} catch (Exception e) {
logger.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip);
}
}
}

8
dolphinscheduler-api/src/main/resources/i18n/messages.properties

@ -173,7 +173,6 @@ PROCESS_DEFINITION_ID=process definition id
PROCESS_DEFINITION_IDS=process definition ids
RELEASE_PROCESS_DEFINITION_NOTES=release process definition
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id
COPY_PROCESS_DEFINITION_NOTES=copy process definition
QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging
QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list
@ -253,4 +252,9 @@ AUTHORIZED_DATA_SOURCE_NOTES=authorized data source
DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids
QUERY_USER_CREATED_PROJECT_NOTES= query user created project
COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
TARGET_PROJECT_ID= target project id
IS_COPY = is copy

6
dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties

@ -173,7 +173,6 @@ PROCESS_DEFINITION_ID=process definition id
PROCESS_DEFINITION_IDS=process definition ids
RELEASE_PROCESS_DEFINITION_NOTES=release process definition
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id
COPY_PROCESS_DEFINITION_NOTES=copy process definition
QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging
QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list
@ -254,3 +253,8 @@ DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids
QUERY_USER_CREATED_PROJECT_NOTES= query user created project
COPY_PROCESS_DEFINITION_NOTES= copy process definition notes
MOVE_PROCESS_DEFINITION_NOTES= move process definition notes
TARGET_PROJECT_ID= target project id
IS_COPY = is copy

6
dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties

@ -171,7 +171,6 @@ UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义
PROCESS_DEFINITION_ID=流程定义ID
RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义
QUERY_PROCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID
COPY_PROCESS_DEFINITION_NOTES=复制流程定义
QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表
QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表
QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义
@ -252,4 +251,9 @@ DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据
QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表
EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义
BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义
QUERY_USER_CREATED_PROJECT_NOTES= 查询用户创建的项目
COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义
MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义
TARGET_PROJECT_ID= 目标项目ID
IS_COPY = 是否复制

59
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java

@ -18,16 +18,18 @@ package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.junit.*;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
@ -47,7 +49,7 @@ import java.util.Map;
* process definition controller test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class ProcessDefinitionControllerTest{
public class ProcessDefinitionControllerTest {
private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class);
@ -55,7 +57,7 @@ public class ProcessDefinitionControllerTest{
private ProcessDefinitionController processDefinitionController;
@Mock
private ProcessDefinitionService processDefinitionService;
private ProcessDefinitionServiceImpl processDefinitionService;
protected User user;
@ -78,7 +80,7 @@ public class ProcessDefinitionControllerTest{
String name = "dag_test";
String description = "desc test";
String connects = "[]";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
@ -102,7 +104,7 @@ public class ProcessDefinitionControllerTest{
@Test
public void testVerifyProcessDefinitionName() throws Exception {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROCESS_INSTANCE_EXIST);
String projectName = "test";
String name = "dag_test";
@ -124,7 +126,7 @@ public class ProcessDefinitionControllerTest{
String description = "desc test";
String connects = "[]";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId",1);
@ -140,7 +142,7 @@ public class ProcessDefinitionControllerTest{
public void testReleaseProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result);
@ -168,7 +170,7 @@ public class ProcessDefinitionControllerTest{
processDefinition.setName(name);
processDefinition.setProcessDefinitionJson(json);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
@ -179,16 +181,33 @@ public class ProcessDefinitionControllerTest{
}
@Test
public void testCopyProcessDefinition() throws Exception {
public void testBatchCopyProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
int targetProjectId = 2;
String id = "1";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName,id,targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@Test
public void testBatchMoveProcessDefinition() throws Exception {
String projectName = "test";
int targetProjectId = 2;
String id = "1";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.copyProcessDefinition(user, projectName,id)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName,id);
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result);
Result response = processDefinitionController.moveProcessDefinition(user, projectName,id,targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue());
}
@ -200,7 +219,7 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
List<ProcessDefinition> resourceList = getDefinitionList();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, resourceList);
@ -255,7 +274,7 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result);
@ -269,7 +288,7 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result);
@ -283,7 +302,7 @@ public class ProcessDefinitionControllerTest{
String projectName = "test";
String idList = "1,2,3";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result);
@ -342,9 +361,7 @@ public class ProcessDefinitionControllerTest{
String processDefinitionIds = "1,2";
String projectName = "test";
HttpServletResponse response = new MockHttpServletResponse();
ProcessDefinitionService service = new ProcessDefinitionService();
ProcessDefinitionService spy = Mockito.spy(service);
Mockito.doNothing().when(spy).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
Mockito.doNothing().when(this.processDefinitionService).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
}

83
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java

@ -16,29 +16,27 @@
*/
package org.apache.dolphinscheduler.api.controller;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* process instance controller test
*/
public class ProcessInstanceControllerTest extends AbstractControllerTest {
private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class);
@Test
public void testQueryProcessInstanceList() throws Exception {
@ -52,31 +50,30 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest {
paramsMap.add("pageNo", "2");
paramsMap.add("pageSize", "2");
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/list-paging","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/list-paging", "cxc_1113")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryTaskListByProcessId() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1203"))
.param("processInstanceId", "1203"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
assert result != null;
Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(), result.getCode().intValue());
}
@Test
@ -91,110 +88,108 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest {
paramsMap.add("syncDefine", "false");
paramsMap.add("locations", locations);
paramsMap.add("connects", "[]");
// paramsMap.add("flag", "2");
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/instance/update","cxc_1113")
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/instance/update", "cxc_1113")
.header("sessionId", sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryProcessInstanceById() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-by-id","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-by-id", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1203"))
.param("processInstanceId", "1203"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testQuerySubProcessInstanceByTaskId() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-sub-process","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-sub-process", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("taskId","1203"))
.param("taskId", "1203"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS.getCode(), result.getCode().intValue());
}
@Test
public void testQueryParentInstanceBySubId() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-parent-process","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-parent-process", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("subId","1204"))
.param("subId", "1204"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE.getCode(), result.getCode().intValue());
}
@Test
public void testViewVariables() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/view-variables","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/view-variables", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1204"))
.param("processInstanceId", "1204"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testDeleteProcessInstanceById() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/delete","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/delete", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceId","1204"))
.param("processInstanceId", "1204"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testBatchDeleteProcessInstanceByIds() throws Exception {
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/batch-delete","cxc_1113")
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/batch-delete", "cxc_1113")
.header(SESSION_ID, sessionId)
.param("processInstanceIds","1205,1206"))
.param("processInstanceIds", "1205,1206"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
Assert.assertNotNull(result);
Assert.assertEquals(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), result.getCode().intValue());
}
}

17
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java

@ -285,6 +285,21 @@ public class UsersControllerTest extends AbstractControllerTest{
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testActivateUser() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("userName","user_test");
MvcResult mvcResult = mockMvc.perform(post("/users/activate")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
}
}

109
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java

@ -16,10 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import java.util.Calendar;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.AccessTokenServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -27,9 +29,14 @@ import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.AccessToken;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper;
import org.junit.After;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
@ -38,131 +45,109 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class AccessTokenServiceTest {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceTest.class);
@InjectMocks
private AccessTokenService accessTokenService ;
private AccessTokenServiceImpl accessTokenService;
@Mock
private AccessTokenMapper accessTokenMapper;
@Before
public void setUp() {
}
@After
public void after(){
}
@Test
public void testQueryAccessTokenList(){
@SuppressWarnings("unchecked")
public void testQueryAccessTokenList() {
IPage<AccessToken> tokenPage = new Page<>();
tokenPage.setRecords(getList());
tokenPage.setTotal(1L);
when(accessTokenMapper.selectAccessTokenPage(any(Page.class),eq("zhangsan"),eq(0))).thenReturn(tokenPage);
when(accessTokenMapper.selectAccessTokenPage(any(Page.class), eq("zhangsan"), eq(0))).thenReturn(tokenPage);
User user =new User();
Map<String, Object> result = accessTokenService.queryAccessTokenList(user,"zhangsan",1,10);
User user = new User();
Map<String, Object> result = accessTokenService.queryAccessTokenList(user, "zhangsan", 1, 10);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
PageInfo<AccessToken> pageInfo = (PageInfo<AccessToken>) result.get(Constants.DATA_LIST);
Assert.assertTrue(pageInfo.getTotalCount()>0);
Assert.assertTrue(pageInfo.getTotalCount() > 0);
}
@Test
public void testCreateToken(){
public void testCreateToken() {
when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2);
Map<String, Object> result = accessTokenService.createToken(1,getDate(),"AccessTokenServiceTest");
when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2);
Map<String, Object> result = accessTokenService.createToken(1, getDate(), "AccessTokenServiceTest");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGenerateToken(){
public void testGenerateToken() {
Map<String, Object> result = accessTokenService.generateToken(Integer.MAX_VALUE,getDate());
Map<String, Object> result = accessTokenService.generateToken(Integer.MAX_VALUE, getDate());
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
String token = (String) result.get(Constants.DATA_LIST);
Assert.assertNotNull(token);
}
@Test
public void testDelAccessTokenById(){
public void testDelAccessTokenById() {
when(accessTokenMapper.selectById(1)).thenReturn(getEntity());
User userLogin = new User();
// not exist
Map<String, Object> result = accessTokenService.delAccessTokenById(userLogin,0);
Map<String, Object> result = accessTokenService.delAccessTokenById(userLogin, 0);
logger.info(result.toString());
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST,result.get(Constants.STATUS));
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS));
// no operate
result = accessTokenService.delAccessTokenById(userLogin,1);
result = accessTokenService.delAccessTokenById(userLogin, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
userLogin.setId(1);
userLogin.setUserType(UserType.ADMIN_USER);
result = accessTokenService.delAccessTokenById(userLogin,1);
result = accessTokenService.delAccessTokenById(userLogin, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testUpdateToken(){
public void testUpdateToken() {
when(accessTokenMapper.selectById(1)).thenReturn(getEntity());
Map<String, Object> result = accessTokenService.updateToken(1,Integer.MAX_VALUE,getDate(),"token");
Map<String, Object> result = accessTokenService.updateToken(1, Integer.MAX_VALUE, getDate(), "token");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
// not exist
result = accessTokenService.updateToken(2,Integer.MAX_VALUE,getDate(),"token");
result = accessTokenService.updateToken(2, Integer.MAX_VALUE, getDate(), "token");
logger.info(result.toString());
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST,result.get(Constants.STATUS));
Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS));
}
/**
* create entity
* @return
*/
private AccessToken getEntity(){
private AccessToken getEntity() {
AccessToken accessToken = new AccessToken();
accessToken.setId(1);
accessToken.setUserId(1);
accessToken.setToken("AccessTokenServiceTest");
Date date = DateUtils.add(new Date(),Calendar.DAY_OF_MONTH, 30);
Date date = DateUtils.add(new Date(), Calendar.DAY_OF_MONTH, 30);
accessToken.setExpireTime(date);
return accessToken;
}
/**
* entity list
* @return
*/
private List<AccessToken> getList(){
private List<AccessToken> getList() {
List<AccessToken> list = new ArrayList<>();
list.add(getEntity());
@ -170,13 +155,11 @@ public class AccessTokenServiceTest {
}
/**
* get dateStr
* @return
*/
private String getDate(){
private String getDate() {
Date date = DateUtils.add(new Date(), Calendar.DAY_OF_MONTH, 30);
return DateUtils.dateToString(date);
return DateUtils.dateToString(date);
}
}

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java

@ -76,7 +76,7 @@ public class AlertGroupServiceTest {
@Test
public void testQueryAlertgroup(){
public void testQueryAlertGroup(){
Mockito.when(alertGroupMapper.queryAllGroupList()).thenReturn(getList());
HashMap<String, Object> result= alertGroupService.queryAlertgroup();

50
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java

@ -1,50 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class BaseDAGServiceTest {
@Test
public void testProcessInstance2DAG(){
ProcessInstance processInstance = new ProcessInstance();
processInstance.setProcessInstanceJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-61567\"," +
"\"name\":\"开始\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo '1'\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[]},{\"type\":\"SHELL\",\"id\":\"tasks-6-3ug5ej\",\"name\":\"结束\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo '1'\"},\"description\":\"\"," +
"\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[\"开始\"]}],\"tenantId\":-1,\"timeout\":0}");
DAG<String, TaskNode, TaskNodeRelation> relationDAG = BaseDAGService.processInstance2DAG(processInstance);
Assert.assertTrue(relationDAG.containsNode("开始"));
}
}

76
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java

@ -17,17 +17,29 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -36,25 +48,19 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class)
public class DataAnalysisServiceTest {
@InjectMocks
private DataAnalysisService dataAnalysisService;
private DataAnalysisServiceImpl dataAnalysisService;
@Mock
ProjectMapper projectMapper;
@Mock
ProjectService projectService;
ProjectServiceImpl projectService;
@Mock
ProcessInstanceMapper processInstanceMapper;
@ -71,13 +77,9 @@ public class DataAnalysisServiceTest {
@Mock
TaskInstanceMapper taskInstanceMapper;
@Mock
ProcessService processService;
private Project project;
private Map<String, Object> resultMap;
private User user;
@ -86,26 +88,25 @@ public class DataAnalysisServiceTest {
public void setUp() {
user = new User();
project = new Project();
Project project = new Project();
project.setId(1);
resultMap = new HashMap<>();
Mockito.when(projectMapper.selectById(1)).thenReturn(project);
Mockito.when(projectService.hasProjectAndPerm(user,project,resultMap)).thenReturn(true);
Mockito.when(projectService.hasProjectAndPerm(user, project, resultMap)).thenReturn(true);
}
@After
public void after(){
public void after() {
user = null;
projectMapper = null;
resultMap = null;
}
@Test
public void testCountTaskStateByProject(){
public void testCountTaskStateByProject() {
String startDate = "2020-02-11 16:02:18";
String endDate = "2020-02-11 16:03:18";
@ -120,42 +121,40 @@ public class DataAnalysisServiceTest {
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts());
result = dataAnalysisService.countTaskStateByProject(user, 1, startDate, endDate);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCountProcessInstanceStateByProject(){
public void testCountProcessInstanceStateByProject() {
String startDate = "2020-02-11 16:02:18";
String endDate = "2020-02-11 16:03:18";
//checkProject false
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(user,2,startDate,endDate);
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(user, 2, startDate, endDate);
Assert.assertTrue(result.isEmpty());
//SUCCESS
Mockito.when(processInstanceMapper.countInstanceStateByUser(DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts());
result = dataAnalysisService.countProcessInstanceStateByProject(user,1,startDate,endDate);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
result = dataAnalysisService.countProcessInstanceStateByProject(user, 1, startDate, endDate);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCountDefinitionByUser(){
public void testCountDefinitionByUser() {
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(user,1);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(user, 1);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCountCommandState(){
public void testCountCommandState() {
String startDate = "2020-02-11 16:02:18";
String endDate = "2020-02-11 16:03:18";
//checkProject false
Map<String, Object> result = dataAnalysisService.countCommandState(user,2,startDate,endDate);
Map<String, Object> result = dataAnalysisService.countCommandState(user, 2, startDate, endDate);
Assert.assertTrue(result.isEmpty());
List<CommandCount> commandCounts = new ArrayList<>(1);
CommandCount commandCount = new CommandCount();
@ -164,26 +163,25 @@ public class DataAnalysisServiceTest {
Mockito.when(commandMapper.countCommandState(0, DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(commandCounts);
Mockito.when(errorCommandMapper.countCommandState( DateUtils.getScheduleDate(startDate),
Mockito.when(errorCommandMapper.countCommandState(DateUtils.getScheduleDate(startDate),
DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(commandCounts);
result = dataAnalysisService.countCommandState(user,1,startDate,endDate);
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
result = dataAnalysisService.countCommandState(user, 1, startDate, endDate);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
/**
* get list
* @return
* get list
*/
private List<ExecuteStatusCount> getTaskInstanceStateCounts(){
private List<ExecuteStatusCount> getTaskInstanceStateCounts() {
List<ExecuteStatusCount> taskInstanceStateCounts = new ArrayList<>(1);
ExecuteStatusCount executeStatusCount = new ExecuteStatusCount();
executeStatusCount.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
taskInstanceStateCounts.add(executeStatusCount);
return taskInstanceStateCounts;
return taskInstanceStateCounts;
}
}

63
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java

@ -16,17 +16,36 @@
*/
package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -36,13 +55,6 @@ import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import java.text.ParseException;
import java.util.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.times;
/**
* test for ExecutorService
*/
@ -62,7 +74,7 @@ public class ExecutorService2Test {
private ProjectMapper projectMapper;
@Mock
private ProjectService projectService;
private ProjectServiceImpl projectService;
@Mock
private MonitorService monitorService;
@ -84,7 +96,7 @@ public class ExecutorService2Test {
private String cronTime;
@Before
public void init(){
public void init() {
// user
loginUser.setId(userId);
@ -111,7 +123,6 @@ public class ExecutorService2Test {
/**
* not complement
* @throws ParseException
*/
@Test
public void testNoComplement() throws ParseException {
@ -125,13 +136,12 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* date error
* @throws ParseException
*/
@Test
public void testDateError() throws ParseException {
@ -145,13 +155,12 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS));
verify(processService, times(0)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* serial
* @throws ParseException
*/
@Test
public void testSerial() throws ParseException {
@ -165,17 +174,16 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* without schedule
* @throws ParseException
*/
@Test
public void testParallelWithOutSchedule() throws ParseException {
try{
try {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
@ -185,17 +193,16 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(31)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
/**
* with schedule
* @throws ParseException
*/
@Test
public void testParallelWithSchedule() throws ParseException {
try{
try {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
@ -205,13 +212,13 @@ public class ExecutorService2Test {
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(15)).createCommand(any(Command.class));
}catch (Exception e){
} catch (Exception e) {
}
}
@Test
public void testNoMsterServers() throws ParseException{
public void testNoMsterServers() throws ParseException {
Mockito.when(monitorService.getServerListFromZK(true)).thenReturn(new ArrayList<Server>());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
@ -220,11 +227,11 @@ public class ExecutorService2Test {
null, null, 0,
"", "", RunMode.RUN_MODE_PARALLEL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110);
Assert.assertEquals(result.get(Constants.STATUS),Status.MASTER_NOT_EXISTS);
Assert.assertEquals(result.get(Constants.STATUS), Status.MASTER_NOT_EXISTS);
}
private List<Server> getMasterServersList(){
private List<Server> getMasterServersList() {
List<Server> masterServerList = new ArrayList<>();
Server masterServer1 = new Server();
masterServer1.setId(1);
@ -242,11 +249,11 @@ public class ExecutorService2Test {
}
private List<Schedule> zeroSchedulerList(){
private List<Schedule> zeroSchedulerList() {
return Collections.EMPTY_LIST;
}
private List<Schedule> oneSchedulerList(){
private List<Schedule> oneSchedulerList() {
List<Schedule> schedulerList = new LinkedList<>();
Schedule schedule = new Schedule();
schedule.setCrontab("0 0 0 1/2 * ?");
@ -254,7 +261,7 @@ public class ExecutorService2Test {
return schedulerList;
}
private Map<String, Object> checkProjectAndAuth(){
private Map<String, Object> checkProjectAndAuth() {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, Status.SUCCESS);
return result;

2
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java

@ -54,7 +54,7 @@ public class ExecutorServiceTest {
@Test
public void putMsgWithParamsTest() {
Map<String,Object> map = new HashMap<>(5);
Map<String,Object> map = new HashMap<>();
putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS);
logger.info(map.toString());

44
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java

@ -17,10 +17,14 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
@ -32,25 +36,30 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@RunWith(MockitoJUnitRunner.class)
@PrepareForTest({LoggerService.class})
@PrepareForTest({LoggerServiceImpl.class})
public class LoggerServiceTest {
private static final Logger logger = LoggerFactory.getLogger(LoggerServiceTest.class);
@InjectMocks
private LoggerService loggerService;
private LoggerServiceImpl loggerService;
@Mock
private ProcessService processService;
@Before
public void init() {
this.loggerService.init();
}
@Test
public void testQueryDataSourceList(){
public void testQueryDataSourceList() {
TaskInstance taskInstance = new TaskInstance();
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
Result result = loggerService.queryLog(2,1,1);
Result result = loggerService.queryLog(2, 1, 1);
//TASK_INSTANCE_NOT_FOUND
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), result.getCode().intValue());
try {
//HOST NOT FOUND OR ILLEGAL
@ -59,36 +68,36 @@ public class LoggerServiceTest {
Assert.assertTrue(true);
logger.error("testQueryDataSourceList error {}", e.getMessage());
}
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue());
Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), result.getCode().intValue());
//SUCCESS
taskInstance.setHost("127.0.0.1:8080");
taskInstance.setLogPath("/temp/log");
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
result = loggerService.queryLog(1,1,1);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
result = loggerService.queryLog(1, 1, 1);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
@Test
public void testGetLogBytes(){
public void testGetLogBytes() {
TaskInstance taskInstance = new TaskInstance();
Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance);
//task instance is null
try{
try {
loggerService.getLogBytes(2);
}catch (RuntimeException e){
} catch (RuntimeException e) {
Assert.assertTrue(true);
logger.error("testGetLogBytes error: {}","task instance is null");
logger.error("testGetLogBytes error: {}", "task instance is null");
}
//task instance host is null
try{
try {
loggerService.getLogBytes(1);
}catch (RuntimeException e){
} catch (RuntimeException e) {
Assert.assertTrue(true);
logger.error("testGetLogBytes error: {}","task instance host is null");
logger.error("testGetLogBytes error: {}", "task instance host is null");
}
//success
@ -100,4 +109,9 @@ public class LoggerServiceTest {
}
@After
public void close() {
this.loggerService.close();
}
}

378
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -16,21 +16,48 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.http.entity.ContentType;
import org.json.JSONException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -38,23 +65,14 @@ import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.skyscreamer.jsonassert.JSONAssert;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.*;
@RunWith(MockitoJUnitRunner.Silent.class)
@SpringBootTest(classes = ApiApplicationServer.class)
public class ProcessDefinitionServiceTest {
@InjectMocks
ProcessDefinitionService processDefinitionService;
ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private DataSourceMapper dataSourceMapper;
@ -66,13 +84,11 @@ public class ProcessDefinitionServiceTest {
private ProjectMapper projectMapper;
@Mock
private ProjectService projectService;
private ProjectServiceImpl projectService;
@Mock
private ScheduleMapper scheduleMapper;
@Mock
private ProcessService processService;
@ -115,21 +131,21 @@ public class ProcessDefinitionServiceTest {
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1");
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionList(loginUser, "project_test1");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
List<ProcessDefinition> resourceList = new ArrayList<>();
resourceList.add(getProcessDefinition());
Mockito.when(processDefineMapper.queryAllDefinitionList(project.getId())).thenReturn(resourceList);
Map<String, Object> checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1");
Map<String, Object> checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser, "project_test1");
Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS));
}
@ -144,12 +160,12 @@ public class ProcessDefinitionServiceTest {
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "",1, 5,0);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "", 1, 5, 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
}
@ -165,18 +181,18 @@ public class ProcessDefinitionServiceTest {
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project check auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processDefineMapper.selectById(1)).thenReturn(null);
Map<String, Object> instanceNotexitRes = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 1);
@ -190,20 +206,22 @@ public class ProcessDefinitionServiceTest {
}
@Test
public void testCopyProcessDefinition() throws Exception{
public void testCopyProcessDefinition() throws Exception {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectMapper.queryDetailById(1)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
@ -212,7 +230,7 @@ public class ProcessDefinitionServiceTest {
//instance exit
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
Map<String, Object> createProcessResult = new HashMap<>(5);
Map<String, Object> createProcessResult = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.createProcessDefinition(
@ -224,8 +242,52 @@ public class ProcessDefinitionServiceTest {
definition.getLocations(),
definition.getConnects())).thenReturn(createProcessResult);
Map<String, Object> successRes = processDefinitionService.copyProcessDefinition(loginUser,
"project_test1", 46);
Map<String, Object> successRes = processDefinitionService.batchCopyProcessDefinition(loginUser, "project_test1",
"46", 1);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testBatchMoveProcessDefinition() throws Exception {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
String projectName2 = "project_test2";
Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(getProject(projectName2));
int targetProjectId = 2;
Mockito.when(projectMapper.queryDetailById(targetProjectId)).thenReturn(getProjectById(targetProjectId));
Project project = getProject(projectName);
Project targetProject = getProjectById(targetProjectId);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectName);
Map<String, Object> result2 = new HashMap<>();
putMsg(result2, Status.SUCCESS, targetProject.getName());
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, targetProject, targetProject.getName())).thenReturn(result2);
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
//instance exit
Mockito.when(processDefineMapper.updateById(definition)).thenReturn(46);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
putMsg(result, Status.SUCCESS);
Map<String, Object> successRes = processDefinitionService.batchMoveProcessDefinition(loginUser, "project_test1",
"46", 2);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@ -241,15 +303,15 @@ public class ProcessDefinitionServiceTest {
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.deleteProcessDefinitionById(loginUser, "project_test1", 6);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processDefineMapper.selectById(1)).thenReturn(null);
Map<String, Object> instanceNotexitRes = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 1);
@ -321,9 +383,9 @@ public class ProcessDefinitionServiceTest {
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
6, ReleaseState.OFFLINE.getCode());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
@ -364,22 +426,22 @@ public class ProcessDefinitionServiceTest {
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, process not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(null);
Mockito.when(processDefineMapper.queryByDefineName(project.getId(), "test_pdf")).thenReturn(null);
Map<String, Object> processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS));
//process exist
Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(getProcessDefinition());
Mockito.when(processDefineMapper.queryByDefineName(project.getId(), "test_pdf")).thenReturn(getProcessDefinition());
Map<String, Object> processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST, processExistRes.get(Constants.STATUS));
@ -404,7 +466,7 @@ public class ProcessDefinitionServiceTest {
Assert.assertEquals(Status.DATA_IS_NULL, taskNotEmptyRes.get(Constants.STATUS));
//json abnormal
String abnormalJson = processDefinitionJson.replaceAll("SHELL","");
String abnormalJson = processDefinitionJson.replaceAll("SHELL", "");
processData = JSONUtils.parseObject(abnormalJson, ProcessData.class);
Map<String, Object> abnormalTaskRes = processDefinitionService.checkProcessNodeList(processData, abnormalJson);
Assert.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID, abnormalTaskRes.get(Constants.STATUS));
@ -502,157 +564,6 @@ public class ProcessDefinitionServiceTest {
Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS));
}
/**
* add datasource param and dependent when export process
* @throws JSONException
*/
@Test
public void testAddTaskNodeSpecialParam() throws JSONException {
Mockito.when(dataSourceMapper.selectById(1)).thenReturn(getDataSource());
Mockito.when(processDefineMapper.queryByDefineId(2)).thenReturn(getProcessDefinition());
String corSqlDependentJson = processDefinitionService.addExportTaskNodeSpecialParam(sqlDependentJson);
JSONAssert.assertEquals(sqlDependentJson,corSqlDependentJson,false);
}
@Test
public void testExportProcessMetaDataStr() {
Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(getSchedulerList());
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(sqlDependentJson);
String exportProcessMetaDataStr = processDefinitionService.exportProcessMetaDataStr(46, processDefinition);
Assert.assertNotEquals(sqlDependentJson,exportProcessMetaDataStr);
}
@Test
public void testAddExportTaskNodeSpecialParam() throws JSONException {
String shellData = shellJson;
String resultStr = processDefinitionService.addExportTaskNodeSpecialParam(shellData);
JSONAssert.assertEquals(shellJson, resultStr, false);
}
@Test
public void testImportProcessSchedule() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
String currentProjectName = "test";
String processDefinitionName = "test_process";
Integer processDefinitionId = 1;
Schedule schedule = getSchedule();
ProcessMeta processMeta = getProcessMeta();
int insertFlag = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMeta,
processDefinitionName, processDefinitionId);
Assert.assertEquals(0, insertFlag);
ProcessMeta processMetaCron = new ProcessMeta();
processMetaCron.setScheduleCrontab(schedule.getCrontab());
int insertFlagCron = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron,
processDefinitionName, processDefinitionId);
Assert.assertEquals(0, insertFlagCron);
WorkerGroup workerGroup = new WorkerGroup();
workerGroup.setName("ds-test-workergroup");
List<WorkerGroup> workerGroups = new ArrayList<>();
workerGroups.add(workerGroup);
processMetaCron.setScheduleWorkerGroupName("ds-test");
int insertFlagWorker = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron,
processDefinitionName, processDefinitionId);
Assert.assertEquals(0, insertFlagWorker);
int workerNullFlag = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron,
processDefinitionName, processDefinitionId);
Assert.assertEquals(0, workerNullFlag);
}
/**
* import sub process test
*/
@Test
public void testImportSubProcess() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
Project testProject = getProject("test");
//Recursive subprocess sub2 process in sub1 process and sub1process in top process
String topProcessJson = "{\"globalParams\":[]," +
"\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-38634\",\"name\":\"shell1\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," +
"\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," +
"\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}," +
"{\"type\":\"SUB_PROCESS\",\"id\":\"tasks-44207\",\"name\":\"shell-4\"," +
"\"params\":{\"processDefinitionId\":39},\"description\":\"\",\"runFlag\":\"NORMAL\"," +
"\"dependence\":{},\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," +
"\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1," +
"\"preTasks\":[\"shell1\"]}],\"tenantId\":1,\"timeout\":0}";
String sub1ProcessJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-84090\"," +
"\"name\":\"shell-4\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-4\\\"\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," +
"\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," +
"\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]},{\"type\":\"SUB_PROCESS\"," +
"\"id\":\"tasks-87364\",\"name\":\"shell-5\"," +
"\"params\":{\"processDefinitionId\":46},\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{}," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[\"shell-4\"]}],\"tenantId\":1,\"timeout\":0}";
String sub2ProcessJson = "{\"globalParams\":[]," +
"\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-52423\",\"name\":\"shell-5\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo \\\"shell-5\\\"\"},\"description\":\"\"," +
"\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1," +
"\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
ObjectNode jsonObject = JSONUtils.parseObject(topProcessJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.path("tasks");
String originSubJson = jsonArray.toString();
Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
ProcessDefinition shellDefinition1 = new ProcessDefinition();
shellDefinition1.setId(39);
shellDefinition1.setName("shell-4");
shellDefinition1.setProjectId(2);
shellDefinition1.setProcessDefinitionJson(sub1ProcessJson);
ProcessDefinition shellDefinition2 = new ProcessDefinition();
shellDefinition2.setId(46);
shellDefinition2.setName("shell-5");
shellDefinition2.setProjectId(2);
shellDefinition2.setProcessDefinitionJson(sub2ProcessJson);
Mockito.when(processDefineMapper.queryByDefineId(39)).thenReturn(shellDefinition1);
Mockito.when(processDefineMapper.queryByDefineId(46)).thenReturn(shellDefinition2);
Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "shell-5")).thenReturn(null);
Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "shell-4")).thenReturn(null);
Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "testProject")).thenReturn(shellDefinition2);
processDefinitionService.importSubProcess(loginUser,testProject, jsonArray, subProcessIdMap);
String correctSubJson = jsonArray.toString();
Assert.assertEquals(originSubJson, correctSubJson);
}
@Test
public void testImportProcessDefinitionById() throws IOException {
@ -680,7 +591,7 @@ public class ProcessDefinitionServiceTest {
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":\\\"default\\\\," +
"\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
FileUtils.writeStringToFile(new File("/tmp/task.json"),processJson);
FileUtils.writeStringToFile(new File("/tmp/task.json"), processJson);
File file = new File("/tmp/task.json");
@ -694,7 +605,7 @@ public class ProcessDefinitionServiceTest {
loginUser.setUserType(UserType.ADMIN_USER);
String currentProjectName = "testProject";
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, currentProjectName);
ProcessDefinition shellDefinition2 = new ProcessDefinition();
@ -731,41 +642,13 @@ public class ProcessDefinitionServiceTest {
}
/**
* check import process metadata
* @param file file
* @param loginUser login user
* @param currentProjectName current project name
* @param processMetaJson process meta json
* @throws IOException IO exception
*/
private void improssProcessCheckData(File file, User loginUser, String currentProjectName, String processMetaJson) throws IOException {
//check null
FileUtils.writeStringToFile(new File("/tmp/task.json"),processMetaJson);
File fileEmpty = new File("/tmp/task.json");
FileInputStream fileEmptyInputStream = new FileInputStream("/tmp/task.json");
MultipartFile multiFileEmpty = new MockMultipartFile(fileEmpty.getName(), fileEmpty.getName(),
ContentType.APPLICATION_OCTET_STREAM.toString(), fileEmptyInputStream);
Map<String, Object> resEmptyProcess = processDefinitionService.importProcessDefinition(loginUser, multiFileEmpty, currentProjectName);
Assert.assertEquals(Status.DATA_IS_NULL, resEmptyProcess.get(Constants.STATUS));
boolean deleteFlag = file.delete();
Assert.assertTrue(deleteFlag);
}
@Test
public void testUpdateProcessDefinition () {
public void testUpdateProcessDefinition() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
String projectName = "project_test1";
@ -783,20 +666,22 @@ public class ProcessDefinitionServiceTest {
/**
* get mock datasource
*
* @return DataSource
*/
private DataSource getDataSource(){
private DataSource getDataSource() {
DataSource dataSource = new DataSource();
dataSource.setId(2);
dataSource.setName("test");
return dataSource;
return dataSource;
}
/**
* get mock processDefinition
*
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition(){
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(46);
@ -805,24 +690,40 @@ public class ProcessDefinitionServiceTest {
processDefinition.setTenantId(1);
processDefinition.setDescription("");
return processDefinition;
return processDefinition;
}
/**
* get mock Project
*
* @param projectName projectName
* @return Project
*/
private Project getProject(String projectName){
private Project getProject(String projectName) {
Project project = new Project();
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
return project;
}
/**
* get mock Project
*
* @param projectId projectId
* @return Project
*/
private Project getProjectById(int projectId) {
Project project = new Project();
project.setId(1);
project.setName("project_test2");
project.setUserId(1);
return project;
}
/**
* get mock schedule
*
* @return schedule
*/
private Schedule getSchedule() {
@ -845,6 +746,7 @@ public class ProcessDefinitionServiceTest {
/**
* get mock processMeta
*
* @return processMeta
*/
private ProcessMeta getProcessMeta() {
@ -876,4 +778,4 @@ public class ProcessDefinitionServiceTest {
result.put(Constants.MSG, status.getMsg());
}
}
}
}

82
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java

@ -16,16 +16,43 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.IOException;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -33,22 +60,11 @@ import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import java.io.IOException;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.Silent.class)
@SpringBootTest(classes = ApiApplicationServer.class)
public class ProcessInstanceServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class);
@InjectMocks
ProcessInstanceService processInstanceService;
@ -57,7 +73,7 @@ public class ProcessInstanceServiceTest {
ProjectMapper projectMapper;
@Mock
ProjectService projectService;
ProjectServiceImpl projectService;
@Mock
ProcessService processService;
@ -78,9 +94,7 @@ public class ProcessInstanceServiceTest {
TaskInstanceMapper taskInstanceMapper;
@Mock
LoggerService loggerService;
LoggerServiceImpl loggerService;
@Mock
UsersService usersService;
@ -96,7 +110,7 @@ public class ProcessInstanceServiceTest {
public void testQueryProcessInstanceList() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -153,30 +167,28 @@ public class ProcessInstanceServiceTest {
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
int size=10;
String startTime="2020-01-01 00:00:00";
String endTime="2020-08-02 00:00:00";
int size = 10;
String startTime = "2020-01-01 00:00:00";
String endTime = "2020-08-02 00:00:00";
Date start = DateUtils.getScheduleDate(startTime);
Date end = DateUtils.getScheduleDate(endTime);
//project auth fail
when(projectMapper.queryByName(projectName)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser,projectName,size,startTime,endTime);
Map<String, Object> proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectName, size, startTime, endTime);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
//project auth success
putMsg(result, Status.SUCCESS, projectName);
Project project = getProject(projectName);
ProcessInstance processInstance = getProcessInstance();
List<ProcessInstance> processInstanceList = new ArrayList<>();
processInstanceList.add(processInstance);
when(projectMapper.queryByName(projectName)).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId());
when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser);
Map<String, Object> successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser,projectName,size,startTime,endTime);
Map<String, Object> successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectName, size, startTime, endTime);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@ -185,7 +197,7 @@ public class ProcessInstanceServiceTest {
public void testQueryProcessInstanceById() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -223,7 +235,7 @@ public class ProcessInstanceServiceTest {
public void testQueryTaskListByProcessId() throws IOException {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -272,7 +284,7 @@ public class ProcessInstanceServiceTest {
public void testQuerySubProcessInstanceByTaskId() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -318,7 +330,7 @@ public class ProcessInstanceServiceTest {
public void testUpdateProcessInstance() throws ParseException {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -374,7 +386,7 @@ public class ProcessInstanceServiceTest {
public void testQueryParentInstanceBySubId() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
@ -415,7 +427,7 @@ public class ProcessInstanceServiceTest {
public void testDeleteProcessInstanceById() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail

175
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java

@ -16,9 +16,8 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -30,7 +29,12 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@ -43,10 +47,8 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class ProjectServiceTest {
@ -55,18 +57,18 @@ public class ProjectServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class);
@InjectMocks
private ProjectService projectService;
private ProjectServiceImpl projectService;
@Mock
private ProjectMapper projectMapper;
@Mock
private UserMapper userMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
private String projectName = "ProjectServiceTest";
private String userName = "ProjectServiceTest";
@ -78,106 +80,109 @@ public class ProjectServiceTest {
@After
public void after(){
public void after() {
}
@Test
public void testCreateProject(){
public void testCreateProject() {
User loginUser = getLoginUser();
User loginUser = getLoginUser();
loginUser.setId(1);
Map<String, Object> result = projectService.createProject(loginUser, projectName, getDesc());
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS));
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//project name exist
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject());
result = projectService.createProject(loginUser, projectName, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.insert(Mockito.any(Project.class))).thenReturn(1);
result = projectService.createProject(loginUser, "test", "test");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testQueryById(){
public void testQueryById() {
//not exist
Map<String, Object> result = projectService.queryById(Integer.MAX_VALUE);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
logger.info(result.toString());
//success
Mockito.when(projectMapper.selectById(1)).thenReturn(getProject());
result = projectService.queryById(1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testCheckProjectAndAuth(){
public void testCheckProjectAndAuth() {
Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
User loginUser = getLoginUser();
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser,null,projectName);
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, null, projectName);
logger.info(result.toString());
Status status = (Status)result.get(Constants.STATUS);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Status status = (Status) result.get(Constants.STATUS);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
Project project = getProject();
//USER_NO_OPERATION_PROJECT_PERM
project.setUserId(2);
result = projectService.checkProjectAndAuth(loginUser,project,projectName);
result = projectService.checkProjectAndAuth(loginUser, project, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
//success
project.setUserId(1);
result = projectService.checkProjectAndAuth(loginUser,project,projectName);
result = projectService.checkProjectAndAuth(loginUser, project, projectName);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testHasProjectAndPerm(){
public void testHasProjectAndPerm() {
// Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
// Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser());
User loginUser = getLoginUser();
Project project = getProject();
Map<String, Object> result = new HashMap<>();
// not exist user
User tempUser = new User();
tempUser.setId(Integer.MAX_VALUE);
boolean checkResult = projectService.hasProjectAndPerm(tempUser,project,result);
boolean checkResult = projectService.hasProjectAndPerm(tempUser, project, result);
logger.info(result.toString());
Assert.assertFalse(checkResult);
//success
result = new HashMap<>();
project.setUserId(1);
checkResult = projectService.hasProjectAndPerm(loginUser,project,result);
checkResult = projectService.hasProjectAndPerm(loginUser, project, result);
logger.info(result.toString());
Assert.assertTrue(checkResult);
}
@Test
public void testQueryProjectListPaging(){
public void testQueryProjectListPaging() {
IPage<Project> page = new Page<>(1,10);
IPage<Project> page = new Page<>(1, 10);
page.setRecords(getList());
page.setTotal(1L);
Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(1), Mockito.eq(projectName))).thenReturn(page);
User loginUser = getLoginUser();
// project owner
Map<String, Object> result = projectService.queryProjectListPaging(loginUser,10,1,projectName);
Map<String, Object> result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName);
logger.info(result.toString());
PageInfo<Project> pageInfo = (PageInfo<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists()));
@ -185,124 +190,148 @@ public class ProjectServiceTest {
//admin
Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(0), Mockito.eq(projectName))).thenReturn(page);
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryProjectListPaging(loginUser,10,1,projectName);
result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName);
logger.info(result.toString());
pageInfo = (PageInfo<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists()));
}
@Test
public void testDeleteProject(){
public void testDeleteProject() {
Mockito.when(projectMapper.selectById(1)).thenReturn(getProject());
User loginUser = getLoginUser();
//PROJECT_NOT_FOUNT
Map<String, Object> result= projectService.deleteProject(loginUser,12);
Map<String, Object> result = projectService.deleteProject(loginUser, 12);
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
loginUser.setId(2);
//USER_NO_OPERATION_PROJECT_PERM
result= projectService.deleteProject(loginUser,1);
result = projectService.deleteProject(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS));
//DELETE_PROJECT_ERROR_DEFINES_NOT_NULL
Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(getProcessDefinitions());
loginUser.setUserType(UserType.ADMIN_USER);
result= projectService.deleteProject(loginUser,1);
result = projectService.deleteProject(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL,result.get(Constants.STATUS));
Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL, result.get(Constants.STATUS));
//success
Mockito.when(projectMapper.deleteById(1)).thenReturn(1);
Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(new ArrayList<>());
result= projectService.deleteProject(loginUser,1);
result = projectService.deleteProject(loginUser, 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testUpdate(){
public void testUpdate() {
User loginUser = getLoginUser();
Project project = getProject();
project.setId(2);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project);
Mockito.when( projectMapper.selectById(1)).thenReturn(getProject());
Mockito.when(projectMapper.selectById(1)).thenReturn(getProject());
// PROJECT_NOT_FOUNT
Map<String, Object> result = projectService.update(loginUser,12,projectName,"desc");
Map<String, Object> result = projectService.update(loginUser, 12, projectName, "desc");
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS));
//PROJECT_ALREADY_EXISTS
result = projectService.update(loginUser,1,projectName,"desc");
result = projectService.update(loginUser, 1, projectName, "desc");
logger.info(result.toString());
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS,result.get(Constants.STATUS));
Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS));
//success
project.setUserId(1);
Mockito.when(projectMapper.updateById(Mockito.any(Project.class))).thenReturn(1);
result = projectService.update(loginUser,1,"test","desc");
result = projectService.update(loginUser, 1, "test", "desc");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testQueryAuthorizedProject(){
public void testQueryAuthorizedProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
//USER_NO_OPERATION_PERM
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser,3);
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, 3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS));
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryAuthorizedProject(loginUser,1);
result = projectService.queryAuthorizedProject(loginUser, 1);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryAllProjectList(){
public void testQueryCreatedProject() {
User loginUser = getLoginUser();
Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(getList());
//USER_NO_OPERATION_PERM
Map<String, Object> result = projectService.queryProjectCreatedByUser(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = projectService.queryProjectCreatedByUser(loginUser);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryAllProjectList() {
Mockito.when(projectMapper.selectList(null)).thenReturn(getList());
Mockito.when(processDefinitionMapper.selectList(null)).thenReturn(getProcessDefinitions());
Map<String, Object> result = projectService.queryAllProjectList();
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
@Test
public void testQueryUnauthorizedProject(){
// Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
public void testQueryUnauthorizedProject() {
// Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList());
Mockito.when(projectMapper.queryProjectExceptUserId(2)).thenReturn(getList());
User loginUser = new User();
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser,2);
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, 2);
logger.info(result.toString());
List<Project> projects = (List<Project>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(projects));
}
private Project getProject(){
private Project getProject() {
Project project = new Project();
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
return project;
}
private List<Project> getList(){
private List<Project> getList() {
List<Project> list = new ArrayList<>();
list.add(getProject());
return list;
@ -311,30 +340,28 @@ public class ProjectServiceTest {
/**
* create admin user
* @return
*/
private User getLoginUser(){
private User getLoginUser() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
loginUser.setUserName(userName);
loginUser.setId(1);
return loginUser;
return loginUser;
}
/**
* get project user
*/
private ProjectUser getProjectUser(){
private ProjectUser getProjectUser() {
ProjectUser projectUser = new ProjectUser();
projectUser.setProjectId(1);
projectUser.setUserId(1);
return projectUser;
return projectUser;
}
private List<ProcessDefinition> getProcessDefinitions(){
private List<ProcessDefinition> getProcessDefinitions() {
List<ProcessDefinition> list = new ArrayList<>();
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectId(1);
@ -343,9 +370,7 @@ public class ProjectServiceTest {
}
private String getDesc(){
private String getDesc() {
return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" +
".deleteProjectRelation(projectId,userId)projectUserMappe" +
"r.deleteProjectRelation(projectId,userId)projectUserMapper" +

34
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java

@ -17,6 +17,7 @@
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.model.Server;
@ -24,12 +25,16 @@ import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -40,13 +45,6 @@ import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.quartz.Scheduler;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class)
@PrepareForTest(QuartzExecutors.class)
@ -57,10 +55,6 @@ public class SchedulerServiceTest {
@InjectMocks
private SchedulerService schedulerService;
@Autowired
private ExecutorService executorService;
@Mock
private MonitorService monitorService;
@ -72,21 +66,13 @@ public class SchedulerServiceTest {
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ProjectService projectService;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
private ProjectServiceImpl projectService;
@Mock
private QuartzExecutors quartzExecutors;
@Mock
private Scheduler scheduler;
@Before
public void setUp() {
@ -176,10 +162,10 @@ public class SchedulerServiceTest {
Mockito.when(quartzExecutors.deleteJob("1", "1")).thenReturn(true);
Mockito.when(quartzExecutors.buildJobGroupName(1)).thenReturn("1");
Mockito.when(quartzExecutors.buildJobName(1)).thenReturn("1");
boolean flag = true;
boolean flag = true;
try {
schedulerService.deleteSchedule(1, 1);
}catch (Exception e){
} catch (Exception e) {
flag = false;
}
Assert.assertTrue(flag);

11
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java

@ -16,7 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import org.apache.dolphinscheduler.api.service.impl.SessionServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
@ -38,10 +43,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.mock.web.MockCookie;
import org.springframework.mock.web.MockHttpServletRequest;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@RunWith(MockitoJUnitRunner.class)
public class SessionServiceTest {
@ -49,7 +50,7 @@ public class SessionServiceTest {
private static final Logger logger = LoggerFactory.getLogger(SessionServiceTest.class);
@InjectMocks
private SessionService sessionService;
private SessionServiceImpl sessionService;
@Mock
private SessionMapper sessionMapper;

37
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java

@ -16,9 +16,12 @@
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.UserType;
@ -30,6 +33,14 @@ import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -41,11 +52,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import java.text.MessageFormat;
import java.util.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.Silent.class)
@SpringBootTest(classes = ApiApplicationServer.class)
@ -59,7 +66,7 @@ public class TaskInstanceServiceTest {
ProjectMapper projectMapper;
@Mock
ProjectService projectService;
ProjectServiceImpl projectService;
@Mock
ProcessService processService;
@ -74,16 +81,16 @@ public class TaskInstanceServiceTest {
UsersService usersService;
@Test
public void queryTaskListPaging(){
public void queryTaskListPaging() {
String projectName = "project_test1";
User loginUser = getAdminUser();
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project auth fail
when(projectMapper.queryByName(projectName)).thenReturn(null);
when(projectService.checkProjectAndAuth(loginUser,null,projectName)).thenReturn(result);
when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result);
Map<String, Object> proejctAuthFailRes = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "",
"test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS));
@ -101,7 +108,7 @@ public class TaskInstanceServiceTest {
taskInstanceList.add(taskInstance);
pageReturn.setRecords(taskInstanceList);
when(projectMapper.queryByName(Mockito.anyString())).thenReturn(project);
when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result);
when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser);
when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId());
when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""),
@ -130,6 +137,7 @@ public class TaskInstanceServiceTest {
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
@ -142,19 +150,21 @@ public class TaskInstanceServiceTest {
/**
* get mock Project
*
* @param projectName projectName
* @return Project
*/
private Project getProject(String projectName){
private Project getProject(String projectName) {
Project project = new Project();
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
return project;
}
/**
* get Mock process instance
*
* @return process instance
*/
private ProcessInstance getProcessInstance() {
@ -169,6 +179,7 @@ public class TaskInstanceServiceTest {
/**
* get Mock task instance
*
* @return task instance
*/
private TaskInstance getTaskInstance() {

57
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java

@ -462,42 +462,87 @@ public class UsersServiceTest {
try {
//userName error
Map<String, Object> result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userName = "userTest0002";
userPassword = "userTest000111111111111111";
//password error
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userPassword = "userTest0002";
email = "1q.com";
//email error
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//repeatPassword error
email = "7400@qq.com";
repeatPassword = "userPassword";
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//success
repeatPassword = "userTest0002";
result = usersService.registerUser(userName, userPassword, repeatPassword, email);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error(Status.CREATE_USER_ERROR.getMsg(),e);
Assert.assertTrue(false);
}
}
@Test
public void testActivateUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
String userName = "userTest0002~";
try {
//not admin
Map<String, Object> result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//userName error
user.setUserType(UserType.ADMIN_USER);
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//user not exist
userName = "userTest10013";
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//user state error
userName = "userTest0001";
when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser());
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
//success
when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getDisabledUser());
result = usersService.activateUser(user, userName);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
Assert.assertTrue(false);
}
}
/**
* get disabled user
* @return
*/
private User getDisabledUser() {
User user = new User();
user.setUserType(UserType.GENERAL_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
user.setState(0);
return user;
}
/**
* get user
* @return

5
dolphinscheduler-common/pom.xml

@ -580,6 +580,11 @@
</exclusions>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-annotation</artifactId>

3
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -898,6 +898,7 @@ public final class Constants {
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver";
/**
* database type
@ -910,6 +911,7 @@ public final class Constants {
public static final String ORACLE = "ORACLE";
public static final String SQLSERVER = "SQLSERVER";
public static final String DB2 = "DB2";
public static final String PRESTO = "PRESTO";
/**
* jdbc url
@ -922,6 +924,7 @@ public final class Constants {
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String JDBC_PRESTO = "jdbc:presto://";
public static final String ADDRESS = "address";

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java

@ -33,6 +33,7 @@ public enum DbType {
* 5 oracle
* 6 sqlserver
* 7 db2
* 8 presto
*/
MYSQL(0, "mysql"),
POSTGRESQL(1, "postgresql"),
@ -41,7 +42,8 @@ public enum DbType {
CLICKHOUSE(4, "clickhouse"),
ORACLE(5, "oracle"),
SQLSERVER(6, "sqlserver"),
DB2(7, "db2");
DB2(7, "db2"),
PRESTO(8, "presto");
DbType(int code, String descp) {
this.code = code;

7
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -417,7 +417,12 @@ public class HadoopUtils implements Closeable {
String applicationUrl = getApplicationUrl(applicationId);
logger.info("applicationUrl={}", applicationUrl);
String responseContent = HttpUtils.get(applicationUrl);
String responseContent ;
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
responseContent = KerberosHttpClient.get(applicationUrl);
} else {
responseContent = HttpUtils.get(applicationUrl);
}
if (responseContent != null) {
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
result = jsonObject.path("app").path("finalStatus").asText();

76
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java

@ -128,40 +128,50 @@ public class HttpUtils {
CloseableHttpClient httpclient = HttpUtils.getInstance();
HttpGet httpget = new HttpGet(url);
String responseContent = null;
CloseableHttpResponse response = null;
try {
response = httpclient.execute(httpget);
//check response status is 200
if (response.getStatusLine().getStatusCode() == 200) {
HttpEntity entity = response.getEntity();
if (entity != null) {
responseContent = EntityUtils.toString(entity, Constants.UTF_8);
}else{
logger.warn("http entity is null");
}
}else{
logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode());
}
}catch (Exception e){
logger.error(e.getMessage(),e);
}finally {
try {
if (response != null) {
EntityUtils.consume(response.getEntity());
response.close();
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
if (!httpget.isAborted()) {
httpget.releaseConnection();
httpget.abort();
}
return getResponseContentString(httpget,httpclient);
}
/**
* get http response content
*
* @param httpget httpget
* @param httpClient httpClient
* @return http get request response content
*/
public static String getResponseContentString(HttpGet httpget, CloseableHttpClient httpClient) {
String responseContent = null;
CloseableHttpResponse response = null;
try {
response = httpClient.execute(httpget);
// check response status is 200
if (response.getStatusLine().getStatusCode() == 200) {
HttpEntity entity = response.getEntity();
if (entity != null) {
responseContent = EntityUtils.toString(entity, Constants.UTF_8);
} else {
logger.warn("http entity is null");
}
return responseContent;
} else {
logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode());
}
} catch (IOException ioe) {
logger.error(ioe.getMessage(), ioe);
} finally {
try {
if (response != null) {
EntityUtils.consume(response.getEntity());
response.close();
}
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
if (!httpget.isAborted()) {
httpget.releaseConnection();
httpget.abort();
}
}
return responseContent;
}
}

156
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java

@ -0,0 +1,156 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.http.auth.AuthSchemeProvider;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.client.config.AuthSchemes;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.config.Lookup;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.impl.auth.SPNegoSchemeFactory;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* kerberos http client
*/
public class KerberosHttpClient {
public static final Logger logger = LoggerFactory.getLogger(KerberosHttpClient.class);
private String principal;
private String keyTabLocation;
public KerberosHttpClient(String principal, String keyTabLocation) {
super();
this.principal = principal;
this.keyTabLocation = keyTabLocation;
}
public KerberosHttpClient(String principal, String keyTabLocation, boolean isDebug) {
this(principal, keyTabLocation);
if (isDebug) {
System.setProperty("sun.security.spnego.debug", "true");
System.setProperty("sun.security.krb5.debug", "true");
}
}
public KerberosHttpClient(String principal, String keyTabLocation, String krb5Location, boolean isDebug) {
this(principal, keyTabLocation, isDebug);
System.setProperty("java.security.krb5.conf", krb5Location);
}
private static CloseableHttpClient buildSpengoHttpClient() {
HttpClientBuilder builder = HttpClientBuilder.create();
Lookup<AuthSchemeProvider> authSchemeRegistry = RegistryBuilder.<AuthSchemeProvider>create()
.register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true)).build();
builder.setDefaultAuthSchemeRegistry(authSchemeRegistry);
BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(new AuthScope(null, -1, null), new Credentials() {
@Override
public Principal getUserPrincipal() {
return null;
}
@Override
public String getPassword() {
return null;
}
});
builder.setDefaultCredentialsProvider(credentialsProvider);
return builder.build();
}
public String get(final String url, final String userId) {
logger.info("Calling KerberosHttpClient {} {} {}", this.principal, this.keyTabLocation, url);
Configuration config = new Configuration() {
@SuppressWarnings("serial")
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, Object> options = new HashMap<>(9);
options.put("useTicketCache", "false");
options.put("useKeyTab", "true");
options.put("keyTab", keyTabLocation);
options.put("refreshKrb5Config", "true");
options.put("principal", principal);
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("isInitiator", "true");
options.put("debug", "true");
return new AppConfigurationEntry[] {
new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
}
};
Set<Principal> princ = new HashSet<>(1);
princ.add(new KerberosPrincipal(userId));
Subject sub = new Subject(false, princ, new HashSet<>(), new HashSet<>());
LoginContext lc;
try {
lc = new LoginContext("", sub, null, config);
lc.login();
Subject serviceSubject = lc.getSubject();
return Subject.doAs(serviceSubject, (PrivilegedAction<String>) () -> {
CloseableHttpClient httpClient = buildSpengoHttpClient();
HttpGet httpget = new HttpGet(url);
return HttpUtils.getResponseContentString(httpget, httpClient);
});
} catch (LoginException le) {
logger.error("Kerberos authentication failed ", le);
}
return null;
}
/**
* get http request content by kerberosClient
*
* @param url url
* @return http get request response content
*/
public static String get(String url) {
String responseContent;
KerberosHttpClient kerberosHttpClient = new KerberosHttpClient(
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH),
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH), true);
responseContent = kerberosHttpClient.get(url, PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME));
return responseContent;
}
}

4
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java

@ -37,4 +37,8 @@ public class StringUtils {
public static boolean isNotBlank(String s){
return !isBlank(s);
}
public static String replaceNRTtoUnderline(String src){
return src.replaceAll("[\n|\r|\t]", "_");
}
}

27
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
/**
* tri function function interface
*/
@FunctionalInterface
public interface TriFunction<IN1, IN2, IN3, OUT1> {
OUT1 apply(IN1 in1, IN2 in2, IN3 in3);
}

55
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java

@ -17,7 +17,13 @@
package org.apache.dolphinscheduler.common.utils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
@ -28,26 +34,53 @@ import org.slf4j.LoggerFactory;
*/
public class HttpUtilsTest {
public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class);
private HadoopUtils hadoopUtils = HadoopUtils.getInstance();
public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class);
@Test
public void testGetTest() {
// success
String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result);
ObjectNode jsonObject = JSONUtils.parseObject(result);
Assert.assertEquals("GitHub", jsonObject.path("name").asText());
result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result);
}
@Test
public void testGetByKerberos() {
try {
String applicationUrl = hadoopUtils.getApplicationUrl("application_1542010131334_0029");
String responseContent;
responseContent = HttpUtils.get(applicationUrl);
Assert.assertNull(responseContent);
@Test
public void testGetTest(){
//success
String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result);
ObjectNode jsonObject = JSONUtils.parseObject(result);
Assert.assertEquals("GitHub", jsonObject.path("name").asText());
result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
@Test
public void testGetResponseContentString() {
CloseableHttpClient httpclient = HttpClients.createDefault();
HttpGet httpget = new HttpGet("https://github.com/manifest.json");
/** set timeout、request time、socket timeout */
RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(Constants.HTTP_CONNECT_TIMEOUT)
.setConnectionRequestTimeout(Constants.HTTP_CONNECTION_REQUEST_TIMEOUT)
.setSocketTimeout(Constants.SOCKET_TIMEOUT).setRedirectsEnabled(true).build();
httpget.setConfig(requestConfig);
String responseContent = HttpUtils.getResponseContentString(httpget, httpclient);
Assert.assertNotNull(responseContent);
}
@Test
public void testGetHttpClient() {
CloseableHttpClient httpClient1 = HttpUtils.getInstance();
CloseableHttpClient httpClient2 = HttpUtils.getInstance();
Assert.assertEquals(httpClient1, httpClient2);
}
}

46
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClientTest.java

@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* KerberosHttpClient test
*/
public class KerberosHttpClientTest {
public static final Logger logger = LoggerFactory.getLogger(KerberosHttpClientTest.class);
private HadoopUtils hadoopUtils = HadoopUtils.getInstance();
@Test
public void get() {
try {
String applicationUrl = hadoopUtils.getApplicationUrl("application_1542010131334_0029");
String responseContent;
KerberosHttpClient kerberosHttpClient = new KerberosHttpClient(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH), PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH), true);
responseContent = kerberosHttpClient.get(applicationUrl,
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME));
Assert.assertNull(responseContent);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}

1
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java

@ -133,6 +133,7 @@ public abstract class BaseDataSource {
case MYSQL:
case ORACLE:
case POSTGRESQL:
case PRESTO:
separator = "?";
break;
case DB2:

5
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java

@ -54,6 +54,8 @@ public class DataSourceFactory {
return JSONUtils.parseObject(parameter, SQLServerDataSource.class);
case DB2:
return JSONUtils.parseObject(parameter, DB2ServerDataSource.class);
case PRESTO:
return JSONUtils.parseObject(parameter, PrestoDataSource.class);
default:
return null;
}
@ -94,6 +96,9 @@ public class DataSourceFactory {
case DB2:
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
case PRESTO:
Class.forName(Constants.COM_PRESTO_JDBC_DRIVER);
break;
default:
logger.error("not support sql type: {},can't load class", dbType);
throw new IllegalArgumentException("not support sql type,can't load class");

39
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PrestoDataSource.java

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
public class PrestoDataSource extends BaseDataSource {
/**
* @return driver class
*/
@Override
public String driverClassSelector() {
return Constants.COM_PRESTO_JDBC_DRIVER;
}
/**
* @return db type
*/
@Override
public DbType dbTypeSelector() {
return DbType.PRESTO;
}
}

101
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java

@ -16,13 +16,13 @@
*/
package org.apache.dolphinscheduler.dao.entity;
import java.util.Date;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.util.Date;
/**
* project
*/
@ -32,7 +32,7 @@ public class Project {
/**
* id
*/
@TableId(value="id", type=IdType.AUTO)
@TableId(value = "id", type = IdType.AUTO)
private int id;
/**
@ -44,7 +44,7 @@ public class Project {
/**
* user name
*/
@TableField(exist=false)
@TableField(exist = false)
private String userName;
/**
@ -70,19 +70,19 @@ public class Project {
/**
* permission
*/
@TableField(exist=false)
@TableField(exist = false)
private int perm;
/**
* process define count
*/
@TableField(exist=false)
@TableField(exist = false)
private int defCount;
/**
* process instance running count
*/
@TableField(exist=false)
@TableField(exist = false)
private int instRunningCount;
public int getDefCount() {
@ -136,6 +136,7 @@ public class Project {
public void setDescription(String description) {
this.description = description;
}
public String getDescription() {
return description;
}
@ -163,6 +164,7 @@ public class Project {
public void setPerm(int perm) {
this.perm = perm;
}
@Override
public String toString() {
return "Project{" +
@ -176,7 +178,6 @@ public class Project {
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
@ -202,4 +203,88 @@ public class Project {
return result;
}
public static Builder newBuilder() {
return new Builder();
}
public static final class Builder {
private int id;
private int userId;
private String userName;
private String name;
private String description;
private Date createTime;
private Date updateTime;
private int perm;
private int defCount;
private int instRunningCount;
private Builder() {
}
public Builder id(int id) {
this.id = id;
return this;
}
public Builder userId(int userId) {
this.userId = userId;
return this;
}
public Builder userName(String userName) {
this.userName = userName;
return this;
}
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public Builder createTime(Date createTime) {
this.createTime = createTime;
return this;
}
public Builder updateTime(Date updateTime) {
this.updateTime = updateTime;
return this;
}
public Builder perm(int perm) {
this.perm = perm;
return this;
}
public Builder defCount(int defCount) {
this.defCount = defCount;
return this;
}
public Builder instRunningCount(int instRunningCount) {
this.instRunningCount = instRunningCount;
return this;
}
public Project build() {
Project project = new Project();
project.setId(id);
project.setUserId(userId);
project.setUserName(userName);
project.setName(name);
project.setDescription(description);
project.setCreateTime(createTime);
project.setUpdateTime(updateTime);
project.setPerm(perm);
project.setDefCount(defCount);
project.setInstRunningCount(instRunningCount);
return project;
}
}
}

1
dolphinscheduler-dist/release-docs/LICENSE vendored

@ -384,6 +384,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0
xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C
zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0
presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1
========================================================================

7
dolphinscheduler-dist/release-docs/NOTICE vendored

@ -43,6 +43,13 @@ The following artifacts are EPL and CDDL 1.0.
* org.eclipse.jetty.orbit:javax.mail.glassfish
------
presto-jdbc
The code for the t-digest was originally authored by Ted Dunning
Adrien Grand contributed the heart of the AVLTreeDigest (https://github.com/jpountz)
------
Oracle

201
dolphinscheduler-dist/release-docs/licenses/LICENSE-presto-jdbc.txt vendored

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

63
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java

@ -20,7 +20,7 @@ import java.io.Serializable;
import java.util.Objects;
/**
* server address
* server address
*/
public class Host implements Serializable {
@ -39,6 +39,16 @@ public class Host implements Serializable {
*/
private int port;
/**
* weight
*/
private int weight;
/**
* workGroup
*/
private String workGroup;
public Host() {
}
@ -48,6 +58,21 @@ public class Host implements Serializable {
this.address = ip + ":" + port;
}
public Host(String ip, int port, int weight) {
this.ip = ip;
this.port = port;
this.address = ip + ":" + port;
this.weight = weight;
}
public Host(String ip, int port, int weight,String workGroup) {
this.ip = ip;
this.port = port;
this.address = ip + ":" + port;
this.weight = weight;
this.workGroup=workGroup;
}
public String getAddress() {
return address;
}
@ -65,6 +90,14 @@ public class Host implements Serializable {
this.address = ip + ":" + port;
}
public int getWeight() {
return weight;
}
public void setWeight(int weight) {
this.weight = weight;
}
public int getPort() {
return port;
}
@ -74,31 +107,47 @@ public class Host implements Serializable {
this.address = ip + ":" + port;
}
public String getWorkGroup() {
return workGroup;
}
public void setWorkGroup(String workGroup) {
this.workGroup = workGroup;
}
/**
* address convert host
*
* @param address address
* @return host
*/
public static Host of(String address){
if(address == null) {
public static Host of(String address) {
if (address == null) {
throw new IllegalArgumentException("Host : address is null.");
}
String[] parts = address.split(":");
if (parts.length != 2) {
if (parts.length < 2) {
throw new IllegalArgumentException(String.format("Host : %s illegal.", address));
}
Host host = new Host(parts[0], Integer.parseInt(parts[1]));
Host host = null;
if (parts.length == 2) {
host = new Host(parts[0], Integer.parseInt(parts[1]));
}
if (parts.length == 3) {
host = new Host(parts[0], Integer.parseInt(parts[1]), Integer.parseInt(parts[2]));
}
return host;
}
/**
* whether old version
*
* @param address address
* @return old version is true , otherwise is false
*/
public static Boolean isOldVersion(String address){
public static Boolean isOldVersion(String address) {
String[] parts = address.split(":");
return parts.length != 2 ? true : false;
return parts.length != 2 && parts.length != 3;
}
@Override

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java vendored

@ -36,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap;
public class TaskInstanceCacheManagerImpl implements TaskInstanceCacheManager {
/**
* taskInstance caceh
* taskInstance cache
*/
private Map<Integer,TaskInstance> taskInstanceCache = new ConcurrentHashMap<>();

7
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java

@ -71,7 +71,12 @@ public abstract class CommonHostManager implements HostManager {
return host;
}
List<Host> candidateHosts = new ArrayList<>(nodes.size());
nodes.stream().forEach(node -> candidateHosts.add(Host.of(node)));
nodes.forEach(node -> {
Host nodeHost=Host.of(node);
nodeHost.setWorkGroup(context.getWorkerGroup());
candidateHosts.add(nodeHost);
});
return select(candidateHosts);
}

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java

@ -38,7 +38,7 @@ public class RandomHostManager extends CommonHostManager {
* set round robin
*/
public RandomHostManager(){
this.selector = new RandomSelector<>();
this.selector = new RandomSelector();
}
@Override

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java

@ -38,7 +38,7 @@ public class RoundRobinHostManager extends CommonHostManager {
* set round robin
*/
public RoundRobinHostManager(){
this.selector = new RoundRobinSelector<>();
this.selector = new RoundRobinSelector();
}
@Override

45
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java

@ -17,27 +17,44 @@
package org.apache.dolphinscheduler.server.master.dispatch.host.assign;
import org.apache.dolphinscheduler.remote.utils.Host;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Random;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
/**
* random selector
* @param <T> T
*/
public class RandomSelector<T> extends AbstractSelector<T> {
private final Random random = new Random();
public class RandomSelector extends AbstractSelector<Host> {
@Override
public T doSelect(final Collection<T> source) {
int size = source.size();
/**
* random select
*/
int randomIndex = random.nextInt(size);
return (T) source.toArray()[randomIndex];
public Host doSelect(final Collection<Host> source) {
List<Host> hosts = new ArrayList<>(source);
int size = hosts.size();
int[] weights = new int[size];
int totalWeight = 0;
int index = 0;
for (Host host : hosts) {
totalWeight += host.getWeight();
weights[index] = host.getWeight();
index++;
}
if (totalWeight > 0) {
int offset = ThreadLocalRandom.current().nextInt(totalWeight);
for (int i = 0; i < size; i++) {
offset -= weights[i];
if (offset < 0) {
return hosts.get(i);
}
}
}
return hosts.get(ThreadLocalRandom.current().nextInt(size));
}
}

120
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java

@ -16,27 +16,123 @@
*/
package org.apache.dolphinscheduler.server.master.dispatch.host.assign;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.springframework.stereotype.Service;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
/**
* round robin selector
* @param <T> T
* Smooth Weight Round Robin
*/
@Service
public class RoundRobinSelector<T> extends AbstractSelector<T> {
public class RoundRobinSelector extends AbstractSelector<Host> {
private ConcurrentMap<String, ConcurrentMap<String, WeightedRoundRobin>> workGroupWeightMap = new ConcurrentHashMap<>();
private static final int RECYCLE_PERIOD = 100000;
private AtomicBoolean updateLock = new AtomicBoolean();
protected static class WeightedRoundRobin {
private int weight;
private AtomicLong current = new AtomicLong(0);
private long lastUpdate;
int getWeight() {
return weight;
}
void setWeight(int weight) {
this.weight = weight;
current.set(0);
}
long increaseCurrent() {
return current.addAndGet(weight);
}
void sel(int total) {
current.addAndGet(-1L * total);
}
long getLastUpdate() {
return lastUpdate;
}
void setLastUpdate(long lastUpdate) {
this.lastUpdate = lastUpdate;
}
}
private final AtomicInteger index = new AtomicInteger(0);
@Override
public T doSelect(Collection<T> source) {
public Host doSelect(Collection<Host> source) {
List<Host> hosts = new ArrayList<>(source);
String key = hosts.get(0).getWorkGroup();
ConcurrentMap<String, WeightedRoundRobin> map = workGroupWeightMap.get(key);
if (map == null) {
workGroupWeightMap.putIfAbsent(key, new ConcurrentHashMap<>());
map = workGroupWeightMap.get(key);
}
int totalWeight = 0;
long maxCurrent = Long.MIN_VALUE;
long now = System.currentTimeMillis();
Host selectedHost = null;
WeightedRoundRobin selectWeightRoundRobin = null;
for (Host host : hosts) {
String workGroupHost = host.getWorkGroup() + host.getAddress();
WeightedRoundRobin weightedRoundRobin = map.get(workGroupHost);
int weight = host.getWeight();
if (weight < 0) {
weight = 0;
}
if (weightedRoundRobin == null) {
weightedRoundRobin = new WeightedRoundRobin();
// set weight
weightedRoundRobin.setWeight(weight);
map.putIfAbsent(workGroupHost, weightedRoundRobin);
weightedRoundRobin = map.get(workGroupHost);
}
if (weight != weightedRoundRobin.getWeight()) {
weightedRoundRobin.setWeight(weight);
}
long cur = weightedRoundRobin.increaseCurrent();
weightedRoundRobin.setLastUpdate(now);
if (cur > maxCurrent) {
maxCurrent = cur;
selectedHost = host;
selectWeightRoundRobin = weightedRoundRobin;
}
totalWeight += weight;
}
if (!updateLock.get() && hosts.size() != map.size() && updateLock.compareAndSet(false, true)) {
try {
ConcurrentMap<String, WeightedRoundRobin> newMap = new ConcurrentHashMap<>(map);
newMap.entrySet().removeIf(item -> now - item.getValue().getLastUpdate() > RECYCLE_PERIOD);
workGroupWeightMap.put(key, newMap);
} finally {
updateLock.set(false);
}
}
if (selectedHost != null) {
selectWeightRoundRobin.sel(totalWeight);
return selectedHost;
}
int size = source.size();
/**
* round robin
*/
return (T) source.toArray()[index.getAndIncrement() % size];
return hosts.get(0);
}
}

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java vendored

@ -32,7 +32,7 @@ public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContex
/**
* taskInstance caceh
* taskInstance cache
*/
private Map<Integer,TaskExecutionContext> taskExecutionContextCache = new ConcurrentHashMap<>();

12
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java

@ -49,6 +49,9 @@ public class WorkerConfig {
@Value("${worker.listen.port: 1234}")
private int listenPort;
@Value("${worker.weight:100}")
private int weight;
public int getListenPort() {
return listenPort;
}
@ -107,4 +110,13 @@ public class WorkerConfig {
public void setWorkerMaxCpuloadAvg(int workerMaxCpuloadAvg) {
this.workerMaxCpuloadAvg = workerMaxCpuloadAvg;
}
public int getWeight() {
return weight;
}
public void setWeight(int weight) {
this.weight = weight;
}
}

28
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java

@ -16,9 +16,6 @@
*/
package org.apache.dolphinscheduler.server.worker.registry;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import static org.apache.dolphinscheduler.common.Constants.SLASH;
import java.util.Date;
import java.util.Set;
import java.util.concurrent.Executors;
@ -44,9 +41,11 @@ import org.springframework.stereotype.Service;
import com.google.common.collect.Sets;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* worker registry
* worker registry
*/
@Service
public class WorkerRegistry {
@ -54,13 +53,13 @@ public class WorkerRegistry {
private final Logger logger = LoggerFactory.getLogger(WorkerRegistry.class);
/**
* zookeeper registry center
* zookeeper registry center
*/
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
/**
* worker config
* worker config
*/
@Autowired
private WorkerConfig workerConfig;
@ -86,7 +85,7 @@ public class WorkerRegistry {
}
/**
* registry
* registry
*/
public void registry() {
String address = NetUtils.getHost();
@ -122,7 +121,7 @@ public class WorkerRegistry {
}
/**
* remove registry info
* remove registry info
*/
public void unRegistry() {
String address = getLocalAddress();
@ -135,13 +134,14 @@ public class WorkerRegistry {
}
/**
* get worker path
* get worker path
*/
private Set<String> getWorkerZkPaths() {
Set<String> workerZkPaths = Sets.newHashSet();
String address = getLocalAddress();
String workerZkPathPrefix = this.zookeeperRegistryCenter.getWorkerPath();
String weight = getWorkerWeight();
for (String workGroup : this.workerGroups) {
StringBuilder workerZkPathBuilder = new StringBuilder(100);
@ -152,15 +152,23 @@ public class WorkerRegistry {
// trim and lower case is need
workerZkPathBuilder.append(workGroup.trim().toLowerCase()).append(SLASH);
workerZkPathBuilder.append(address);
workerZkPathBuilder.append(weight);
workerZkPaths.add(workerZkPathBuilder.toString());
}
return workerZkPaths;
}
/**
* get local address
* get local address
*/
private String getLocalAddress() {
return NetUtils.getHost() + ":" + workerConfig.getListenPort();
}
/**
* get Worker Weight
*/
private String getWorkerWeight() {
return ":" + workerConfig.getWeight();
}
}

24
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java

@ -147,8 +147,8 @@ public class SqlTask extends AbstractTask {
}
/**
* ready to execute SQL and parameter entity Map
* @return
* ready to execute SQL and parameter entity Map
* @return SqlBinds
*/
private SqlBinds getSqlAndSqlParamsMap(String sql) {
Map<Integer,Property> sqlParamsMap = new HashMap<>();
@ -250,7 +250,7 @@ public class SqlTask extends AbstractTask {
* result process
*
* @param resultSet resultSet
* @throws Exception
* @throws Exception Exception
*/
private void resultProcess(ResultSet resultSet) throws Exception{
ArrayNode resultJSONArray = JSONUtils.createArrayNode();
@ -293,7 +293,7 @@ public class SqlTask extends AbstractTask {
}
/**
* post psql
* post sql
*
* @param connection connection
* @param postStatementsBinds postStatementsBinds
@ -329,7 +329,7 @@ public class SqlTask extends AbstractTask {
* create connection
*
* @return connection
* @throws Exception
* @throws Exception Exception
*/
private Connection createConnection() throws Exception{
// if hive , load connection params if exists
@ -367,7 +367,7 @@ public class SqlTask extends AbstractTask {
try {
resultSet.close();
} catch (SQLException e) {
logger.error("close result set error : {}",e.getMessage(),e);
}
}
@ -375,7 +375,7 @@ public class SqlTask extends AbstractTask {
try {
pstmt.close();
} catch (SQLException e) {
logger.error("close prepared statement error : {}",e.getMessage(),e);
}
}
@ -383,17 +383,17 @@ public class SqlTask extends AbstractTask {
try {
connection.close();
} catch (SQLException e) {
logger.error("close connection error : {}",e.getMessage(),e);
}
}
}
/**
* preparedStatement bind
* @param connection
* @param sqlBinds
* @return
* @throws Exception
* @param connection connection
* @param sqlBinds sqlBinds
* @return PreparedStatement
* @throws Exception Exception
*/
private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception {
// is the timeout set

3
dolphinscheduler-server/src/main/resources/worker.properties

@ -32,3 +32,6 @@
# default worker group
#worker.groups=default
# default worker weight
#work.weight=100

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save