diff --git a/.github/workflows/ci_ut.yml b/.github/workflows/ci_ut.yml index 70790a7650..739c9be7fa 100644 --- a/.github/workflows/ci_ut.yml +++ b/.github/workflows/ci_ut.yml @@ -91,3 +91,30 @@ jobs: mkdir -p ${LOG_DIR} docker-compose -f $(pwd)/docker/docker-swarm/docker-compose.yml logs dolphinscheduler-postgresql > ${LOG_DIR}/db.txt continue-on-error: true + + Checkstyle: + name: Check code style + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + # In the checkout@v2, it doesn't support git submodule. Execute the commands manually. + - name: checkout submodules + shell: bash + run: | + git submodule sync --recursive + git -c protocol.version=2 submodule update --init --force --recursive --depth=1 + - name: check code style + env: + WORKDIR: ./ + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CHECKSTYLE_CONFIG: style/checkstyle.xml + REVIEWDOG_VERSION: v0.10.2 + run: | + wget -O - -q https://github.com/checkstyle/checkstyle/releases/download/checkstyle-8.22/checkstyle-8.22-all.jar > /opt/checkstyle.jar + wget -O - -q https://raw.githubusercontent.com/reviewdog/reviewdog/master/install.sh | sh -s -- -b /opt ${REVIEWDOG_VERSION} + java -jar /opt/checkstyle.jar "${WORKDIR}" -c "${CHECKSTYLE_CONFIG}" -f xml \ + | /opt/reviewdog -f=checkstyle \ + -reporter="${INPUT_REPORTER:-github-pr-check}" \ + -filter-mode="${INPUT_FILTER_MODE:-added}" \ + -fail-on-error="${INPUT_FAIL_ON_ERROR:-false}" \ No newline at end of file diff --git a/.gitignore b/.gitignore index 17b0dc6610..7a99e2e4b0 100644 --- a/.gitignore +++ b/.gitignore @@ -19,6 +19,7 @@ third-party-dependencies.txt *.iws *.tgz .*.swp +.factorypath .vim .tmp **/node_modules diff --git a/docker/build/Dockerfile b/docker/build/Dockerfile index d0f16d5d0d..ceb94ea8c5 100644 --- a/docker/build/Dockerfile +++ b/docker/build/Dockerfile @@ -27,7 +27,7 @@ ENV DEBIAN_FRONTEND noninteractive #If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example: #RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories RUN apk update && \ - apk add dos2unix shadow bash openrc python python3 sudo vim wget iputils net-tools openssh-server py2-pip tini && \ + apk --update add --no-cache dos2unix shadow bash openrc python2 python3 sudo vim wget iputils net-tools openssh-server py-pip tini && \ apk add --update procps && \ openrc boot && \ pip install kazoo diff --git a/docker/build/README.md b/docker/build/README.md index bc516bc214..951f2d6b51 100644 --- a/docker/build/README.md +++ b/docker/build/README.md @@ -238,6 +238,10 @@ This environment variable sets max cpu load avg for `worker-server`. The default This environment variable sets reserved memory for `worker-server`. The default value is `0.1`. +**`WORKER_WEIGHT`** + +This environment variable sets port for `worker-server`. The default value is `100`. + **`WORKER_LISTEN_PORT`** This environment variable sets port for `worker-server`. The default value is `1234`. diff --git a/docker/build/README_zh_CN.md b/docker/build/README_zh_CN.md index c2affc0691..c4339a945c 100644 --- a/docker/build/README_zh_CN.md +++ b/docker/build/README_zh_CN.md @@ -238,6 +238,10 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些 配置`worker-server`的保留内存,默认值 `0.1`。 +**`WORKER_WEIGHT`** + +配置`worker-server`的权重,默认之`100`。 + **`WORKER_LISTEN_PORT`** 配置`worker-server`的端口,默认值 `1234`。 diff --git a/docker/build/conf/dolphinscheduler/worker.properties.tpl b/docker/build/conf/dolphinscheduler/worker.properties.tpl index d596be94bc..83097dd9a4 100644 --- a/docker/build/conf/dolphinscheduler/worker.properties.tpl +++ b/docker/build/conf/dolphinscheduler/worker.properties.tpl @@ -34,4 +34,7 @@ worker.reserved.memory=${WORKER_RESERVED_MEMORY} #worker.listen.port=${WORKER_LISTEN_PORT} # default worker group -#worker.group=${WORKER_GROUP} \ No newline at end of file +#worker.groups=${WORKER_GROUP} + +# default worker weight +#worker.weight=${WORKER_WEIGHT} \ No newline at end of file diff --git a/docker/build/startup-init-conf.sh b/docker/build/startup-init-conf.sh index 73fdad6798..d5cd86f1a4 100644 --- a/docker/build/startup-init-conf.sh +++ b/docker/build/startup-init-conf.sh @@ -74,6 +74,7 @@ export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"} export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"} export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"} export WORKER_GROUP=${WORKER_GROUP:-"default"} +export WORKER_WEIGHT=${WORKER_WEIGHT:-"100"} #============================================================================ # Alert Server diff --git a/docker/docker-swarm/docker-compose.yml b/docker/docker-swarm/docker-compose.yml index 51eb0aeaa5..349b3ad790 100644 --- a/docker/docker-swarm/docker-compose.yml +++ b/docker/docker-swarm/docker-compose.yml @@ -187,6 +187,7 @@ services: WORKER_MAX_CPULOAD_AVG: "100" WORKER_RESERVED_MEMORY: "0.1" WORKER_GROUP: "default" + WORKER_WEIGHT: "100" DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler" DATABASE_HOST: dolphinscheduler-postgresql DATABASE_PORT: 5432 diff --git a/docker/docker-swarm/docker-stack.yml b/docker/docker-swarm/docker-stack.yml index ca9f7c88c7..dff4a47b2c 100644 --- a/docker/docker-swarm/docker-stack.yml +++ b/docker/docker-swarm/docker-stack.yml @@ -187,6 +187,7 @@ services: WORKER_MAX_CPULOAD_AVG: "100" WORKER_RESERVED_MEMORY: "0.1" WORKER_GROUP: "default" + WORKER_WEIGHT: "100" DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler" DATABASE_HOST: dolphinscheduler-postgresql DATABASE_PORT: 5432 diff --git a/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml index 1e08b67b53..569341c225 100644 --- a/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml @@ -31,6 +31,7 @@ data: WORKER_RESERVED_MEMORY: {{ .Values.worker.configmap.WORKER_RESERVED_MEMORY | quote }} WORKER_LISTEN_PORT: {{ .Values.worker.configmap.WORKER_LISTEN_PORT | quote }} WORKER_GROUP: {{ .Values.worker.configmap.WORKER_GROUP | quote }} + WORKER_WEIGHT: {{ .Values.worker.configmap.WORKER_WEIGHT | quote }} DOLPHINSCHEDULER_DATA_BASEDIR_PATH: {{ include "dolphinscheduler.worker.base.dir" . | quote }} dolphinscheduler_env.sh: |- {{- range .Values.worker.configmap.DOLPHINSCHEDULER_ENV }} diff --git a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml index 51a83bcfa7..92c2c72398 100644 --- a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml @@ -162,6 +162,12 @@ spec: {{- else }} value: {{ .Values.externalZookeeper.zookeeperQuorum }} {{- end }} + - name: ZOOKEEPER_ROOT + {{- if .Values.zookeeper.enabled }} + value: "/dolphinscheduler" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperRoot }} + {{- end }} - name: RESOURCE_STORAGE_TYPE valueFrom: configMapKeyRef: diff --git a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml index 0949127dda..e9dc7919ca 100644 --- a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml @@ -228,6 +228,12 @@ spec: {{- else }} value: {{ .Values.externalZookeeper.zookeeperQuorum }} {{- end }} + - name: ZOOKEEPER_ROOT + {{- if .Values.zookeeper.enabled }} + value: "/dolphinscheduler" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperRoot }} + {{- end }} - name: RESOURCE_STORAGE_TYPE valueFrom: configMapKeyRef: diff --git a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml index 097f8d8580..ae562cc62b 100644 --- a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml +++ b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml @@ -162,6 +162,11 @@ spec: configMapKeyRef: name: {{ include "dolphinscheduler.fullname" . }}-worker key: WORKER_GROUP + - name: WORKER_WEUGHT + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_WEIGHT - name: DOLPHINSCHEDULER_DATA_BASEDIR_PATH valueFrom: configMapKeyRef: @@ -225,6 +230,12 @@ spec: {{- else }} value: {{ .Values.externalZookeeper.zookeeperQuorum }} {{- end }} + - name: ZOOKEEPER_ROOT + {{- if .Values.zookeeper.enabled }} + value: "/dolphinscheduler" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperRoot }} + {{- end }} - name: RESOURCE_STORAGE_TYPE valueFrom: configMapKeyRef: diff --git a/docker/kubernetes/dolphinscheduler/values.yaml b/docker/kubernetes/dolphinscheduler/values.yaml index 8acb1d326a..3261b08401 100644 --- a/docker/kubernetes/dolphinscheduler/values.yaml +++ b/docker/kubernetes/dolphinscheduler/values.yaml @@ -201,6 +201,7 @@ worker: WORKER_RESERVED_MEMORY: "0.1" WORKER_LISTEN_PORT: "1234" WORKER_GROUP: "default" + WORKER_WEIGHT: "100" DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler" DOLPHINSCHEDULER_ENV: - "export HADOOP_HOME=/opt/soft/hadoop" diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml index 035551e669..76dd8980b7 100644 --- a/dolphinscheduler-api/pom.xml +++ b/dolphinscheduler-api/pom.xml @@ -152,6 +152,10 @@ javax.servlet servlet-api + + org.apache.curator + curator-client + @@ -244,4 +248,4 @@ - \ No newline at end of file + diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java index 8731b264e9..2457177cdf 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -17,6 +17,12 @@ package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_ACCESS_TOKEN_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_ACCESS_TOKEN_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GENERATE_TOKEN_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_ACCESSTOKEN_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_ACCESS_TOKEN_ERROR; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AccessTokenService; @@ -24,20 +30,26 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * access token controller diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java index a5b8176a48..7d612b8b1d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java @@ -17,25 +17,34 @@ package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR; + import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.LoggerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** @@ -70,7 +79,7 @@ public class LoggerController extends BaseController { @GetMapping(value = "/detail") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_TASK_INSTANCE_LOG_ERROR) - public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "taskInstanceId") int taskInstanceId, @RequestParam(value = "skipLineNum") int skipNum, @RequestParam(value = "limit") int limit) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index 6b539d01b1..29f415bac2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -95,27 +95,65 @@ public class ProcessDefinitionController extends BaseController { } /** - * copy process definition + * copy process definition * * @param loginUser login user * @param projectName project name - * @param processId process definition id + * @param processDefinitionIds process definition ids + * @param targetProjectId target project id * @return copy result code */ @ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"), + @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer") }) @PostMapping(value = "/copy") @ResponseStatus(HttpStatus.OK) - @ApiException(COPY_PROCESS_DEFINITION_ERROR) + @ApiException(BATCH_COPY_PROCESS_DEFINITION_ERROR) public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processId", required = true) int processId) throws JsonProcessingException { - logger.info("copy process definition, login user:{}, project name:{}, process definition id:{}", - loginUser.getUserName(), projectName, processId); - Map result = processDefinitionService.copyProcessDefinition(loginUser, projectName, processId); - return returnDataList(result); + @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, + @RequestParam(value = "targetProjectId",required = true) int targetProjectId) { + logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", + StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), + StringUtils.replaceNRTtoUnderline(projectName), + StringUtils.replaceNRTtoUnderline(processDefinitionIds), + StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); + + return returnDataList( + processDefinitionService.batchCopyProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId)); + } + + /** + * move process definition + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionIds process definition ids + * @param targetProjectId target project id + * @return move result code + */ + @ApiOperation(value = "moveProcessDefinition", notes= "MOVE_PROCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"), + @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer") + }) + @PostMapping(value = "/move") + @ResponseStatus(HttpStatus.OK) + @ApiException(BATCH_MOVE_PROCESS_DEFINITION_ERROR) + public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds, + @RequestParam(value = "targetProjectId",required = true) int targetProjectId) { + logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}", + StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), + StringUtils.replaceNRTtoUnderline(projectName), + StringUtils.replaceNRTtoUnderline(processDefinitionIds), + StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId))); + + return returnDataList( + processDefinitionService.batchMoveProcessDefinition(loginUser,projectName,processDefinitionIds,targetProjectId)); } /** @@ -365,7 +403,7 @@ public class ProcessDefinitionController extends BaseController { public Result getNodeListByDefinitionIdList( @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processDefinitionIdList") String processDefinitionIdList) throws Exception { + @RequestParam("processDefinitionIdList") String processDefinitionIdList) { logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}", loginUser.getUserName(), projectName, processDefinitionIdList); @@ -420,7 +458,7 @@ public class ProcessDefinitionController extends BaseController { logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}", loginUser.getUserName(), projectName, processDefinitionIds); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List deleteFailedIdList = new ArrayList<>(); if (StringUtils.isNotEmpty(processDefinitionIds)) { String[] processDefinitionIdArray = processDefinitionIds.split(","); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index 7e9473d81c..1f1ec1ed7b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -370,7 +370,7 @@ public class ProcessInstanceController extends BaseController { logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}", loginUser.getUserName(), projectName, processInstanceIds); // task queue - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List deleteFailedIdList = new ArrayList<>(); if (StringUtils.isNotEmpty(processInstanceIds)) { String[] processInstanceIdArray = processInstanceIds.split(","); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index cc9e0f657f..dac97bca9d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -226,6 +227,25 @@ public class ProjectController extends BaseController { return returnDataList(result); } + /** + * query user created project + * + * @param loginUser login user + * @return projects which the user create + */ + @ApiOperation(value = "queryProjectCreatedByUser", notes = "QUERY_USER_CREATED_PROJECT_NOTES") + + @GetMapping(value = "/login-user-created-project") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_USER_CREATED_PROJECT_ERROR) + public Result queryProjectCreatedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user {}, query authorized project by user id: {}.", + StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), + StringUtils.replaceNRTtoUnderline(String.valueOf(loginUser.getId()))); + Map result = projectService.queryProjectCreatedByUser(loginUser); + return returnDataList(result); + } + /** * import process definition * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index 39b9b06337..ab4dce972d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -432,14 +432,34 @@ public class UsersController extends BaseController { @RequestParam(value = "userPassword") String userPassword, @RequestParam(value = "repeatPassword") String repeatPassword, @RequestParam(value = "email") String email) throws Exception { - userName = userName.replaceAll("[\n|\r|\t]", ""); - userPassword = userPassword.replaceAll("[\n|\r|\t]", ""); - repeatPassword = repeatPassword.replaceAll("[\n|\r|\t]", ""); - email = email.replaceAll("[\n|\r|\t]", ""); + userName = ParameterUtils.handleEscapes(userName); + userPassword = ParameterUtils.handleEscapes(userPassword); + repeatPassword = ParameterUtils.handleEscapes(repeatPassword); + email = ParameterUtils.handleEscapes(email); logger.info("user self-register, userName: {}, userPassword {}, repeatPassword {}, eamil {}", - userName, userPassword, repeatPassword, email); + userName, Constants.PASSWORD_DEFAULT, Constants.PASSWORD_DEFAULT, email); Map result = usersService.registerUser(userName, userPassword, repeatPassword, email); return returnDataList(result); } + /** + * user activate + * + * @param userName user name + */ + @ApiOperation(value="activateUser",notes = "ACTIVATE_USER_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"), + }) + @PostMapping("/activate") + @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_USER_ERROR) + public Result activateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "userName") String userName) { + userName = ParameterUtils.handleEscapes(userName); + logger.info("login user {}, activate user, userName: {}", + loginUser.getUserName(), userName); + Map result = usersService.activateUser(loginUser, userName); + return returnDataList(result); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 8e90b4cb08..950d0a511d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -168,15 +168,21 @@ public enum Status { PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"), PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"), SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"), - DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), - DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), - DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), - DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), - QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"), - DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"), - QUERY_WORKFLOW_LINEAGE_ERROR(10143,"query workflow lineage error", "查询血缘失败"), - COPY_PROCESS_DEFINITION_ERROR(10148,"copy process definition error", "复制工作流错误"), - USER_DISABLED(10149,"The current user is disabled", "当前用户已停用"), + DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), + DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), + DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), + DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), + QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"), + DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"), + USER_DISABLED(10148,"The current user is disabled", "当前用户已停用"), + COPY_PROCESS_DEFINITION_ERROR(10149,"copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"), + MOVE_PROCESS_DEFINITION_ERROR(10150,"move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"), + QUERY_USER_CREATED_PROJECT_ERROR(10151,"query user created project error error", "查询用户创建的项目错误"), + PROCESS_DEFINITION_IDS_IS_EMPTY(10152,"process definition ids is empty", "工作流IDS不能为空"), + BATCH_COPY_PROCESS_DEFINITION_ERROR(10153,"batch copy process definition error", "复制工作流错误"), + BATCH_MOVE_PROCESS_DEFINITION_ERROR(10154,"batch move process definition error", "移动工作流错误"), + QUERY_WORKFLOW_LINEAGE_ERROR(10155,"query workflow lineage error", "查询血缘失败"), + UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java index 90d1afea49..cd6ac2b622 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java @@ -18,17 +18,18 @@ package org.apache.dolphinscheduler.api.exceptions; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.RestControllerAdvice; import org.springframework.web.method.HandlerMethod; /** * Exception Handler */ -@ControllerAdvice +@RestControllerAdvice @ResponseBody public class ApiExceptionHandler { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java index cb7a8e653f..83eb4fefce 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java @@ -16,32 +16,28 @@ */ package org.apache.dolphinscheduler.api.interceptor; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.commons.httpclient.HttpStatus; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.security.Authenticator; -import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.UserMapper; -import org.apache.commons.httpclient.HttpStatus; -import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.servlet.HandlerInterceptor; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - /** * login interceptor, must login first */ public class LoginHandlerInterceptor implements HandlerInterceptor { private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class); - @Autowired - private SessionService sessionService; - @Autowired private UserMapper userMapper; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java index 5d176961bb..98eef47090 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java @@ -16,35 +16,14 @@ */ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.dao.entity.AccessToken; import org.apache.dolphinscheduler.dao.entity.User; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.EncryptionUtils; -import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import java.util.*; +import java.util.Map; /** - * user service + * access token service */ -@Service -public class AccessTokenService extends BaseService { - - private static final Logger logger = LoggerFactory.getLogger(AccessTokenService.class); - - @Autowired - private AccessTokenMapper accessTokenMapper; - +public interface AccessTokenService { /** * query access token list @@ -55,123 +34,44 @@ public class AccessTokenService extends BaseService { * @param pageSize page size * @return token list for page number and page size */ - public Map queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); - - PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); - Page page = new Page(pageNo, pageSize); - int userId = loginUser.getId(); - if (loginUser.getUserType() == UserType.ADMIN_USER){ - userId = 0; - } - IPage accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId); - pageInfo.setTotalCount((int)accessTokenList.getTotal()); - pageInfo.setLists(accessTokenList.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize); /** * create token + * * @param userId token for user * @param expireTime token expire time * @param token token string * @return create result code */ - public Map createToken(int userId, String expireTime, String token) { - Map result = new HashMap<>(5); - - if (userId <= 0) { - throw new IllegalArgumentException("User id should not less than or equals to 0."); - } - AccessToken accessToken = new AccessToken(); - accessToken.setUserId(userId); - accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); - accessToken.setToken(token); - accessToken.setCreateTime(new Date()); - accessToken.setUpdateTime(new Date()); - - // insert - int insert = accessTokenMapper.insert(accessToken); - - if (insert > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR); - } - - return result; - } + Map createToken(int userId, String expireTime, String token); /** * generate token + * * @param userId token for user * @param expireTime token expire time * @return token string */ - public Map generateToken(int userId, String expireTime) { - Map result = new HashMap<>(5); - String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis())); - result.put(Constants.DATA_LIST, token); - putMsg(result, Status.SUCCESS); - return result; - } + Map generateToken(int userId, String expireTime); /** - * delete access token + * delete access token + * * @param loginUser login user * @param id token id * @return delete result code */ - public Map delAccessTokenById(User loginUser, int id) { - Map result = new HashMap<>(5); - - AccessToken accessToken = accessTokenMapper.selectById(id); - - if (accessToken == null) { - logger.error("access token not exist, access token id {}", id); - putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); - return result; - } - - if (loginUser.getId() != accessToken.getUserId() && - loginUser.getUserType() != UserType.ADMIN_USER) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - accessTokenMapper.deleteById(id); - putMsg(result, Status.SUCCESS); - return result; - } + Map delAccessTokenById(User loginUser, int id); /** * update token by id + * * @param id token id * @param userId token for user * @param expireTime token expire time * @param token token string * @return update result code */ - public Map updateToken(int id,int userId, String expireTime, String token) { - Map result = new HashMap<>(5); - - AccessToken accessToken = accessTokenMapper.selectById(id); - if (accessToken == null) { - logger.error("access token not exist, access token id {}", id); - putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); - return result; - } - accessToken.setUserId(userId); - accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); - accessToken.setToken(token); - accessToken.setUpdateTime(new Date()); - - accessTokenMapper.updateById(accessToken); - - putMsg(result, Status.SUCCESS); - return result; - } + Map updateToken(int id, int userId, String expireTime, String token); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java index f3dcbfa237..72cbd50833 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java @@ -55,7 +55,7 @@ public class AlertGroupService extends BaseService{ */ public HashMap queryAlertgroup() { - HashMap result = new HashMap<>(5); + HashMap result = new HashMap<>(); List alertGroups = alertGroupMapper.queryAllGroupList(); result.put(Constants.DATA_LIST, alertGroups); putMsg(result, Status.SUCCESS); @@ -74,7 +74,7 @@ public class AlertGroupService extends BaseService{ */ public Map listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } @@ -101,7 +101,7 @@ public class AlertGroupService extends BaseService{ * @return create result code */ public Map createAlertgroup(User loginUser, String groupName, AlertType groupType, String desc) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (checkAdmin(loginUser, result)){ return result; @@ -138,7 +138,7 @@ public class AlertGroupService extends BaseService{ * @return update result code */ public Map updateAlertgroup(User loginUser, int id, String groupName, AlertType groupType, String desc) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)){ return result; @@ -179,7 +179,7 @@ public class AlertGroupService extends BaseService{ */ @Transactional(rollbackFor = RuntimeException.class) public Map delAlertgroupById(User loginUser, int id) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); //only admin can operate @@ -209,7 +209,7 @@ public class AlertGroupService extends BaseService{ * @return grant result code */ public Map grantUser(User loginUser, int alertgroupId, String userIds) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); //only admin can operate diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java deleted file mode 100644 index edc115b3d4..0000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.api.service; - -import org.apache.dolphinscheduler.common.graph.DAG; -import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.model.TaskNodeRelation; -import org.apache.dolphinscheduler.common.process.ProcessDag; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.ProcessData; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.utils.DagHelper; - -import java.util.List; - -/** - * base DAG service - */ -public class BaseDAGService extends BaseService{ - - - /** - * process instance to DAG - * - * @param processInstance input process instance - * @return process instance dag. - */ - public static DAG processInstance2DAG(ProcessInstance processInstance) { - - String processDefinitionJson = processInstance.getProcessInstanceJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - List taskNodeList = processData.getTasks(); - - ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); - - return DagHelper.buildDagGraph(processDag); - } -} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java index 646a67ab04..4b094ea494 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java @@ -16,6 +16,12 @@ */ package org.apache.dolphinscheduler.api.service; +import java.text.MessageFormat; +import java.util.Map; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -24,11 +30,6 @@ import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import java.text.MessageFormat; -import java.util.Map; - /** * base service */ @@ -96,6 +97,7 @@ public class BaseService { /** * get cookie info by name + * * @param request request * @param name 'sessionId' * @return get cookie info @@ -115,10 +117,11 @@ public class BaseService { /** * create tenant dir if not exists + * * @param tenantCode tenant code * @throws Exception if hdfs operation exception */ - protected void createTenantDirIfNotExists(String tenantCode)throws Exception{ + protected void createTenantDirIfNotExists(String tenantCode) throws Exception { String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); @@ -129,7 +132,7 @@ public class BaseService { HadoopUtils.getInstance().mkdir(udfsPath); } - protected boolean hasPerm(User operateUser, int createUserId){ + protected boolean hasPerm(User operateUser, int createUserId) { return operateUser.getId() == createUserId || isAdmin(operateUser); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java index 39bec56357..70fb272bea 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java @@ -17,57 +17,14 @@ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.dto.CommandStateCount; -import org.apache.dolphinscheduler.api.dto.DefineUserDto; -import org.apache.dolphinscheduler.api.dto.TaskCountDto; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.*; -import org.apache.dolphinscheduler.dao.mapper.*; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; +import org.apache.dolphinscheduler.dao.entity.User; -import java.text.MessageFormat; -import java.util.*; +import java.util.Map; /** * data analysis service */ -@Service -public class DataAnalysisService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class); - - @Autowired - ProjectMapper projectMapper; - - @Autowired - ProjectService projectService; - - @Autowired - ProcessInstanceMapper processInstanceMapper; - - @Autowired - ProcessDefinitionMapper processDefinitionMapper; - - @Autowired - CommandMapper commandMapper; - - @Autowired - ErrorCommandMapper errorCommandMapper; - - @Autowired - TaskInstanceMapper taskInstanceMapper; - - @Autowired - ProcessService processService; +public interface DataAnalysisService { /** * statistical task instance status data @@ -78,46 +35,7 @@ public class DataAnalysisService extends BaseService{ * @param endDate end date * @return task state count data */ - public Map countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { - - Map result = new HashMap<>(5); - boolean checkProject = checkProject(loginUser, projectId, result); - if(!checkProject){ - return result; - } - - /** - * find all the task lists in the project under the user - * statistics based on task status execution, failure, completion, wait, total - */ - Date start = null; - Date end = null; - - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(),e); - putErrorRequestParamsMsg(result); - return result; - } - - Integer[] projectIds = getProjectIdsArrays(loginUser, projectId); - List taskInstanceStateCounts = - taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds); - - if (taskInstanceStateCounts != null) { - TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts); - result.put(Constants.DATA_LIST, taskCountResult); - putMsg(result, Status.SUCCESS); - } - return result; - } - - private void putErrorRequestParamsMsg(Map result) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); - } + Map countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate); /** * statistical process instance status data @@ -128,37 +46,7 @@ public class DataAnalysisService extends BaseService{ * @param endDate end date * @return process instance state count data */ - public Map countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { - - Map result = new HashMap<>(5); - boolean checkProject = checkProject(loginUser, projectId, result); - if(!checkProject){ - return result; - } - - Date start = null; - Date end = null; - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(),e); - putErrorRequestParamsMsg(result); - return result; - } - Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); - List processInstanceStateCounts = - processInstanceMapper.countInstanceStateByUser(start, end, - projectIdArray); - - if (processInstanceStateCounts != null) { - TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); - result.put(Constants.DATA_LIST, taskCountResult); - putMsg(result, Status.SUCCESS); - } - return result; - } - + Map countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate); /** * statistics the process definition quantities of certain person @@ -167,20 +55,7 @@ public class DataAnalysisService extends BaseService{ * @param projectId project id * @return definition count data */ - public Map countDefinitionByUser(User loginUser, int projectId) { - Map result = new HashMap<>(); - - - Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); - List defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser( - loginUser.getId(), projectIdArray,isAdmin(loginUser)); - - DefineUserDto dto = new DefineUserDto(defineGroupByUsers); - result.put(Constants.DATA_LIST, dto); - putMsg(result, Status.SUCCESS); - return result; - } - + Map countDefinitionByUser(User loginUser, int projectId); /** * statistical command status data @@ -191,189 +66,15 @@ public class DataAnalysisService extends BaseService{ * @param endDate end date * @return command state count data */ - public Map countCommandState(User loginUser, int projectId, String startDate, String endDate) { - - Map result = new HashMap<>(5); - boolean checkProject = checkProject(loginUser, projectId, result); - if(!checkProject){ - return result; - } - - /** - * find all the task lists in the project under the user - * statistics based on task status execution, failure, completion, wait, total - */ - Date start = null; - Date end = null; - - if (startDate != null && endDate != null){ - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(),e); - putErrorRequestParamsMsg(result); - return result; - } - } - - - Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); - // count command state - List commandStateCounts = - commandMapper.countCommandState( - loginUser.getId(), - start, - end, - projectIdArray); - - // count error command state - List errorCommandStateCounts = - errorCommandMapper.countCommandState( - start, end, projectIdArray); - - // - Map> dataMap = new HashMap<>(); - - Map commonCommand = new HashMap<>(); - commonCommand.put("commandState",0); - commonCommand.put("errorCommandState",0); - - - // init data map - /** - * START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS, - START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD; - */ - dataMap.put(CommandType.START_PROCESS,commonCommand); - dataMap.put(CommandType.START_CURRENT_TASK_PROCESS,commonCommand); - dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS,commonCommand); - dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,commonCommand); - dataMap.put(CommandType.START_FAILURE_TASK_PROCESS,commonCommand); - dataMap.put(CommandType.COMPLEMENT_DATA,commonCommand); - dataMap.put(CommandType.SCHEDULER,commonCommand); - dataMap.put(CommandType.REPEAT_RUNNING,commonCommand); - dataMap.put(CommandType.PAUSE,commonCommand); - dataMap.put(CommandType.STOP,commonCommand); - dataMap.put(CommandType.RECOVER_WAITTING_THREAD,commonCommand); - - // put command state - for (CommandCount executeStatusCount : commandStateCounts){ - Map commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType())); - commandStateCountsMap.put("commandState", executeStatusCount.getCount()); - dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap); - } - - // put error command state - for (CommandCount errorExecutionStatus : errorCommandStateCounts){ - Map errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType())); - errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount()); - dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap); - } - - List list = new ArrayList<>(); - Iterator>> iterator = dataMap.entrySet().iterator(); - while (iterator.hasNext()){ - Map.Entry> next = iterator.next(); - CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"), - next.getValue().get("commandState"),next.getKey()); - list.add(commandStateCount); - } - - result.put(Constants.DATA_LIST, list); - putMsg(result, Status.SUCCESS); - return result; - } - - private Integer[] getProjectIdsArrays(User loginUser, int projectId) { - List projectIds = new ArrayList<>(); - if(projectId !=0){ - projectIds.add(projectId); - }else if(loginUser.getUserType() == UserType.GENERAL_USER){ - projectIds = processService.getProjectIdListHavePerm(loginUser.getId()); - if(projectIds.size() ==0 ){ - projectIds.add(0); - } - } - return projectIds.toArray(new Integer[projectIds.size()]); - } + Map countCommandState(User loginUser, int projectId, String startDate, String endDate); /** * count queue state + * * @param loginUser login user * @param projectId project id * @return queue state count data */ - public Map countQueueState(User loginUser, int projectId) { - Map result = new HashMap<>(5); - - boolean checkProject = checkProject(loginUser, projectId, result); - if(!checkProject){ - return result; - } - - List tasksQueueList = new ArrayList<>(); - List tasksKillList = new ArrayList<>(); - - Map dataMap = new HashMap<>(); - if (loginUser.getUserType() == UserType.ADMIN_USER){ - dataMap.put("taskQueue",tasksQueueList.size()); - dataMap.put("taskKill",tasksKillList.size()); - - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } - - int[] tasksQueueIds = new int[tasksQueueList.size()]; - int[] tasksKillIds = new int[tasksKillList.size()]; - - int i =0; - for (String taskQueueStr : tasksQueueList){ - if (StringUtils.isNotEmpty(taskQueueStr)){ - String[] splits = taskQueueStr.split("_"); - if (splits.length >= 4){ - tasksQueueIds[i++] = Integer.parseInt(splits[3]); - } - } - } - - i = 0; - for (String taskKillStr : tasksKillList){ - if (StringUtils.isNotEmpty(taskKillStr)){ - String[] splits = taskKillStr.split("-"); - if (splits.length == 2){ - tasksKillIds[i++] = Integer.parseInt(splits[1]); - } - } - } - Integer taskQueueCount = 0; - Integer taskKillCount = 0; - - Integer[] projectIds = getProjectIdsArrays(loginUser, projectId); - if (tasksQueueIds.length != 0){ - taskQueueCount = taskInstanceMapper.countTask( - projectIds, - tasksQueueIds); - } - - if (tasksKillIds.length != 0){ - taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds); - } - - dataMap.put("taskQueue",taskQueueCount); - dataMap.put("taskKill",taskKillCount); - - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } + Map countQueueState(User loginUser, int projectId); - private boolean checkProject(User loginUser, int projectId, Map result){ - if(projectId != 0){ - Project project = projectMapper.selectById(projectId); - return projectService.hasProjectAndPerm(loginUser, project, result); - } - return true; - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index 41374f4478..49ed52bb5a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -88,7 +88,7 @@ public class DataSourceService extends BaseService{ */ public Map createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); // check name can use or not if (checkName(name)) { putMsg(result, Status.DATASOURCE_EXIST); @@ -249,6 +249,7 @@ public class DataSourceService extends BaseService{ case POSTGRESQL: case CLICKHOUSE: case ORACLE: + case PRESTO: separator = "&"; break; default: @@ -340,7 +341,7 @@ public class DataSourceService extends BaseService{ * @return data source list page */ public Map queryDataSourceList(User loginUser, Integer type) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List datasourceList; @@ -430,6 +431,10 @@ public class DataSourceService extends BaseService{ datasource = JSONUtils.parseObject(parameter, DB2ServerDataSource.class); Class.forName(Constants.COM_DB2_JDBC_DRIVER); break; + case PRESTO: + datasource = JSONUtils.parseObject(parameter, PrestoDataSource.class); + Class.forName(Constants.COM_PRESTO_JDBC_DRIVER); + break; default: break; } @@ -513,7 +518,8 @@ public class DataSourceService extends BaseService{ if (Constants.MYSQL.equals(type.name()) || Constants.POSTGRESQL.equals(type.name()) || Constants.CLICKHOUSE.equals(type.name()) - || Constants.ORACLE.equals(type.name())) { + || Constants.ORACLE.equals(type.name()) + || Constants.PRESTO.equals(type.name())) { separator = "&"; } else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name()) @@ -585,9 +591,12 @@ public class DataSourceService extends BaseService{ } else if (Constants.SQLSERVER.equals(type.name())) { sb.append(Constants.JDBC_SQLSERVER); sb.append(host).append(":").append(port); - }else if (Constants.DB2.equals(type.name())) { + } else if (Constants.DB2.equals(type.name())) { sb.append(Constants.JDBC_DB2); sb.append(host).append(":").append(port); + } else if (Constants.PRESTO.equals(type.name())) { + sb.append(Constants.JDBC_PRESTO); + sb.append(host).append(":").append(port); } return sb.toString(); @@ -674,7 +683,7 @@ public class DataSourceService extends BaseService{ * @return authorized result code */ public Map authedDatasource(User loginUser, Integer userId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index 6a8dad4f2a..fb735ecf19 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -98,7 +98,7 @@ public class ExecutorService extends BaseService{ TaskDependType taskDependType, WarningType warningType, int warningGroupId, String receivers, String receiversCc, RunMode runMode, Priority processInstancePriority, String workerGroup, Integer timeout) throws ParseException { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); // timeout is invalid if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR); @@ -176,7 +176,7 @@ public class ExecutorService extends BaseService{ * @return check result code */ public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){ - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (processDefinition == null) { // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId); @@ -201,7 +201,7 @@ public class ExecutorService extends BaseService{ * @return execute result code */ public Map execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = checkResultAndAuth(loginUser, projectName, project); @@ -294,7 +294,7 @@ public class ExecutorService extends BaseService{ */ private Map checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); ExecutionStatus executionStatus = processInstance.getState(); boolean checkResult = false; switch (executeType) { @@ -339,7 +339,7 @@ public class ExecutorService extends BaseService{ * @return update result */ private Map updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); processInstance.setCommandType(commandType); processInstance.addHistoryCmd(commandType); @@ -365,7 +365,7 @@ public class ExecutorService extends BaseService{ * @return insert result code */ private Map insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Command command = new Command(); command.setCommandType(commandType); command.setProcessDefinitionId(processDefinitionId); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java index 3c7b421d5e..14440ee61e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java @@ -16,117 +16,30 @@ */ package org.apache.dolphinscheduler.api.service; -import java.nio.charset.StandardCharsets; -import javax.annotation.PreDestroy; -import org.apache.commons.lang.ArrayUtils; -import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.remote.utils.Host; -import org.apache.dolphinscheduler.service.log.LogClientService; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; /** * log service */ -@Service -public class LoggerService { +public interface LoggerService { + + /** + * view log + * + * @param taskInstId task instance id + * @param skipLineNum skip line number + * @param limit limit + * @return log string data + */ + Result queryLog(int taskInstId, int skipLineNum, int limit); + + + /** + * get log size + * + * @param taskInstId task instance id + * @return log byte array + */ + byte[] getLogBytes(int taskInstId); - private static final Logger logger = LoggerFactory.getLogger(LoggerService.class); - - private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s"; - - @Autowired - private ProcessService processService; - - private final LogClientService logClient; - - public LoggerService() { - logClient = new LogClientService(); - } - - @PreDestroy - public void close() { - logClient.close(); - } - - /** - * view log - * - * @param taskInstId task instance id - * @param skipLineNum skip line number - * @param limit limit - * @return log string data - */ - public Result queryLog(int taskInstId, int skipLineNum, int limit) { - - TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); - - if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { - return Result.error(Status.TASK_INSTANCE_NOT_FOUND); - } - - String host = getHost(taskInstance.getHost()); - - Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); - - logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(), - Constants.RPC_PORT); - - StringBuilder log = new StringBuilder(); - if (skipLineNum == 0) { - String head = String.format(LOG_HEAD_FORMAT, - taskInstance.getLogPath(), - host, - Constants.SYSTEM_LINE_SEPARATOR); - log.append(head); - } - - log.append(logClient - .rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit)); - - result.setData(log); - return result; - } - - - /** - * get log size - * - * @param taskInstId task instance id - * @return log byte array - */ - public byte[] getLogBytes(int taskInstId) { - TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); - if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { - throw new RuntimeException("task instance is null or host is null"); - } - String host = getHost(taskInstance.getHost()); - byte[] head = String.format(LOG_HEAD_FORMAT, - taskInstance.getLogPath(), - host, - Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8); - return ArrayUtils.addAll(head, - logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); - } - - - /** - * get host - * - * @param address address - * @return old version return true ,otherwise return false - */ - private String getHost(String address) { - if (Host.isOldVersion(address)) { - return address; - } - return Host.of(address).getIp(); - } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java index 55c4fa113b..e46ca6fcf2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/MonitorService.java @@ -56,7 +56,7 @@ public class MonitorService extends BaseService { * @return data base state */ public Map queryDatabaseState(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List monitorRecordList = monitorDBDao.queryDatabaseState(); @@ -75,7 +75,7 @@ public class MonitorService extends BaseService { */ public Map queryMaster(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List masterServers = getServerListFromZK(true); result.put(Constants.DATA_LIST, masterServers); @@ -91,7 +91,7 @@ public class MonitorService extends BaseService { * @return zookeeper information list */ public Map queryZookeeperState(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List zookeeperRecordList = zookeeperMonitor.zookeeperInfoList(); @@ -111,7 +111,7 @@ public class MonitorService extends BaseService { */ public Map queryWorker(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List workerServers = getServerListFromZK(false) .stream() .map((Server server) -> { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java index b3d56e5982..cc18de439f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java @@ -16,90 +16,17 @@ */ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.dolphinscheduler.api.dto.ProcessMeta; -import org.apache.dolphinscheduler.api.dto.treeview.Instance; -import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.CheckUtils; -import org.apache.dolphinscheduler.api.utils.FileUtils; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.exportprocess.ProcessAddTaskParam; -import org.apache.dolphinscheduler.api.utils.exportprocess.TaskNodeParamFactory; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.*; -import org.apache.dolphinscheduler.common.graph.DAG; -import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.model.TaskNodeRelation; -import org.apache.dolphinscheduler.common.process.ProcessDag; -import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.task.AbstractParameters; -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.*; -import org.apache.dolphinscheduler.dao.mapper.*; -import org.apache.dolphinscheduler.dao.utils.DagHelper; -import org.apache.dolphinscheduler.service.permission.PermissionCheck; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.MediaType; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.web.multipart.MultipartFile; - -import javax.servlet.ServletOutputStream; +import java.util.Map; import javax.servlet.http.HttpServletResponse; -import java.io.BufferedOutputStream; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.User; +import org.springframework.web.multipart.MultipartFile; +import com.fasterxml.jackson.core.JsonProcessingException; /** * process definition service */ -@Service -public class ProcessDefinitionService extends BaseDAGService { - - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionService.class); - - private static final String PROCESSDEFINITIONID = "processDefinitionId"; - - private static final String RELEASESTATE = "releaseState"; - - private static final String TASKS = "tasks"; - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProjectService projectService; - - @Autowired - private ProcessDefinitionMapper processDefineMapper; - - @Autowired - private ProcessInstanceMapper processInstanceMapper; - - - @Autowired - private TaskInstanceMapper taskInstanceMapper; - - @Autowired - private ScheduleMapper scheduleMapper; - - @Autowired - private ProcessService processService; +public interface ProcessDefinitionService { /** * create process definition @@ -114,92 +41,13 @@ public class ProcessDefinitionService extends BaseDAGService { * @return create result code * @throws JsonProcessingException JsonProcessingException */ - public Map createProcessDefinition(User loginUser, - String projectName, - String name, - String processDefinitionJson, - String desc, - String locations, - String connects) throws JsonProcessingException { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - // check project auth - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - ProcessDefinition processDefine = new ProcessDefinition(); - Date now = new Date(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); - if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) { - return checkProcessJson; - } - - processDefine.setName(name); - processDefine.setReleaseState(ReleaseState.OFFLINE); - processDefine.setProjectId(project.getId()); - processDefine.setUserId(loginUser.getId()); - processDefine.setProcessDefinitionJson(processDefinitionJson); - processDefine.setDescription(desc); - processDefine.setLocations(locations); - processDefine.setConnects(connects); - processDefine.setTimeout(processData.getTimeout()); - processDefine.setTenantId(processData.getTenantId()); - processDefine.setModifyBy(loginUser.getUserName()); - processDefine.setResourceIds(getResourceIds(processData)); - - //custom global params - List globalParamsList = processData.getGlobalParams(); - if (CollectionUtils.isNotEmpty(globalParamsList)) { - Set globalParamsSet = new HashSet<>(globalParamsList); - globalParamsList = new ArrayList<>(globalParamsSet); - processDefine.setGlobalParamList(globalParamsList); - } - processDefine.setCreateTime(now); - processDefine.setUpdateTime(now); - processDefine.setFlag(Flag.YES); - processDefineMapper.insert(processDefine); - - // return processDefinition object with ID - result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId())); - putMsg(result, Status.SUCCESS); - result.put("processDefinitionId", processDefine.getId()); - return result; - } - - /** - * get resource ids - * - * @param processData process data - * @return resource ids - */ - private String getResourceIds(ProcessData processData) { - List tasks = processData.getTasks(); - Set resourceIds = new HashSet<>(); - for (TaskNode taskNode : tasks) { - String taskParameter = taskNode.getParams(); - AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter); - if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) { - Set tempSet = params.getResourceFilesList().stream().map(t -> t.getId()).collect(Collectors.toSet()); - resourceIds.addAll(tempSet); - } - } - - StringBuilder sb = new StringBuilder(); - for (int i : resourceIds) { - if (sb.length() > 0) { - sb.append(","); - } - sb.append(i); - } - return sb.toString(); - } - + Map createProcessDefinition(User loginUser, + String projectName, + String name, + String processDefinitionJson, + String desc, + String locations, + String connects) throws JsonProcessingException; /** * query process definition list @@ -208,24 +56,8 @@ public class ProcessDefinitionService extends BaseDAGService { * @param projectName project name * @return definition list */ - public Map queryProcessDefinitionList(User loginUser, String projectName) { - - HashMap result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - List resourceList = processDefineMapper.queryAllDefinitionList(project.getId()); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - - return result; - } - + Map queryProcessDefinitionList(User loginUser, + String projectName); /** * query process definition list paging @@ -238,29 +70,12 @@ public class ProcessDefinitionService extends BaseDAGService { * @param userId user id * @return process definition page */ - public Map queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - Page page = new Page(pageNo, pageSize); - IPage processDefinitionIPage = processDefineMapper.queryDefineListPaging( - page, searchVal, userId, project.getId(), isAdmin(loginUser)); - - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int) processDefinitionIPage.getTotal()); - pageInfo.setLists(processDefinitionIPage.getRecords()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryProcessDefinitionListPaging(User loginUser, + String projectName, + String searchVal, + Integer pageNo, + Integer pageSize, + Integer userId); /** * query datail of process definition @@ -270,62 +85,36 @@ public class ProcessDefinitionService extends BaseDAGService { * @param processId process definition id * @return process definition detail */ - public Map queryProcessDefinitionById(User loginUser, String projectName, Integer processId) { - - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - ProcessDefinition processDefinition = processDefineMapper.selectById(processId); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); - } else { - result.put(Constants.DATA_LIST, processDefinition); - putMsg(result, Status.SUCCESS); - } - return result; - } + Map queryProcessDefinitionById(User loginUser, + String projectName, + Integer processId); /** - * copy process definition - * - * @param loginUser login user - * @param projectName project name - * @param processId process definition id - * @return copy result code + * batch copy process definition + * @param loginUser loginUser + * @param projectName projectName + * @param processDefinitionIds processDefinitionIds + * @param targetProjectId targetProjectId + * @return */ - public Map copyProcessDefinition(User loginUser, String projectName, Integer processId) throws JsonProcessingException { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } + Map batchCopyProcessDefinition(User loginUser, + String projectName, + String processDefinitionIds, + int targetProjectId); - ProcessDefinition processDefinition = processDefineMapper.selectById(processId); - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); - return result; - } else { - return createProcessDefinition( - loginUser, - projectName, - processDefinition.getName() + "_copy_" + System.currentTimeMillis(), - processDefinition.getProcessDefinitionJson(), - processDefinition.getDescription(), - processDefinition.getLocations(), - processDefinition.getConnects()); - } - } + /** + * batch move process definition + * @param loginUser loginUser + * @param projectName projectName + * @param processDefinitionIds processDefinitionIds + * @param targetProjectId targetProjectId + * @return + */ + Map batchMoveProcessDefinition(User loginUser, + String projectName, + String processDefinitionIds, + int targetProjectId); /** * update process definition @@ -340,68 +129,12 @@ public class ProcessDefinitionService extends BaseDAGService { * @param connects connects for nodes * @return update result code */ - public Map updateProcessDefinition(User loginUser, String projectName, int id, String name, - String processDefinitionJson, String desc, - String locations, String connects) { - Map result = new HashMap<>(5); - - Project project = projectMapper.queryByName(projectName); - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - if (resultStatus != Status.SUCCESS) { - return checkResult; - } - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); - if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { - return checkProcessJson; - } - ProcessDefinition processDefine = processService.findProcessDefineById(id); - if (processDefine == null) { - // check process definition exists - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); - return result; - } else if (processDefine.getReleaseState() == ReleaseState.ONLINE) { - // online can not permit edit - putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefine.getName()); - return result; - } else { - putMsg(result, Status.SUCCESS); - } - - Date now = new Date(); - - processDefine.setId(id); - processDefine.setName(name); - processDefine.setReleaseState(ReleaseState.OFFLINE); - processDefine.setProjectId(project.getId()); - processDefine.setProcessDefinitionJson(processDefinitionJson); - processDefine.setDescription(desc); - processDefine.setLocations(locations); - processDefine.setConnects(connects); - processDefine.setTimeout(processData.getTimeout()); - processDefine.setTenantId(processData.getTenantId()); - processDefine.setModifyBy(loginUser.getUserName()); - processDefine.setResourceIds(getResourceIds(processData)); - - //custom global params - List globalParamsList = new ArrayList<>(); - if (CollectionUtils.isNotEmpty(processData.getGlobalParams())) { - Set userDefParamsSet = new HashSet<>(processData.getGlobalParams()); - globalParamsList = new ArrayList<>(userDefParamsSet); - } - processDefine.setGlobalParamList(globalParamsList); - processDefine.setUpdateTime(now); - processDefine.setFlag(Flag.YES); - if (processDefineMapper.updateById(processDefine) > 0) { - putMsg(result, Status.SUCCESS); - - } else { - putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); - } - return result; - } + Map updateProcessDefinition(User loginUser, + String projectName, + int id, + String name, + String processDefinitionJson, String desc, + String locations, String connects); /** * verify process definition name unique @@ -411,24 +144,9 @@ public class ProcessDefinitionService extends BaseDAGService { * @param name name * @return true if process definition name not exists, otherwise false */ - public Map verifyProcessDefinitionName(User loginUser, String projectName, String name) { - - Map result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name); - if (processDefinition == null) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.PROCESS_INSTANCE_EXIST, name); - } - return result; - } + Map verifyProcessDefinitionName(User loginUser, + String projectName, + String name); /** * delete process definition by id @@ -438,62 +156,9 @@ public class ProcessDefinitionService extends BaseDAGService { * @param processDefinitionId process definition id * @return delete result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId); - - if (processDefinition == null) { - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId); - return result; - } - - // Determine if the login user is the owner of the process definition - if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - // check process definition is already online - if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { - putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, processDefinitionId); - return result; - } - - // get the timing according to the process definition - List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); - if (!schedules.isEmpty() && schedules.size() > 1) { - logger.warn("scheduler num is {},Greater than 1", schedules.size()); - putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); - return result; - } else if (schedules.size() == 1) { - Schedule schedule = schedules.get(0); - if (schedule.getReleaseState() == ReleaseState.OFFLINE) { - scheduleMapper.deleteById(schedule.getId()); - } else if (schedule.getReleaseState() == ReleaseState.ONLINE) { - putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); - return result; - } - } - - int delete = processDefineMapper.deleteById(processDefinitionId); - - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); - } - return result; - } + Map deleteProcessDefinitionById(User loginUser, + String projectName, + Integer processDefinitionId); /** * release process definition: online / offline @@ -504,244 +169,23 @@ public class ProcessDefinitionService extends BaseDAGService { * @param releaseState release state * @return release result code */ - @Transactional(rollbackFor = RuntimeException.class) - public Map releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) { - HashMap result = new HashMap<>(); - Project project = projectMapper.queryByName(projectName); - - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultEnum = (Status) checkResult.get(Constants.STATUS); - if (resultEnum != Status.SUCCESS) { - return checkResult; - } - - ReleaseState state = ReleaseState.getEnum(releaseState); - - // check state - if (null == state) { - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); - return result; - } - - ProcessDefinition processDefinition = processDefineMapper.selectById(id); - - switch (state) { - case ONLINE: - // To check resources whether they are already cancel authorized or deleted - String resourceIds = processDefinition.getResourceIds(); - if (StringUtils.isNotBlank(resourceIds)) { - Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new); - PermissionCheck permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE_ID, processService, resourceIdArray, loginUser.getId(), logger); - try { - permissionCheck.checkPermission(); - } catch (Exception e) { - logger.error(e.getMessage(), e); - putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, RELEASESTATE); - return result; - } - } - - processDefinition.setReleaseState(state); - processDefineMapper.updateById(processDefinition); - break; - case OFFLINE: - processDefinition.setReleaseState(state); - processDefineMapper.updateById(processDefinition); - List scheduleList = scheduleMapper.selectAllByProcessDefineArray( - new int[]{processDefinition.getId()} - ); - - for (Schedule schedule : scheduleList) { - logger.info("set schedule offline, project id: {}, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); - // set status - schedule.setReleaseState(ReleaseState.OFFLINE); - scheduleMapper.updateById(schedule); - SchedulerService.deleteSchedule(project.getId(), schedule.getId()); - } - break; - default: - putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); - return result; - } - - putMsg(result, Status.SUCCESS); - return result; - } + Map releaseProcessDefinition(User loginUser, + String projectName, + int id, + int releaseState); /** * batch export process definition by ids * - * @param loginUser - * @param projectName - * @param processDefinitionIds - * @param response + * @param loginUser login user + * @param projectName project name + * @param processDefinitionIds process definition ids + * @param response http servlet response */ - public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) { - - if (StringUtils.isEmpty(processDefinitionIds)) { - return; - } - - //export project info - Project project = projectMapper.queryByName(projectName); - - //check user access for project - Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); - Status resultStatus = (Status) checkResult.get(Constants.STATUS); - - if (resultStatus != Status.SUCCESS) { - return; - } - - List processDefinitionList = - getProcessDefinitionList(processDefinitionIds); - - if (CollectionUtils.isNotEmpty(processDefinitionList)) { - downloadProcessDefinitionFile(response, processDefinitionList); - } - } - - /** - * get process definition list by ids - * - * @param processDefinitionIds - * @return - */ - private List getProcessDefinitionList(String processDefinitionIds) { - List processDefinitionList = new ArrayList<>(); - String[] processDefinitionIdArray = processDefinitionIds.split(","); - for (String strProcessDefinitionId : processDefinitionIdArray) { - //get workflow info - int processDefinitionId = Integer.parseInt(strProcessDefinitionId); - ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId); - if (null != processDefinition) { - processDefinitionList.add(exportProcessMetaData(processDefinitionId, processDefinition)); - } - } - - return processDefinitionList; - } - - /** - * download the process definition file - * - * @param response - * @param processDefinitionList - */ - private void downloadProcessDefinitionFile(HttpServletResponse response, List processDefinitionList) { - response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE); - BufferedOutputStream buff = null; - ServletOutputStream out = null; - try { - out = response.getOutputStream(); - buff = new BufferedOutputStream(out); - buff.write(JSONUtils.toJsonString(processDefinitionList).getBytes(StandardCharsets.UTF_8)); - buff.flush(); - buff.close(); - } catch (IOException e) { - logger.warn("export process fail", e); - } finally { - if (null != buff) { - try { - buff.close(); - } catch (Exception e) { - logger.warn("export process buffer not close", e); - } - } - if (null != out) { - try { - out.close(); - } catch (Exception e) { - logger.warn("export process output stream not close", e); - } - } - } - } - - /** - * get export process metadata string - * - * @param processDefinitionId process definition id - * @param processDefinition process definition - * @return export process metadata string - */ - public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) { - //create workflow json file - return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition)); - } - - /** - * get export process metadata string - * - * @param processDefinitionId process definition id - * @param processDefinition process definition - * @return export process metadata string - */ - public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) { - //correct task param which has data source or dependent param - String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson()); - processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson); - - //export process metadata - ProcessMeta exportProcessMeta = new ProcessMeta(); - exportProcessMeta.setProjectName(processDefinition.getProjectName()); - exportProcessMeta.setProcessDefinitionName(processDefinition.getName()); - exportProcessMeta.setProcessDefinitionJson(processDefinition.getProcessDefinitionJson()); - exportProcessMeta.setProcessDefinitionLocations(processDefinition.getLocations()); - exportProcessMeta.setProcessDefinitionConnects(processDefinition.getConnects()); - - //schedule info - List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); - if (!schedules.isEmpty()) { - Schedule schedule = schedules.get(0); - exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString()); - exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId()); - exportProcessMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime())); - exportProcessMeta.setScheduleEndTime(DateUtils.dateToString(schedule.getEndTime())); - exportProcessMeta.setScheduleCrontab(schedule.getCrontab()); - exportProcessMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy())); - exportProcessMeta.setScheduleReleaseState(String.valueOf(ReleaseState.OFFLINE)); - exportProcessMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority())); - exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup()); - } - //create workflow json file - return exportProcessMeta; - } - - /** - * correct task param which has datasource or dependent - * - * @param processDefinitionJson processDefinitionJson - * @return correct processDefinitionJson - */ - public String addExportTaskNodeSpecialParam(String processDefinitionJson) { - ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson); - ArrayNode jsonArray = (ArrayNode) jsonObject.path(TASKS); - - for (int i = 0; i < jsonArray.size(); i++) { - JsonNode taskNode = jsonArray.path(i); - if (StringUtils.isNotEmpty(taskNode.path("type").asText())) { - String taskType = taskNode.path("type").asText(); - - ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType); - if (null != addTaskParam) { - addTaskParam.addExportSpecialParam(taskNode); - } - } - } - jsonObject.set(TASKS, jsonArray); - return jsonObject.toString(); - } - - /** - * check task if has sub process - * - * @param taskType task type - * @return if task has sub process return true else false - */ - private boolean checkTaskHasSubProcess(String taskType) { - return taskType.equals(TaskType.SUB_PROCESS.name()); - } + void batchExportProcessDefinitionByIds(User loginUser, + String projectName, + String processDefinitionIds, + HttpServletResponse response); /** * import process definition @@ -751,358 +195,9 @@ public class ProcessDefinitionService extends BaseDAGService { * @param currentProjectName current project name * @return import process */ - @Transactional(rollbackFor = RuntimeException.class) - public Map importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { - Map result = new HashMap<>(5); - String processMetaJson = FileUtils.file2String(file); - List processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class); - - //check file content - if (CollectionUtils.isEmpty(processMetaList)) { - putMsg(result, Status.DATA_IS_NULL, "fileContent"); - return result; - } - - for (ProcessMeta processMeta : processMetaList) { - - if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)) { - return result; - } - } - - return result; - } - - /** - * check and import process definition - * - * @param loginUser - * @param currentProjectName - * @param result - * @param processMeta - * @return - */ - private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map result, ProcessMeta processMeta) { - - if (!checkImportanceParams(processMeta, result)) { - return false; - } - - //deal with process name - String processDefinitionName = processMeta.getProcessDefinitionName(); - //use currentProjectName to query - Project targetProject = projectMapper.queryByName(currentProjectName); - if (null != targetProject) { - processDefinitionName = recursionProcessDefinitionName(targetProject.getId(), - processDefinitionName, 1); - } - - //unique check - Map checkResult = verifyProcessDefinitionName(loginUser, currentProjectName, processDefinitionName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (Status.SUCCESS.equals(status)) { - putMsg(result, Status.SUCCESS); - } else { - result.putAll(checkResult); - return false; - } - - // get create process result - Map createProcessResult = - getCreateProcessResult(loginUser, - currentProjectName, - result, - processMeta, - processDefinitionName, - addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject)); - - if (createProcessResult == null) { - return false; - } - - //create process definition - Integer processDefinitionId = - Objects.isNull(createProcessResult.get(PROCESSDEFINITIONID)) ? - null : Integer.parseInt(createProcessResult.get(PROCESSDEFINITIONID).toString()); - - //scheduler param - return getImportProcessScheduleResult(loginUser, - currentProjectName, - result, - processMeta, - processDefinitionName, - processDefinitionId); - - } - - /** - * get create process result - * - * @param loginUser - * @param currentProjectName - * @param result - * @param processMeta - * @param processDefinitionName - * @param importProcessParam - * @return - */ - private Map getCreateProcessResult(User loginUser, - String currentProjectName, - Map result, - ProcessMeta processMeta, - String processDefinitionName, - String importProcessParam) { - Map createProcessResult = null; - try { - createProcessResult = createProcessDefinition(loginUser - , currentProjectName, - processDefinitionName + "_import_" + System.currentTimeMillis(), - importProcessParam, - processMeta.getProcessDefinitionDescription(), - processMeta.getProcessDefinitionLocations(), - processMeta.getProcessDefinitionConnects()); - putMsg(result, Status.SUCCESS); - } catch (JsonProcessingException e) { - logger.error("import process meta json data: {}", e.getMessage(), e); - putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); - } - - return createProcessResult; - } - - /** - * get import process schedule result - * - * @param loginUser - * @param currentProjectName - * @param result - * @param processMeta - * @param processDefinitionName - * @param processDefinitionId - * @return - */ - private boolean getImportProcessScheduleResult(User loginUser, - String currentProjectName, - Map result, - ProcessMeta processMeta, - String processDefinitionName, - Integer processDefinitionId) { - if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) { - int scheduleInsert = importProcessSchedule(loginUser, - currentProjectName, - processMeta, - processDefinitionName, - processDefinitionId); - - if (0 == scheduleInsert) { - putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); - return false; - } - } - return true; - } - - /** - * check importance params - * - * @param processMeta - * @param result - * @return - */ - private boolean checkImportanceParams(ProcessMeta processMeta, Map result) { - if (StringUtils.isEmpty(processMeta.getProjectName())) { - putMsg(result, Status.DATA_IS_NULL, "projectName"); - return false; - } - if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) { - putMsg(result, Status.DATA_IS_NULL, "processDefinitionName"); - return false; - } - if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) { - putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson"); - return false; - } - - return true; - } - - /** - * import process add special task param - * - * @param loginUser login user - * @param processDefinitionJson process definition json - * @param targetProject target project - * @return import process param - */ - private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) { - ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson); - ArrayNode jsonArray = (ArrayNode) jsonObject.get(TASKS); - //add sql and dependent param - for (int i = 0; i < jsonArray.size(); i++) { - JsonNode taskNode = jsonArray.path(i); - String taskType = taskNode.path("type").asText(); - ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType); - if (null != addTaskParam) { - addTaskParam.addImportSpecialParam(taskNode); - } - } - - //recursive sub-process parameter correction map key for old process id value for new process id - Map subProcessIdMap = new HashMap<>(20); - - List subProcessList = StreamUtils.asStream(jsonArray.elements()) - .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) - .collect(Collectors.toList()); - - if (CollectionUtils.isNotEmpty(subProcessList)) { - importSubProcess(loginUser, targetProject, jsonArray, subProcessIdMap); - } - - jsonObject.set(TASKS, jsonArray); - return jsonObject.toString(); - } - - /** - * import process schedule - * - * @param loginUser login user - * @param currentProjectName current project name - * @param processMeta process meta data - * @param processDefinitionName process definition name - * @param processDefinitionId process definition id - * @return insert schedule flag - */ - public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta, - String processDefinitionName, Integer processDefinitionId) { - Date now = new Date(); - Schedule scheduleObj = new Schedule(); - scheduleObj.setProjectName(currentProjectName); - scheduleObj.setProcessDefinitionId(processDefinitionId); - scheduleObj.setProcessDefinitionName(processDefinitionName); - scheduleObj.setCreateTime(now); - scheduleObj.setUpdateTime(now); - scheduleObj.setUserId(loginUser.getId()); - scheduleObj.setUserName(loginUser.getUserName()); - - scheduleObj.setCrontab(processMeta.getScheduleCrontab()); - - if (null != processMeta.getScheduleStartTime()) { - scheduleObj.setStartTime(DateUtils.stringToDate(processMeta.getScheduleStartTime())); - } - if (null != processMeta.getScheduleEndTime()) { - scheduleObj.setEndTime(DateUtils.stringToDate(processMeta.getScheduleEndTime())); - } - if (null != processMeta.getScheduleWarningType()) { - scheduleObj.setWarningType(WarningType.valueOf(processMeta.getScheduleWarningType())); - } - if (null != processMeta.getScheduleWarningGroupId()) { - scheduleObj.setWarningGroupId(processMeta.getScheduleWarningGroupId()); - } - if (null != processMeta.getScheduleFailureStrategy()) { - scheduleObj.setFailureStrategy(FailureStrategy.valueOf(processMeta.getScheduleFailureStrategy())); - } - if (null != processMeta.getScheduleReleaseState()) { - scheduleObj.setReleaseState(ReleaseState.valueOf(processMeta.getScheduleReleaseState())); - } - if (null != processMeta.getScheduleProcessInstancePriority()) { - scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority())); - } - - if (null != processMeta.getScheduleWorkerGroupName()) { - scheduleObj.setWorkerGroup(processMeta.getScheduleWorkerGroupName()); - } - - return scheduleMapper.insert(scheduleObj); - } - - /** - * check import process has sub process - * recursion create sub process - * - * @param loginUser login user - * @param targetProject target project - * @param jsonArray process task array - * @param subProcessIdMap correct sub process id map - */ - public void importSubProcess(User loginUser, Project targetProject, ArrayNode jsonArray, Map subProcessIdMap) { - for (int i = 0; i < jsonArray.size(); i++) { - ObjectNode taskNode = (ObjectNode) jsonArray.path(i); - String taskType = taskNode.path("type").asText(); - - if (!checkTaskHasSubProcess(taskType)) { - continue; - } - //get sub process info - ObjectNode subParams = (ObjectNode) taskNode.path("params"); - Integer subProcessId = subParams.path(PROCESSDEFINITIONID).asInt(); - ProcessDefinition subProcess = processDefineMapper.queryByDefineId(subProcessId); - //check is sub process exist in db - if (null == subProcess) { - continue; - } - String subProcessJson = subProcess.getProcessDefinitionJson(); - //check current project has sub process - ProcessDefinition currentProjectSubProcess = processDefineMapper.queryByDefineName(targetProject.getId(), subProcess.getName()); - - if (null == currentProjectSubProcess) { - ArrayNode subJsonArray = (ArrayNode) JSONUtils.parseObject(subProcess.getProcessDefinitionJson()).get(TASKS); - - List subProcessList = StreamUtils.asStream(subJsonArray.elements()) - .filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).path("type").asText())) - .collect(Collectors.toList()); - - if (CollectionUtils.isNotEmpty(subProcessList)) { - importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap); - //sub process processId correct - if (!subProcessIdMap.isEmpty()) { - - for (Map.Entry entry : subProcessIdMap.entrySet()) { - String oldSubProcessId = "\"processDefinitionId\":" + entry.getKey(); - String newSubProcessId = "\"processDefinitionId\":" + entry.getValue(); - subProcessJson = subProcessJson.replaceAll(oldSubProcessId, newSubProcessId); - } - - subProcessIdMap.clear(); - } - } - - //if sub-process recursion - Date now = new Date(); - //create sub process in target project - ProcessDefinition processDefine = new ProcessDefinition(); - processDefine.setName(subProcess.getName()); - processDefine.setVersion(subProcess.getVersion()); - processDefine.setReleaseState(subProcess.getReleaseState()); - processDefine.setProjectId(targetProject.getId()); - processDefine.setUserId(loginUser.getId()); - processDefine.setProcessDefinitionJson(subProcessJson); - processDefine.setDescription(subProcess.getDescription()); - processDefine.setLocations(subProcess.getLocations()); - processDefine.setConnects(subProcess.getConnects()); - processDefine.setTimeout(subProcess.getTimeout()); - processDefine.setTenantId(subProcess.getTenantId()); - processDefine.setGlobalParams(subProcess.getGlobalParams()); - processDefine.setCreateTime(now); - processDefine.setUpdateTime(now); - processDefine.setFlag(subProcess.getFlag()); - processDefine.setReceivers(subProcess.getReceivers()); - processDefine.setReceiversCc(subProcess.getReceiversCc()); - processDefineMapper.insert(processDefine); - - logger.info("create sub process, project: {}, process name: {}", targetProject.getName(), processDefine.getName()); - - //modify task node - ProcessDefinition newSubProcessDefine = processDefineMapper.queryByDefineName(processDefine.getProjectId(), processDefine.getName()); - - if (null != newSubProcessDefine) { - subProcessIdMap.put(subProcessId, newSubProcessDefine.getId()); - subParams.put(PROCESSDEFINITIONID, newSubProcessDefine.getId()); - taskNode.set("params", subParams); - } - } - } - } - + Map importProcessDefinition(User loginUser, + MultipartFile file, + String currentProjectName); /** * check the process definition node meets the specifications @@ -1111,50 +206,8 @@ public class ProcessDefinitionService extends BaseDAGService { * @param processDefinitionJson process definition json * @return check result code */ - public Map checkProcessNodeList(ProcessData processData, String processDefinitionJson) { - - Map result = new HashMap<>(5); - try { - if (processData == null) { - logger.error("process data is null"); - putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson); - return result; - } - - // Check whether the task node is normal - List taskNodes = processData.getTasks(); - - if (taskNodes == null) { - logger.error("process node info is empty"); - putMsg(result, Status.DATA_IS_NULL, processDefinitionJson); - return result; - } - - // check has cycle - if (graphHasCycle(taskNodes)) { - logger.error("process DAG has cycle"); - putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); - return result; - } - - // check whether the process definition json is normal - for (TaskNode taskNode : taskNodes) { - if (!CheckUtils.checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) { - logger.error("task node {} parameter invalid", taskNode.getName()); - putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); - return result; - } - - // check extra params - CheckUtils.checkOtherParams(taskNode.getExtras()); - } - putMsg(result, Status.SUCCESS); - } catch (Exception e) { - result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); - result.put(Constants.MSG, e.getMessage()); - } - return result; - } + Map checkProcessNodeList(ProcessData processData, + String processDefinitionJson); /** * get task node details based on process definition @@ -1162,36 +215,7 @@ public class ProcessDefinitionService extends BaseDAGService { * @param defineId define id * @return task node list */ - public Map getTaskNodeListByDefinitionId(Integer defineId) { - Map result = new HashMap<>(); - - ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); - if (processDefinition == null) { - logger.info("process define not exists"); - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineId); - return result; - } - - - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - //process data check - if (null == processData) { - logger.error("process data is null"); - putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson); - return result; - } - - List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); - - result.put(Constants.DATA_LIST, taskNodeList); - putMsg(result, Status.SUCCESS); - - return result; - - } + Map getTaskNodeListByDefinitionId(Integer defineId); /** * get task node details based on process definition @@ -1199,37 +223,7 @@ public class ProcessDefinitionService extends BaseDAGService { * @param defineIdList define id list * @return task node list */ - public Map getTaskNodeListByDefinitionIdList(String defineIdList) { - Map result = new HashMap<>(); - - Map> taskNodeMap = new HashMap<>(); - String[] idList = defineIdList.split(","); - List idIntList = new ArrayList<>(); - for (String definitionId : idList) { - idIntList.add(Integer.parseInt(definitionId)); - } - Integer[] idArray = idIntList.toArray(new Integer[idIntList.size()]); - List processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray); - if (CollectionUtils.isEmpty(processDefinitionList)) { - logger.info("process definition not exists"); - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); - return result; - } - - for (ProcessDefinition processDefinition : processDefinitionList) { - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); - taskNodeMap.put(processDefinition.getId(), taskNodeList); - } - - result.put(Constants.DATA_LIST, taskNodeMap); - putMsg(result, Status.SUCCESS); - - return result; - - } - + Map getTaskNodeListByDefinitionIdList(String defineIdList); /** * query process definition all by project id @@ -1237,16 +231,7 @@ public class ProcessDefinitionService extends BaseDAGService { * @param projectId project id * @return process definitions in the project */ - public Map queryProcessDefinitionAllByProjectId(Integer projectId) { - - HashMap result = new HashMap<>(5); - - List resourceList = processDefineMapper.queryAllDefinitionList(projectId); - result.put(Constants.DATA_LIST, resourceList); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryProcessDefinitionAllByProjectId(Integer projectId); /** * Encapsulates the TreeView structure @@ -1256,200 +241,7 @@ public class ProcessDefinitionService extends BaseDAGService { * @return tree view json data * @throws Exception exception */ - public Map viewTree(Integer processId, Integer limit) throws Exception { - Map result = new HashMap<>(); - - ProcessDefinition processDefinition = processDefineMapper.selectById(processId); - if (null == processDefinition) { - logger.info("process define not exists"); - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition); - return result; - } - DAG dag = genDagGraph(processDefinition); - /** - * nodes that is running - */ - Map> runningNodeMap = new ConcurrentHashMap<>(); - - /** - * nodes that is waiting torun - */ - Map> waitingRunningNodeMap = new ConcurrentHashMap<>(); - - /** - * List of process instances - */ - List processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit); - - for (ProcessInstance processInstance : processInstanceList) { - processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime())); - } - - if (limit > processInstanceList.size()) { - limit = processInstanceList.size(); - } - - TreeViewDto parentTreeViewDto = new TreeViewDto(); - parentTreeViewDto.setName("DAG"); - parentTreeViewDto.setType(""); - // Specify the process definition, because it is a TreeView for a process definition - - for (int i = limit - 1; i >= 0; i--) { - ProcessInstance processInstance = processInstanceList.get(i); - - Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime(); - parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString() - , processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); - } - - List parentTreeViewDtoList = new ArrayList<>(); - parentTreeViewDtoList.add(parentTreeViewDto); - // Here is the encapsulation task instance - for (String startNode : dag.getBeginNode()) { - runningNodeMap.put(startNode, parentTreeViewDtoList); - } - - while (Stopper.isRunning()) { - Set postNodeList = null; - Iterator>> iter = runningNodeMap.entrySet().iterator(); - while (iter.hasNext()) { - Map.Entry> en = iter.next(); - String nodeName = en.getKey(); - parentTreeViewDtoList = en.getValue(); - - TreeViewDto treeViewDto = new TreeViewDto(); - treeViewDto.setName(nodeName); - TaskNode taskNode = dag.getNode(nodeName); - treeViewDto.setType(taskNode.getType()); - - - //set treeViewDto instances - for (int i = limit - 1; i >= 0; i--) { - ProcessInstance processInstance = processInstanceList.get(i); - TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName); - if (taskInstance == null) { - treeViewDto.getInstances().add(new Instance(-1, "not running", "null")); - } else { - Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); - Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); - - int subProcessId = 0; - /** - * if process is sub process, the return sub id, or sub id=0 - */ - if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) { - String taskJson = taskInstance.getTaskJson(); - taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); - subProcessId = Integer.parseInt(JSONUtils.parseObject( - taskNode.getParams()).path(CMDPARAM_SUB_PROCESS_DEFINE_ID).asText()); - } - treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString() - , taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); - } - } - for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) { - pTreeViewDto.getChildren().add(treeViewDto); - } - postNodeList = dag.getSubsequentNodes(nodeName); - if (CollectionUtils.isNotEmpty(postNodeList)) { - for (String nextNodeName : postNodeList) { - List treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); - if (CollectionUtils.isNotEmpty(treeViewDtoList)) { - treeViewDtoList.add(treeViewDto); - waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); - } else { - treeViewDtoList = new ArrayList<>(); - treeViewDtoList.add(treeViewDto); - waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); - } - } - } - runningNodeMap.remove(nodeName); - } - if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { - break; - } else { - runningNodeMap.putAll(waitingRunningNodeMap); - waitingRunningNodeMap.clear(); - } - } - result.put(Constants.DATA_LIST, parentTreeViewDto); - result.put(Constants.STATUS, Status.SUCCESS); - result.put(Constants.MSG, Status.SUCCESS.getMsg()); - return result; - } - - - /** - * Generate the DAG Graph based on the process definition id - * - * @param processDefinition process definition - * @return dag graph - */ - private DAG genDagGraph(ProcessDefinition processDefinition) { - - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - - //check process data - if (null != processData) { - List taskNodeList = processData.getTasks(); - processDefinition.setGlobalParamList(processData.getGlobalParams()); - ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); - - // Generate concrete Dag to be executed - return DagHelper.buildDagGraph(processDag); - } - - return new DAG<>(); - } - - - /** - * whether the graph has a ring - * - * @param taskNodeResponseList task node response list - * @return if graph has cycle flag - */ - private boolean graphHasCycle(List taskNodeResponseList) { - DAG graph = new DAG<>(); - - // Fill the vertices - for (TaskNode taskNodeResponse : taskNodeResponseList) { - graph.addNode(taskNodeResponse.getName(), taskNodeResponse); - } - - // Fill edge relations - for (TaskNode taskNodeResponse : taskNodeResponseList) { - taskNodeResponse.getPreTasks(); - List preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class); - if (CollectionUtils.isNotEmpty(preTasks)) { - for (String preTask : preTasks) { - if (!graph.addEdge(preTask, taskNodeResponse.getName())) { - return true; - } - } - } - } - - return graph.hasCycle(); - } - - private String recursionProcessDefinitionName(Integer projectId, String processDefinitionName, int num) { - ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(projectId, processDefinitionName); - if (processDefinition != null) { - if (num > 1) { - String str = processDefinitionName.substring(0, processDefinitionName.length() - 3); - processDefinitionName = str + "(" + num + ")"; - } else { - processDefinitionName = processDefinition.getName() + "(" + num + ")"; - } - } else { - return processDefinitionName; - } - return recursionProcessDefinitionName(projectId, processDefinitionName, num + 1); - } - + Map viewTree(Integer processId, + Integer limit) throws Exception; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index e4a00f3895..40c009aa68 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -16,8 +16,28 @@ */ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +import static org.apache.dolphinscheduler.common.Constants.DATA_LIST; +import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; +import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS; +import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; +import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE; +import static org.apache.dolphinscheduler.common.Constants.TASK_LIST; + +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; import org.apache.dolphinscheduler.api.dto.gantt.Task; import org.apache.dolphinscheduler.api.enums.Status; @@ -31,14 +51,26 @@ import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; -import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -46,22 +78,14 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; -import java.text.ParseException; -import java.util.*; -import java.util.stream.Collectors; - -import static org.apache.dolphinscheduler.common.Constants.*; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * process instance service */ @Service -public class ProcessInstanceService extends BaseDAGService { +public class ProcessInstanceService extends BaseService { private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class); @@ -154,7 +178,7 @@ public class ProcessInstanceService extends BaseDAGService { * @return process instance detail */ public Map queryProcessInstanceById(User loginUser, String projectName, Integer processId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -167,7 +191,7 @@ public class ProcessInstanceService extends BaseDAGService { ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); processInstance.setReceivers(processDefinition.getReceivers()); processInstance.setReceiversCc(processDefinition.getReceiversCc()); - result.put(Constants.DATA_LIST, processInstance); + result.put(DATA_LIST, processInstance); putMsg(result, Status.SUCCESS); return result; @@ -193,7 +217,7 @@ public class ProcessInstanceService extends BaseDAGService { String searchVal, String executorName,ExecutionStatus stateType, String host, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -242,7 +266,7 @@ public class ProcessInstanceService extends BaseDAGService { pageInfo.setTotalCount((int) processInstanceList.getTotal()); pageInfo.setLists(processInstances); - result.put(Constants.DATA_LIST, pageInfo); + result.put(DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; } @@ -273,7 +297,7 @@ public class ProcessInstanceService extends BaseDAGService { Map resultMap = new HashMap<>(); resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); resultMap.put(TASK_LIST, taskInstanceList); - result.put(Constants.DATA_LIST, resultMap); + result.put(DATA_LIST, resultMap); putMsg(result, Status.SUCCESS); return result; @@ -362,7 +386,7 @@ public class ProcessInstanceService extends BaseDAGService { } Map dataMap = new HashMap<>(); dataMap.put("subProcessInstanceId", subWorkflowInstance.getId()); - result.put(Constants.DATA_LIST, dataMap); + result.put(DATA_LIST, dataMap); putMsg(result, Status.SUCCESS); return result; } @@ -501,7 +525,7 @@ public class ProcessInstanceService extends BaseDAGService { } Map dataMap = new HashMap<>(); dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId()); - result.put(Constants.DATA_LIST, dataMap); + result.put(DATA_LIST, dataMap); putMsg(result, Status.SUCCESS); return result; } @@ -516,7 +540,7 @@ public class ProcessInstanceService extends BaseDAGService { @Transactional(rollbackFor = RuntimeException.class) public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -556,7 +580,7 @@ public class ProcessInstanceService extends BaseDAGService { * @return variables data */ public Map viewVariables(Integer processInstanceId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); @@ -618,7 +642,7 @@ public class ProcessInstanceService extends BaseDAGService { resultMap.put(GLOBAL_PARAMS, globalParams); resultMap.put(LOCAL_PARAMS, localUserDefParams); - result.put(Constants.DATA_LIST, resultMap); + result.put(DATA_LIST, resultMap); putMsg(result, Status.SUCCESS); return result; } @@ -668,9 +692,28 @@ public class ProcessInstanceService extends BaseDAGService { } ganttDto.setTasks(taskList); - result.put(Constants.DATA_LIST, ganttDto); + result.put(DATA_LIST, ganttDto); putMsg(result, Status.SUCCESS); return result; } + /** + * process instance to DAG + * + * @param processInstance input process instance + * @return process instance dag. + */ + private static DAG processInstance2DAG(ProcessInstance processInstance) { + + String processDefinitionJson = processInstance.getProcessInstanceJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + List taskNodeList = processData.getTasks(); + + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); + + return DagHelper.buildDagGraph(processDag); + } + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java index 6d3650b77f..ca0e1fc0ec 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java @@ -16,45 +16,15 @@ */ package org.apache.dolphinscheduler.api.service; -import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; -import org.apache.dolphinscheduler.dao.entity.ProjectUser; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import java.util.*; - -import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc; +import java.util.Map; /** * project service - *HttpTask./ **/ -@Service -public class ProjectService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(ProjectService.class); - - @Autowired - private ProjectMapper projectMapper; - - @Autowired - private ProjectUserMapper projectUserMapper; - - @Autowired - private ProcessDefinitionMapper processDefinitionMapper; +public interface ProjectService { /** * create project @@ -64,38 +34,7 @@ public class ProjectService extends BaseService{ * @param desc description * @return returns an error if it exists */ - public Map createProject(User loginUser, String name, String desc) { - - Map result = new HashMap<>(5); - Map descCheck = checkDesc(desc); - if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { - return descCheck; - } - - Project project = projectMapper.queryByName(name); - if (project != null) { - putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); - return result; - } - project = new Project(); - Date now = new Date(); - - project.setName(name); - project.setDescription(desc); - project.setUserId(loginUser.getId()); - project.setUserName(loginUser.getUserName()); - project.setCreateTime(now); - project.setUpdateTime(now); - - if (projectMapper.insert(project) > 0) { - Project insertedProject = projectMapper.queryByName(name); - result.put(Constants.DATA_LIST, insertedProject); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.CREATE_PROJECT_ERROR); - } - return result; - } + Map createProject(User loginUser, String name, String desc); /** * query project details by id @@ -103,19 +42,7 @@ public class ProjectService extends BaseService{ * @param projectId project id * @return project detail information */ - public Map queryById(Integer projectId) { - - Map result = new HashMap<>(5); - Project project = projectMapper.selectById(projectId); - - if (project != null) { - result.put(Constants.DATA_LIST, project); - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.PROJECT_NOT_FOUNT, projectId); - } - return result; - } + Map queryById(Integer projectId); /** * check project and authorization @@ -125,30 +52,9 @@ public class ProjectService extends BaseService{ * @param projectName project name * @return true if the login user have permission to see the project */ - public Map checkProjectAndAuth(User loginUser, Project project, String projectName) { - Map result = new HashMap<>(5); - if (project == null) { - putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); - } else if (!checkReadPermission(loginUser, project)) { - // check read permission - putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName); - }else { - putMsg(result, Status.SUCCESS); - } - return result; - } + Map checkProjectAndAuth(User loginUser, Project project, String projectName); - public boolean hasProjectAndPerm(User loginUser, Project project, Map result) { - boolean checkResult = false; - if (project == null) { - putMsg(result, Status.PROJECT_NOT_FOUNT, ""); - } else if (!checkReadPermission(loginUser, project)) { - putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName()); - } else { - checkResult = true; - } - return checkResult; - } + boolean hasProjectAndPerm(User loginUser, Project project, Map result); /** * admin can view all projects @@ -159,29 +65,7 @@ public class ProjectService extends BaseService{ * @param pageNo page number * @return project list which the login user have permission to see */ - public Map queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { - Map result = new HashMap<>(); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - - Page page = new Page(pageNo, pageSize); - - int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId(); - IPage projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal); - - List projectList = projectIPage.getRecords(); - if(userId != 0){ - for (Project project : projectList) { - project.setPerm(org.apache.dolphinscheduler.common.Constants.DEFAULT_ADMIN_PERMISSION); - } - } - pageInfo.setTotalCount((int)projectIPage.getTotal()); - pageInfo.setLists(projectList); - result.put(Constants.COUNT, (int)projectIPage.getTotal()); - result.put(Constants.DATA_LIST, pageInfo); - putMsg(result, Status.SUCCESS); - - return result; - } + Map queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal); /** * delete project by id @@ -190,50 +74,7 @@ public class ProjectService extends BaseService{ * @param projectId project id * @return delete result code */ - public Map deleteProject(User loginUser, Integer projectId) { - Map result = new HashMap<>(5); - Project project = projectMapper.selectById(projectId); - Map checkResult = getCheckResult(loginUser, project); - if (checkResult != null) { - return checkResult; - } - - if (!hasPerm(loginUser, project.getUserId())) { - putMsg(result, Status.USER_NO_OPERATION_PERM); - return result; - } - - List processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId); - - if(processDefinitionList.size() > 0){ - putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL); - return result; - } - int delete = projectMapper.deleteById(projectId); - if (delete > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.DELETE_PROJECT_ERROR); - } - return result; - } - - /** - * get check result - * - * @param loginUser login user - * @param project project - * @return check result - */ - private Map getCheckResult(User loginUser, Project project) { - String projectName = project == null ? null:project.getName(); - Map checkResult = checkProjectAndAuth(loginUser, project, projectName); - Status status = (Status) checkResult.get(Constants.STATUS); - if (status != Status.SUCCESS) { - return checkResult; - } - return null; - } + Map deleteProject(User loginUser, Integer projectId); /** * updateProcessInstance project @@ -244,37 +85,7 @@ public class ProjectService extends BaseService{ * @param desc description * @return update result code */ - public Map update(User loginUser, Integer projectId, String projectName, String desc) { - Map result = new HashMap<>(5); - - Map descCheck = checkDesc(desc); - if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { - return descCheck; - } - - Project project = projectMapper.selectById(projectId); - boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result); - if (!hasProjectAndPerm) { - return result; - } - Project tempProject = projectMapper.queryByName(projectName); - if (tempProject != null && tempProject.getId() != projectId) { - putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName); - return result; - } - project.setName(projectName); - project.setDescription(desc); - project.setUpdateTime(new Date()); - - int update = projectMapper.updateById(project); - if (update > 0) { - putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.UPDATE_PROJECT_ERROR); - } - return result; - } - + Map update(User loginUser, Integer projectId, String projectName, String desc); /** * query unauthorized project @@ -283,48 +94,7 @@ public class ProjectService extends BaseService{ * @param userId user id * @return the projects which user have not permission to see */ - public Map queryUnauthorizedProject(User loginUser, Integer userId) { - Map result = new HashMap<>(5); - if (checkAdmin(loginUser, result)) { - return result; - } - /** - * query all project list except specified userId - */ - List projectList = projectMapper.queryProjectExceptUserId(userId); - List resultList = new ArrayList<>(); - Set projectSet = null; - if (projectList != null && projectList.size() > 0) { - projectSet = new HashSet<>(projectList); - - List authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId); - - resultList = getUnauthorizedProjects(projectSet, authedProjectList); - } - result.put(Constants.DATA_LIST, resultList); - putMsg(result,Status.SUCCESS); - return result; - } - - /** - * get unauthorized project - * - * @param projectSet project set - * @param authedProjectList authed project list - * @return project list that authorization - */ - private List getUnauthorizedProjects(Set projectSet, List authedProjectList) { - List resultList; - Set authedProjectSet = null; - if (authedProjectList != null && authedProjectList.size() > 0) { - authedProjectSet = new HashSet<>(authedProjectList); - projectSet.removeAll(authedProjectSet); - - } - resultList = new ArrayList<>(projectSet); - return resultList; - } - + Map queryUnauthorizedProject(User loginUser, Integer userId); /** * query authorized project @@ -333,83 +103,21 @@ public class ProjectService extends BaseService{ * @param userId user id * @return projects which the user have permission to see, Except for items created by this user */ - public Map queryAuthorizedProject(User loginUser, Integer userId) { - Map result = new HashMap<>(); - - if (checkAdmin(loginUser, result)) { - return result; - } - - List projects = projectMapper.queryAuthedProjectListByUserId(userId); - result.put(Constants.DATA_LIST, projects); - putMsg(result,Status.SUCCESS); - - return result; - } - + Map queryAuthorizedProject(User loginUser, Integer userId); /** - * check whether have read permission - * - * @param user user - * @param project project - * @return true if the user have permission to see the project, otherwise return false - */ - private boolean checkReadPermission(User user, Project project) { - int permissionId = queryPermission(user, project); - return (permissionId & Constants.READ_PERMISSION) != 0; - } - - /** - * query permission id + * query authorized project * - * @param user user - * @param project project - * @return permission + * @param loginUser login user + * @return projects which the user have permission to see, Except for items created by this user */ - private int queryPermission(User user, Project project) { - if (user.getUserType() == UserType.ADMIN_USER) { - return Constants.READ_PERMISSION; - } - - if (project.getUserId() == user.getId()) { - return Constants.ALL_PERMISSIONS; - } - - ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId()); - - if (projectUser == null) { - return 0; - } - - return projectUser.getPerm(); - - } + Map queryProjectCreatedByUser(User loginUser); /** * query all project list that have one or more process definitions. + * * @return project list */ - public Map queryAllProjectList() { - Map result = new HashMap<>(); - List projects = projectMapper.selectList(null); - List processDefinitions = processDefinitionMapper.selectList(null); - if(projects != null){ - Set set = new HashSet<>(); - for (ProcessDefinition processDefinition : processDefinitions){ - set.add(processDefinition.getProjectId()); - } - List tempDeletelist = new ArrayList(); - for (Project project : projects) { - if(!set.contains(project.getId())){ - tempDeletelist.add(project); - } - } - projects.removeAll(tempDeletelist); - } - result.put(Constants.DATA_LIST, projects); - putMsg(result,Status.SUCCESS); - return result; - } + Map queryAllProjectList(); -} +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java index cba1b5f2bb..caffeabd5c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java @@ -59,7 +59,7 @@ public class QueueService extends BaseService { * @return queue list */ public Map queryList(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } @@ -81,7 +81,7 @@ public class QueueService extends BaseService { * @return queue list */ public Map queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } @@ -110,7 +110,7 @@ public class QueueService extends BaseService { * @return create result */ public Map createQueue(User loginUser, String queue, String queueName) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } @@ -159,7 +159,7 @@ public class QueueService extends BaseService { * @return update result code */ public Map updateQueue(User loginUser, int id, String queue, String queueName) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index 3cb715b964..d26bef9f92 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -402,7 +402,7 @@ public class ResourcesService extends BaseService { putMsg(result, Status.SUCCESS); Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(5); + Map resultMap = new HashMap<>(); for (Map.Entry entry: dataMap.entrySet()) { if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); @@ -447,7 +447,7 @@ public class ResourcesService extends BaseService { */ public Map queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { - HashMap result = new HashMap<>(5); + HashMap result = new HashMap<>(); Page page = new Page(pageNo, pageSize); int userId = loginUser.getId(); if (isAdmin(loginUser)) { @@ -548,7 +548,7 @@ public class ResourcesService extends BaseService { */ public Map queryResourceList(User loginUser, ResourceType type) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); int userId = loginUser.getId(); if(isAdmin(loginUser)){ @@ -571,7 +571,7 @@ public class ResourcesService extends BaseService { */ public Map queryResourceJarList(User loginUser, ResourceType type) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); int userId = loginUser.getId(); if(isAdmin(loginUser)){ userId = 0; @@ -1094,7 +1094,7 @@ public class ResourcesService extends BaseService { * @return unauthorized result code */ public Map unauthorizedUDFFunction(User loginUser, Integer userId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (checkAdmin(loginUser, result)) { return result; @@ -1146,7 +1146,7 @@ public class ResourcesService extends BaseService { * @return authorized result */ public Map authorizedFile(User loginUser, Integer userId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)){ return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java index 78a36c639a..93fa14872a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -452,7 +452,7 @@ public class SchedulerService extends BaseService { * @return schedule list */ public Map queryScheduleList(User loginUser, String projectName) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); // check project auth @@ -534,7 +534,7 @@ public class SchedulerService extends BaseService { */ public Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -583,7 +583,7 @@ public class SchedulerService extends BaseService { * @return the next five fire time */ public Map previewSchedule(User loginUser, String projectName, String schedule) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); CronExpression cronExpression; ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); Date now = new Date(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java index b4aab962ef..dc911f51e3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SessionService.java @@ -16,36 +16,15 @@ */ package org.apache.dolphinscheduler.api.service; +import javax.servlet.http.HttpServletRequest; -import org.apache.dolphinscheduler.api.controller.BaseController; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.SessionMapper; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; -import org.springframework.transaction.annotation.Transactional; - -import javax.servlet.http.Cookie; -import javax.servlet.http.HttpServletRequest; -import java.util.Date; -import java.util.List; -import java.util.UUID; /** * session service */ -@Service -public class SessionService extends BaseService{ - - private static final Logger logger = LoggerFactory.getLogger(SessionService.class); - - @Autowired - private SessionMapper sessionMapper; +public interface SessionService { /** * get user session from request @@ -53,26 +32,7 @@ public class SessionService extends BaseService{ * @param request request * @return session */ - public Session getSession(HttpServletRequest request) { - String sessionId = request.getHeader(Constants.SESSION_ID); - - if(StringUtils.isBlank(sessionId)) { - Cookie cookie = getCookie(request, Constants.SESSION_ID); - - if (cookie != null) { - sessionId = cookie.getValue(); - } - } - - if(StringUtils.isBlank(sessionId)) { - return null; - } - - String ip = BaseController.getClientIpAddress(request); - logger.debug("get session: {}, ip: {}", sessionId, ip); - - return sessionMapper.selectById(sessionId); - } + Session getSession(HttpServletRequest request); /** * create session @@ -81,55 +41,7 @@ public class SessionService extends BaseService{ * @param ip ip * @return session string */ - @Transactional(rollbackFor = RuntimeException.class) - public String createSession(User user, String ip) { - Session session = null; - - // logined - List sessionList = sessionMapper.queryByUserId(user.getId()); - - Date now = new Date(); - - /** - * if you have logged in and are still valid, return directly - */ - if (CollectionUtils.isNotEmpty(sessionList)) { - // is session list greater 1 , delete other ,get one - if (sessionList.size() > 1){ - for (int i=1 ; i < sessionList.size();i++){ - sessionMapper.deleteById(sessionList.get(i).getId()); - } - } - session = sessionList.get(0); - if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) { - /** - * updateProcessInstance the latest login time - */ - session.setLastLoginTime(now); - sessionMapper.updateById(session); - - return session.getId(); - - } else { - /** - * session expired, then delete this session first - */ - sessionMapper.deleteById(session.getId()); - } - } - - // assign new session - session = new Session(); - - session.setId(UUID.randomUUID().toString()); - session.setIp(ip); - session.setUserId(user.getId()); - session.setLastLoginTime(now); - - sessionMapper.insert(session); - - return session.getId(); - } + String createSession(User user, String ip); /** * sign out @@ -138,17 +50,5 @@ public class SessionService extends BaseService{ * @param ip no use * @param loginUser login user */ - public void signOut(String ip, User loginUser) { - try { - /** - * query session by user id and ip - */ - Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(),ip); - - //delete session - sessionMapper.deleteById(session.getId()); - }catch (Exception e){ - logger.warn("userId : {} , ip : {} , find more one session",loginUser.getId(),ip); - } - } + void signOut(String ip, User loginUser); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index 170278e02f..c5a09009d1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -83,7 +83,7 @@ public class TaskInstanceService extends BaseService { Integer processInstanceId, String taskName, String executorName, String startDate, String endDate, String searchVal, ExecutionStatus stateType,String host, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java index a78c951d34..586c1a1b53 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java @@ -86,7 +86,7 @@ public class TenantService extends BaseService{ int queueId, String desc) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); if (checkAdmin(loginUser, result)) { return result; @@ -138,7 +138,7 @@ public class TenantService extends BaseService{ */ public Map queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } @@ -169,7 +169,7 @@ public class TenantService extends BaseService{ */ public Map updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); if (checkAdmin(loginUser, result)) { @@ -235,7 +235,7 @@ public class TenantService extends BaseService{ */ @Transactional(rollbackFor = Exception.class) public Map deleteTenantById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; @@ -292,7 +292,7 @@ public class TenantService extends BaseService{ */ public Map queryTenantList(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List resourceList = tenantMapper.selectList(null); result.put(Constants.DATA_LIST, resourceList); @@ -307,7 +307,7 @@ public class TenantService extends BaseService{ * @return tenant list */ public Map queryTenantList(String tenantCode) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List resourceList = tenantMapper.queryByTenantCode(tenantCode); if (CollectionUtils.isNotEmpty(resourceList)) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java index bb92c9ccf7..8931454e27 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java @@ -151,7 +151,7 @@ public class UdfFuncService extends BaseService{ */ public Map queryUdfFuncDetail(int id) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); UdfFunc udfFunc = udfFuncMapper.selectById(id); if (udfFunc == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); @@ -247,7 +247,7 @@ public class UdfFuncService extends BaseService{ * @return udf function list page */ public Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); PageInfo pageInfo = new PageInfo(pageNo, pageSize); @@ -286,7 +286,7 @@ public class UdfFuncService extends BaseService{ * @return resource list */ public Map queryResourceList(User loginUser, Integer type) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); List udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type); result.put(Constants.DATA_LIST, udfFuncList); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index cbd795cce4..d66ef7257c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -26,6 +26,7 @@ import org.apache.dolphinscheduler.api.utils.CheckUtils; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.*; @@ -101,7 +102,7 @@ public class UsersService extends BaseService { String queue, int state) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //check all user params String msg = this.checkUserParams(userName, userPassword, email, phone); @@ -229,7 +230,7 @@ public class UsersService extends BaseService { * @return user list page */ public Map queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; @@ -269,7 +270,7 @@ public class UsersService extends BaseService { String phone, String queue, int state) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); User user = userMapper.selectById(userId); @@ -392,7 +393,7 @@ public class UsersService extends BaseService { * @throws Exception exception when operate hdfs */ public Map deleteUserById(User loginUser, int id) throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (!isAdmin(loginUser)) { putMsg(result, Status.USER_NO_OPERATION_PERM, id); @@ -432,7 +433,7 @@ public class UsersService extends BaseService { */ @Transactional(rollbackFor = RuntimeException.class) public Map grantProject(User loginUser, int userId, String projectIds) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); //only admin can operate @@ -482,7 +483,7 @@ public class UsersService extends BaseService { */ @Transactional(rollbackFor = RuntimeException.class) public Map grantResources(User loginUser, int userId, String resourceIds) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; @@ -579,7 +580,7 @@ public class UsersService extends BaseService { */ @Transactional(rollbackFor = RuntimeException.class) public Map grantUDFFunction(User loginUser, int userId, String udfIds) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { @@ -626,7 +627,7 @@ public class UsersService extends BaseService { */ @Transactional(rollbackFor = RuntimeException.class) public Map grantDataSource(User loginUser, int userId, String datasourceIds) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); result.put(Constants.STATUS, false); //only admin can operate @@ -706,7 +707,7 @@ public class UsersService extends BaseService { * @return user list */ public Map queryAllGeneralUsers(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; @@ -727,7 +728,7 @@ public class UsersService extends BaseService { * @return user list */ public Map queryUserList(User loginUser) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; @@ -771,7 +772,7 @@ public class UsersService extends BaseService { */ public Map unauthorizedUser(User loginUser, Integer alertgroupId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; @@ -807,7 +808,7 @@ public class UsersService extends BaseService { * @return authorized result code */ public Map authorizedUser(User loginUser, Integer alertgroupId) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { return result; @@ -917,10 +918,11 @@ public class UsersService extends BaseService { * @param repeatPassword repeat password * @param email email * @return register result code + * @throws Exception exception */ @Transactional(rollbackFor = RuntimeException.class) public Map registerUser(String userName, String userPassword, String repeatPassword, String email) { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //check user params String msg = this.checkUserParams(userName, userPassword, email, ""); @@ -934,10 +936,51 @@ public class UsersService extends BaseService { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "two passwords are not same"); return result; } - - createUser(userName, userPassword, email, 1, "", "", 0); + User user = createUser(userName, userPassword, email, 1, "", "", Flag.NO.ordinal()); putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, user); return result; } + /** + * activate user, only system admin have permission, change user state code 0 to 1 + * + * @param loginUser login user + * @return create result code + */ + public Map activateUser(User loginUser, String userName) { + Map result = new HashMap<>(); + result.put(Constants.STATUS, false); + + if (!isAdmin(loginUser)) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + if (!CheckUtils.checkUserName(userName)){ + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); + return result; + } + + User user = userMapper.queryByUserNameAccurately(userName); + + if (user == null) { + putMsg(result, Status.USER_NOT_EXIST, userName); + return result; + } + + if (user.getState() != Flag.NO.ordinal()) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); + return result; + } + + user.setState(Flag.YES.ordinal()); + Date now = new Date(); + user.setUpdateTime(now); + userMapper.updateById(user); + User responseUser = userMapper.queryByUserNameAccurately(userName); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, responseUser); + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java index 374fd6e718..95257e8c8a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java @@ -63,7 +63,7 @@ public class WorkerGroupService extends BaseService { // list to index Integer toIndex = (pageNo - 1) * pageSize + pageSize; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); if (checkAdmin(loginUser, result)) { return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java new file mode 100644 index 0000000000..7e0b11780c --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.AccessTokenService; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.dao.entity.AccessToken; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; + +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * access token service impl + */ +@Service +public class AccessTokenServiceImpl extends BaseService implements AccessTokenService { + + private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class); + + @Autowired + private AccessTokenMapper accessTokenMapper; + + /** + * query access token list + * + * @param loginUser login user + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return token list for page number and page size + */ + public Map queryAccessTokenList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { + Map result = new HashMap<>(5); + + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + Page page = new Page<>(pageNo, pageSize); + int userId = loginUser.getId(); + if (loginUser.getUserType() == UserType.ADMIN_USER) { + userId = 0; + } + IPage accessTokenList = accessTokenMapper.selectAccessTokenPage(page, searchVal, userId); + pageInfo.setTotalCount((int) accessTokenList.getTotal()); + pageInfo.setLists(accessTokenList.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * create token + * + * @param userId token for user + * @param expireTime token expire time + * @param token token string + * @return create result code + */ + public Map createToken(int userId, String expireTime, String token) { + Map result = new HashMap<>(5); + + if (userId <= 0) { + throw new IllegalArgumentException("User id should not less than or equals to 0."); + } + AccessToken accessToken = new AccessToken(); + accessToken.setUserId(userId); + accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); + accessToken.setToken(token); + accessToken.setCreateTime(new Date()); + accessToken.setUpdateTime(new Date()); + + // insert + int insert = accessTokenMapper.insert(accessToken); + + if (insert > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.CREATE_ACCESS_TOKEN_ERROR); + } + + return result; + } + + /** + * generate token + * + * @param userId token for user + * @param expireTime token expire time + * @return token string + */ + public Map generateToken(int userId, String expireTime) { + Map result = new HashMap<>(5); + String token = EncryptionUtils.getMd5(userId + expireTime + String.valueOf(System.currentTimeMillis())); + result.put(Constants.DATA_LIST, token); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * delete access token + * + * @param loginUser login user + * @param id token id + * @return delete result code + */ + public Map delAccessTokenById(User loginUser, int id) { + Map result = new HashMap<>(5); + + AccessToken accessToken = accessTokenMapper.selectById(id); + + if (accessToken == null) { + logger.error("access token not exist, access token id {}", id); + putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); + return result; + } + + if (loginUser.getId() != accessToken.getUserId() && + loginUser.getUserType() != UserType.ADMIN_USER) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + accessTokenMapper.deleteById(id); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * update token by id + * + * @param id token id + * @param userId token for user + * @param expireTime token expire time + * @param token token string + * @return update result code + */ + public Map updateToken(int id, int userId, String expireTime, String token) { + Map result = new HashMap<>(5); + + AccessToken accessToken = accessTokenMapper.selectById(id); + if (accessToken == null) { + logger.error("access token not exist, access token id {}", id); + putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); + return result; + } + accessToken.setUserId(userId); + accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); + accessToken.setToken(token); + accessToken.setUpdateTime(new Date()); + + accessTokenMapper.updateById(accessToken); + + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java new file mode 100644 index 0000000000..21313b96d3 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java @@ -0,0 +1,384 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service.impl; + + +import org.apache.dolphinscheduler.api.dto.CommandStateCount; +import org.apache.dolphinscheduler.api.dto.DefineUserDto; +import org.apache.dolphinscheduler.api.dto.TaskCountDto; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.service.DataAnalysisService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.TriFunction; +import org.apache.dolphinscheduler.dao.entity.CommandCount; +import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; +import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.CommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * data analysis service impl + */ +@Service +public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService { + + private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class); + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + @Autowired + private CommandMapper commandMapper; + + @Autowired + private ErrorCommandMapper errorCommandMapper; + + @Autowired + private TaskInstanceMapper taskInstanceMapper; + + @Autowired + private ProcessService processService; + + private static final String COMMAND_STATE = "commandState"; + + private static final String ERROR_COMMAND_STATE = "errorCommandState"; + + /** + * statistical task instance status data + * + * @param loginUser login user + * @param projectId project id + * @param startDate start date + * @param endDate end date + * @return task state count data + */ + public Map countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { + + return countStateByProject( + loginUser, + projectId, + startDate, + endDate, + (start, end, projectIds) -> this.taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds)); + } + + /** + * statistical process instance status data + * + * @param loginUser login user + * @param projectId project id + * @param startDate start date + * @param endDate end date + * @return process instance state count data + */ + public Map countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { + return this.countStateByProject( + loginUser, + projectId, + startDate, + endDate, + (start, end, projectIds) -> this.processInstanceMapper.countInstanceStateByUser(start, end, projectIds)); + } + + private Map countStateByProject(User loginUser, int projectId, String startDate, String endDate + , TriFunction> instanceStateCounter) { + Map result = new HashMap<>(5); + boolean checkProject = checkProject(loginUser, projectId, result); + if (!checkProject) { + return result; + } + + Date start; + Date end; + try { + start = DateUtils.getScheduleDate(startDate); + end = DateUtils.getScheduleDate(endDate); + } catch (Exception e) { + logger.error(e.getMessage(), e); + putErrorRequestParamsMsg(result); + return result; + } + Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); + List processInstanceStateCounts = + instanceStateCounter.apply(start, end, projectIdArray); + + if (processInstanceStateCounts != null) { + TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); + result.put(Constants.DATA_LIST, taskCountResult); + putMsg(result, Status.SUCCESS); + } + return result; + } + + + /** + * statistics the process definition quantities of certain person + * + * @param loginUser login user + * @param projectId project id + * @return definition count data + */ + public Map countDefinitionByUser(User loginUser, int projectId) { + Map result = new HashMap<>(); + + + Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); + List defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser( + loginUser.getId(), projectIdArray, isAdmin(loginUser)); + + DefineUserDto dto = new DefineUserDto(defineGroupByUsers); + result.put(Constants.DATA_LIST, dto); + putMsg(result, Status.SUCCESS); + return result; + } + + + /** + * statistical command status data + * + * @param loginUser login user + * @param projectId project id + * @param startDate start date + * @param endDate end date + * @return command state count data + */ + public Map countCommandState(User loginUser, int projectId, String startDate, String endDate) { + + Map result = new HashMap<>(5); + boolean checkProject = checkProject(loginUser, projectId, result); + if (!checkProject) { + return result; + } + + /** + * find all the task lists in the project under the user + * statistics based on task status execution, failure, completion, wait, total + */ + Date start = null; + Date end = null; + + if (startDate != null && endDate != null) { + try { + start = DateUtils.getScheduleDate(startDate); + end = DateUtils.getScheduleDate(endDate); + } catch (Exception e) { + logger.error(e.getMessage(), e); + putErrorRequestParamsMsg(result); + return result; + } + } + + + Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); + // count command state + List commandStateCounts = + commandMapper.countCommandState( + loginUser.getId(), + start, + end, + projectIdArray); + + // count error command state + List errorCommandStateCounts = + errorCommandMapper.countCommandState( + start, end, projectIdArray); + + // enumMap + Map> dataMap = new EnumMap<>(CommandType.class); + + Map commonCommand = new HashMap<>(); + commonCommand.put(COMMAND_STATE, 0); + commonCommand.put(ERROR_COMMAND_STATE, 0); + + + // init data map + /** + * START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS, + START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD; + */ + dataMap.put(CommandType.START_PROCESS, commonCommand); + dataMap.put(CommandType.START_CURRENT_TASK_PROCESS, commonCommand); + dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS, commonCommand); + dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, commonCommand); + dataMap.put(CommandType.START_FAILURE_TASK_PROCESS, commonCommand); + dataMap.put(CommandType.COMPLEMENT_DATA, commonCommand); + dataMap.put(CommandType.SCHEDULER, commonCommand); + dataMap.put(CommandType.REPEAT_RUNNING, commonCommand); + dataMap.put(CommandType.PAUSE, commonCommand); + dataMap.put(CommandType.STOP, commonCommand); + dataMap.put(CommandType.RECOVER_WAITTING_THREAD, commonCommand); + + // put command state + for (CommandCount executeStatusCount : commandStateCounts) { + Map commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType())); + commandStateCountsMap.put(COMMAND_STATE, executeStatusCount.getCount()); + dataMap.put(executeStatusCount.getCommandType(), commandStateCountsMap); + } + + // put error command state + for (CommandCount errorExecutionStatus : errorCommandStateCounts) { + Map errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType())); + errorCommandStateCountsMap.put(ERROR_COMMAND_STATE, errorExecutionStatus.getCount()); + dataMap.put(errorExecutionStatus.getCommandType(), errorCommandStateCountsMap); + } + + List list = new ArrayList<>(); + for (Map.Entry> next : dataMap.entrySet()) { + CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get(ERROR_COMMAND_STATE), + next.getValue().get(COMMAND_STATE), next.getKey()); + list.add(commandStateCount); + } + + result.put(Constants.DATA_LIST, list); + putMsg(result, Status.SUCCESS); + return result; + } + + private Integer[] getProjectIdsArrays(User loginUser, int projectId) { + List projectIds = new ArrayList<>(); + if (projectId != 0) { + projectIds.add(projectId); + } else if (loginUser.getUserType() == UserType.GENERAL_USER) { + projectIds = processService.getProjectIdListHavePerm(loginUser.getId()); + if (projectIds.isEmpty()) { + projectIds.add(0); + } + } + return projectIds.toArray(new Integer[0]); + } + + /** + * count queue state + * + * @param loginUser login user + * @param projectId project id + * @return queue state count data + */ + public Map countQueueState(User loginUser, int projectId) { + Map result = new HashMap<>(5); + + boolean checkProject = checkProject(loginUser, projectId, result); + if (!checkProject) { + return result; + } + + // TODO tasksQueueList and tasksKillList is never updated. + List tasksQueueList = new ArrayList<>(); + List tasksKillList = new ArrayList<>(); + + Map dataMap = new HashMap<>(); + if (loginUser.getUserType() == UserType.ADMIN_USER) { + dataMap.put("taskQueue", tasksQueueList.size()); + dataMap.put("taskKill", tasksKillList.size()); + + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + int[] tasksQueueIds = new int[tasksQueueList.size()]; + int[] tasksKillIds = new int[tasksKillList.size()]; + + int i = 0; + for (String taskQueueStr : tasksQueueList) { + if (StringUtils.isNotEmpty(taskQueueStr)) { + String[] splits = taskQueueStr.split("_"); + if (splits.length >= 4) { + tasksQueueIds[i++] = Integer.parseInt(splits[3]); + } + } + } + + i = 0; + for (String taskKillStr : tasksKillList) { + if (StringUtils.isNotEmpty(taskKillStr)) { + String[] splits = taskKillStr.split("-"); + if (splits.length == 2) { + tasksKillIds[i++] = Integer.parseInt(splits[1]); + } + } + } + Integer taskQueueCount = 0; + Integer taskKillCount = 0; + + Integer[] projectIds = getProjectIdsArrays(loginUser, projectId); + if (tasksQueueIds.length != 0) { + taskQueueCount = taskInstanceMapper.countTask( + projectIds, + tasksQueueIds); + } + + if (tasksKillIds.length != 0) { + taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds); + } + + dataMap.put("taskQueue", taskQueueCount); + dataMap.put("taskKill", taskKillCount); + + result.put(Constants.DATA_LIST, dataMap); + putMsg(result, Status.SUCCESS); + return result; + } + + private boolean checkProject(User loginUser, int projectId, Map result) { + if (projectId != 0) { + Project project = projectMapper.selectById(projectId); + return projectService.hasProjectAndPerm(loginUser, project, result); + } + return true; + } + + private void putErrorRequestParamsMsg(Map result) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java new file mode 100644 index 0000000000..c71f2980f5 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; +import org.apache.dolphinscheduler.api.service.LoggerService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.service.log.LogClientService; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import org.apache.commons.lang.ArrayUtils; + +import java.nio.charset.StandardCharsets; +import java.util.Objects; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * log service + */ +@Service +public class LoggerServiceImpl implements LoggerService { + + private static final Logger logger = LoggerFactory.getLogger(LoggerServiceImpl.class); + + private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s"; + + @Autowired + private ProcessService processService; + + private LogClientService logClient; + + @PostConstruct + public void init() { + if (Objects.isNull(this.logClient)) { + this.logClient = new LogClientService(); + } + } + + @PreDestroy + public void close() { + if (Objects.nonNull(this.logClient) && this.logClient.isRunning()) { + logClient.close(); + } + } + + /** + * view log + * + * @param taskInstId task instance id + * @param skipLineNum skip line number + * @param limit limit + * @return log string data + */ + @SuppressWarnings("unchecked") + public Result queryLog(int taskInstId, int skipLineNum, int limit) { + + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); + + if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { + return Result.error(Status.TASK_INSTANCE_NOT_FOUND); + } + + String host = getHost(taskInstance.getHost()); + + Result result = new Result<>(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); + + logger.info("log host : {} , logPath : {} , logServer port : {}", host, taskInstance.getLogPath(), + Constants.RPC_PORT); + + StringBuilder log = new StringBuilder(); + if (skipLineNum == 0) { + String head = String.format(LOG_HEAD_FORMAT, + taskInstance.getLogPath(), + host, + Constants.SYSTEM_LINE_SEPARATOR); + log.append(head); + } + + log.append(logClient + .rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(), skipLineNum, limit)); + + result.setData(log.toString()); + return result; + } + + + /** + * get log size + * + * @param taskInstId task instance id + * @return log byte array + */ + public byte[] getLogBytes(int taskInstId) { + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); + if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())) { + throw new ServiceException("task instance is null or host is null"); + } + String host = getHost(taskInstance.getHost()); + byte[] head = String.format(LOG_HEAD_FORMAT, + taskInstance.getLogPath(), + host, + Constants.SYSTEM_LINE_SEPARATOR).getBytes(StandardCharsets.UTF_8); + return ArrayUtils.addAll(head, + logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); + } + + + /** + * get host + * + * @param address address + * @return old version return true ,otherwise return false + */ + private String getHost(String address) { + if (Boolean.TRUE.equals(Host.isOldVersion(address))) { + return address; + } + return Host.of(address).getIp(); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java new file mode 100644 index 0000000000..e40fdfad30 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -0,0 +1,1696 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; + +import java.io.BufferedOutputStream; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; +import org.apache.dolphinscheduler.api.dto.ProcessMeta; +import org.apache.dolphinscheduler.api.dto.treeview.Instance; +import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.service.SchedulerService; +import org.apache.dolphinscheduler.api.utils.CheckUtils; +import org.apache.dolphinscheduler.api.utils.FileUtils; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.exportprocess.ProcessAddTaskParam; +import org.apache.dolphinscheduler.api.utils.exportprocess.TaskNodeParamFactory; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.AuthorizationType; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WarningType; +import org.apache.dolphinscheduler.common.graph.DAG; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.model.TaskNodeRelation; +import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StreamUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.service.permission.PermissionCheck; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.web.multipart.MultipartFile; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +/** + * process definition service + */ +@Service +public class ProcessDefinitionServiceImpl extends BaseService implements + ProcessDefinitionService { + + private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class); + + private static final String PROCESSDEFINITIONID = "processDefinitionId"; + + private static final String RELEASESTATE = "releaseState"; + + private static final String TASKS = "tasks"; + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectService projectService; + + @Autowired + private ProcessDefinitionMapper processDefineMapper; + + @Autowired + private ProcessInstanceMapper processInstanceMapper; + + @Autowired + private TaskInstanceMapper taskInstanceMapper; + + @Autowired + private ScheduleMapper scheduleMapper; + + @Autowired + private ProcessService processService; + + /** + * create process definition + * + * @param loginUser login user + * @param projectName project name + * @param name process definition name + * @param processDefinitionJson process definition json + * @param desc description + * @param locations locations for nodes + * @param connects connects for nodes + * @return create result code + * @throws JsonProcessingException JsonProcessingException + */ + public Map createProcessDefinition(User loginUser, + String projectName, + String name, + String processDefinitionJson, + String desc, + String locations, + String connects) throws JsonProcessingException { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + // check project auth + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefine = new ProcessDefinition(); + Date now = new Date(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); + if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) { + return checkProcessJson; + } + + processDefine.setName(name); + processDefine.setReleaseState(ReleaseState.OFFLINE); + processDefine.setProjectId(project.getId()); + processDefine.setUserId(loginUser.getId()); + processDefine.setProcessDefinitionJson(processDefinitionJson); + processDefine.setDescription(desc); + processDefine.setLocations(locations); + processDefine.setConnects(connects); + processDefine.setTimeout(processData.getTimeout()); + processDefine.setTenantId(processData.getTenantId()); + processDefine.setModifyBy(loginUser.getUserName()); + processDefine.setResourceIds(getResourceIds(processData)); + + //custom global params + List globalParamsList = processData.getGlobalParams(); + if (CollectionUtils.isNotEmpty(globalParamsList)) { + Set globalParamsSet = new HashSet<>(globalParamsList); + globalParamsList = new ArrayList<>(globalParamsSet); + processDefine.setGlobalParamList(globalParamsList); + } + processDefine.setCreateTime(now); + processDefine.setUpdateTime(now); + processDefine.setFlag(Flag.YES); + processDefineMapper.insert(processDefine); + + // return processDefinition object with ID + result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId())); + putMsg(result, Status.SUCCESS); + result.put("processDefinitionId", processDefine.getId()); + return result; + } + + /** + * get resource ids + * + * @param processData process data + * @return resource ids + */ + private String getResourceIds(ProcessData processData) { + List tasks = processData.getTasks(); + Set resourceIds = new HashSet<>(); + for (TaskNode taskNode : tasks) { + String taskParameter = taskNode.getParams(); + AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter); + if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) { + Set tempSet = params.getResourceFilesList().stream().map(t -> t.getId()).collect(Collectors.toSet()); + resourceIds.addAll(tempSet); + } + } + + StringBuilder sb = new StringBuilder(); + for (int i : resourceIds) { + if (sb.length() > 0) { + sb.append(","); + } + sb.append(i); + } + return sb.toString(); + } + + + /** + * query process definition list + * + * @param loginUser login user + * @param projectName project name + * @return definition list + */ + public Map queryProcessDefinitionList(User loginUser, String projectName) { + + HashMap result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + List resourceList = processDefineMapper.queryAllDefinitionList(project.getId()); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + + /** + * query process definition list paging + * + * @param loginUser login user + * @param projectName project name + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @param userId user id + * @return process definition page + */ + public Map queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + Page page = new Page(pageNo, pageSize); + IPage processDefinitionIPage = processDefineMapper.queryDefineListPaging( + page, searchVal, userId, project.getId(), isAdmin(loginUser)); + + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + pageInfo.setTotalCount((int) processDefinitionIPage.getTotal()); + pageInfo.setLists(processDefinitionIPage.getRecords()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query datail of process definition + * + * @param loginUser login user + * @param projectName project name + * @param processId process definition id + * @return process definition detail + */ + public Map queryProcessDefinitionById(User loginUser, String projectName, Integer processId) { + + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); + } else { + result.put(Constants.DATA_LIST, processDefinition); + putMsg(result, Status.SUCCESS); + } + return result; + } + + /** + * update process definition + * + * @param loginUser login user + * @param projectName project name + * @param name process definition name + * @param id process definition id + * @param processDefinitionJson process definition json + * @param desc description + * @param locations locations for nodes + * @param connects connects for nodes + * @return update result code + */ + public Map updateProcessDefinition(User loginUser, String projectName, int id, String name, + String processDefinitionJson, String desc, + String locations, String connects) { + Map result = new HashMap<>(5); + + Project project = projectMapper.queryByName(projectName); + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + Map checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); + if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { + return checkProcessJson; + } + ProcessDefinition processDefine = processService.findProcessDefineById(id); + if (processDefine == null) { + // check process definition exists + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); + return result; + } else if (processDefine.getReleaseState() == ReleaseState.ONLINE) { + // online can not permit edit + putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefine.getName()); + return result; + } else { + putMsg(result, Status.SUCCESS); + } + + Date now = new Date(); + + processDefine.setId(id); + processDefine.setName(name); + processDefine.setReleaseState(ReleaseState.OFFLINE); + processDefine.setProjectId(project.getId()); + processDefine.setProcessDefinitionJson(processDefinitionJson); + processDefine.setDescription(desc); + processDefine.setLocations(locations); + processDefine.setConnects(connects); + processDefine.setTimeout(processData.getTimeout()); + processDefine.setTenantId(processData.getTenantId()); + processDefine.setModifyBy(loginUser.getUserName()); + processDefine.setResourceIds(getResourceIds(processData)); + + //custom global params + List globalParamsList = new ArrayList<>(); + if (CollectionUtils.isNotEmpty(processData.getGlobalParams())) { + Set userDefParamsSet = new HashSet<>(processData.getGlobalParams()); + globalParamsList = new ArrayList<>(userDefParamsSet); + } + processDefine.setGlobalParamList(globalParamsList); + processDefine.setUpdateTime(now); + processDefine.setFlag(Flag.YES); + if (processDefineMapper.updateById(processDefine) > 0) { + putMsg(result, Status.SUCCESS); + + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + return result; + } + + /** + * verify process definition name unique + * + * @param loginUser login user + * @param projectName project name + * @param name name + * @return true if process definition name not exists, otherwise false + */ + public Map verifyProcessDefinitionName(User loginUser, String projectName, String name) { + + Map result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name); + if (processDefinition == null) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.PROCESS_INSTANCE_EXIST, name); + } + return result; + } + + /** + * delete process definition by id + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process definition id + * @return delete result code + */ + @Transactional(rollbackFor = RuntimeException.class) + public Map deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId); + + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId); + return result; + } + + // Determine if the login user is the owner of the process definition + if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + // check process definition is already online + if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { + putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, processDefinitionId); + return result; + } + + // get the timing according to the process definition + List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); + if (!schedules.isEmpty() && schedules.size() > 1) { + logger.warn("scheduler num is {},Greater than 1", schedules.size()); + putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); + return result; + } else if (schedules.size() == 1) { + Schedule schedule = schedules.get(0); + if (schedule.getReleaseState() == ReleaseState.OFFLINE) { + scheduleMapper.deleteById(schedule.getId()); + } else if (schedule.getReleaseState() == ReleaseState.ONLINE) { + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); + return result; + } + } + + int delete = processDefineMapper.deleteById(processDefinitionId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); + } + return result; + } + + /** + * release process definition: online / offline + * + * @param loginUser login user + * @param projectName project name + * @param id process definition id + * @param releaseState release state + * @return release result code + */ + @Transactional(rollbackFor = RuntimeException.class) + public Map releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) { + HashMap result = new HashMap<>(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + ReleaseState state = ReleaseState.getEnum(releaseState); + + // check state + if (null == state) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + return result; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(id); + + switch (state) { + case ONLINE: + // To check resources whether they are already cancel authorized or deleted + String resourceIds = processDefinition.getResourceIds(); + if (StringUtils.isNotBlank(resourceIds)) { + Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new); + PermissionCheck permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE_ID, processService, resourceIdArray, loginUser.getId(), logger); + try { + permissionCheck.checkPermission(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, RELEASESTATE); + return result; + } + } + + processDefinition.setReleaseState(state); + processDefineMapper.updateById(processDefinition); + break; + case OFFLINE: + processDefinition.setReleaseState(state); + processDefineMapper.updateById(processDefinition); + List scheduleList = scheduleMapper.selectAllByProcessDefineArray( + new int[]{processDefinition.getId()} + ); + + for (Schedule schedule : scheduleList) { + logger.info("set schedule offline, project id: {}, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); + // set status + schedule.setReleaseState(ReleaseState.OFFLINE); + scheduleMapper.updateById(schedule); + SchedulerService.deleteSchedule(project.getId(), schedule.getId()); + } + break; + default: + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); + return result; + } + + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * batch export process definition by ids + * + * @param loginUser + * @param projectName + * @param processDefinitionIds + * @param response + */ + public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) { + + if (StringUtils.isEmpty(processDefinitionIds)) { + return; + } + + //export project info + Project project = projectMapper.queryByName(projectName); + + //check user access for project + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + + if (resultStatus != Status.SUCCESS) { + return; + } + + List processDefinitionList = + getProcessDefinitionList(processDefinitionIds); + + if (CollectionUtils.isNotEmpty(processDefinitionList)) { + downloadProcessDefinitionFile(response, processDefinitionList); + } + } + + /** + * get process definition list by ids + * + * @param processDefinitionIds + * @return + */ + private List getProcessDefinitionList(String processDefinitionIds) { + List processDefinitionList = new ArrayList<>(); + String[] processDefinitionIdArray = processDefinitionIds.split(","); + for (String strProcessDefinitionId : processDefinitionIdArray) { + //get workflow info + int processDefinitionId = Integer.parseInt(strProcessDefinitionId); + ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId); + if (null != processDefinition) { + processDefinitionList.add(exportProcessMetaData(processDefinitionId, processDefinition)); + } + } + + return processDefinitionList; + } + + /** + * download the process definition file + * + * @param response + * @param processDefinitionList + */ + private void downloadProcessDefinitionFile(HttpServletResponse response, List processDefinitionList) { + response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE); + BufferedOutputStream buff = null; + ServletOutputStream out = null; + try { + out = response.getOutputStream(); + buff = new BufferedOutputStream(out); + buff.write(JSONUtils.toJsonString(processDefinitionList).getBytes(StandardCharsets.UTF_8)); + buff.flush(); + buff.close(); + } catch (IOException e) { + logger.warn("export process fail", e); + } finally { + if (null != buff) { + try { + buff.close(); + } catch (Exception e) { + logger.warn("export process buffer not close", e); + } + } + if (null != out) { + try { + out.close(); + } catch (Exception e) { + logger.warn("export process output stream not close", e); + } + } + } + } + + /** + * get export process metadata string + * + * @param processDefinitionId process definition id + * @param processDefinition process definition + * @return export process metadata string + */ + private String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) { + //create workflow json file + return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition)); + } + + /** + * get export process metadata string + * + * @param processDefinitionId process definition id + * @param processDefinition process definition + * @return export process metadata string + */ + public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) { + //correct task param which has data source or dependent param + String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson()); + processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson); + + //export process metadata + ProcessMeta exportProcessMeta = new ProcessMeta(); + exportProcessMeta.setProjectName(processDefinition.getProjectName()); + exportProcessMeta.setProcessDefinitionName(processDefinition.getName()); + exportProcessMeta.setProcessDefinitionJson(processDefinition.getProcessDefinitionJson()); + exportProcessMeta.setProcessDefinitionLocations(processDefinition.getLocations()); + exportProcessMeta.setProcessDefinitionConnects(processDefinition.getConnects()); + + //schedule info + List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); + if (!schedules.isEmpty()) { + Schedule schedule = schedules.get(0); + exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString()); + exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId()); + exportProcessMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime())); + exportProcessMeta.setScheduleEndTime(DateUtils.dateToString(schedule.getEndTime())); + exportProcessMeta.setScheduleCrontab(schedule.getCrontab()); + exportProcessMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy())); + exportProcessMeta.setScheduleReleaseState(String.valueOf(ReleaseState.OFFLINE)); + exportProcessMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority())); + exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup()); + } + //create workflow json file + return exportProcessMeta; + } + + /** + * correct task param which has datasource or dependent + * + * @param processDefinitionJson processDefinitionJson + * @return correct processDefinitionJson + */ + private String addExportTaskNodeSpecialParam(String processDefinitionJson) { + ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson); + ArrayNode jsonArray = (ArrayNode) jsonObject.path(TASKS); + + for (int i = 0; i < jsonArray.size(); i++) { + JsonNode taskNode = jsonArray.path(i); + if (StringUtils.isNotEmpty(taskNode.path("type").asText())) { + String taskType = taskNode.path("type").asText(); + + ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType); + if (null != addTaskParam) { + addTaskParam.addExportSpecialParam(taskNode); + } + } + } + jsonObject.set(TASKS, jsonArray); + return jsonObject.toString(); + } + + /** + * check task if has sub process + * + * @param taskType task type + * @return if task has sub process return true else false + */ + private boolean checkTaskHasSubProcess(String taskType) { + return taskType.equals(TaskType.SUB_PROCESS.name()); + } + + /** + * import process definition + * + * @param loginUser login user + * @param file process metadata json file + * @param currentProjectName current project name + * @return import process + */ + @Transactional(rollbackFor = RuntimeException.class) + public Map importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { + Map result = new HashMap<>(5); + String processMetaJson = FileUtils.file2String(file); + List processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class); + + //check file content + if (CollectionUtils.isEmpty(processMetaList)) { + putMsg(result, Status.DATA_IS_NULL, "fileContent"); + return result; + } + + for (ProcessMeta processMeta : processMetaList) { + + if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)) { + return result; + } + } + + return result; + } + + /** + * check and import process definition + * + * @param loginUser + * @param currentProjectName + * @param result + * @param processMeta + * @return + */ + private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map result, ProcessMeta processMeta) { + + if (!checkImportanceParams(processMeta, result)) { + return false; + } + + //deal with process name + String processDefinitionName = processMeta.getProcessDefinitionName(); + //use currentProjectName to query + Project targetProject = projectMapper.queryByName(currentProjectName); + if (null != targetProject) { + processDefinitionName = recursionProcessDefinitionName(targetProject.getId(), + processDefinitionName, 1); + } + + //unique check + Map checkResult = verifyProcessDefinitionName(loginUser, currentProjectName, processDefinitionName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (Status.SUCCESS.equals(status)) { + putMsg(result, Status.SUCCESS); + } else { + result.putAll(checkResult); + return false; + } + + // get create process result + Map createProcessResult = + getCreateProcessResult(loginUser, + currentProjectName, + result, + processMeta, + processDefinitionName, + addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject)); + + if (createProcessResult == null) { + return false; + } + + //create process definition + Integer processDefinitionId = + Objects.isNull(createProcessResult.get(PROCESSDEFINITIONID)) ? + null : Integer.parseInt(createProcessResult.get(PROCESSDEFINITIONID).toString()); + + //scheduler param + return getImportProcessScheduleResult(loginUser, + currentProjectName, + result, + processMeta, + processDefinitionName, + processDefinitionId); + + } + + /** + * get create process result + * + * @param loginUser + * @param currentProjectName + * @param result + * @param processMeta + * @param processDefinitionName + * @param importProcessParam + * @return + */ + private Map getCreateProcessResult(User loginUser, + String currentProjectName, + Map result, + ProcessMeta processMeta, + String processDefinitionName, + String importProcessParam) { + Map createProcessResult = null; + try { + createProcessResult = createProcessDefinition(loginUser + , currentProjectName, + processDefinitionName + "_import_" + System.currentTimeMillis(), + importProcessParam, + processMeta.getProcessDefinitionDescription(), + processMeta.getProcessDefinitionLocations(), + processMeta.getProcessDefinitionConnects()); + putMsg(result, Status.SUCCESS); + } catch (JsonProcessingException e) { + logger.error("import process meta json data: {}", e.getMessage(), e); + putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); + } + + return createProcessResult; + } + + /** + * get import process schedule result + * + * @param loginUser + * @param currentProjectName + * @param result + * @param processMeta + * @param processDefinitionName + * @param processDefinitionId + * @return + */ + private boolean getImportProcessScheduleResult(User loginUser, + String currentProjectName, + Map result, + ProcessMeta processMeta, + String processDefinitionName, + Integer processDefinitionId) { + if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) { + int scheduleInsert = importProcessSchedule(loginUser, + currentProjectName, + processMeta, + processDefinitionName, + processDefinitionId); + + if (0 == scheduleInsert) { + putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); + return false; + } + } + return true; + } + + /** + * check importance params + * + * @param processMeta + * @param result + * @return + */ + private boolean checkImportanceParams(ProcessMeta processMeta, Map result) { + if (StringUtils.isEmpty(processMeta.getProjectName())) { + putMsg(result, Status.DATA_IS_NULL, "projectName"); + return false; + } + if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) { + putMsg(result, Status.DATA_IS_NULL, "processDefinitionName"); + return false; + } + if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) { + putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson"); + return false; + } + + return true; + } + /** + * import process add special task param + * + * @param loginUser login user + * @param processDefinitionJson process definition json + * @param targetProject target project + * @return import process param + */ + private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) { + ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson); + ArrayNode jsonArray = (ArrayNode) jsonObject.get(TASKS); + //add sql and dependent param + for (int i = 0; i < jsonArray.size(); i++) { + JsonNode taskNode = jsonArray.path(i); + String taskType = taskNode.path("type").asText(); + ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType); + if (null != addTaskParam) { + addTaskParam.addImportSpecialParam(taskNode); + } + } + + //recursive sub-process parameter correction map key for old process id value for new process id + Map subProcessIdMap = new HashMap<>(20); + + List subProcessList = StreamUtils.asStream(jsonArray.elements()) + .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) + .collect(Collectors.toList()); + + if (CollectionUtils.isNotEmpty(subProcessList)) { + importSubProcess(loginUser, targetProject, jsonArray, subProcessIdMap); + } + + jsonObject.set(TASKS, jsonArray); + return jsonObject.toString(); + } + + /** + * import process schedule + * + * @param loginUser login user + * @param currentProjectName current project name + * @param processMeta process meta data + * @param processDefinitionName process definition name + * @param processDefinitionId process definition id + * @return insert schedule flag + */ + public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta, + String processDefinitionName, Integer processDefinitionId) { + Date now = new Date(); + Schedule scheduleObj = new Schedule(); + scheduleObj.setProjectName(currentProjectName); + scheduleObj.setProcessDefinitionId(processDefinitionId); + scheduleObj.setProcessDefinitionName(processDefinitionName); + scheduleObj.setCreateTime(now); + scheduleObj.setUpdateTime(now); + scheduleObj.setUserId(loginUser.getId()); + scheduleObj.setUserName(loginUser.getUserName()); + + scheduleObj.setCrontab(processMeta.getScheduleCrontab()); + + if (null != processMeta.getScheduleStartTime()) { + scheduleObj.setStartTime(DateUtils.stringToDate(processMeta.getScheduleStartTime())); + } + if (null != processMeta.getScheduleEndTime()) { + scheduleObj.setEndTime(DateUtils.stringToDate(processMeta.getScheduleEndTime())); + } + if (null != processMeta.getScheduleWarningType()) { + scheduleObj.setWarningType(WarningType.valueOf(processMeta.getScheduleWarningType())); + } + if (null != processMeta.getScheduleWarningGroupId()) { + scheduleObj.setWarningGroupId(processMeta.getScheduleWarningGroupId()); + } + if (null != processMeta.getScheduleFailureStrategy()) { + scheduleObj.setFailureStrategy(FailureStrategy.valueOf(processMeta.getScheduleFailureStrategy())); + } + if (null != processMeta.getScheduleReleaseState()) { + scheduleObj.setReleaseState(ReleaseState.valueOf(processMeta.getScheduleReleaseState())); + } + if (null != processMeta.getScheduleProcessInstancePriority()) { + scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority())); + } + + if (null != processMeta.getScheduleWorkerGroupName()) { + scheduleObj.setWorkerGroup(processMeta.getScheduleWorkerGroupName()); + } + + return scheduleMapper.insert(scheduleObj); + } + + /** + * check import process has sub process + * recursion create sub process + * + * @param loginUser login user + * @param targetProject target project + * @param jsonArray process task array + * @param subProcessIdMap correct sub process id map + */ + private void importSubProcess(User loginUser, Project targetProject, ArrayNode jsonArray, Map subProcessIdMap) { + for (int i = 0; i < jsonArray.size(); i++) { + ObjectNode taskNode = (ObjectNode) jsonArray.path(i); + String taskType = taskNode.path("type").asText(); + + if (!checkTaskHasSubProcess(taskType)) { + continue; + } + //get sub process info + ObjectNode subParams = (ObjectNode) taskNode.path("params"); + Integer subProcessId = subParams.path(PROCESSDEFINITIONID).asInt(); + ProcessDefinition subProcess = processDefineMapper.queryByDefineId(subProcessId); + //check is sub process exist in db + if (null == subProcess) { + continue; + } + String subProcessJson = subProcess.getProcessDefinitionJson(); + //check current project has sub process + ProcessDefinition currentProjectSubProcess = processDefineMapper.queryByDefineName(targetProject.getId(), subProcess.getName()); + + if (null == currentProjectSubProcess) { + ArrayNode subJsonArray = (ArrayNode) JSONUtils.parseObject(subProcess.getProcessDefinitionJson()).get(TASKS); + + List subProcessList = StreamUtils.asStream(subJsonArray.elements()) + .filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).path("type").asText())) + .collect(Collectors.toList()); + + if (CollectionUtils.isNotEmpty(subProcessList)) { + importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap); + //sub process processId correct + if (!subProcessIdMap.isEmpty()) { + + for (Map.Entry entry : subProcessIdMap.entrySet()) { + String oldSubProcessId = "\"processDefinitionId\":" + entry.getKey(); + String newSubProcessId = "\"processDefinitionId\":" + entry.getValue(); + subProcessJson = subProcessJson.replaceAll(oldSubProcessId, newSubProcessId); + } + + subProcessIdMap.clear(); + } + } + + //if sub-process recursion + Date now = new Date(); + //create sub process in target project + ProcessDefinition processDefine = new ProcessDefinition(); + processDefine.setName(subProcess.getName()); + processDefine.setVersion(subProcess.getVersion()); + processDefine.setReleaseState(subProcess.getReleaseState()); + processDefine.setProjectId(targetProject.getId()); + processDefine.setUserId(loginUser.getId()); + processDefine.setProcessDefinitionJson(subProcessJson); + processDefine.setDescription(subProcess.getDescription()); + processDefine.setLocations(subProcess.getLocations()); + processDefine.setConnects(subProcess.getConnects()); + processDefine.setTimeout(subProcess.getTimeout()); + processDefine.setTenantId(subProcess.getTenantId()); + processDefine.setGlobalParams(subProcess.getGlobalParams()); + processDefine.setCreateTime(now); + processDefine.setUpdateTime(now); + processDefine.setFlag(subProcess.getFlag()); + processDefine.setReceivers(subProcess.getReceivers()); + processDefine.setReceiversCc(subProcess.getReceiversCc()); + processDefineMapper.insert(processDefine); + + logger.info("create sub process, project: {}, process name: {}", targetProject.getName(), processDefine.getName()); + + //modify task node + ProcessDefinition newSubProcessDefine = processDefineMapper.queryByDefineName(processDefine.getProjectId(), processDefine.getName()); + + if (null != newSubProcessDefine) { + subProcessIdMap.put(subProcessId, newSubProcessDefine.getId()); + subParams.put(PROCESSDEFINITIONID, newSubProcessDefine.getId()); + taskNode.set("params", subParams); + } + } + } + } + + + /** + * check the process definition node meets the specifications + * + * @param processData process data + * @param processDefinitionJson process definition json + * @return check result code + */ + public Map checkProcessNodeList(ProcessData processData, String processDefinitionJson) { + + Map result = new HashMap<>(5); + try { + if (processData == null) { + logger.error("process data is null"); + putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson); + return result; + } + + // Check whether the task node is normal + List taskNodes = processData.getTasks(); + + if (taskNodes == null) { + logger.error("process node info is empty"); + putMsg(result, Status.DATA_IS_NULL, processDefinitionJson); + return result; + } + + // check has cycle + if (graphHasCycle(taskNodes)) { + logger.error("process DAG has cycle"); + putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); + return result; + } + + // check whether the process definition json is normal + for (TaskNode taskNode : taskNodes) { + if (!CheckUtils.checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) { + logger.error("task node {} parameter invalid", taskNode.getName()); + putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); + return result; + } + + // check extra params + CheckUtils.checkOtherParams(taskNode.getExtras()); + } + putMsg(result, Status.SUCCESS); + } catch (Exception e) { + result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + result.put(Constants.MSG, e.getMessage()); + } + return result; + } + + /** + * get task node details based on process definition + * + * @param defineId define id + * @return task node list + */ + public Map getTaskNodeListByDefinitionId(Integer defineId) { + Map result = new HashMap<>(); + + ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); + if (processDefinition == null) { + logger.info("process define not exists"); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineId); + return result; + } + + + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + //process data check + if (null == processData) { + logger.error("process data is null"); + putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson); + return result; + } + + List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); + + result.put(Constants.DATA_LIST, taskNodeList); + putMsg(result, Status.SUCCESS); + + return result; + + } + + /** + * get task node details based on process definition + * + * @param defineIdList define id list + * @return task node list + */ + public Map getTaskNodeListByDefinitionIdList(String defineIdList) { + Map result = new HashMap<>(); + + Map> taskNodeMap = new HashMap<>(); + String[] idList = defineIdList.split(","); + List idIntList = new ArrayList<>(); + for (String definitionId : idList) { + idIntList.add(Integer.parseInt(definitionId)); + } + Integer[] idArray = idIntList.toArray(new Integer[idIntList.size()]); + List processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray); + if (CollectionUtils.isEmpty(processDefinitionList)) { + logger.info("process definition not exists"); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); + return result; + } + + for (ProcessDefinition processDefinition : processDefinitionList) { + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); + taskNodeMap.put(processDefinition.getId(), taskNodeList); + } + + result.put(Constants.DATA_LIST, taskNodeMap); + putMsg(result, Status.SUCCESS); + + return result; + + } + + + /** + * query process definition all by project id + * + * @param projectId project id + * @return process definitions in the project + */ + public Map queryProcessDefinitionAllByProjectId(Integer projectId) { + + HashMap result = new HashMap<>(5); + + List resourceList = processDefineMapper.queryAllDefinitionList(projectId); + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * Encapsulates the TreeView structure + * + * @param processId process definition id + * @param limit limit + * @return tree view json data + * @throws Exception exception + */ + public Map viewTree(Integer processId, Integer limit) throws Exception { + Map result = new HashMap<>(); + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (null == processDefinition) { + logger.info("process define not exists"); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition); + return result; + } + DAG dag = genDagGraph(processDefinition); + /** + * nodes that is running + */ + Map> runningNodeMap = new ConcurrentHashMap<>(); + + /** + * nodes that is waiting torun + */ + Map> waitingRunningNodeMap = new ConcurrentHashMap<>(); + + /** + * List of process instances + */ + List processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit); + + for (ProcessInstance processInstance : processInstanceList) { + processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime())); + } + + if (limit > processInstanceList.size()) { + limit = processInstanceList.size(); + } + + TreeViewDto parentTreeViewDto = new TreeViewDto(); + parentTreeViewDto.setName("DAG"); + parentTreeViewDto.setType(""); + // Specify the process definition, because it is a TreeView for a process definition + + for (int i = limit - 1; i >= 0; i--) { + ProcessInstance processInstance = processInstanceList.get(i); + + Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime(); + parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString() + , processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); + } + + List parentTreeViewDtoList = new ArrayList<>(); + parentTreeViewDtoList.add(parentTreeViewDto); + // Here is the encapsulation task instance + for (String startNode : dag.getBeginNode()) { + runningNodeMap.put(startNode, parentTreeViewDtoList); + } + + while (Stopper.isRunning()) { + Set postNodeList = null; + Iterator>> iter = runningNodeMap.entrySet().iterator(); + while (iter.hasNext()) { + Map.Entry> en = iter.next(); + String nodeName = en.getKey(); + parentTreeViewDtoList = en.getValue(); + + TreeViewDto treeViewDto = new TreeViewDto(); + treeViewDto.setName(nodeName); + TaskNode taskNode = dag.getNode(nodeName); + treeViewDto.setType(taskNode.getType()); + + + //set treeViewDto instances + for (int i = limit - 1; i >= 0; i--) { + ProcessInstance processInstance = processInstanceList.get(i); + TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName); + if (taskInstance == null) { + treeViewDto.getInstances().add(new Instance(-1, "not running", "null")); + } else { + Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); + Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); + + int subProcessId = 0; + /** + * if process is sub process, the return sub id, or sub id=0 + */ + if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) { + String taskJson = taskInstance.getTaskJson(); + taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); + subProcessId = Integer.parseInt(JSONUtils.parseObject( + taskNode.getParams()).path(CMDPARAM_SUB_PROCESS_DEFINE_ID).asText()); + } + treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString() + , taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); + } + } + for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) { + pTreeViewDto.getChildren().add(treeViewDto); + } + postNodeList = dag.getSubsequentNodes(nodeName); + if (CollectionUtils.isNotEmpty(postNodeList)) { + for (String nextNodeName : postNodeList) { + List treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); + if (CollectionUtils.isNotEmpty(treeViewDtoList)) { + treeViewDtoList.add(treeViewDto); + waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); + } else { + treeViewDtoList = new ArrayList<>(); + treeViewDtoList.add(treeViewDto); + waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); + } + } + } + runningNodeMap.remove(nodeName); + } + if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { + break; + } else { + runningNodeMap.putAll(waitingRunningNodeMap); + waitingRunningNodeMap.clear(); + } + } + result.put(Constants.DATA_LIST, parentTreeViewDto); + result.put(Constants.STATUS, Status.SUCCESS); + result.put(Constants.MSG, Status.SUCCESS.getMsg()); + return result; + } + + + /** + * Generate the DAG Graph based on the process definition id + * + * @param processDefinition process definition + * @return dag graph + */ + private DAG genDagGraph(ProcessDefinition processDefinition) { + + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + //check process data + if (null != processData) { + List taskNodeList = processData.getTasks(); + processDefinition.setGlobalParamList(processData.getGlobalParams()); + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); + + // Generate concrete Dag to be executed + return DagHelper.buildDagGraph(processDag); + } + + return new DAG<>(); + } + + + /** + * whether the graph has a ring + * + * @param taskNodeResponseList task node response list + * @return if graph has cycle flag + */ + private boolean graphHasCycle(List taskNodeResponseList) { + DAG graph = new DAG<>(); + + // Fill the vertices + for (TaskNode taskNodeResponse : taskNodeResponseList) { + graph.addNode(taskNodeResponse.getName(), taskNodeResponse); + } + + // Fill edge relations + for (TaskNode taskNodeResponse : taskNodeResponseList) { + taskNodeResponse.getPreTasks(); + List preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class); + if (CollectionUtils.isNotEmpty(preTasks)) { + for (String preTask : preTasks) { + if (!graph.addEdge(preTask, taskNodeResponse.getName())) { + return true; + } + } + } + } + + return graph.hasCycle(); + } + + private String recursionProcessDefinitionName(Integer projectId, String processDefinitionName, int num) { + ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(projectId, processDefinitionName); + if (processDefinition != null) { + if (num > 1) { + String str = processDefinitionName.substring(0, processDefinitionName.length() - 3); + processDefinitionName = str + "(" + num + ")"; + } else { + processDefinitionName = processDefinition.getName() + "(" + num + ")"; + } + } else { + return processDefinitionName; + } + return recursionProcessDefinitionName(projectId, processDefinitionName, num + 1); + } + + private Map copyProcessDefinition(User loginUser, + Integer processId, + Project targetProject) throws JsonProcessingException { + + Map result = new HashMap<>(); + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); + return result; + } else { + return createProcessDefinition( + loginUser, + targetProject.getName(), + processDefinition.getName() + "_copy_" + System.currentTimeMillis(), + processDefinition.getProcessDefinitionJson(), + processDefinition.getDescription(), + processDefinition.getLocations(), + processDefinition.getConnects()); + + } + } + + /** + * batch copy process definition + * @param loginUser loginUser + * @param projectName projectName + * @param processDefinitionIds processDefinitionIds + * @param targetProjectId targetProjectId + * @return + */ + @Override + public Map batchCopyProcessDefinition(User loginUser, + String projectName, + String processDefinitionIds, + int targetProjectId){ + Map result = new HashMap<>(); + List failedProcessList = new ArrayList<>(); + + if (StringUtils.isEmpty(processDefinitionIds)) { + putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds); + return result; + } + + //check src project auth + Map checkResult = checkProjectAndAuth(loginUser, projectName); + if (checkResult != null) { + return checkResult; + } + + Project targetProject = projectMapper.queryDetailById(targetProjectId); + if(targetProject == null){ + putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId); + return result; + } + + if(!(targetProject.getName()).equals(projectName)){ + Map checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName()); + if (checkTargetProjectResult != null) { + return checkTargetProjectResult; + } + } + + String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA); + doBatchCopyProcessDefinition(loginUser, targetProject, failedProcessList, processDefinitionIdList); + + checkBatchOperateResult(projectName,targetProject.getName(),result,failedProcessList,true); + + return result; + } + + /** + * batch move process definition + * @param loginUser loginUser + * @param projectName projectName + * @param processDefinitionIds processDefinitionIds + * @param targetProjectId targetProjectId + * @return + */ + @Override + public Map batchMoveProcessDefinition(User loginUser, + String projectName, + String processDefinitionIds, + int targetProjectId) { + Map result = new HashMap<>(); + List failedProcessList = new ArrayList<>(); + + //check src project auth + Map checkResult = checkProjectAndAuth(loginUser, projectName); + if (checkResult != null) { + return checkResult; + } + + if (StringUtils.isEmpty(processDefinitionIds)) { + putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds); + return result; + } + + Project targetProject = projectMapper.queryDetailById(targetProjectId); + if(targetProject == null){ + putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId); + return result; + } + + if(!(targetProject.getName()).equals(projectName)){ + Map checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName()); + if (checkTargetProjectResult != null) { + return checkTargetProjectResult; + } + } + + String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA); + doBatchMoveProcessDefinition(targetProject, failedProcessList, processDefinitionIdList); + + checkBatchOperateResult(projectName,targetProject.getName(),result,failedProcessList,false); + + return result; + } + + /** + * do batch move process definition + * @param targetProject targetProject + * @param failedProcessList failedProcessList + * @param processDefinitionIdList processDefinitionIdList + */ + private void doBatchMoveProcessDefinition(Project targetProject, List failedProcessList, String[] processDefinitionIdList) { + for(String processDefinitionId:processDefinitionIdList){ + try { + Map moveProcessDefinitionResult = + moveProcessDefinition(Integer.valueOf(processDefinitionId),targetProject); + if (!Status.SUCCESS.equals(moveProcessDefinitionResult.get(Constants.STATUS))) { + setFailedProcessList(failedProcessList, processDefinitionId); + logger.error((String) moveProcessDefinitionResult.get(Constants.MSG)); + } + } catch (Exception e) { + setFailedProcessList(failedProcessList, processDefinitionId); + } + } + } + + /** + * batch copy process definition + * @param loginUser loginUser + * @param targetProject targetProject + * @param failedProcessList failedProcessList + * @param processDefinitionIdList processDefinitionIdList + */ + private void doBatchCopyProcessDefinition(User loginUser, Project targetProject, List failedProcessList, String[] processDefinitionIdList) { + for(String processDefinitionId:processDefinitionIdList){ + try { + Map copyProcessDefinitionResult = + copyProcessDefinition(loginUser,Integer.valueOf(processDefinitionId),targetProject); + if (!Status.SUCCESS.equals(copyProcessDefinitionResult.get(Constants.STATUS))) { + setFailedProcessList(failedProcessList, processDefinitionId); + logger.error((String) copyProcessDefinitionResult.get(Constants.MSG)); + } + } catch (Exception e) { + setFailedProcessList(failedProcessList, processDefinitionId); + } + } + } + + /** + * set failed processList + * @param failedProcessList failedProcessList + * @param processDefinitionId processDefinitionId + */ + private void setFailedProcessList(List failedProcessList, String processDefinitionId) { + ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(Integer.valueOf(processDefinitionId)); + if(processDefinition != null){ + failedProcessList.add(processDefinitionId+"["+processDefinition.getName()+"]"); + }else{ + failedProcessList.add(processDefinitionId+"[null]"); + } + } + + /** + * check project and auth + * @param loginUser loginUser + * @param projectName projectName + * @return + */ + private Map checkProjectAndAuth(User loginUser, String projectName) { + Project project = projectMapper.queryByName(projectName); + + //check user access for project + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + return null; + } + + /** + * move process definition + * @param processId processId + * @param targetProject targetProject + * @return move result code + */ + private Map moveProcessDefinition(Integer processId, + Project targetProject) { + + Map result = new HashMap<>(); + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); + return result; + } else { + processDefinition.setProjectId(targetProject.getId()); + processDefinition.setUpdateTime(new Date()); + if (processDefineMapper.updateById(processDefinition) > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); + } + return result; + } + } + + /** + * check batch operate result + * @param srcProjectName srcProjectName + * @param targetProjectName targetProjectName + * @param result result + * @param failedProcessList failedProcessList + * @param isCopy isCopy + */ + private void checkBatchOperateResult(String srcProjectName,String targetProjectName, + Map result, List failedProcessList,boolean isCopy) { + if (!failedProcessList.isEmpty()) { + if(isCopy){ + putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName,String.join(",", failedProcessList)); + }else{ + putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName,String.join(",", failedProcessList)); + } + } else { + putMsg(result, Status.SUCCESS); + } + } + +} + diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java new file mode 100644 index 0000000000..395da6027f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -0,0 +1,443 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.service.ProjectService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * project service implement + **/ +@Service +public class ProjectServiceImpl extends BaseService implements ProjectService { + + @Autowired + private ProjectMapper projectMapper; + + @Autowired + private ProjectUserMapper projectUserMapper; + + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + /** + * create project + * + * @param loginUser login user + * @param name project name + * @param desc description + * @return returns an error if it exists + */ + public Map createProject(User loginUser, String name, String desc) { + + Map result = new HashMap<>(); + Map descCheck = checkDesc(desc); + if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { + return descCheck; + } + + Project project = projectMapper.queryByName(name); + if (project != null) { + putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); + return result; + } + + Date now = new Date(); + + project = Project + .newBuilder() + .name(name) + .description(desc) + .userId(loginUser.getId()) + .userName(loginUser.getUserName()) + .createTime(now) + .updateTime(now) + .build(); + + if (projectMapper.insert(project) > 0) { + Project insertedProject = projectMapper.queryByName(name); + result.put(Constants.DATA_LIST, insertedProject); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.CREATE_PROJECT_ERROR); + } + return result; + } + + /** + * query project details by id + * + * @param projectId project id + * @return project detail information + */ + public Map queryById(Integer projectId) { + + Map result = new HashMap<>(); + Project project = projectMapper.selectById(projectId); + + if (project != null) { + result.put(Constants.DATA_LIST, project); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.PROJECT_NOT_FOUNT, projectId); + } + return result; + } + + /** + * check project and authorization + * + * @param loginUser login user + * @param project project + * @param projectName project name + * @return true if the login user have permission to see the project + */ + public Map checkProjectAndAuth(User loginUser, Project project, String projectName) { + Map result = new HashMap<>(); + if (project == null) { + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + } else if (!checkReadPermission(loginUser, project)) { + // check read permission + putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName); + } else { + putMsg(result, Status.SUCCESS); + } + return result; + } + + public boolean hasProjectAndPerm(User loginUser, Project project, Map result) { + boolean checkResult = false; + if (project == null) { + putMsg(result, Status.PROJECT_NOT_FOUNT, ""); + } else if (!checkReadPermission(loginUser, project)) { + putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName()); + } else { + checkResult = true; + } + return checkResult; + } + + /** + * admin can view all projects + * + * @param loginUser login user + * @param searchVal search value + * @param pageSize page size + * @param pageNo page number + * @return project list which the login user have permission to see + */ + public Map queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { + Map result = new HashMap<>(); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + + Page page = new Page<>(pageNo, pageSize); + + int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId(); + IPage projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal); + + List projectList = projectIPage.getRecords(); + if (userId != 0) { + for (Project project : projectList) { + project.setPerm(Constants.DEFAULT_ADMIN_PERMISSION); + } + } + pageInfo.setTotalCount((int) projectIPage.getTotal()); + pageInfo.setLists(projectList); + result.put(Constants.COUNT, (int) projectIPage.getTotal()); + result.put(Constants.DATA_LIST, pageInfo); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * delete project by id + * + * @param loginUser login user + * @param projectId project id + * @return delete result code + */ + public Map deleteProject(User loginUser, Integer projectId) { + Map result = new HashMap<>(); + Project project = projectMapper.selectById(projectId); + Map checkResult = getCheckResult(loginUser, project); + if (checkResult != null) { + return checkResult; + } + + if (!hasPerm(loginUser, project.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + + List processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId); + + if (!processDefinitionList.isEmpty()) { + putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL); + return result; + } + int delete = projectMapper.deleteById(projectId); + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_PROJECT_ERROR); + } + return result; + } + + /** + * get check result + * + * @param loginUser login user + * @param project project + * @return check result + */ + private Map getCheckResult(User loginUser, Project project) { + String projectName = project == null ? null : project.getName(); + Map checkResult = checkProjectAndAuth(loginUser, project, projectName); + Status status = (Status) checkResult.get(Constants.STATUS); + if (status != Status.SUCCESS) { + return checkResult; + } + return null; + } + + /** + * updateProcessInstance project + * + * @param loginUser login user + * @param projectId project id + * @param projectName project name + * @param desc description + * @return update result code + */ + public Map update(User loginUser, Integer projectId, String projectName, String desc) { + Map result = new HashMap<>(); + + Map descCheck = checkDesc(desc); + if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { + return descCheck; + } + + Project project = projectMapper.selectById(projectId); + boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result); + if (!hasProjectAndPerm) { + return result; + } + Project tempProject = projectMapper.queryByName(projectName); + if (tempProject != null && tempProject.getId() != projectId) { + putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName); + return result; + } + project.setName(projectName); + project.setDescription(desc); + project.setUpdateTime(new Date()); + + int update = projectMapper.updateById(project); + if (update > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.UPDATE_PROJECT_ERROR); + } + return result; + } + + + /** + * query unauthorized project + * + * @param loginUser login user + * @param userId user id + * @return the projects which user have not permission to see + */ + public Map queryUnauthorizedProject(User loginUser, Integer userId) { + Map result = new HashMap<>(); + if (checkAdmin(loginUser, result)) { + return result; + } + /** + * query all project list except specified userId + */ + List projectList = projectMapper.queryProjectExceptUserId(userId); + List resultList = new ArrayList<>(); + Set projectSet = null; + if (projectList != null && !projectList.isEmpty()) { + projectSet = new HashSet<>(projectList); + + List authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId); + + resultList = getUnauthorizedProjects(projectSet, authedProjectList); + } + result.put(Constants.DATA_LIST, resultList); + putMsg(result, Status.SUCCESS); + return result; + } + + /** + * get unauthorized project + * + * @param projectSet project set + * @param authedProjectList authed project list + * @return project list that authorization + */ + private List getUnauthorizedProjects(Set projectSet, List authedProjectList) { + List resultList; + Set authedProjectSet = null; + if (authedProjectList != null && !authedProjectList.isEmpty()) { + authedProjectSet = new HashSet<>(authedProjectList); + projectSet.removeAll(authedProjectSet); + + } + resultList = new ArrayList<>(projectSet); + return resultList; + } + + + /** + * query authorized project + * + * @param loginUser login user + * @param userId user id + * @return projects which the user have permission to see, Except for items created by this user + */ + public Map queryAuthorizedProject(User loginUser, Integer userId) { + Map result = new HashMap<>(); + + if (checkAdmin(loginUser, result)) { + return result; + } + + List projects = projectMapper.queryAuthedProjectListByUserId(userId); + result.put(Constants.DATA_LIST, projects); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * query authorized project + * + * @param loginUser login user + * @return projects which the user have permission to see, Except for items created by this user + */ + public Map queryProjectCreatedByUser(User loginUser) { + Map result = new HashMap<>(); + + if (checkAdmin(loginUser, result)) { + return result; + } + + List projects = projectMapper.queryProjectCreatedByUser(loginUser.getId()); + result.put(Constants.DATA_LIST, projects); + putMsg(result, Status.SUCCESS); + + return result; + } + + /** + * check whether have read permission + * + * @param user user + * @param project project + * @return true if the user have permission to see the project, otherwise return false + */ + private boolean checkReadPermission(User user, Project project) { + int permissionId = queryPermission(user, project); + return (permissionId & Constants.READ_PERMISSION) != 0; + } + + /** + * query permission id + * + * @param user user + * @param project project + * @return permission + */ + private int queryPermission(User user, Project project) { + if (user.getUserType() == UserType.ADMIN_USER) { + return Constants.READ_PERMISSION; + } + + if (project.getUserId() == user.getId()) { + return Constants.ALL_PERMISSIONS; + } + + ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId()); + + if (projectUser == null) { + return 0; + } + + return projectUser.getPerm(); + + } + + /** + * query all project list that have one or more process definitions. + * + * @return project list + */ + public Map queryAllProjectList() { + Map result = new HashMap<>(); + List projects = projectMapper.selectList(null); + List processDefinitions = processDefinitionMapper.selectList(null); + if (projects != null) { + Set set = new HashSet<>(); + for (ProcessDefinition processDefinition : processDefinitions) { + set.add(processDefinition.getProjectId()); + } + List tempDeletelist = new ArrayList<>(); + for (Project project : projects) { + if (!set.contains(project.getId())) { + tempDeletelist.add(project); + } + } + projects.removeAll(tempDeletelist); + } + result.put(Constants.DATA_LIST, projects); + putMsg(result, Status.SUCCESS); + return result; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java new file mode 100644 index 0000000000..8aaefdadff --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.api.controller.BaseController; +import org.apache.dolphinscheduler.api.service.BaseService; +import org.apache.dolphinscheduler.api.service.SessionService; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.dao.entity.Session; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.SessionMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +/** + * session service implement + */ +@Service +public class SessionServiceImpl extends BaseService implements SessionService { + + private static final Logger logger = LoggerFactory.getLogger(SessionService.class); + + @Autowired + private SessionMapper sessionMapper; + + /** + * get user session from request + * + * @param request request + * @return session + */ + public Session getSession(HttpServletRequest request) { + String sessionId = request.getHeader(Constants.SESSION_ID); + + if (StringUtils.isBlank(sessionId)) { + Cookie cookie = getCookie(request, Constants.SESSION_ID); + + if (cookie != null) { + sessionId = cookie.getValue(); + } + } + + if (StringUtils.isBlank(sessionId)) { + return null; + } + + String ip = BaseController.getClientIpAddress(request); + logger.debug("get session: {}, ip: {}", sessionId, ip); + + return sessionMapper.selectById(sessionId); + } + + /** + * create session + * + * @param user user + * @param ip ip + * @return session string + */ + @Transactional(rollbackFor = RuntimeException.class) + public String createSession(User user, String ip) { + Session session = null; + + // logined + List sessionList = sessionMapper.queryByUserId(user.getId()); + + Date now = new Date(); + + /** + * if you have logged in and are still valid, return directly + */ + if (CollectionUtils.isNotEmpty(sessionList)) { + // is session list greater 1 , delete other ,get one + if (sessionList.size() > 1) { + for (int i = 1; i < sessionList.size(); i++) { + sessionMapper.deleteById(sessionList.get(i).getId()); + } + } + session = sessionList.get(0); + if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) { + /** + * updateProcessInstance the latest login time + */ + session.setLastLoginTime(now); + sessionMapper.updateById(session); + + return session.getId(); + + } else { + /** + * session expired, then delete this session first + */ + sessionMapper.deleteById(session.getId()); + } + } + + // assign new session + session = new Session(); + + session.setId(UUID.randomUUID().toString()); + session.setIp(ip); + session.setUserId(user.getId()); + session.setLastLoginTime(now); + + sessionMapper.insert(session); + + return session.getId(); + } + + /** + * sign out + * remove ip restrictions + * + * @param ip no use + * @param loginUser login user + */ + public void signOut(String ip, User loginUser) { + try { + /** + * query session by user id and ip + */ + Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(), ip); + + //delete session + sessionMapper.deleteById(session.getId()); + } catch (Exception e) { + logger.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip); + } + } + +} diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index c8e48ad865..2005d05fde 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -173,7 +173,6 @@ PROCESS_DEFINITION_ID=process definition id PROCESS_DEFINITION_IDS=process definition ids RELEASE_PROCESS_DEFINITION_NOTES=release process definition QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id -COPY_PROCESS_DEFINITION_NOTES=copy process definition QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list @@ -253,4 +252,9 @@ AUTHORIZED_DATA_SOURCE_NOTES=authorized data source DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id -BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids \ No newline at end of file +BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids +QUERY_USER_CREATED_PROJECT_NOTES= query user created project +COPY_PROCESS_DEFINITION_NOTES= copy process definition notes +MOVE_PROCESS_DEFINITION_NOTES= move process definition notes +TARGET_PROJECT_ID= target project id +IS_COPY = is copy diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index 0669e8d8cf..2e079285ca 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -173,7 +173,6 @@ PROCESS_DEFINITION_ID=process definition id PROCESS_DEFINITION_IDS=process definition ids RELEASE_PROCESS_DEFINITION_NOTES=release process definition QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id -COPY_PROCESS_DEFINITION_NOTES=copy process definition QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list @@ -254,3 +253,8 @@ DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids +QUERY_USER_CREATED_PROJECT_NOTES= query user created project +COPY_PROCESS_DEFINITION_NOTES= copy process definition notes +MOVE_PROCESS_DEFINITION_NOTES= move process definition notes +TARGET_PROJECT_ID= target project id +IS_COPY = is copy diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index 9053b0924c..31f70ef40c 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -171,7 +171,6 @@ UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义 PROCESS_DEFINITION_ID=流程定义ID RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义 QUERY_PROCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID -COPY_PROCESS_DEFINITION_NOTES=复制流程定义 QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表 QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义 @@ -252,4 +251,9 @@ DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据 QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表 EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义 BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义 +QUERY_USER_CREATED_PROJECT_NOTES= 查询用户创建的项目 +COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义 +MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义 +TARGET_PROJECT_ID= 目标项目ID +IS_COPY = 是否复制 diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java index 8c0d04c6c6..ab0a3ce1f7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java @@ -18,16 +18,18 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; +import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; -import org.junit.*; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; @@ -47,7 +49,7 @@ import java.util.Map; * process definition controller test */ @RunWith(MockitoJUnitRunner.Silent.class) -public class ProcessDefinitionControllerTest{ +public class ProcessDefinitionControllerTest { private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class); @@ -55,7 +57,7 @@ public class ProcessDefinitionControllerTest{ private ProcessDefinitionController processDefinitionController; @Mock - private ProcessDefinitionService processDefinitionService; + private ProcessDefinitionServiceImpl processDefinitionService; protected User user; @@ -78,7 +80,7 @@ public class ProcessDefinitionControllerTest{ String name = "dag_test"; String description = "desc test"; String connects = "[]"; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put("processDefinitionId",1); @@ -102,7 +104,7 @@ public class ProcessDefinitionControllerTest{ @Test public void testVerifyProcessDefinitionName() throws Exception { - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROCESS_INSTANCE_EXIST); String projectName = "test"; String name = "dag_test"; @@ -124,7 +126,7 @@ public class ProcessDefinitionControllerTest{ String description = "desc test"; String connects = "[]"; int id = 1; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put("processDefinitionId",1); @@ -140,7 +142,7 @@ public class ProcessDefinitionControllerTest{ public void testReleaseProcessDefinition() throws Exception { String projectName = "test"; int id = 1; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result); @@ -168,7 +170,7 @@ public class ProcessDefinitionControllerTest{ processDefinition.setName(name); processDefinition.setProcessDefinitionJson(json); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); @@ -179,16 +181,33 @@ public class ProcessDefinitionControllerTest{ } @Test - public void testCopyProcessDefinition() throws Exception { + public void testBatchCopyProcessDefinition() throws Exception { String projectName = "test"; - int id = 1; + int targetProjectId = 2; + String id = "1"; + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS); + + Mockito.when(processDefinitionService.batchCopyProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result); + Result response = processDefinitionController.copyProcessDefinition(user, projectName,id,targetProjectId); + + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + } + + @Test + public void testBatchMoveProcessDefinition() throws Exception { + + String projectName = "test"; + int targetProjectId = 2; + String id = "1"; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); - Mockito.when(processDefinitionService.copyProcessDefinition(user, projectName,id)).thenReturn(result); - Result response = processDefinitionController.copyProcessDefinition(user, projectName,id); + Mockito.when(processDefinitionService.batchMoveProcessDefinition(user,projectName,id,targetProjectId)).thenReturn(result); + Result response = processDefinitionController.moveProcessDefinition(user, projectName,id,targetProjectId); Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } @@ -200,7 +219,7 @@ public class ProcessDefinitionControllerTest{ String projectName = "test"; List resourceList = getDefinitionList(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, resourceList); @@ -255,7 +274,7 @@ public class ProcessDefinitionControllerTest{ String projectName = "test"; int id = 1; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result); @@ -269,7 +288,7 @@ public class ProcessDefinitionControllerTest{ String projectName = "test"; int id = 1; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result); @@ -283,7 +302,7 @@ public class ProcessDefinitionControllerTest{ String projectName = "test"; String idList = "1,2,3"; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result); @@ -342,9 +361,7 @@ public class ProcessDefinitionControllerTest{ String processDefinitionIds = "1,2"; String projectName = "test"; HttpServletResponse response = new MockHttpServletResponse(); - ProcessDefinitionService service = new ProcessDefinitionService(); - ProcessDefinitionService spy = Mockito.spy(service); - Mockito.doNothing().when(spy).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response); + Mockito.doNothing().when(this.processDefinitionService).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response); processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java index 5189097e68..bdd762afa8 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceControllerTest.java @@ -16,29 +16,27 @@ */ package org.apache.dolphinscheduler.api.controller; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; + import org.junit.Assert; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MvcResult; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; - /** * process instance controller test */ public class ProcessInstanceControllerTest extends AbstractControllerTest { - private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); @Test public void testQueryProcessInstanceList() throws Exception { @@ -52,31 +50,30 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest { paramsMap.add("pageNo", "2"); paramsMap.add("pageSize", "2"); - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/list-paging","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/list-paging", "cxc_1113") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertNotNull(result); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryTaskListByProcessId() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id", "cxc_1113") .header(SESSION_ID, sessionId) - .param("processInstanceId","1203")) + .param("processInstanceId", "1203")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - assert result != null; - Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT.getCode(), result.getCode().intValue()); } @Test @@ -91,110 +88,108 @@ public class ProcessInstanceControllerTest extends AbstractControllerTest { paramsMap.add("syncDefine", "false"); paramsMap.add("locations", locations); paramsMap.add("connects", "[]"); -// paramsMap.add("flag", "2"); - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/instance/update","cxc_1113") + MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/instance/update", "cxc_1113") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertNotNull(result); Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); } @Test public void testQueryProcessInstanceById() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-by-id","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-by-id", "cxc_1113") .header(SESSION_ID, sessionId) - .param("processInstanceId","1203")) + .param("processInstanceId", "1203")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } - @Test public void testQuerySubProcessInstanceByTaskId() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-sub-process","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-sub-process", "cxc_1113") .header(SESSION_ID, sessionId) - .param("taskId","1203")) + .param("taskId", "1203")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS.getCode(), result.getCode().intValue()); } @Test public void testQueryParentInstanceBySubId() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-parent-process","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/select-parent-process", "cxc_1113") .header(SESSION_ID, sessionId) - .param("subId","1204")) + .param("subId", "1204")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE.getCode(), result.getCode().intValue()); } @Test public void testViewVariables() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/view-variables","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/view-variables", "cxc_1113") .header(SESSION_ID, sessionId) - .param("processInstanceId","1204")) + .param("processInstanceId", "1204")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } @Test public void testDeleteProcessInstanceById() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/delete","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/delete", "cxc_1113") .header(SESSION_ID, sessionId) - .param("processInstanceId","1204")) + .param("processInstanceId", "1204")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } @Test public void testBatchDeleteProcessInstanceByIds() throws Exception { - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/batch-delete","cxc_1113") + MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/batch-delete", "cxc_1113") .header(SESSION_ID, sessionId) - .param("processInstanceIds","1205,1206")) + .param("processInstanceIds", "1205,1206")) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertNotNull(result); + Assert.assertEquals(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), result.getCode().intValue()); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java index fc86632ed7..e6796d8c47 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java @@ -285,6 +285,21 @@ public class UsersControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + } + + @Test + public void testActivateUser() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userName","user_test"); + + MvcResult mvcResult = mockMvc.perform(post("/users/activate") + .header(SESSION_ID, sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java index f388445f0c..f5543487ea 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java @@ -16,10 +16,12 @@ */ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import java.util.Calendar; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.AccessTokenServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; @@ -27,9 +29,14 @@ import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.AccessToken; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper; -import org.junit.After; + +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.List; +import java.util.Map; + import org.junit.Assert; -import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; @@ -38,131 +45,109 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; -import java.util.Map; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.when; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.class) public class AccessTokenServiceTest { - private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceTest.class); - @InjectMocks - private AccessTokenService accessTokenService ; + private AccessTokenServiceImpl accessTokenService; @Mock private AccessTokenMapper accessTokenMapper; - @Before - public void setUp() { - - } - - - @After - public void after(){ - - } - - @Test - public void testQueryAccessTokenList(){ + @SuppressWarnings("unchecked") + public void testQueryAccessTokenList() { IPage tokenPage = new Page<>(); tokenPage.setRecords(getList()); tokenPage.setTotal(1L); - when(accessTokenMapper.selectAccessTokenPage(any(Page.class),eq("zhangsan"),eq(0))).thenReturn(tokenPage); + when(accessTokenMapper.selectAccessTokenPage(any(Page.class), eq("zhangsan"), eq(0))).thenReturn(tokenPage); - User user =new User(); - Map result = accessTokenService.queryAccessTokenList(user,"zhangsan",1,10); + User user = new User(); + Map result = accessTokenService.queryAccessTokenList(user, "zhangsan", 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); - Assert.assertTrue(pageInfo.getTotalCount()>0); + Assert.assertTrue(pageInfo.getTotalCount() > 0); } @Test - public void testCreateToken(){ - + public void testCreateToken() { - when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2); - Map result = accessTokenService.createToken(1,getDate(),"AccessTokenServiceTest"); + when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2); + Map result = accessTokenService.createToken(1, getDate(), "AccessTokenServiceTest"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test - public void testGenerateToken(){ + public void testGenerateToken() { - Map result = accessTokenService.generateToken(Integer.MAX_VALUE,getDate()); + Map result = accessTokenService.generateToken(Integer.MAX_VALUE, getDate()); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); String token = (String) result.get(Constants.DATA_LIST); Assert.assertNotNull(token); } @Test - public void testDelAccessTokenById(){ + public void testDelAccessTokenById() { when(accessTokenMapper.selectById(1)).thenReturn(getEntity()); User userLogin = new User(); // not exist - Map result = accessTokenService.delAccessTokenById(userLogin,0); + Map result = accessTokenService.delAccessTokenById(userLogin, 0); logger.info(result.toString()); - Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST,result.get(Constants.STATUS)); + Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS)); // no operate - result = accessTokenService.delAccessTokenById(userLogin,1); + result = accessTokenService.delAccessTokenById(userLogin, 1); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success userLogin.setId(1); userLogin.setUserType(UserType.ADMIN_USER); - result = accessTokenService.delAccessTokenById(userLogin,1); + result = accessTokenService.delAccessTokenById(userLogin, 1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test - public void testUpdateToken(){ + public void testUpdateToken() { when(accessTokenMapper.selectById(1)).thenReturn(getEntity()); - Map result = accessTokenService.updateToken(1,Integer.MAX_VALUE,getDate(),"token"); + Map result = accessTokenService.updateToken(1, Integer.MAX_VALUE, getDate(), "token"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); // not exist - result = accessTokenService.updateToken(2,Integer.MAX_VALUE,getDate(),"token"); + result = accessTokenService.updateToken(2, Integer.MAX_VALUE, getDate(), "token"); logger.info(result.toString()); - Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST,result.get(Constants.STATUS)); + Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS)); } /** * create entity - * @return */ - private AccessToken getEntity(){ + private AccessToken getEntity() { AccessToken accessToken = new AccessToken(); accessToken.setId(1); accessToken.setUserId(1); accessToken.setToken("AccessTokenServiceTest"); - Date date = DateUtils.add(new Date(),Calendar.DAY_OF_MONTH, 30); + Date date = DateUtils.add(new Date(), Calendar.DAY_OF_MONTH, 30); accessToken.setExpireTime(date); return accessToken; } /** * entity list - * @return */ - private List getList(){ + private List getList() { List list = new ArrayList<>(); list.add(getEntity()); @@ -170,13 +155,11 @@ public class AccessTokenServiceTest { } - /** * get dateStr - * @return */ - private String getDate(){ + private String getDate() { Date date = DateUtils.add(new Date(), Calendar.DAY_OF_MONTH, 30); - return DateUtils.dateToString(date); + return DateUtils.dateToString(date); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java index ab7dac4d60..1b93e86773 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java @@ -76,7 +76,7 @@ public class AlertGroupServiceTest { @Test - public void testQueryAlertgroup(){ + public void testQueryAlertGroup(){ Mockito.when(alertGroupMapper.queryAllGroupList()).thenReturn(getList()); HashMap result= alertGroupService.queryAlertgroup(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java deleted file mode 100644 index bb6e3882fe..0000000000 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/BaseDAGServiceTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.api.service; - -import org.apache.dolphinscheduler.common.graph.DAG; -import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.model.TaskNodeRelation; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; - -@RunWith(MockitoJUnitRunner.class) -public class BaseDAGServiceTest { - - @Test - public void testProcessInstance2DAG(){ - - ProcessInstance processInstance = new ProcessInstance(); - processInstance.setProcessInstanceJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-61567\"," + - "\"name\":\"开始\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo '1'\"}," + - "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + - "\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + - "\"workerGroupId\":-1,\"preTasks\":[]},{\"type\":\"SHELL\",\"id\":\"tasks-6-3ug5ej\",\"name\":\"结束\"," + - "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo '1'\"},\"description\":\"\"," + - "\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + - "\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + - "\"workerGroupId\":-1,\"preTasks\":[\"开始\"]}],\"tenantId\":-1,\"timeout\":0}"); - - DAG relationDAG = BaseDAGService.processInstance2DAG(processInstance); - - Assert.assertTrue(relationDAG.containsNode("开始")); - - } -} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index 6a9e78600b..3d8ae91287 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -17,17 +17,29 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.CommandCount; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.mapper.CommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -36,25 +48,19 @@ import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; @RunWith(PowerMockRunner.class) public class DataAnalysisServiceTest { - + @InjectMocks - private DataAnalysisService dataAnalysisService; + private DataAnalysisServiceImpl dataAnalysisService; @Mock ProjectMapper projectMapper; @Mock - ProjectService projectService; + ProjectServiceImpl projectService; @Mock ProcessInstanceMapper processInstanceMapper; @@ -71,13 +77,9 @@ public class DataAnalysisServiceTest { @Mock TaskInstanceMapper taskInstanceMapper; - - @Mock ProcessService processService; - private Project project; - private Map resultMap; private User user; @@ -86,26 +88,25 @@ public class DataAnalysisServiceTest { public void setUp() { user = new User(); - project = new Project(); + Project project = new Project(); project.setId(1); resultMap = new HashMap<>(); Mockito.when(projectMapper.selectById(1)).thenReturn(project); - Mockito.when(projectService.hasProjectAndPerm(user,project,resultMap)).thenReturn(true); + Mockito.when(projectService.hasProjectAndPerm(user, project, resultMap)).thenReturn(true); } @After - public void after(){ + public void after() { user = null; projectMapper = null; resultMap = null; } - @Test - public void testCountTaskStateByProject(){ + public void testCountTaskStateByProject() { String startDate = "2020-02-11 16:02:18"; String endDate = "2020-02-11 16:03:18"; @@ -120,42 +121,40 @@ public class DataAnalysisServiceTest { DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts()); result = dataAnalysisService.countTaskStateByProject(user, 1, startDate, endDate); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } - @Test - public void testCountProcessInstanceStateByProject(){ + public void testCountProcessInstanceStateByProject() { String startDate = "2020-02-11 16:02:18"; String endDate = "2020-02-11 16:03:18"; //checkProject false - Map result = dataAnalysisService.countProcessInstanceStateByProject(user,2,startDate,endDate); + Map result = dataAnalysisService.countProcessInstanceStateByProject(user, 2, startDate, endDate); Assert.assertTrue(result.isEmpty()); //SUCCESS Mockito.when(processInstanceMapper.countInstanceStateByUser(DateUtils.getScheduleDate(startDate), DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts()); - result = dataAnalysisService.countProcessInstanceStateByProject(user,1,startDate,endDate); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + result = dataAnalysisService.countProcessInstanceStateByProject(user, 1, startDate, endDate); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test - public void testCountDefinitionByUser(){ + public void testCountDefinitionByUser() { - Map result = dataAnalysisService.countDefinitionByUser(user,1); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Map result = dataAnalysisService.countDefinitionByUser(user, 1); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } - @Test - public void testCountCommandState(){ + public void testCountCommandState() { String startDate = "2020-02-11 16:02:18"; String endDate = "2020-02-11 16:03:18"; //checkProject false - Map result = dataAnalysisService.countCommandState(user,2,startDate,endDate); + Map result = dataAnalysisService.countCommandState(user, 2, startDate, endDate); Assert.assertTrue(result.isEmpty()); List commandCounts = new ArrayList<>(1); CommandCount commandCount = new CommandCount(); @@ -164,26 +163,25 @@ public class DataAnalysisServiceTest { Mockito.when(commandMapper.countCommandState(0, DateUtils.getScheduleDate(startDate), DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(commandCounts); - Mockito.when(errorCommandMapper.countCommandState( DateUtils.getScheduleDate(startDate), + Mockito.when(errorCommandMapper.countCommandState(DateUtils.getScheduleDate(startDate), DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(commandCounts); - result = dataAnalysisService.countCommandState(user,1,startDate,endDate); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + result = dataAnalysisService.countCommandState(user, 1, startDate, endDate); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } /** - * get list - * @return + * get list */ - private List getTaskInstanceStateCounts(){ + private List getTaskInstanceStateCounts() { List taskInstanceStateCounts = new ArrayList<>(1); ExecuteStatusCount executeStatusCount = new ExecuteStatusCount(); executeStatusCount.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); taskInstanceStateCounts.add(executeStatusCount); - return taskInstanceStateCounts; + return taskInstanceStateCounts; } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java index 59523bdd11..a4c0c6bfe7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java @@ -16,17 +16,36 @@ */ package org.apache.dolphinscheduler.api.service; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -36,13 +55,6 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import java.text.ParseException; -import java.util.*; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.times; - /** * test for ExecutorService */ @@ -62,7 +74,7 @@ public class ExecutorService2Test { private ProjectMapper projectMapper; @Mock - private ProjectService projectService; + private ProjectServiceImpl projectService; @Mock private MonitorService monitorService; @@ -84,7 +96,7 @@ public class ExecutorService2Test { private String cronTime; @Before - public void init(){ + public void init() { // user loginUser.setId(userId); @@ -111,7 +123,6 @@ public class ExecutorService2Test { /** * not complement - * @throws ParseException */ @Test public void testNoComplement() throws ParseException { @@ -125,13 +136,12 @@ public class ExecutorService2Test { Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); - }catch (Exception e){ + } catch (Exception e) { } } /** * date error - * @throws ParseException */ @Test public void testDateError() throws ParseException { @@ -145,13 +155,12 @@ public class ExecutorService2Test { Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS)); verify(processService, times(0)).createCommand(any(Command.class)); - }catch (Exception e){ + } catch (Exception e) { } } /** * serial - * @throws ParseException */ @Test public void testSerial() throws ParseException { @@ -165,17 +174,16 @@ public class ExecutorService2Test { Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); - }catch (Exception e){ + } catch (Exception e) { } } /** * without schedule - * @throws ParseException */ @Test public void testParallelWithOutSchedule() throws ParseException { - try{ + try { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, @@ -185,17 +193,16 @@ public class ExecutorService2Test { Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(31)).createCommand(any(Command.class)); - }catch (Exception e){ + } catch (Exception e) { } } /** * with schedule - * @throws ParseException */ @Test public void testParallelWithSchedule() throws ParseException { - try{ + try { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, @@ -205,13 +212,13 @@ public class ExecutorService2Test { Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(15)).createCommand(any(Command.class)); - }catch (Exception e){ + } catch (Exception e) { } } @Test - public void testNoMsterServers() throws ParseException{ + public void testNoMsterServers() throws ParseException { Mockito.when(monitorService.getServerListFromZK(true)).thenReturn(new ArrayList()); Map result = executorService.execProcessInstance(loginUser, projectName, @@ -220,11 +227,11 @@ public class ExecutorService2Test { null, null, 0, "", "", RunMode.RUN_MODE_PARALLEL, Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); - Assert.assertEquals(result.get(Constants.STATUS),Status.MASTER_NOT_EXISTS); + Assert.assertEquals(result.get(Constants.STATUS), Status.MASTER_NOT_EXISTS); } - private List getMasterServersList(){ + private List getMasterServersList() { List masterServerList = new ArrayList<>(); Server masterServer1 = new Server(); masterServer1.setId(1); @@ -242,11 +249,11 @@ public class ExecutorService2Test { } - private List zeroSchedulerList(){ + private List zeroSchedulerList() { return Collections.EMPTY_LIST; } - private List oneSchedulerList(){ + private List oneSchedulerList() { List schedulerList = new LinkedList<>(); Schedule schedule = new Schedule(); schedule.setCrontab("0 0 0 1/2 * ?"); @@ -254,7 +261,7 @@ public class ExecutorService2Test { return schedulerList; } - private Map checkProjectAndAuth(){ + private Map checkProjectAndAuth() { Map result = new HashMap<>(); result.put(Constants.STATUS, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java index 6551919e4a..57cd207c4d 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java @@ -54,7 +54,7 @@ public class ExecutorServiceTest { @Test public void putMsgWithParamsTest() { - Map map = new HashMap<>(5); + Map map = new HashMap<>(); putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS); logger.info(map.toString()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java index 4e41ed39b0..3952a25542 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -17,10 +17,14 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.service.process.ProcessService; + +import org.junit.After; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; @@ -32,25 +36,30 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; @RunWith(MockitoJUnitRunner.class) -@PrepareForTest({LoggerService.class}) +@PrepareForTest({LoggerServiceImpl.class}) public class LoggerServiceTest { private static final Logger logger = LoggerFactory.getLogger(LoggerServiceTest.class); @InjectMocks - private LoggerService loggerService; + private LoggerServiceImpl loggerService; @Mock private ProcessService processService; + @Before + public void init() { + this.loggerService.init(); + } + @Test - public void testQueryDataSourceList(){ + public void testQueryDataSourceList() { TaskInstance taskInstance = new TaskInstance(); Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); - Result result = loggerService.queryLog(2,1,1); + Result result = loggerService.queryLog(2, 1, 1); //TASK_INSTANCE_NOT_FOUND - Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); + Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), result.getCode().intValue()); try { //HOST NOT FOUND OR ILLEGAL @@ -59,36 +68,36 @@ public class LoggerServiceTest { Assert.assertTrue(true); logger.error("testQueryDataSourceList error {}", e.getMessage()); } - Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); + Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(), result.getCode().intValue()); //SUCCESS taskInstance.setHost("127.0.0.1:8080"); taskInstance.setLogPath("/temp/log"); Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); - result = loggerService.queryLog(1,1,1); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + result = loggerService.queryLog(1, 1, 1); + Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); } @Test - public void testGetLogBytes(){ + public void testGetLogBytes() { TaskInstance taskInstance = new TaskInstance(); Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); //task instance is null - try{ + try { loggerService.getLogBytes(2); - }catch (RuntimeException e){ + } catch (RuntimeException e) { Assert.assertTrue(true); - logger.error("testGetLogBytes error: {}","task instance is null"); + logger.error("testGetLogBytes error: {}", "task instance is null"); } //task instance host is null - try{ + try { loggerService.getLogBytes(1); - }catch (RuntimeException e){ + } catch (RuntimeException e) { Assert.assertTrue(true); - logger.error("testGetLogBytes error: {}","task instance host is null"); + logger.error("testGetLogBytes error: {}", "task instance host is null"); } //success @@ -100,4 +109,9 @@ public class LoggerServiceTest { } + @After + public void close() { + this.loggerService.close(); + } + } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index 8db667e28b..38dbdf438d 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -16,21 +16,48 @@ */ package org.apache.dolphinscheduler.api.service; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.dto.ProcessMeta; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.*; -import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.ProcessData; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; + import org.apache.http.entity.ContentType; -import org.json.JSONException; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -38,23 +65,14 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.skyscreamer.jsonassert.JSONAssert; -import org.springframework.boot.test.context.SpringBootTest; import org.springframework.mock.web.MockMultipartFile; import org.springframework.web.multipart.MultipartFile; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.text.MessageFormat; -import java.util.*; - @RunWith(MockitoJUnitRunner.Silent.class) -@SpringBootTest(classes = ApiApplicationServer.class) public class ProcessDefinitionServiceTest { @InjectMocks - ProcessDefinitionService processDefinitionService; + ProcessDefinitionServiceImpl processDefinitionService; @Mock private DataSourceMapper dataSourceMapper; @@ -66,13 +84,11 @@ public class ProcessDefinitionServiceTest { private ProjectMapper projectMapper; @Mock - private ProjectService projectService; + private ProjectServiceImpl projectService; @Mock private ScheduleMapper scheduleMapper; - - @Mock private ProcessService processService; @@ -115,21 +131,21 @@ public class ProcessDefinitionServiceTest { loginUser.setId(-1); loginUser.setUserType(UserType.GENERAL_USER); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project not found - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1"); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + Map map = processDefinitionService.queryProcessDefinitionList(loginUser, "project_test1"); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); //project check auth success putMsg(result, Status.SUCCESS, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); List resourceList = new ArrayList<>(); resourceList.add(getProcessDefinition()); Mockito.when(processDefineMapper.queryAllDefinitionList(project.getId())).thenReturn(resourceList); - Map checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1"); + Map checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser, "project_test1"); Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS)); } @@ -144,12 +160,12 @@ public class ProcessDefinitionServiceTest { loginUser.setId(-1); loginUser.setUserType(UserType.GENERAL_USER); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project not found - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "",1, 5,0); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + Map map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "", 1, 5, 0); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); } @@ -165,18 +181,18 @@ public class ProcessDefinitionServiceTest { loginUser.setId(-1); loginUser.setUserType(UserType.GENERAL_USER); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project check auth fail - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Map map = processDefinitionService.queryProcessDefinitionById(loginUser, "project_test1", 1); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); //project check auth success, instance not exist putMsg(result, Status.SUCCESS, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Mockito.when(processDefineMapper.selectById(1)).thenReturn(null); Map instanceNotexitRes = processDefinitionService.queryProcessDefinitionById(loginUser, "project_test1", 1); @@ -190,20 +206,22 @@ public class ProcessDefinitionServiceTest { } @Test - public void testCopyProcessDefinition() throws Exception{ + public void testCopyProcessDefinition() throws Exception { String projectName = "project_test1"; Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); + Mockito.when(projectMapper.queryDetailById(1)).thenReturn(getProject(projectName)); + Project project = getProject(projectName); User loginUser = new User(); loginUser.setId(-1); loginUser.setUserType(UserType.GENERAL_USER); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); //project check auth success, instance not exist putMsg(result, Status.SUCCESS, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); ProcessDefinition definition = getProcessDefinition(); definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"); @@ -212,7 +230,7 @@ public class ProcessDefinitionServiceTest { //instance exit Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition); - Map createProcessResult = new HashMap<>(5); + Map createProcessResult = new HashMap<>(); putMsg(result, Status.SUCCESS); Mockito.when(processDefinitionService.createProcessDefinition( @@ -224,8 +242,52 @@ public class ProcessDefinitionServiceTest { definition.getLocations(), definition.getConnects())).thenReturn(createProcessResult); - Map successRes = processDefinitionService.copyProcessDefinition(loginUser, - "project_test1", 46); + Map successRes = processDefinitionService.batchCopyProcessDefinition(loginUser, "project_test1", + "46", 1); + + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + @Test + public void testBatchMoveProcessDefinition() throws Exception { + String projectName = "project_test1"; + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); + + String projectName2 = "project_test2"; + Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(getProject(projectName2)); + + int targetProjectId = 2; + Mockito.when(projectMapper.queryDetailById(targetProjectId)).thenReturn(getProjectById(targetProjectId)); + + Project project = getProject(projectName); + Project targetProject = getProjectById(targetProjectId); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS, projectName); + + Map result2 = new HashMap<>(); + putMsg(result2, Status.SUCCESS, targetProject.getName()); + + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, targetProject, targetProject.getName())).thenReturn(result2); + + ProcessDefinition definition = getProcessDefinition(); + definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"); + definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"); + definition.setConnects("[]"); + //instance exit + Mockito.when(processDefineMapper.updateById(definition)).thenReturn(46); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition); + + putMsg(result, Status.SUCCESS); + + + Map successRes = processDefinitionService.batchMoveProcessDefinition(loginUser, "project_test1", + "46", 2); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -241,15 +303,15 @@ public class ProcessDefinitionServiceTest { loginUser.setUserType(UserType.GENERAL_USER); //project check auth fail - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Map map = processDefinitionService.deleteProcessDefinitionById(loginUser, "project_test1", 6); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); //project check auth success, instance not exist putMsg(result, Status.SUCCESS, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Mockito.when(processDefineMapper.selectById(1)).thenReturn(null); Map instanceNotexitRes = processDefinitionService.deleteProcessDefinitionById(loginUser, "project_test1", 1); @@ -321,9 +383,9 @@ public class ProcessDefinitionServiceTest { loginUser.setUserType(UserType.GENERAL_USER); //project check auth fail - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Map map = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", 6, ReleaseState.OFFLINE.getCode()); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); @@ -364,22 +426,22 @@ public class ProcessDefinitionServiceTest { loginUser.setUserType(UserType.GENERAL_USER); //project check auth fail - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); - Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); Map map = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); //project check auth success, process not exist putMsg(result, Status.SUCCESS, projectName); - Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(null); + Mockito.when(processDefineMapper.queryByDefineName(project.getId(), "test_pdf")).thenReturn(null); Map processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS)); //process exist - Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(getProcessDefinition()); + Mockito.when(processDefineMapper.queryByDefineName(project.getId(), "test_pdf")).thenReturn(getProcessDefinition()); Map processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST, processExistRes.get(Constants.STATUS)); @@ -404,7 +466,7 @@ public class ProcessDefinitionServiceTest { Assert.assertEquals(Status.DATA_IS_NULL, taskNotEmptyRes.get(Constants.STATUS)); //json abnormal - String abnormalJson = processDefinitionJson.replaceAll("SHELL",""); + String abnormalJson = processDefinitionJson.replaceAll("SHELL", ""); processData = JSONUtils.parseObject(abnormalJson, ProcessData.class); Map abnormalTaskRes = processDefinitionService.checkProcessNodeList(processData, abnormalJson); Assert.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID, abnormalTaskRes.get(Constants.STATUS)); @@ -502,157 +564,6 @@ public class ProcessDefinitionServiceTest { Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS)); } - /** - * add datasource param and dependent when export process - * @throws JSONException - */ - @Test - public void testAddTaskNodeSpecialParam() throws JSONException { - - Mockito.when(dataSourceMapper.selectById(1)).thenReturn(getDataSource()); - Mockito.when(processDefineMapper.queryByDefineId(2)).thenReturn(getProcessDefinition()); - - String corSqlDependentJson = processDefinitionService.addExportTaskNodeSpecialParam(sqlDependentJson); - - JSONAssert.assertEquals(sqlDependentJson,corSqlDependentJson,false); - - } - - @Test - public void testExportProcessMetaDataStr() { - Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(getSchedulerList()); - ProcessDefinition processDefinition = getProcessDefinition(); - processDefinition.setProcessDefinitionJson(sqlDependentJson); - - String exportProcessMetaDataStr = processDefinitionService.exportProcessMetaDataStr(46, processDefinition); - Assert.assertNotEquals(sqlDependentJson,exportProcessMetaDataStr); - } - - @Test - public void testAddExportTaskNodeSpecialParam() throws JSONException { - String shellData = shellJson; - - String resultStr = processDefinitionService.addExportTaskNodeSpecialParam(shellData); - JSONAssert.assertEquals(shellJson, resultStr, false); - } - - @Test - public void testImportProcessSchedule() { - User loginUser = new User(); - loginUser.setId(1); - loginUser.setUserType(UserType.GENERAL_USER); - - String currentProjectName = "test"; - String processDefinitionName = "test_process"; - Integer processDefinitionId = 1; - Schedule schedule = getSchedule(); - - ProcessMeta processMeta = getProcessMeta(); - - int insertFlag = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMeta, - processDefinitionName, processDefinitionId); - Assert.assertEquals(0, insertFlag); - - ProcessMeta processMetaCron = new ProcessMeta(); - processMetaCron.setScheduleCrontab(schedule.getCrontab()); - - int insertFlagCron = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron, - processDefinitionName, processDefinitionId); - Assert.assertEquals(0, insertFlagCron); - - WorkerGroup workerGroup = new WorkerGroup(); - workerGroup.setName("ds-test-workergroup"); - List workerGroups = new ArrayList<>(); - workerGroups.add(workerGroup); - - processMetaCron.setScheduleWorkerGroupName("ds-test"); - int insertFlagWorker = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron, - processDefinitionName, processDefinitionId); - Assert.assertEquals(0, insertFlagWorker); - - int workerNullFlag = processDefinitionService.importProcessSchedule(loginUser, currentProjectName, processMetaCron, - processDefinitionName, processDefinitionId); - Assert.assertEquals(0, workerNullFlag); - - - } - - /** - * import sub process test - */ - @Test - public void testImportSubProcess() { - - User loginUser = new User(); - loginUser.setId(1); - loginUser.setUserType(UserType.ADMIN_USER); - - Project testProject = getProject("test"); - - //Recursive subprocess sub2 process in sub1 process and sub1process in top process - String topProcessJson = "{\"globalParams\":[]," + - "\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-38634\",\"name\":\"shell1\"," + - "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + - "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," + - "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," + - "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}," + - "{\"type\":\"SUB_PROCESS\",\"id\":\"tasks-44207\",\"name\":\"shell-4\"," + - "\"params\":{\"processDefinitionId\":39},\"description\":\"\",\"runFlag\":\"NORMAL\"," + - "\"dependence\":{},\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," + - "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1," + - "\"preTasks\":[\"shell1\"]}],\"tenantId\":1,\"timeout\":0}"; - - String sub1ProcessJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-84090\"," + - "\"name\":\"shell-4\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-4\\\"\"}," + - "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," + - "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false}," + - "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]},{\"type\":\"SUB_PROCESS\"," + - "\"id\":\"tasks-87364\",\"name\":\"shell-5\"," + - "\"params\":{\"processDefinitionId\":46},\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{}," + - "\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + - "\"workerGroupId\":-1,\"preTasks\":[\"shell-4\"]}],\"tenantId\":1,\"timeout\":0}"; - - String sub2ProcessJson = "{\"globalParams\":[]," + - "\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-52423\",\"name\":\"shell-5\"," + - "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo \\\"shell-5\\\"\"},\"description\":\"\"," + - "\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + - "\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1," + - "\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; - - - ObjectNode jsonObject = JSONUtils.parseObject(topProcessJson); - ArrayNode jsonArray = (ArrayNode) jsonObject.path("tasks"); - - String originSubJson = jsonArray.toString(); - - Map subProcessIdMap = new HashMap<>(20); - - ProcessDefinition shellDefinition1 = new ProcessDefinition(); - shellDefinition1.setId(39); - shellDefinition1.setName("shell-4"); - shellDefinition1.setProjectId(2); - shellDefinition1.setProcessDefinitionJson(sub1ProcessJson); - - ProcessDefinition shellDefinition2 = new ProcessDefinition(); - shellDefinition2.setId(46); - shellDefinition2.setName("shell-5"); - shellDefinition2.setProjectId(2); - shellDefinition2.setProcessDefinitionJson(sub2ProcessJson); - - Mockito.when(processDefineMapper.queryByDefineId(39)).thenReturn(shellDefinition1); - Mockito.when(processDefineMapper.queryByDefineId(46)).thenReturn(shellDefinition2); - Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "shell-5")).thenReturn(null); - Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "shell-4")).thenReturn(null); - Mockito.when(processDefineMapper.queryByDefineName(testProject.getId(), "testProject")).thenReturn(shellDefinition2); - - processDefinitionService.importSubProcess(loginUser,testProject, jsonArray, subProcessIdMap); - - String correctSubJson = jsonArray.toString(); - - Assert.assertEquals(originSubJson, correctSubJson); - - } - @Test public void testImportProcessDefinitionById() throws IOException { @@ -680,7 +591,7 @@ public class ProcessDefinitionServiceTest { "\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":\\\"default\\\\," + "\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; - FileUtils.writeStringToFile(new File("/tmp/task.json"),processJson); + FileUtils.writeStringToFile(new File("/tmp/task.json"), processJson); File file = new File("/tmp/task.json"); @@ -694,7 +605,7 @@ public class ProcessDefinitionServiceTest { loginUser.setUserType(UserType.ADMIN_USER); String currentProjectName = "testProject"; - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS, currentProjectName); ProcessDefinition shellDefinition2 = new ProcessDefinition(); @@ -731,41 +642,13 @@ public class ProcessDefinitionServiceTest { } - /** - * check import process metadata - * @param file file - * @param loginUser login user - * @param currentProjectName current project name - * @param processMetaJson process meta json - * @throws IOException IO exception - */ - private void improssProcessCheckData(File file, User loginUser, String currentProjectName, String processMetaJson) throws IOException { - //check null - FileUtils.writeStringToFile(new File("/tmp/task.json"),processMetaJson); - - File fileEmpty = new File("/tmp/task.json"); - - FileInputStream fileEmptyInputStream = new FileInputStream("/tmp/task.json"); - - MultipartFile multiFileEmpty = new MockMultipartFile(fileEmpty.getName(), fileEmpty.getName(), - ContentType.APPLICATION_OCTET_STREAM.toString(), fileEmptyInputStream); - - Map resEmptyProcess = processDefinitionService.importProcessDefinition(loginUser, multiFileEmpty, currentProjectName); - - Assert.assertEquals(Status.DATA_IS_NULL, resEmptyProcess.get(Constants.STATUS)); - - boolean deleteFlag = file.delete(); - - Assert.assertTrue(deleteFlag); - } - @Test - public void testUpdateProcessDefinition () { + public void testUpdateProcessDefinition() { User loginUser = new User(); loginUser.setId(1); loginUser.setUserType(UserType.ADMIN_USER); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.SUCCESS); String projectName = "project_test1"; @@ -783,20 +666,22 @@ public class ProcessDefinitionServiceTest { /** * get mock datasource + * * @return DataSource */ - private DataSource getDataSource(){ + private DataSource getDataSource() { DataSource dataSource = new DataSource(); dataSource.setId(2); dataSource.setName("test"); - return dataSource; + return dataSource; } /** * get mock processDefinition + * * @return ProcessDefinition */ - private ProcessDefinition getProcessDefinition(){ + private ProcessDefinition getProcessDefinition() { ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setId(46); @@ -805,24 +690,40 @@ public class ProcessDefinitionServiceTest { processDefinition.setTenantId(1); processDefinition.setDescription(""); - return processDefinition; + return processDefinition; } /** * get mock Project + * * @param projectName projectName * @return Project */ - private Project getProject(String projectName){ + private Project getProject(String projectName) { Project project = new Project(); project.setId(1); project.setName(projectName); project.setUserId(1); - return project; + return project; + } + + /** + * get mock Project + * + * @param projectId projectId + * @return Project + */ + private Project getProjectById(int projectId) { + Project project = new Project(); + project.setId(1); + project.setName("project_test2"); + project.setUserId(1); + return project; } /** * get mock schedule + * * @return schedule */ private Schedule getSchedule() { @@ -845,6 +746,7 @@ public class ProcessDefinitionServiceTest { /** * get mock processMeta + * * @return processMeta */ private ProcessMeta getProcessMeta() { @@ -876,4 +778,4 @@ public class ProcessDefinitionServiceTest { result.put(Constants.MSG, status.getMsg()); } } -} \ No newline at end of file +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index 651964bb16..82031ca9eb 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -16,16 +16,43 @@ */ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.dolphinscheduler.api.ApiApplicationServer; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.LoggerServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.dao.entity.*; -import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.entity.WorkerGroup; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.io.IOException; +import java.text.MessageFormat; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -33,22 +60,11 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.boot.test.context.SpringBootTest; -import java.io.IOException; -import java.text.MessageFormat; -import java.text.ParseException; -import java.util.*; - -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.when; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.Silent.class) -@SpringBootTest(classes = ApiApplicationServer.class) public class ProcessInstanceServiceTest { - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class); @InjectMocks ProcessInstanceService processInstanceService; @@ -57,7 +73,7 @@ public class ProcessInstanceServiceTest { ProjectMapper projectMapper; @Mock - ProjectService projectService; + ProjectServiceImpl projectService; @Mock ProcessService processService; @@ -78,9 +94,7 @@ public class ProcessInstanceServiceTest { TaskInstanceMapper taskInstanceMapper; @Mock - LoggerService loggerService; - - + LoggerServiceImpl loggerService; @Mock UsersService usersService; @@ -96,7 +110,7 @@ public class ProcessInstanceServiceTest { public void testQueryProcessInstanceList() { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail @@ -153,30 +167,28 @@ public class ProcessInstanceServiceTest { User loginUser = getAdminUser(); Map result = new HashMap<>(5); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); - int size=10; - String startTime="2020-01-01 00:00:00"; - String endTime="2020-08-02 00:00:00"; + int size = 10; + String startTime = "2020-01-01 00:00:00"; + String endTime = "2020-08-02 00:00:00"; Date start = DateUtils.getScheduleDate(startTime); Date end = DateUtils.getScheduleDate(endTime); //project auth fail when(projectMapper.queryByName(projectName)).thenReturn(null); when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); - Map proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser,projectName,size,startTime,endTime); + Map proejctAuthFailRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectName, size, startTime, endTime); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); //project auth success putMsg(result, Status.SUCCESS, projectName); Project project = getProject(projectName); ProcessInstance processInstance = getProcessInstance(); - List processInstanceList = new ArrayList<>(); - processInstanceList.add(processInstance); when(projectMapper.queryByName(projectName)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser); - Map successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser,projectName,size,startTime,endTime); + Map successRes = processInstanceService.queryTopNLongestRunningProcessInstance(loginUser, projectName, size, startTime, endTime); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -185,7 +197,7 @@ public class ProcessInstanceServiceTest { public void testQueryProcessInstanceById() { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail @@ -223,7 +235,7 @@ public class ProcessInstanceServiceTest { public void testQueryTaskListByProcessId() throws IOException { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail @@ -272,7 +284,7 @@ public class ProcessInstanceServiceTest { public void testQuerySubProcessInstanceByTaskId() { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail @@ -318,7 +330,7 @@ public class ProcessInstanceServiceTest { public void testUpdateProcessInstance() throws ParseException { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail @@ -374,7 +386,7 @@ public class ProcessInstanceServiceTest { public void testQueryParentInstanceBySubId() { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail @@ -415,7 +427,7 @@ public class ProcessInstanceServiceTest { public void testDeleteProcessInstanceById() { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java index 51f9e148d1..99ec76a745 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java @@ -16,9 +16,8 @@ */ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; @@ -30,7 +29,12 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; -import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -43,10 +47,8 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.class) public class ProjectServiceTest { @@ -55,18 +57,18 @@ public class ProjectServiceTest { private static final Logger logger = LoggerFactory.getLogger(ProjectServiceTest.class); @InjectMocks - private ProjectService projectService; + private ProjectServiceImpl projectService; + @Mock private ProjectMapper projectMapper; - @Mock - private UserMapper userMapper; + @Mock private ProjectUserMapper projectUserMapper; + @Mock private ProcessDefinitionMapper processDefinitionMapper; - private String projectName = "ProjectServiceTest"; private String userName = "ProjectServiceTest"; @@ -78,106 +80,109 @@ public class ProjectServiceTest { @After - public void after(){ + public void after() { } @Test - public void testCreateProject(){ + public void testCreateProject() { - User loginUser = getLoginUser(); + User loginUser = getLoginUser(); loginUser.setId(1); Map result = projectService.createProject(loginUser, projectName, getDesc()); logger.info(result.toString()); - Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR,result.get(Constants.STATUS)); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //project name exist Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject()); result = projectService.createProject(loginUser, projectName, projectName); logger.info(result.toString()); - Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS)); //success Mockito.when(projectMapper.insert(Mockito.any(Project.class))).thenReturn(1); result = projectService.createProject(loginUser, "test", "test"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } + @Test - public void testQueryById(){ + public void testQueryById() { //not exist Map result = projectService.queryById(Integer.MAX_VALUE); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS)); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); logger.info(result.toString()); //success Mockito.when(projectMapper.selectById(1)).thenReturn(getProject()); result = projectService.queryById(1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } + @Test - public void testCheckProjectAndAuth(){ + public void testCheckProjectAndAuth() { Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser()); User loginUser = getLoginUser(); - Map result = projectService.checkProjectAndAuth(loginUser,null,projectName); + Map result = projectService.checkProjectAndAuth(loginUser, null, projectName); logger.info(result.toString()); - Status status = (Status)result.get(Constants.STATUS); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS)); + Status status = (Status) result.get(Constants.STATUS); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); Project project = getProject(); //USER_NO_OPERATION_PROJECT_PERM project.setUserId(2); - result = projectService.checkProjectAndAuth(loginUser,project,projectName); + result = projectService.checkProjectAndAuth(loginUser, project, projectName); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS)); //success project.setUserId(1); - result = projectService.checkProjectAndAuth(loginUser,project,projectName); + result = projectService.checkProjectAndAuth(loginUser, project, projectName); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test - public void testHasProjectAndPerm(){ + public void testHasProjectAndPerm() { - // Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser()); + // Mockito.when(projectUserMapper.queryProjectRelation(1, 1)).thenReturn(getProjectUser()); User loginUser = getLoginUser(); Project project = getProject(); Map result = new HashMap<>(); // not exist user User tempUser = new User(); tempUser.setId(Integer.MAX_VALUE); - boolean checkResult = projectService.hasProjectAndPerm(tempUser,project,result); + boolean checkResult = projectService.hasProjectAndPerm(tempUser, project, result); logger.info(result.toString()); Assert.assertFalse(checkResult); //success result = new HashMap<>(); project.setUserId(1); - checkResult = projectService.hasProjectAndPerm(loginUser,project,result); + checkResult = projectService.hasProjectAndPerm(loginUser, project, result); logger.info(result.toString()); Assert.assertTrue(checkResult); } + @Test - public void testQueryProjectListPaging(){ + public void testQueryProjectListPaging() { - IPage page = new Page<>(1,10); + IPage page = new Page<>(1, 10); page.setRecords(getList()); page.setTotal(1L); Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(1), Mockito.eq(projectName))).thenReturn(page); User loginUser = getLoginUser(); // project owner - Map result = projectService.queryProjectListPaging(loginUser,10,1,projectName); + Map result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName); logger.info(result.toString()); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists())); @@ -185,124 +190,148 @@ public class ProjectServiceTest { //admin Mockito.when(projectMapper.queryProjectListPaging(Mockito.any(Page.class), Mockito.eq(0), Mockito.eq(projectName))).thenReturn(page); loginUser.setUserType(UserType.ADMIN_USER); - result = projectService.queryProjectListPaging(loginUser,10,1,projectName); + result = projectService.queryProjectListPaging(loginUser, 10, 1, projectName); logger.info(result.toString()); pageInfo = (PageInfo) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(pageInfo.getLists())); } + @Test - public void testDeleteProject(){ + public void testDeleteProject() { Mockito.when(projectMapper.selectById(1)).thenReturn(getProject()); User loginUser = getLoginUser(); //PROJECT_NOT_FOUNT - Map result= projectService.deleteProject(loginUser,12); + Map result = projectService.deleteProject(loginUser, 12); logger.info(result.toString()); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS)); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); loginUser.setId(2); //USER_NO_OPERATION_PROJECT_PERM - result= projectService.deleteProject(loginUser,1); + result = projectService.deleteProject(loginUser, 1); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PROJECT_PERM, result.get(Constants.STATUS)); //DELETE_PROJECT_ERROR_DEFINES_NOT_NULL Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(getProcessDefinitions()); loginUser.setUserType(UserType.ADMIN_USER); - result= projectService.deleteProject(loginUser,1); + result = projectService.deleteProject(loginUser, 1); logger.info(result.toString()); - Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL,result.get(Constants.STATUS)); + Assert.assertEquals(Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL, result.get(Constants.STATUS)); //success Mockito.when(projectMapper.deleteById(1)).thenReturn(1); Mockito.when(processDefinitionMapper.queryAllDefinitionList(1)).thenReturn(new ArrayList<>()); - result= projectService.deleteProject(loginUser,1); + result = projectService.deleteProject(loginUser, 1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @Test - public void testUpdate(){ + public void testUpdate() { User loginUser = getLoginUser(); Project project = getProject(); project.setId(2); Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); - Mockito.when( projectMapper.selectById(1)).thenReturn(getProject()); + Mockito.when(projectMapper.selectById(1)).thenReturn(getProject()); // PROJECT_NOT_FOUNT - Map result = projectService.update(loginUser,12,projectName,"desc"); + Map result = projectService.update(loginUser, 12, projectName, "desc"); logger.info(result.toString()); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT,result.get(Constants.STATUS)); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, result.get(Constants.STATUS)); //PROJECT_ALREADY_EXISTS - result = projectService.update(loginUser,1,projectName,"desc"); + result = projectService.update(loginUser, 1, projectName, "desc"); logger.info(result.toString()); - Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.PROJECT_ALREADY_EXISTS, result.get(Constants.STATUS)); //success project.setUserId(1); Mockito.when(projectMapper.updateById(Mockito.any(Project.class))).thenReturn(1); - result = projectService.update(loginUser,1,"test","desc"); + result = projectService.update(loginUser, 1, "test", "desc"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } + @Test - public void testQueryAuthorizedProject(){ + public void testQueryAuthorizedProject() { User loginUser = getLoginUser(); Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList()); //USER_NO_OPERATION_PERM - Map result = projectService.queryAuthorizedProject(loginUser,3); + Map result = projectService.queryAuthorizedProject(loginUser, 3); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success loginUser.setUserType(UserType.ADMIN_USER); - result = projectService.queryAuthorizedProject(loginUser,1); + result = projectService.queryAuthorizedProject(loginUser, 1); logger.info(result.toString()); List projects = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } + @Test - public void testQueryAllProjectList(){ + public void testQueryCreatedProject() { + + User loginUser = getLoginUser(); + + Mockito.when(projectMapper.queryProjectCreatedByUser(1)).thenReturn(getList()); + //USER_NO_OPERATION_PERM + Map result = projectService.queryProjectCreatedByUser(loginUser); + logger.info(result.toString()); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); + + //success + loginUser.setUserType(UserType.ADMIN_USER); + result = projectService.queryProjectCreatedByUser(loginUser); + logger.info(result.toString()); + List projects = (List) result.get(Constants.DATA_LIST); + Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); + + } + + @Test + public void testQueryAllProjectList() { Mockito.when(projectMapper.selectList(null)).thenReturn(getList()); Mockito.when(processDefinitionMapper.selectList(null)).thenReturn(getProcessDefinitions()); Map result = projectService.queryAllProjectList(); logger.info(result.toString()); - List projects = (List) result.get(Constants.DATA_LIST); + List projects = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } + @Test - public void testQueryUnauthorizedProject(){ - // Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList()); + public void testQueryUnauthorizedProject() { + // Mockito.when(projectMapper.queryAuthedProjectListByUserId(1)).thenReturn(getList()); Mockito.when(projectMapper.queryProjectExceptUserId(2)).thenReturn(getList()); User loginUser = new User(); loginUser.setUserType(UserType.ADMIN_USER); - Map result = projectService.queryUnauthorizedProject(loginUser,2); + Map result = projectService.queryUnauthorizedProject(loginUser, 2); logger.info(result.toString()); List projects = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); } - private Project getProject(){ + private Project getProject() { Project project = new Project(); project.setId(1); project.setName(projectName); project.setUserId(1); - return project; + return project; } - private List getList(){ + private List getList() { List list = new ArrayList<>(); list.add(getProject()); return list; @@ -311,30 +340,28 @@ public class ProjectServiceTest { /** * create admin user - * @return */ - private User getLoginUser(){ + private User getLoginUser() { User loginUser = new User(); loginUser.setUserType(UserType.GENERAL_USER); loginUser.setUserName(userName); loginUser.setId(1); - return loginUser; + return loginUser; } /** * get project user - */ - private ProjectUser getProjectUser(){ + private ProjectUser getProjectUser() { ProjectUser projectUser = new ProjectUser(); projectUser.setProjectId(1); projectUser.setUserId(1); - return projectUser; + return projectUser; } - private List getProcessDefinitions(){ + private List getProcessDefinitions() { List list = new ArrayList<>(); ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setProjectId(1); @@ -343,9 +370,7 @@ public class ProjectServiceTest { } - - - private String getDesc(){ + private String getDesc() { return "projectUserMapper.deleteProjectRelation(projectId,userId)projectUserMappe" + ".deleteProjectRelation(projectId,userId)projectUserMappe" + "r.deleteProjectRelation(projectId,userId)projectUserMapper" + diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java index f75d808e56..deadc2129c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SchedulerServiceTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.model.Server; @@ -24,12 +25,16 @@ import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -40,13 +45,6 @@ import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import org.quartz.Scheduler; -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; @RunWith(PowerMockRunner.class) @PrepareForTest(QuartzExecutors.class) @@ -57,10 +55,6 @@ public class SchedulerServiceTest { @InjectMocks private SchedulerService schedulerService; - - @Autowired - private ExecutorService executorService; - @Mock private MonitorService monitorService; @@ -72,21 +66,13 @@ public class SchedulerServiceTest { @Mock private ProjectMapper projectMapper; - @Mock - private ProjectUserMapper projectUserMapper; - @Mock - private ProjectService projectService; @Mock - private ProcessDefinitionMapper processDefinitionMapper; + private ProjectServiceImpl projectService; @Mock private QuartzExecutors quartzExecutors; - @Mock - private Scheduler scheduler; - - @Before public void setUp() { @@ -176,10 +162,10 @@ public class SchedulerServiceTest { Mockito.when(quartzExecutors.deleteJob("1", "1")).thenReturn(true); Mockito.when(quartzExecutors.buildJobGroupName(1)).thenReturn("1"); Mockito.when(quartzExecutors.buildJobName(1)).thenReturn("1"); - boolean flag = true; + boolean flag = true; try { schedulerService.deleteSchedule(1, 1); - }catch (Exception e){ + } catch (Exception e) { flag = false; } Assert.assertTrue(flag); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java index 7e98721207..b51f85f456 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/SessionServiceTest.java @@ -16,7 +16,12 @@ */ package org.apache.dolphinscheduler.api.service; +import java.util.ArrayList; import java.util.Calendar; +import java.util.Date; +import java.util.List; + +import org.apache.dolphinscheduler.api.service.impl.SessionServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; @@ -38,10 +43,6 @@ import org.slf4j.LoggerFactory; import org.springframework.mock.web.MockCookie; import org.springframework.mock.web.MockHttpServletRequest; -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - @RunWith(MockitoJUnitRunner.class) public class SessionServiceTest { @@ -49,7 +50,7 @@ public class SessionServiceTest { private static final Logger logger = LoggerFactory.getLogger(SessionServiceTest.class); @InjectMocks - private SessionService sessionService; + private SessionServiceImpl sessionService; @Mock private SessionMapper sessionMapper; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index ebb6139577..f93ed05535 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -16,9 +16,12 @@ */ package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.UserType; @@ -30,6 +33,14 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -41,11 +52,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.context.SpringBootTest; -import java.text.MessageFormat; -import java.util.*; - -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.when; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.Silent.class) @SpringBootTest(classes = ApiApplicationServer.class) @@ -59,7 +66,7 @@ public class TaskInstanceServiceTest { ProjectMapper projectMapper; @Mock - ProjectService projectService; + ProjectServiceImpl projectService; @Mock ProcessService processService; @@ -74,16 +81,16 @@ public class TaskInstanceServiceTest { UsersService usersService; @Test - public void queryTaskListPaging(){ + public void queryTaskListPaging() { String projectName = "project_test1"; User loginUser = getAdminUser(); - Map result = new HashMap<>(5); + Map result = new HashMap<>(); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); //project auth fail when(projectMapper.queryByName(projectName)).thenReturn(null); - when(projectService.checkProjectAndAuth(loginUser,null,projectName)).thenReturn(result); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); Map proejctAuthFailRes = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", "test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); @@ -101,7 +108,7 @@ public class TaskInstanceServiceTest { taskInstanceList.add(taskInstance); pageReturn.setRecords(taskInstanceList); when(projectMapper.queryByName(Mockito.anyString())).thenReturn(project); - when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), @@ -130,6 +137,7 @@ public class TaskInstanceServiceTest { /** * get Mock Admin User + * * @return admin user */ private User getAdminUser() { @@ -142,19 +150,21 @@ public class TaskInstanceServiceTest { /** * get mock Project + * * @param projectName projectName * @return Project */ - private Project getProject(String projectName){ + private Project getProject(String projectName) { Project project = new Project(); project.setId(1); project.setName(projectName); project.setUserId(1); - return project; + return project; } /** * get Mock process instance + * * @return process instance */ private ProcessInstance getProcessInstance() { @@ -169,6 +179,7 @@ public class TaskInstanceServiceTest { /** * get Mock task instance + * * @return task instance */ private TaskInstance getTaskInstance() { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index 6939e6a280..19562229c6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -462,42 +462,87 @@ public class UsersServiceTest { try { //userName error Map result = usersService.registerUser(userName, userPassword, repeatPassword, email); - logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userName = "userTest0002"; userPassword = "userTest000111111111111111"; //password error result = usersService.registerUser(userName, userPassword, repeatPassword, email); - logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); userPassword = "userTest0002"; email = "1q.com"; //email error result = usersService.registerUser(userName, userPassword, repeatPassword, email); - logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //repeatPassword error email = "7400@qq.com"; repeatPassword = "userPassword"; result = usersService.registerUser(userName, userPassword, repeatPassword, email); - logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); //success repeatPassword = "userTest0002"; result = usersService.registerUser(userName, userPassword, repeatPassword, email); - logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { - logger.error(Status.CREATE_USER_ERROR.getMsg(),e); Assert.assertTrue(false); } } + + @Test + public void testActivateUser() { + User user = new User(); + user.setUserType(UserType.GENERAL_USER); + String userName = "userTest0002~"; + try { + //not admin + Map result = usersService.activateUser(user, userName); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); + + //userName error + user.setUserType(UserType.ADMIN_USER); + result = usersService.activateUser(user, userName); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); + + //user not exist + userName = "userTest10013"; + result = usersService.activateUser(user, userName); + Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); + + //user state error + userName = "userTest0001"; + when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getUser()); + result = usersService.activateUser(user, userName); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); + + //success + when(userMapper.queryByUserNameAccurately(userName)).thenReturn(getDisabledUser()); + result = usersService.activateUser(user, userName); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } catch (Exception e) { + Assert.assertTrue(false); + } + } + + /** + * get disabled user + * @return + */ + private User getDisabledUser() { + + User user = new User(); + user.setUserType(UserType.GENERAL_USER); + user.setUserName("userTest0001"); + user.setUserPassword("userTest0001"); + user.setState(0); + return user; + } + + /** * get user * @return diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml index 130f9dfa99..2ade59550f 100644 --- a/dolphinscheduler-common/pom.xml +++ b/dolphinscheduler-common/pom.xml @@ -580,6 +580,11 @@ + + com.facebook.presto + presto-jdbc + + com.baomidou mybatis-plus-annotation diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index 4cb09a1a56..072a67f44f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -898,6 +898,7 @@ public final class Constants { public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver"; public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver"; public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver"; + public static final String COM_PRESTO_JDBC_DRIVER = "com.facebook.presto.jdbc.PrestoDriver"; /** * database type @@ -910,6 +911,7 @@ public final class Constants { public static final String ORACLE = "ORACLE"; public static final String SQLSERVER = "SQLSERVER"; public static final String DB2 = "DB2"; + public static final String PRESTO = "PRESTO"; /** * jdbc url @@ -922,6 +924,7 @@ public final class Constants { public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//"; public static final String JDBC_SQLSERVER = "jdbc:sqlserver://"; public static final String JDBC_DB2 = "jdbc:db2://"; + public static final String JDBC_PRESTO = "jdbc:presto://"; public static final String ADDRESS = "address"; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java index 1d28a759c0..8ff2c70bba 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java @@ -33,6 +33,7 @@ public enum DbType { * 5 oracle * 6 sqlserver * 7 db2 + * 8 presto */ MYSQL(0, "mysql"), POSTGRESQL(1, "postgresql"), @@ -41,7 +42,8 @@ public enum DbType { CLICKHOUSE(4, "clickhouse"), ORACLE(5, "oracle"), SQLSERVER(6, "sqlserver"), - DB2(7, "db2"); + DB2(7, "db2"), + PRESTO(8, "presto"); DbType(int code, String descp) { this.code = code; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java index cf432a17d5..cc75624b60 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java @@ -417,7 +417,12 @@ public class HadoopUtils implements Closeable { String applicationUrl = getApplicationUrl(applicationId); logger.info("applicationUrl={}", applicationUrl); - String responseContent = HttpUtils.get(applicationUrl); + String responseContent ; + if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) { + responseContent = KerberosHttpClient.get(applicationUrl); + } else { + responseContent = HttpUtils.get(applicationUrl); + } if (responseContent != null) { ObjectNode jsonObject = JSONUtils.parseObject(responseContent); result = jsonObject.path("app").path("finalStatus").asText(); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java index 8ea15314a8..7d1e0a523a 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java @@ -128,40 +128,50 @@ public class HttpUtils { CloseableHttpClient httpclient = HttpUtils.getInstance(); HttpGet httpget = new HttpGet(url); - String responseContent = null; - CloseableHttpResponse response = null; - - try { - response = httpclient.execute(httpget); - //check response status is 200 - if (response.getStatusLine().getStatusCode() == 200) { - HttpEntity entity = response.getEntity(); - if (entity != null) { - responseContent = EntityUtils.toString(entity, Constants.UTF_8); - }else{ - logger.warn("http entity is null"); - } - }else{ - logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode()); - } - }catch (Exception e){ - logger.error(e.getMessage(),e); - }finally { - try { - if (response != null) { - EntityUtils.consume(response.getEntity()); - response.close(); - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - - if (!httpget.isAborted()) { - httpget.releaseConnection(); - httpget.abort(); - } + return getResponseContentString(httpget,httpclient); + } + + /** + * get http response content + * + * @param httpget httpget + * @param httpClient httpClient + * @return http get request response content + */ + public static String getResponseContentString(HttpGet httpget, CloseableHttpClient httpClient) { + String responseContent = null; + CloseableHttpResponse response = null; + try { + response = httpClient.execute(httpget); + // check response status is 200 + if (response.getStatusLine().getStatusCode() == 200) { + HttpEntity entity = response.getEntity(); + if (entity != null) { + responseContent = EntityUtils.toString(entity, Constants.UTF_8); + } else { + logger.warn("http entity is null"); } - return responseContent; + } else { + logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode()); + } + } catch (IOException ioe) { + logger.error(ioe.getMessage(), ioe); + } finally { + try { + if (response != null) { + EntityUtils.consume(response.getEntity()); + response.close(); + } + } catch (IOException e) { + logger.error(e.getMessage(), e); + } + if (!httpget.isAborted()) { + httpget.releaseConnection(); + httpget.abort(); + } + } + return responseContent; + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java new file mode 100644 index 0000000000..5c1fd41900 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.http.auth.AuthSchemeProvider; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.Credentials; +import org.apache.http.client.config.AuthSchemes; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.config.Lookup; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.impl.auth.SPNegoSchemeFactory; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.security.auth.Subject; +import javax.security.auth.kerberos.KerberosPrincipal; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; +import java.security.Principal; +import java.security.PrivilegedAction; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * kerberos http client + */ +public class KerberosHttpClient { + + public static final Logger logger = LoggerFactory.getLogger(KerberosHttpClient.class); + + private String principal; + private String keyTabLocation; + + public KerberosHttpClient(String principal, String keyTabLocation) { + super(); + this.principal = principal; + this.keyTabLocation = keyTabLocation; + } + + public KerberosHttpClient(String principal, String keyTabLocation, boolean isDebug) { + this(principal, keyTabLocation); + if (isDebug) { + System.setProperty("sun.security.spnego.debug", "true"); + System.setProperty("sun.security.krb5.debug", "true"); + } + } + + public KerberosHttpClient(String principal, String keyTabLocation, String krb5Location, boolean isDebug) { + this(principal, keyTabLocation, isDebug); + System.setProperty("java.security.krb5.conf", krb5Location); + } + + private static CloseableHttpClient buildSpengoHttpClient() { + HttpClientBuilder builder = HttpClientBuilder.create(); + Lookup authSchemeRegistry = RegistryBuilder.create() + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true)).build(); + builder.setDefaultAuthSchemeRegistry(authSchemeRegistry); + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(new AuthScope(null, -1, null), new Credentials() { + @Override + public Principal getUserPrincipal() { + return null; + } + + @Override + public String getPassword() { + return null; + } + }); + builder.setDefaultCredentialsProvider(credentialsProvider); + return builder.build(); + } + + public String get(final String url, final String userId) { + logger.info("Calling KerberosHttpClient {} {} {}", this.principal, this.keyTabLocation, url); + Configuration config = new Configuration() { + @SuppressWarnings("serial") + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + Map options = new HashMap<>(9); + options.put("useTicketCache", "false"); + options.put("useKeyTab", "true"); + options.put("keyTab", keyTabLocation); + options.put("refreshKrb5Config", "true"); + options.put("principal", principal); + options.put("storeKey", "true"); + options.put("doNotPrompt", "true"); + options.put("isInitiator", "true"); + options.put("debug", "true"); + return new AppConfigurationEntry[] { + new AppConfigurationEntry("com.sun.security.auth.module.Krb5LoginModule", + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) }; + } + }; + Set princ = new HashSet<>(1); + princ.add(new KerberosPrincipal(userId)); + Subject sub = new Subject(false, princ, new HashSet<>(), new HashSet<>()); + + LoginContext lc; + try { + lc = new LoginContext("", sub, null, config); + lc.login(); + Subject serviceSubject = lc.getSubject(); + return Subject.doAs(serviceSubject, (PrivilegedAction) () -> { + CloseableHttpClient httpClient = buildSpengoHttpClient(); + HttpGet httpget = new HttpGet(url); + return HttpUtils.getResponseContentString(httpget, httpClient); + }); + } catch (LoginException le) { + logger.error("Kerberos authentication failed ", le); + } + return null; + } + + /** + * get http request content by kerberosClient + * + * @param url url + * @return http get request response content + */ + public static String get(String url) { + + String responseContent; + KerberosHttpClient kerberosHttpClient = new KerberosHttpClient( + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH), + PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH), true); + responseContent = kerberosHttpClient.get(url, PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME)); + return responseContent; + + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java index af2817a8d7..45321a59e3 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java @@ -37,4 +37,8 @@ public class StringUtils { public static boolean isNotBlank(String s){ return !isBlank(s); } + + public static String replaceNRTtoUnderline(String src){ + return src.replaceAll("[\n|\r|\t]", "_"); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java new file mode 100644 index 0000000000..fe873b3475 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +/** + * tri function function interface + */ +@FunctionalInterface +public interface TriFunction { + + OUT1 apply(IN1 in1, IN2 in2, IN3 in3); + +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java index aee7ac8880..f9ce989f70 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java @@ -17,7 +17,13 @@ package org.apache.dolphinscheduler.common.utils; import com.fasterxml.jackson.databind.node.ObjectNode; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -28,26 +34,53 @@ import org.slf4j.LoggerFactory; */ public class HttpUtilsTest { + public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class); + private HadoopUtils hadoopUtils = HadoopUtils.getInstance(); - public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class); + @Test + public void testGetTest() { + // success + String result = HttpUtils.get("https://github.com/manifest.json"); + Assert.assertNotNull(result); + ObjectNode jsonObject = JSONUtils.parseObject(result); + Assert.assertEquals("GitHub", jsonObject.path("name").asText()); + result = HttpUtils.get("https://123.333.111.33/ccc"); + Assert.assertNull(result); + } + @Test + public void testGetByKerberos() { + try { + String applicationUrl = hadoopUtils.getApplicationUrl("application_1542010131334_0029"); + String responseContent; + responseContent = HttpUtils.get(applicationUrl); + Assert.assertNull(responseContent); - @Test - public void testGetTest(){ - //success - String result = HttpUtils.get("https://github.com/manifest.json"); - Assert.assertNotNull(result); - ObjectNode jsonObject = JSONUtils.parseObject(result); - Assert.assertEquals("GitHub", jsonObject.path("name").asText()); - - result = HttpUtils.get("https://123.333.111.33/ccc"); - Assert.assertNull(result); + } catch (Exception e) { + logger.error(e.getMessage(), e); } + } + + @Test + public void testGetResponseContentString() { + CloseableHttpClient httpclient = HttpClients.createDefault(); + HttpGet httpget = new HttpGet("https://github.com/manifest.json"); + /** set timeout、request time、socket timeout */ + RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(Constants.HTTP_CONNECT_TIMEOUT) + .setConnectionRequestTimeout(Constants.HTTP_CONNECTION_REQUEST_TIMEOUT) + .setSocketTimeout(Constants.SOCKET_TIMEOUT).setRedirectsEnabled(true).build(); + httpget.setConfig(requestConfig); + String responseContent = HttpUtils.getResponseContentString(httpget, httpclient); + Assert.assertNotNull(responseContent); + } + + @Test public void testGetHttpClient() { CloseableHttpClient httpClient1 = HttpUtils.getInstance(); CloseableHttpClient httpClient2 = HttpUtils.getInstance(); Assert.assertEquals(httpClient1, httpClient2); } + } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClientTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClientTest.java new file mode 100644 index 0000000000..9911961ac0 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClientTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.utils; + +import org.apache.dolphinscheduler.common.Constants; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * KerberosHttpClient test + */ +public class KerberosHttpClientTest { + public static final Logger logger = LoggerFactory.getLogger(KerberosHttpClientTest.class); + private HadoopUtils hadoopUtils = HadoopUtils.getInstance(); + + @Test + public void get() { + try { + String applicationUrl = hadoopUtils.getApplicationUrl("application_1542010131334_0029"); + String responseContent; + KerberosHttpClient kerberosHttpClient = new KerberosHttpClient(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH), PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH), true); + responseContent = kerberosHttpClient.get(applicationUrl, + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME)); + Assert.assertNull(responseContent); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java index ccae36aae0..729a17f27b 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java @@ -133,6 +133,7 @@ public abstract class BaseDataSource { case MYSQL: case ORACLE: case POSTGRESQL: + case PRESTO: separator = "?"; break; case DB2: diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java index 53f2468ea6..f5d07ea693 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java @@ -54,6 +54,8 @@ public class DataSourceFactory { return JSONUtils.parseObject(parameter, SQLServerDataSource.class); case DB2: return JSONUtils.parseObject(parameter, DB2ServerDataSource.class); + case PRESTO: + return JSONUtils.parseObject(parameter, PrestoDataSource.class); default: return null; } @@ -94,6 +96,9 @@ public class DataSourceFactory { case DB2: Class.forName(Constants.COM_DB2_JDBC_DRIVER); break; + case PRESTO: + Class.forName(Constants.COM_PRESTO_JDBC_DRIVER); + break; default: logger.error("not support sql type: {},can't load class", dbType); throw new IllegalArgumentException("not support sql type,can't load class"); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PrestoDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PrestoDataSource.java new file mode 100644 index 0000000000..93ed3d61a5 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PrestoDataSource.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.datasource; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DbType; + +public class PrestoDataSource extends BaseDataSource { + + /** + * @return driver class + */ + @Override + public String driverClassSelector() { + return Constants.COM_PRESTO_JDBC_DRIVER; + } + + /** + * @return db type + */ + @Override + public DbType dbTypeSelector() { + return DbType.PRESTO; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java index feddb598f0..6726aa7dad 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Project.java @@ -16,13 +16,13 @@ */ package org.apache.dolphinscheduler.dao.entity; +import java.util.Date; + import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; -import java.util.Date; - /** * project */ @@ -32,7 +32,7 @@ public class Project { /** * id */ - @TableId(value="id", type=IdType.AUTO) + @TableId(value = "id", type = IdType.AUTO) private int id; /** @@ -44,7 +44,7 @@ public class Project { /** * user name */ - @TableField(exist=false) + @TableField(exist = false) private String userName; /** @@ -70,19 +70,19 @@ public class Project { /** * permission */ - @TableField(exist=false) + @TableField(exist = false) private int perm; /** * process define count */ - @TableField(exist=false) + @TableField(exist = false) private int defCount; /** * process instance running count */ - @TableField(exist=false) + @TableField(exist = false) private int instRunningCount; public int getDefCount() { @@ -136,6 +136,7 @@ public class Project { public void setDescription(String description) { this.description = description; } + public String getDescription() { return description; } @@ -163,6 +164,7 @@ public class Project { public void setPerm(int perm) { this.perm = perm; } + @Override public String toString() { return "Project{" + @@ -176,7 +178,6 @@ public class Project { '}'; } - @Override public boolean equals(Object o) { if (this == o) { @@ -202,4 +203,88 @@ public class Project { return result; } + public static Builder newBuilder() { + return new Builder(); + } + + public static final class Builder { + private int id; + private int userId; + private String userName; + private String name; + private String description; + private Date createTime; + private Date updateTime; + private int perm; + private int defCount; + private int instRunningCount; + + private Builder() { + } + + public Builder id(int id) { + this.id = id; + return this; + } + + public Builder userId(int userId) { + this.userId = userId; + return this; + } + + public Builder userName(String userName) { + this.userName = userName; + return this; + } + + public Builder name(String name) { + this.name = name; + return this; + } + + public Builder description(String description) { + this.description = description; + return this; + } + + public Builder createTime(Date createTime) { + this.createTime = createTime; + return this; + } + + public Builder updateTime(Date updateTime) { + this.updateTime = updateTime; + return this; + } + + public Builder perm(int perm) { + this.perm = perm; + return this; + } + + public Builder defCount(int defCount) { + this.defCount = defCount; + return this; + } + + public Builder instRunningCount(int instRunningCount) { + this.instRunningCount = instRunningCount; + return this; + } + + public Project build() { + Project project = new Project(); + project.setId(id); + project.setUserId(userId); + project.setUserName(userName); + project.setName(name); + project.setDescription(description); + project.setCreateTime(createTime); + project.setUpdateTime(updateTime); + project.setPerm(perm); + project.setDefCount(defCount); + project.setInstRunningCount(instRunningCount); + return project; + } + } } diff --git a/dolphinscheduler-dist/release-docs/LICENSE b/dolphinscheduler-dist/release-docs/LICENSE index 10279872c7..59da2746bf 100644 --- a/dolphinscheduler-dist/release-docs/LICENSE +++ b/dolphinscheduler-dist/release-docs/LICENSE @@ -384,6 +384,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt. xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0 xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0 + presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1 ======================================================================== diff --git a/dolphinscheduler-dist/release-docs/NOTICE b/dolphinscheduler-dist/release-docs/NOTICE index 6ce789c7fb..901659e689 100644 --- a/dolphinscheduler-dist/release-docs/NOTICE +++ b/dolphinscheduler-dist/release-docs/NOTICE @@ -43,6 +43,13 @@ The following artifacts are EPL and CDDL 1.0. * org.eclipse.jetty.orbit:javax.mail.glassfish +------ +presto-jdbc + +The code for the t-digest was originally authored by Ted Dunning + +Adrien Grand contributed the heart of the AVLTreeDigest (https://github.com/jpountz) + ------ Oracle diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-presto-jdbc.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-presto-jdbc.txt new file mode 100644 index 0000000000..f49a4e16e6 --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-presto-jdbc.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java index e9eaabcad6..b905a9fea8 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java @@ -20,7 +20,7 @@ import java.io.Serializable; import java.util.Objects; /** - * server address + * server address */ public class Host implements Serializable { @@ -39,6 +39,16 @@ public class Host implements Serializable { */ private int port; + /** + * weight + */ + private int weight; + + /** + * workGroup + */ + private String workGroup; + public Host() { } @@ -48,6 +58,21 @@ public class Host implements Serializable { this.address = ip + ":" + port; } + public Host(String ip, int port, int weight) { + this.ip = ip; + this.port = port; + this.address = ip + ":" + port; + this.weight = weight; + } + + public Host(String ip, int port, int weight,String workGroup) { + this.ip = ip; + this.port = port; + this.address = ip + ":" + port; + this.weight = weight; + this.workGroup=workGroup; + } + public String getAddress() { return address; } @@ -65,6 +90,14 @@ public class Host implements Serializable { this.address = ip + ":" + port; } + public int getWeight() { + return weight; + } + + public void setWeight(int weight) { + this.weight = weight; + } + public int getPort() { return port; } @@ -74,31 +107,47 @@ public class Host implements Serializable { this.address = ip + ":" + port; } + public String getWorkGroup() { + return workGroup; + } + + public void setWorkGroup(String workGroup) { + this.workGroup = workGroup; + } + /** * address convert host + * * @param address address * @return host */ - public static Host of(String address){ - if(address == null) { + public static Host of(String address) { + if (address == null) { throw new IllegalArgumentException("Host : address is null."); } String[] parts = address.split(":"); - if (parts.length != 2) { + if (parts.length < 2) { throw new IllegalArgumentException(String.format("Host : %s illegal.", address)); } - Host host = new Host(parts[0], Integer.parseInt(parts[1])); + Host host = null; + if (parts.length == 2) { + host = new Host(parts[0], Integer.parseInt(parts[1])); + } + if (parts.length == 3) { + host = new Host(parts[0], Integer.parseInt(parts[1]), Integer.parseInt(parts[2])); + } return host; } /** * whether old version + * * @param address address * @return old version is true , otherwise is false */ - public static Boolean isOldVersion(String address){ + public static Boolean isOldVersion(String address) { String[] parts = address.split(":"); - return parts.length != 2 ? true : false; + return parts.length != 2 && parts.length != 3; } @Override diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java index c149ac3335..4d55490a8d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java @@ -36,7 +36,7 @@ import java.util.concurrent.ConcurrentHashMap; public class TaskInstanceCacheManagerImpl implements TaskInstanceCacheManager { /** - * taskInstance caceh + * taskInstance cache */ private Map taskInstanceCache = new ConcurrentHashMap<>(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java index 58006bf7f7..4a3d4bd9f1 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java @@ -71,7 +71,12 @@ public abstract class CommonHostManager implements HostManager { return host; } List candidateHosts = new ArrayList<>(nodes.size()); - nodes.stream().forEach(node -> candidateHosts.add(Host.of(node))); + nodes.forEach(node -> { + Host nodeHost=Host.of(node); + nodeHost.setWorkGroup(context.getWorkerGroup()); + candidateHosts.add(nodeHost); + }); + return select(candidateHosts); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java index ef2b6fd22f..241906a7b4 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java @@ -38,7 +38,7 @@ public class RandomHostManager extends CommonHostManager { * set round robin */ public RandomHostManager(){ - this.selector = new RandomSelector<>(); + this.selector = new RandomSelector(); } @Override diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java index e9fef49ecf..ec1945e563 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java @@ -38,7 +38,7 @@ public class RoundRobinHostManager extends CommonHostManager { * set round robin */ public RoundRobinHostManager(){ - this.selector = new RoundRobinSelector<>(); + this.selector = new RoundRobinSelector(); } @Override diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java index e00d6f7a65..6975127b9a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java @@ -17,27 +17,44 @@ package org.apache.dolphinscheduler.server.master.dispatch.host.assign; +import org.apache.dolphinscheduler.remote.utils.Host; + +import java.util.ArrayList; import java.util.Collection; -import java.util.Random; +import java.util.List; +import java.util.concurrent.ThreadLocalRandom; /** * random selector - * @param T */ -public class RandomSelector extends AbstractSelector { - - private final Random random = new Random(); +public class RandomSelector extends AbstractSelector { @Override - public T doSelect(final Collection source) { - - int size = source.size(); - /** - * random select - */ - int randomIndex = random.nextInt(size); - - return (T) source.toArray()[randomIndex]; + public Host doSelect(final Collection source) { + + List hosts = new ArrayList<>(source); + int size = hosts.size(); + int[] weights = new int[size]; + int totalWeight = 0; + int index = 0; + + for (Host host : hosts) { + totalWeight += host.getWeight(); + weights[index] = host.getWeight(); + index++; + } + + if (totalWeight > 0) { + int offset = ThreadLocalRandom.current().nextInt(totalWeight); + + for (int i = 0; i < size; i++) { + offset -= weights[i]; + if (offset < 0) { + return hosts.get(i); + } + } + } + return hosts.get(ThreadLocalRandom.current().nextInt(size)); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java index 06e469fe6b..34a79ac6e8 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java @@ -16,27 +16,123 @@ */ package org.apache.dolphinscheduler.server.master.dispatch.host.assign; +import org.apache.dolphinscheduler.remote.utils.Host; import org.springframework.stereotype.Service; -import java.util.Collection; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; /** - * round robin selector - * @param T + * Smooth Weight Round Robin */ @Service -public class RoundRobinSelector extends AbstractSelector { +public class RoundRobinSelector extends AbstractSelector { + + private ConcurrentMap> workGroupWeightMap = new ConcurrentHashMap<>(); + + private static final int RECYCLE_PERIOD = 100000; + + private AtomicBoolean updateLock = new AtomicBoolean(); + + protected static class WeightedRoundRobin { + private int weight; + private AtomicLong current = new AtomicLong(0); + private long lastUpdate; + + int getWeight() { + return weight; + } + + void setWeight(int weight) { + this.weight = weight; + current.set(0); + } + + long increaseCurrent() { + return current.addAndGet(weight); + } + + void sel(int total) { + current.addAndGet(-1L * total); + } + + long getLastUpdate() { + return lastUpdate; + } + + void setLastUpdate(long lastUpdate) { + this.lastUpdate = lastUpdate; + } + + } - private final AtomicInteger index = new AtomicInteger(0); @Override - public T doSelect(Collection source) { + public Host doSelect(Collection source) { + + List hosts = new ArrayList<>(source); + String key = hosts.get(0).getWorkGroup(); + ConcurrentMap map = workGroupWeightMap.get(key); + if (map == null) { + workGroupWeightMap.putIfAbsent(key, new ConcurrentHashMap<>()); + map = workGroupWeightMap.get(key); + } + + int totalWeight = 0; + long maxCurrent = Long.MIN_VALUE; + long now = System.currentTimeMillis(); + Host selectedHost = null; + WeightedRoundRobin selectWeightRoundRobin = null; + + for (Host host : hosts) { + String workGroupHost = host.getWorkGroup() + host.getAddress(); + WeightedRoundRobin weightedRoundRobin = map.get(workGroupHost); + int weight = host.getWeight(); + if (weight < 0) { + weight = 0; + } + + if (weightedRoundRobin == null) { + weightedRoundRobin = new WeightedRoundRobin(); + // set weight + weightedRoundRobin.setWeight(weight); + map.putIfAbsent(workGroupHost, weightedRoundRobin); + weightedRoundRobin = map.get(workGroupHost); + } + if (weight != weightedRoundRobin.getWeight()) { + weightedRoundRobin.setWeight(weight); + } + + long cur = weightedRoundRobin.increaseCurrent(); + weightedRoundRobin.setLastUpdate(now); + if (cur > maxCurrent) { + maxCurrent = cur; + selectedHost = host; + selectWeightRoundRobin = weightedRoundRobin; + } + + totalWeight += weight; + } + + + if (!updateLock.get() && hosts.size() != map.size() && updateLock.compareAndSet(false, true)) { + try { + ConcurrentMap newMap = new ConcurrentHashMap<>(map); + newMap.entrySet().removeIf(item -> now - item.getValue().getLastUpdate() > RECYCLE_PERIOD); + workGroupWeightMap.put(key, newMap); + } finally { + updateLock.set(false); + } + } + + if (selectedHost != null) { + selectWeightRoundRobin.sel(totalWeight); + return selectedHost; + } - int size = source.size(); - /** - * round robin - */ - return (T) source.toArray()[index.getAndIncrement() % size]; + return hosts.get(0); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java index 009332f05c..9c92fb2d64 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java @@ -32,7 +32,7 @@ public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContex /** - * taskInstance caceh + * taskInstance cache */ private Map taskExecutionContextCache = new ConcurrentHashMap<>(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java index 2dedaf8e1b..fa97403527 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java @@ -49,6 +49,9 @@ public class WorkerConfig { @Value("${worker.listen.port: 1234}") private int listenPort; + @Value("${worker.weight:100}") + private int weight; + public int getListenPort() { return listenPort; } @@ -107,4 +110,13 @@ public class WorkerConfig { public void setWorkerMaxCpuloadAvg(int workerMaxCpuloadAvg) { this.workerMaxCpuloadAvg = workerMaxCpuloadAvg; } + + + public int getWeight() { + return weight; + } + + public void setWeight(int weight) { + this.weight = weight; + } } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java index 5e400e1e1f..36998fad63 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java @@ -16,9 +16,6 @@ */ package org.apache.dolphinscheduler.server.worker.registry; -import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; -import static org.apache.dolphinscheduler.common.Constants.SLASH; - import java.util.Date; import java.util.Set; import java.util.concurrent.Executors; @@ -44,9 +41,11 @@ import org.springframework.stereotype.Service; import com.google.common.collect.Sets; +import static org.apache.dolphinscheduler.common.Constants.*; + /** - * worker registry + * worker registry */ @Service public class WorkerRegistry { @@ -54,13 +53,13 @@ public class WorkerRegistry { private final Logger logger = LoggerFactory.getLogger(WorkerRegistry.class); /** - * zookeeper registry center + * zookeeper registry center */ @Autowired private ZookeeperRegistryCenter zookeeperRegistryCenter; /** - * worker config + * worker config */ @Autowired private WorkerConfig workerConfig; @@ -86,7 +85,7 @@ public class WorkerRegistry { } /** - * registry + * registry */ public void registry() { String address = NetUtils.getHost(); @@ -122,7 +121,7 @@ public class WorkerRegistry { } /** - * remove registry info + * remove registry info */ public void unRegistry() { String address = getLocalAddress(); @@ -135,13 +134,14 @@ public class WorkerRegistry { } /** - * get worker path + * get worker path */ private Set getWorkerZkPaths() { Set workerZkPaths = Sets.newHashSet(); String address = getLocalAddress(); String workerZkPathPrefix = this.zookeeperRegistryCenter.getWorkerPath(); + String weight = getWorkerWeight(); for (String workGroup : this.workerGroups) { StringBuilder workerZkPathBuilder = new StringBuilder(100); @@ -152,15 +152,23 @@ public class WorkerRegistry { // trim and lower case is need workerZkPathBuilder.append(workGroup.trim().toLowerCase()).append(SLASH); workerZkPathBuilder.append(address); + workerZkPathBuilder.append(weight); workerZkPaths.add(workerZkPathBuilder.toString()); } return workerZkPaths; } /** - * get local address + * get local address */ private String getLocalAddress() { return NetUtils.getHost() + ":" + workerConfig.getListenPort(); } + + /** + * get Worker Weight + */ + private String getWorkerWeight() { + return ":" + workerConfig.getWeight(); + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java index acc75d70d2..939426b3bf 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java @@ -147,8 +147,8 @@ public class SqlTask extends AbstractTask { } /** - * ready to execute SQL and parameter entity Map - * @return + * ready to execute SQL and parameter entity Map + * @return SqlBinds */ private SqlBinds getSqlAndSqlParamsMap(String sql) { Map sqlParamsMap = new HashMap<>(); @@ -250,7 +250,7 @@ public class SqlTask extends AbstractTask { * result process * * @param resultSet resultSet - * @throws Exception + * @throws Exception Exception */ private void resultProcess(ResultSet resultSet) throws Exception{ ArrayNode resultJSONArray = JSONUtils.createArrayNode(); @@ -293,7 +293,7 @@ public class SqlTask extends AbstractTask { } /** - * post psql + * post sql * * @param connection connection * @param postStatementsBinds postStatementsBinds @@ -329,7 +329,7 @@ public class SqlTask extends AbstractTask { * create connection * * @return connection - * @throws Exception + * @throws Exception Exception */ private Connection createConnection() throws Exception{ // if hive , load connection params if exists @@ -367,7 +367,7 @@ public class SqlTask extends AbstractTask { try { resultSet.close(); } catch (SQLException e) { - + logger.error("close result set error : {}",e.getMessage(),e); } } @@ -375,7 +375,7 @@ public class SqlTask extends AbstractTask { try { pstmt.close(); } catch (SQLException e) { - + logger.error("close prepared statement error : {}",e.getMessage(),e); } } @@ -383,17 +383,17 @@ public class SqlTask extends AbstractTask { try { connection.close(); } catch (SQLException e) { - + logger.error("close connection error : {}",e.getMessage(),e); } } } /** * preparedStatement bind - * @param connection - * @param sqlBinds - * @return - * @throws Exception + * @param connection connection + * @param sqlBinds sqlBinds + * @return PreparedStatement + * @throws Exception Exception */ private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception { // is the timeout set diff --git a/dolphinscheduler-server/src/main/resources/worker.properties b/dolphinscheduler-server/src/main/resources/worker.properties index 0365c8a9c9..9fba30c147 100644 --- a/dolphinscheduler-server/src/main/resources/worker.properties +++ b/dolphinscheduler-server/src/main/resources/worker.properties @@ -32,3 +32,6 @@ # default worker group #worker.groups=default + +# default worker weight +#work.weight=100 diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java index 0da88746f5..d3b1dcf84a 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/LoggerServerTest.java @@ -17,46 +17,62 @@ package org.apache.dolphinscheduler.server.log; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; + import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.service.log.LogClientService; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; import org.junit.Test; public class LoggerServerTest { + private LoggerServer loggerServer; - @Test - public void testRollViewLog(){ - LoggerServer loggerServer = new LoggerServer(); - loggerServer.start(); + private LogClientService logClientService; - LogClientService logClientService = new LogClientService(); - logClientService.rollViewLog("localhost", Constants.RPC_PORT,"/opt/demo.txt",0,1000); + @Before + public void startServerAndClient() { + this.loggerServer = new LoggerServer(); + this.loggerServer.start(); + this.logClientService = new LogClientService(); + } - try { - Thread.sleep(5000); - } catch (InterruptedException e) { + @Test + public void testRollViewLog() throws IOException { + String expectedTmpDemoString = "testRolloViewLog"; + FileUtils.writeStringToFile(new File("/tmp/demo.txt"), expectedTmpDemoString, Charset.defaultCharset()); - } + String resultTmpDemoString = this.logClientService.rollViewLog( + "localhost", Constants.RPC_PORT,"/tmp/demo.txt", 0, 1000); - loggerServer.stop(); - logClientService.close(); + Assert.assertEquals(expectedTmpDemoString, resultTmpDemoString.replaceAll("[\r|\n|\t]", StringUtils.EMPTY)); + + FileUtils.deleteFile("/tmp/demo.txt"); } @Test - public void testRemoveTaskLog(){ - LoggerServer loggerServer = new LoggerServer(); - loggerServer.start(); + public void testRemoveTaskLog() throws IOException { + String expectedTmpRemoveString = "testRemoveTaskLog"; + FileUtils.writeStringToFile(new File("/tmp/remove.txt"), expectedTmpRemoveString, Charset.defaultCharset()); + + Boolean b = this.logClientService.removeTaskLog("localhost", Constants.RPC_PORT,"/tmp/remove.txt"); - LogClientService logClientService = new LogClientService(); - logClientService.removeTaskLog("localhost", Constants.RPC_PORT,"/opt/zhangsan"); + Assert.assertTrue(b); - try { - Thread.sleep(5000); - } catch (InterruptedException e) { + String result = this.logClientService.viewLog("localhost", Constants.RPC_PORT,"/tmp/demo.txt"); - } + Assert.assertEquals(StringUtils.EMPTY, result); + } - loggerServer.stop(); - logClientService.close(); + @After + public void stopServerAndClient() { + this.loggerServer.stop(); + this.logClientService.close(); } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java index a14ea32e4e..f25a227947 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java @@ -16,7 +16,9 @@ */ package org.apache.dolphinscheduler.server.master.dispatch.host.assign; +import org.apache.commons.lang.ObjectUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.Host; import org.junit.Assert; import org.junit.Test; @@ -36,16 +38,16 @@ public class RandomSelectorTest { @Test public void testSelect1(){ - RandomSelector selector = new RandomSelector(); - String result = selector.select(Arrays.asList("1")); - Assert.assertTrue(StringUtils.isNotEmpty(result)); - Assert.assertTrue(result.equalsIgnoreCase("1")); + RandomSelector selector = new RandomSelector(); + Host result = selector.select(Arrays.asList(new Host("192.168.1.1",80,100),new Host("192.168.1.2",80,20))); + Assert.assertNotNull(result); } @Test public void testSelect(){ - RandomSelector selector = new RandomSelector(); - int result = selector.select(Arrays.asList(1,2,3,4,5,6,7)); - Assert.assertTrue(result >= 1 && result <= 7); + RandomSelector selector = new RandomSelector(); + Host result = selector.select(Arrays.asList(new Host("192.168.1.1",80,100),new Host("192.168.1.1",80,20))); + Assert.assertNotNull(result); + } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java index adc55a4774..ed62caaa2c 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.master.dispatch.host.assign; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.Host; import org.junit.Assert; import org.junit.Test; @@ -30,26 +31,46 @@ import java.util.List; public class RoundRobinSelectorTest { @Test(expected = IllegalArgumentException.class) - public void testSelectWithIllegalArgumentException(){ + public void testSelectWithIllegalArgumentException() { RoundRobinSelector selector = new RoundRobinSelector(); selector.select(Collections.EMPTY_LIST); } @Test - public void testSelect1(){ - RoundRobinSelector selector = new RoundRobinSelector(); - String result = selector.select(Arrays.asList("1")); - Assert.assertTrue(StringUtils.isNotEmpty(result)); - Assert.assertTrue(result.equalsIgnoreCase("1")); - } + public void testSelect1() { + RoundRobinSelector selector = new RoundRobinSelector(); + Host result = null; + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1", result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.2", result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1", result.getIp()); + // add new host + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1", result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.2", result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"), new Host("192.168.1.3", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"), new Host("192.168.1.3", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.3",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"), new Host("192.168.1.3", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"), new Host("192.168.1.3", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.2",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"), new Host("192.168.1.3", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"), new Host("192.168.1.3", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.3",result.getIp()); + // remove host3 + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.2",result.getIp()); + result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, "kris"), new Host("192.168.1.2", 80, 10, "kris"))); + Assert.assertEquals("192.168.1.1",result.getIp()); - @Test - public void testSelect(){ - RoundRobinSelector selector = new RoundRobinSelector(); - List sources = Arrays.asList(1, 2, 3, 4, 5, 6, 7); - int result = selector.select(sources); - Assert.assertTrue(result == 1); - int result2 = selector.select(Arrays.asList(1,2,3,4,5,6,7)); - Assert.assertTrue(result2 == 2); } + } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java index 92d38d470a..474bf12c77 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java @@ -16,13 +16,20 @@ */ package org.apache.dolphinscheduler.service.log; -import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; -import org.apache.dolphinscheduler.remote.command.log.*; +import org.apache.dolphinscheduler.remote.command.log.GetLogBytesRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.GetLogBytesResponseCommand; +import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogResponseCommand; +import org.apache.dolphinscheduler.remote.command.log.RollViewLogRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.RollViewLogResponseCommand; +import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.JsonSerializer; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,8 +45,10 @@ public class LogClientService { private final NettyRemotingClient client; + private volatile boolean isRunning; + /** - * request time out + * request time out */ private static final long LOG_REQUEST_TIMEOUT = 10 * 1000L; @@ -50,18 +59,21 @@ public class LogClientService { this.clientConfig = new NettyClientConfig(); this.clientConfig.setWorkerThreads(4); this.client = new NettyRemotingClient(clientConfig); + this.isRunning = true; } /** * close */ - public void close() { + public void close() { this.client.close(); + this.isRunning = false; logger.info("logger client closed"); } /** * roll view log + * * @param host host * @param port port * @param path path @@ -69,7 +81,7 @@ public class LogClientService { * @param limit limit * @return log content */ - public String rollViewLog(String host, int port, String path,int skipLineNum,int limit) { + public String rollViewLog(String host, int port, String path, int skipLineNum, int limit) { logger.info("roll view log, host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, skipLineNum, limit); RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit); String result = ""; @@ -77,7 +89,7 @@ public class LogClientService { try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); - if(response != null){ + if (response != null) { RollViewLogResponseCommand rollReviewLog = JsonSerializer.deserialize( response.getBody(), RollViewLogResponseCommand.class); return rollReviewLog.getMsg(); @@ -92,6 +104,7 @@ public class LogClientService { /** * view log + * * @param host host * @param port port * @param path path @@ -105,7 +118,7 @@ public class LogClientService { try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); - if(response != null){ + if (response != null) { ViewLogResponseCommand viewLog = JsonSerializer.deserialize( response.getBody(), ViewLogResponseCommand.class); return viewLog.getMsg(); @@ -120,6 +133,7 @@ public class LogClientService { /** * get log size + * * @param host host * @param port port * @param path log path @@ -133,7 +147,7 @@ public class LogClientService { try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); - if(response != null){ + if (response != null) { GetLogBytesResponseCommand getLog = JsonSerializer.deserialize( response.getBody(), GetLogBytesResponseCommand.class); return getLog.getData(); @@ -149,6 +163,7 @@ public class LogClientService { /** * remove task log + * * @param host host * @param port port * @param path path @@ -162,7 +177,7 @@ public class LogClientService { try { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); - if(response != null){ + if (response != null) { RemoveTaskLogResponseCommand taskLogResponse = JsonSerializer.deserialize( response.getBody(), RemoveTaskLogResponseCommand.class); return taskLogResponse.getStatus(); @@ -174,4 +189,8 @@ public class LogClientService { } return result; } + + public boolean isRunning() { + return isRunning; + } } \ No newline at end of file diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue index 1e15688c5d..adf4753f4f 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue @@ -227,6 +227,10 @@ { value: 'DB2', label: 'DB2' + }, + { + value: 'PRESTO', + label: 'PRESTO' } ] } @@ -433,6 +437,9 @@ case 'DB2': defaultPort = '50000' break + case 'PRESTO': + defaultPort = '8080' + break default: break diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js index f933eaede4..a549aafaa2 100644 --- a/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js @@ -19,6 +19,25 @@ import _ from 'lodash' import io from '@/module/io' import { tasksState } from '@/conf/home/pages/dag/_source/config' +// delete 'definitionList' from tasks +const deleteDefinitionList = (tasks) => { + const newTasks = []; + tasks.forEach(item => { + const newItem = Object.assign({}, item); + if(newItem.dependence && newItem.dependence.dependTaskList) { + newItem.dependence.dependTaskList.forEach(dependTaskItem => { + if (dependTaskItem.dependItemList) { + dependTaskItem.dependItemList.forEach(dependItem => { + Reflect.deleteProperty(dependItem, 'definitionList'); + }) + } + }) + } + newTasks.push(newItem); + }); + return newTasks; +} + export default { /** * Task status acquisition @@ -193,7 +212,7 @@ export default { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, - tasks: state.tasks, + tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } @@ -217,7 +236,7 @@ export default { return new Promise((resolve, reject) => { const data = { globalParams: state.globalParams, - tasks: state.tasks, + tasks: deleteDefinitionList(state.tasks), tenantId: state.tenantId, timeout: state.timeout } diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dag/state.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/state.js index 05dfa77161..e3c75b838f 100644 --- a/dolphinscheduler-ui/src/js/conf/home/store/dag/state.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/dag/state.js @@ -96,6 +96,11 @@ export default { id: 7, code: 'DB2', disabled: false + }, + { + id: 8, + code: 'PRESTO', + disabled: false } ], // Alarm interface diff --git a/dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js index f8166d610c..0743621e21 100644 --- a/dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/datasource/actions.js @@ -20,7 +20,7 @@ import io from '@/module/io' export default { /** * Data source creation - * @param "type": string,//MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER + * @param "type": string,//MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER, PRESTO * @param "name": string, * @param "desc": string, * @param "parameter":string //{"address":"jdbc:hive2://192.168.220.189:10000","autoReconnect":"true","characterEncoding":"utf8","database":"default","initialTimeout":3000,"jdbcUrl":"jdbc:hive2://192.168.220.189:10000/default","maxReconnect":10,"password":"","useUnicode":true,"user":"hive"} @@ -49,7 +49,7 @@ export default { }, /** * Query data source list - no paging - * @param "type": string//MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER + * @param "type": string//MYSQL, POSTGRESQL, HIVE, SPARK, CLICKHOUSE, ORACLE, SQLSERVER, PRESTO */ getDatasourcesList ({ state }, payload) { return new Promise((resolve, reject) => { diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js index f5e579c088..0ef5340488 100755 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js @@ -176,7 +176,7 @@ export default { 'Edit Tenant': 'Edit Tenant', 'Tenant Code': 'Tenant Code', 'Tenant Name': 'Tenant Name', - Queue: 'Queue', + Queue: 'Yarn Queue', 'Please select a queue': 'default is tenant association queue', 'Please enter the tenant code in English': 'Please enter the tenant code in English', 'Please enter tenant code in English': 'Please enter tenant code in English', @@ -455,7 +455,7 @@ export default { LastMonthBegin: 'LastMonthBegin', LastMonthEnd: 'LastMonthEnd', 'Refresh status succeeded': 'Refresh status succeeded', - 'Queue manage': 'Queue manage', + 'Queue manage': 'Yarn Queue manage', 'Create queue': 'Create queue', 'Edit queue': 'Edit queue', 'Datasource manage': 'Datasource', diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js index 8d524144e1..a352183fca 100755 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js @@ -180,7 +180,7 @@ export default { 'Edit Tenant': '编辑租户', 'Tenant Code': '租户编码', 'Tenant Name': '租户名称', - Queue: '队列', + Queue: 'Yarn 队列', 'Please enter the tenant code in English': '请输入租户编码只允许英文', 'Please enter tenant code in English': '请输入英文租户编码', 'Edit User': '编辑用户', @@ -450,7 +450,7 @@ export default { LastMonthBegin: '上月初', LastMonthEnd: '上月末', 'Refresh status succeeded': '刷新状态成功', - 'Queue manage': '队列管理', + 'Queue manage': 'Yarn 队列管理', 'Create queue': '创建队列', 'Edit queue': '编辑队列', 'Datasource manage': '数据源中心', diff --git a/pom.xml b/pom.xml index b469f38c1d..5a3590affd 100644 --- a/pom.xml +++ b/pom.xml @@ -94,6 +94,7 @@ 3.5.0 0.1.52 6.1.0.jre8 + 0.238.1 6.1.14 3.1.12 3.0.0 @@ -497,6 +498,12 @@ ${mssql.jdbc.version} + + com.facebook.presto + presto-jdbc + ${presto.jdbc.version} + + net.jcip jcip-annotations @@ -702,6 +709,7 @@ **/alert/utils/JSONUtilsTest.java **/alert/plugin/EmailAlertPluginTest.java + **/api/controller/ProcessDefinitionControllerTest.java **/api/dto/resources/filter/ResourceFilterTest.java **/api/dto/resources/visitor/ResourceTreeVisitorTest.java **/api/enums/testGetEnum.java @@ -778,6 +786,7 @@ **/common/utils/TaskParametersUtilsTest.java **/common/utils/HadoopUtilsTest.java **/common/utils/HttpUtilsTest.java + **/common/utils/KerberosHttpClientTest.java **/common/ConstantsTest.java **/common/utils/HadoopUtils.java **/common/utils/RetryerUtilsTest.java @@ -794,7 +803,7 @@ **/remote/RemoveTaskLogRequestCommandTest.java **/remote/ResponseFutureTest.java - + **/server/log/LoggerServerTest.java **/server/entity/SQLTaskExecutionContextTest.java **/server/log/MasterLogFilterTest.java **/server/log/SensitiveDataConverterTest.java diff --git a/script/scp-hosts.sh b/script/scp-hosts.sh index 4a94cffa29..9da94ab79c 100644 --- a/script/scp-hosts.sh +++ b/script/scp-hosts.sh @@ -33,8 +33,8 @@ for workerGroup in ${workersGroup[@]} do echo $workerGroup; worker=`echo $workerGroup|awk -F':' '{print $1}'` - groupName=`echo $workerGroup|awk -F':' '{print $2}'` - workersGroupMap+=([$worker]=$groupName) + groupsName=`echo $workerGroup|awk -F':' '{print $2}'` + workersGroupMap+=([$worker]=$groupsName) done @@ -53,7 +53,7 @@ do do # if worker in workersGroupMap if [[ "${workersGroupMap[${host}]}" ]] && [[ "${dsDir}" == "conf" ]]; then - sed -i ${txt} "s#worker.group.*#worker.group=${workersGroupMap[${host}]}#g" ${dsDir}/worker.properties + sed -i ${txt} "s:.*worker.groups.*:worker.groups=${workersGroupMap[${host}]}:g" ${dsDir}/worker.properties fi echo "start to scp $dsDir to $host/$installPath" @@ -61,4 +61,4 @@ do done echo "scp dirs to $host/$installPath complete" -done \ No newline at end of file +done diff --git a/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql b/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql index 6aeafcd700..e2a08756a6 100644 --- a/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql +++ b/sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql @@ -22,4 +22,5 @@ UPDATE QRTZ_FIRED_TRIGGERS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='E UPDATE QRTZ_JOB_DETAILS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler'; UPDATE QRTZ_JOB_DETAILS SET JOB_CLASS_NAME='org.apache.dolphinscheduler.dao.quartz.ProcessScheduleJob' WHERE JOB_CLASS_NAME='cn.escheduler.server.quartz.ProcessScheduleJob'; UPDATE QRTZ_LOCKS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler'; -UPDATE QRTZ_SCHEDULER_STATE SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler'; \ No newline at end of file +UPDATE QRTZ_SCHEDULER_STATE SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler'; +UPDATE t_ds_user SET phone = '' WHERE phone = 'xx'; \ No newline at end of file diff --git a/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql b/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql index 6f0e145176..661ed9e827 100644 --- a/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql +++ b/sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql @@ -23,4 +23,5 @@ UPDATE t_ds_process_instance instance SET `worker_group`=IFNULL((SELECT name fro UPDATE t_ds_task_instance instance SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE instance.worker_group=CONCAT(id,'')),'default'); UPDATE t_ds_schedules schedule SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE schedule.worker_group=CONCAT(id,'')),'default'); UPDATE t_ds_command command SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); -UPDATE t_ds_error_command command SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); \ No newline at end of file +UPDATE t_ds_error_command command SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); +UPDATE t_ds_user SET phone = '' WHERE phone = 'xx'; \ No newline at end of file diff --git a/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql b/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql index fba03152ee..a748eae6cf 100644 --- a/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql +++ b/sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql @@ -21,4 +21,5 @@ UPDATE t_ds_process_instance instance SET worker_group=COALESCE((SELECT name fro UPDATE t_ds_task_instance instance SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE instance.worker_group=CONCAT(id,'')),'default'); UPDATE t_ds_schedules schedule SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE schedule.worker_group=CONCAT(id,'')),'default'); UPDATE t_ds_command command SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); -UPDATE t_ds_error_command command SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); \ No newline at end of file +UPDATE t_ds_error_command command SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); +UPDATE t_ds_user SET phone = '' WHERE phone = 'xx'; \ No newline at end of file diff --git a/style/checkstyle.xml b/style/checkstyle.xml index 54188c6834..d7283abafa 100644 --- a/style/checkstyle.xml +++ b/style/checkstyle.xml @@ -186,19 +186,54 @@ - + - - + + - + - + + + + + - + + + + + + + + + + + + @@ -233,32 +268,19 @@ - - - - - - + + + - - - - - - - - - diff --git a/style/intellij-java-code-style.xml b/style/intellij-java-code-style.xml index ae333b1c3d..8e0cd2f720 100644 --- a/style/intellij-java-code-style.xml +++ b/style/intellij-java-code-style.xml @@ -37,8 +37,24 @@