diff --git a/.asf.yaml b/.asf.yaml deleted file mode 100644 index fa71a46552..0000000000 --- a/.asf.yaml +++ /dev/null @@ -1,10 +0,0 @@ -staging: - profile: ~ - whoami: dev - foo: trigger - -publish: - whoami: dev - -github: - description: “Dolphin Scheduler is a distributed and easy-to-extend visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.(分布式易扩展的可视化工作流任务调度)” \ No newline at end of file diff --git a/.github/workflows/ci_e2e.yml b/.github/workflows/ci_e2e.yml index fe818d0668..924ef114ef 100644 --- a/.github/workflows/ci_e2e.yml +++ b/.github/workflows/ci_e2e.yml @@ -49,12 +49,13 @@ jobs: - name: Docker Run run: | VERSION=`cat $(pwd)/pom.xml| grep "SNAPSHOT" | awk -F "-SNAPSHOT" '{print $1}' | awk -F ">" '{print $2}'` - docker run -dit -e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -p 8888:8888 dolphinscheduler:$VERSION all + mkdir -p /tmp/logs + docker run -dit -e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -v /tmp/logs:/opt/dolphinscheduler/logs -p 8888:8888 dolphinscheduler:$VERSION all - name: Check Server Status run: sh ./dockerfile/hooks/check - name: Prepare e2e env run: | - sudo apt-get install -y libxss1 libappindicator1 libindicator7 xvfb unzip + sudo apt-get install -y libxss1 libappindicator1 libindicator7 xvfb unzip libgbm1 wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb sudo dpkg -i google-chrome*.deb sudo apt-get install -f -y @@ -65,7 +66,10 @@ jobs: - name: Run e2e Test run: cd ./e2e && mvn -B clean test - name: Collect logs - run: | - mkdir -p ${LOG_DIR} - docker logs dolphinscheduler > ${LOG_DIR}/dolphinscheduler.txt - continue-on-error: true + if: failure() + uses: actions/upload-artifact@v1 + with: + name: dslogs + path: /tmp/logs + + diff --git a/.github/workflows/ci_ut.yml b/.github/workflows/ci_ut.yml index 12f7c04ed6..1c2952b440 100644 --- a/.github/workflows/ci_ut.yml +++ b/.github/workflows/ci_ut.yml @@ -15,7 +15,11 @@ # limitations under the License. # -on: ["pull_request", "push"] +on: + pull_request: + push: + branches: + - dev env: DOCKER_DIR: ./docker LOG_DIR: /tmp/dolphinscheduler @@ -48,19 +52,18 @@ jobs: uses: actions/setup-java@v1 with: java-version: 1.8 + - name: Git fetch unshallow + run: | + git fetch --unshallow + git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" + git fetch origin - name: Compile run: | export MAVEN_OPTS='-Dmaven.repo.local=.m2/repository -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Xmx3g' mvn test -B -Dmaven.test.skip=false - name: Upload coverage report to codecov - if: github.event_name == 'pull_request' run: | CODECOV_TOKEN="09c2663f-b091-4258-8a47-c981827eb29a" bash <(curl -s https://codecov.io/bash) - - name: Git fetch unshallow - run: | - git fetch --unshallow - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch origin - name: Run SonarCloud Analysis run: > mvn verify --batch-mode @@ -80,4 +83,4 @@ jobs: mkdir -p ${LOG_DIR} cd ${DOCKER_DIR} docker-compose logs db > ${LOG_DIR}/db.txt - continue-on-error: true + continue-on-error: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 7cf1d4d7db..6dd99201a9 100644 --- a/.gitignore +++ b/.gitignore @@ -145,6 +145,6 @@ dolphinscheduler-ui/dist/js/home/index.78a5d12.js.map dolphinscheduler-ui/dist/js/login/index.291b8e3.js dolphinscheduler-ui/dist/js/login/index.291b8e3.js.map dolphinscheduler-ui/dist/lib/external/ -dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue /dolphinscheduler-dao/src/main/resources/dao/data_source.properties +!/zookeeper_data/ diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..20be3f8273 --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1 @@ +-Xmx1024m -XX:MaxMetaspaceSize=256m diff --git a/.mvn/wrapper/MavenWrapperDownloader.java b/.mvn/wrapper/MavenWrapperDownloader.java new file mode 100644 index 0000000000..b901097f2d --- /dev/null +++ b/.mvn/wrapper/MavenWrapperDownloader.java @@ -0,0 +1,117 @@ +/* + * Copyright 2007-present the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import java.net.*; +import java.io.*; +import java.nio.channels.*; +import java.util.Properties; + +public class MavenWrapperDownloader { + + private static final String WRAPPER_VERSION = "0.5.6"; + /** + * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. + */ + private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" + + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; + + /** + * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to + * use instead of the default one. + */ + private static final String MAVEN_WRAPPER_PROPERTIES_PATH = + ".mvn/wrapper/maven-wrapper.properties"; + + /** + * Path where the maven-wrapper.jar will be saved to. + */ + private static final String MAVEN_WRAPPER_JAR_PATH = + ".mvn/wrapper/maven-wrapper.jar"; + + /** + * Name of the property which should be used to override the default download url for the wrapper. + */ + private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; + + public static void main(String args[]) { + System.out.println("- Downloader started"); + File baseDirectory = new File(args[0]); + System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); + + // If the maven-wrapper.properties exists, read it and check if it contains a custom + // wrapperUrl parameter. + File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); + String url = DEFAULT_DOWNLOAD_URL; + if(mavenWrapperPropertyFile.exists()) { + FileInputStream mavenWrapperPropertyFileInputStream = null; + try { + mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); + Properties mavenWrapperProperties = new Properties(); + mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); + url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); + } catch (IOException e) { + System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); + } finally { + try { + if(mavenWrapperPropertyFileInputStream != null) { + mavenWrapperPropertyFileInputStream.close(); + } + } catch (IOException e) { + // Ignore ... + } + } + } + System.out.println("- Downloading from: " + url); + + File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); + if(!outputFile.getParentFile().exists()) { + if(!outputFile.getParentFile().mkdirs()) { + System.out.println( + "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); + } + } + System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); + try { + downloadFileFromURL(url, outputFile); + System.out.println("Done"); + System.exit(0); + } catch (Throwable e) { + System.out.println("- Error downloading"); + e.printStackTrace(); + System.exit(1); + } + } + + private static void downloadFileFromURL(String urlString, File destination) throws Exception { + if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { + String username = System.getenv("MVNW_USERNAME"); + char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); + Authenticator.setDefault(new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + return new PasswordAuthentication(username, password); + } + }); + } + URL website = new URL(urlString); + ReadableByteChannel rbc; + rbc = Channels.newChannel(website.openStream()); + FileOutputStream fos = new FileOutputStream(destination); + fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); + fos.close(); + rbc.close(); + } + +} diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000000..642d572ce9 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,2 @@ +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip +wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8ed9aac897..e02ed113c4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,35 +1,53 @@ -* First from the remote repository *https://github.com/apache/incubator-dolphinscheduler.git* fork code to your own repository -* there are three branches in the remote repository currently: - * master normal delivery branch - After the stable version is released, the code for the stable version branch is merged into the master branch. +# Development - * dev daily development branch - The daily development branch, the newly submitted code can pull requests to this branch. +Start by forking the dolphinscheduler GitHub repository, make changes in a branch and then send a pull request. +## Set up your dolphinscheduler GitHub Repository -* Clone your own warehouse to your local +There are three branches in the remote repository currently: + - `master` : normal delivery branch. After the stable version is released, the code for the stable version branch is merged into the master branch. + + - `dev` : daily development branch. The daily development branch, the newly submitted code can pull requests to this branch. + + - `x.x.x-release` : the stable release version. - `git clone https://github.com/apache/incubator-dolphinscheduler.git` +So, you should fork the `dev` branch. -* Add remote repository address, named upstream +After forking the [dolphinscheduler upstream source repository](https://github.com/apache/incubator-dolphinscheduler/fork) to your personal repository, you can set your personal development environment. - `git remote add upstream https://github.com/apache/incubator-dolphinscheduler.git` +```sh +$ cd +$ git clone < your personal forked dolphinscheduler repo> +$ cd incubator-dolphinscheduler +``` -* View repository: +## Set git remote as ``upstream`` - `git remote -v` +Add remote repository address, named upstream -> There will be two repositories at this time: origin (your own warehouse) and upstream (remote repository) +```sh +git remote add upstream https://github.com/apache/incubator-dolphinscheduler.git +``` -* Get/update remote repository code (already the latest code, skip it) +View repository: - `git fetch upstream` +```sh +git remote -v +``` +There will be two repositories at this time: origin (your own warehouse) and upstream (remote repository) -* Synchronize remote repository code to local repository +Get/update remote repository code (already the latest code, skip it). + +```sh +git fetch upstream ``` + +Synchronize remote repository code to local repository + +```sh git checkout origin/dev git merge --no-ff upstream/dev ``` @@ -41,24 +59,46 @@ git checkout -b dev-1.0 upstream/dev-1.0 git push --set-upstream origin dev1.0 ``` -* After modifying the code locally, submit it to your own repository: +## Create your feature branch +Before making code changes, make sure you create a separate branch for them. + +```sh +$ git checkout -b +``` + +## Commit changes +After modifying the code locally, submit it to your own repository: + +```sh + +git commit -m 'information about your feature' +``` + +## Push to the branch + + +Push your locally committed changes to the remote origin (your fork). -`git commit -m 'test commit'` -`git push` +``` +$ git push origin +``` + +## Create a pull request -* Submit changes to the remote repository +After submitting changes to your remote repository, you should click on the new pull request On the following github page. -* On the github page, click on the new pull request.

- - + +

+ + +Select the modified local branch and the branch to merge past to create a pull request. -* Select the modified local branch and the branch to merge past to create a pull request.

- - + +

-* Next, the administrator is responsible for **merging** to complete the pull request +Next, the administrator is responsible for **merging** to complete the pull request. diff --git a/README.md b/README.md index ebd620efee..84f9ccfa66 100644 --- a/README.md +++ b/README.md @@ -80,7 +80,7 @@ Welcome to participate in contributing, please refer to the process of submittin ### How to Build ```bash -mvn clean install -Prelease +./mvnw clean install -Prelease ``` Artifact: diff --git a/README_zh_CN.md b/README_zh_CN.md index 6a4adc8daa..2c8aa11bf8 100644 --- a/README_zh_CN.md +++ b/README_zh_CN.md @@ -77,7 +77,7 @@ DolphinScheduler的工作计划: + + + alert.type + EMAIL + alert type is EMAIL/SMS + + + + alert.template + html + alter msg template, default is html template + + + + mail.protocol + SMTP + + + + + mail.server.host + xxx.xxx.com + + + + + mail.server.port + 25 + + int + + + + + + mail.sender + admin + + + + + mail.user + admin + + + + + mail.passwd + 000000 + + PASSWORD + + password + + + + + + mail.smtp.starttls.enable + true + + boolean + + + + + + mail.smtp.ssl.enable + true + + boolean + + + + + + mail.smtp.ssl.trust + xxx.xxx.com + + + + + + enterprise.wechat.enable + false + + + value-list + + + true + + + + false + + + + 1 + + + + + enterprise.wechat.corp.id + wechatId + + + + + enterprise.wechat.secret + secret + + + + + enterprise.wechat.agent.id + agentId + + + + + enterprise.wechat.users + wechatUsers + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-application-api.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-application-api.xml new file mode 100644 index 0000000000..766c0f477d --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-application-api.xml @@ -0,0 +1,87 @@ + + + + server.port + 12345 + + server port + + + int + + + + server.servlet.session.timeout + 7200 + + int + + + + + + server.servlet.context-path + /dolphinscheduler/ + + + + + spring.servlet.multipart.max-file-size + 1024 + + MB + int + + + + + + spring.servlet.multipart.max-request-size + 1024 + + MB + int + + + + + + server.jetty.max-http-post-size + 5000000 + + int + + + + + + spring.messages.encoding + UTF-8 + + + + spring.messages.basename + i18n/messages + + + + security.authentication.type + PASSWORD + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-common.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-common.xml new file mode 100644 index 0000000000..439e21188a --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-common.xml @@ -0,0 +1,158 @@ + + + + resource.storage.type + Choose Resource Upload Startup Type + + Resource upload startup type : HDFS,S3,NONE + + NONE + + value-list + + + HDFS + + + + S3 + + + + NONE + + + + 1 + + + + + resource.upload.path + /dolphinscheduler + + resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended + + + + + data.basedir.path + /tmp/dolphinscheduler + + user data local directory path, please make sure the directory exists and have read write permissions + + + + + + hadoop.security.authentication.startup.state + false + + value-list + + + true + + + + false + + + + 1 + + whether kerberos starts + + + java.security.krb5.conf.path + /opt/krb5.conf + + java.security.krb5.conf path + + + + + login.user.keytab.username + hdfs-mycluster@ESZ.COM + + LoginUserFromKeytab user + + + + + login.user.keytab.path + /opt/hdfs.headless.keytab + + LoginUserFromKeytab path + + + + + resource.view.suffixs + txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties + + + + hdfs.root.user + hdfs + + Users who have permission to create directories under the HDFS root path + + + + + fs.defaultFS + hdfs://mycluster:8020 + + HA or single namenode, + If namenode ha needs to copy core-site.xml and hdfs-site.xml to the conf directory, + support s3,for example : s3a://dolphinscheduler + + + + + fs.s3a.endpoint + http://host:9010 + + s3 need,s3 endpoint + + + + + fs.s3a.access.key + A3DXS30FO22544RE + + s3 need,s3 access key + + + + + fs.s3a.secret.key + OloCLq3n+8+sdPHUhJ21XrSxTC+JK + + s3 need,s3 secret key + + + + + kerberos.expire.time + 7 + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-datasource.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-datasource.xml new file mode 100644 index 0000000000..6e50a1b649 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-datasource.xml @@ -0,0 +1,467 @@ + + + + spring.datasource.initialSize + 5 + + Init connection number + + + int + + + + + spring.datasource.minIdle + 5 + + Min connection number + + + int + + + + + spring.datasource.maxActive + 50 + + Max connection number + + + int + + + + + spring.datasource.maxWait + 60000 + + Max wait time for get a connection in milliseconds. + If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. + If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. + + + int + + + + + spring.datasource.timeBetweenEvictionRunsMillis + 60000 + + Milliseconds for check to close free connections + + + int + + + + + spring.datasource.timeBetweenConnectErrorMillis + 60000 + + The Destroy thread detects the connection interval and closes the physical connection in milliseconds + if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. + + + int + + + + + spring.datasource.minEvictableIdleTimeMillis + 300000 + + The longest time a connection remains idle without being evicted, in milliseconds + + + int + + + + + spring.datasource.validationQuery + SELECT 1 + + The SQL used to check whether the connection is valid requires a query statement. + If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. + + + + + spring.datasource.validationQueryTimeout + 3 + + int + + + Check whether the connection is valid for timeout, in seconds + + + + + spring.datasource.testWhileIdle + true + + boolean + + + When applying for a connection, + if it is detected that the connection is idle longer than time Between Eviction Runs Millis, + validation Query is performed to check whether the connection is valid + + + + + spring.datasource.testOnBorrow + true + + boolean + + + Execute validation to check if the connection is valid when applying for a connection + + + + + spring.datasource.testOnReturn + false + + boolean + + + Execute validation to check if the connection is valid when the connection is returned + + + + + spring.datasource.defaultAutoCommit + true + + boolean + + + + + + + spring.datasource.keepAlive + false + + boolean + + + + + + + + spring.datasource.poolPreparedStatements + true + + boolean + + + Open PSCache, specify count PSCache for every connection + + + + + spring.datasource.maxPoolPreparedStatementPerConnectionSize + 20 + + int + + + + + + spring.datasource.spring.datasource.filters + stat,wall,log4j + + + + + spring.datasource.connectionProperties + druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 + + + + + + mybatis-plus.mapper-locations + classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml + + + + + mybatis-plus.typeEnumsPackage + org.apache.dolphinscheduler.*.enums + + + + + mybatis-plus.typeAliasesPackage + org.apache.dolphinscheduler.dao.entity + + Entity scan, where multiple packages are separated by a comma or semicolon + + + + + mybatis-plus.global-config.db-config.id-type + AUTO + + value-list + + + AUTO + + + + INPUT + + + + ID_WORKER + + + + UUID + + + + 1 + + + Primary key type AUTO:" database ID AUTO ", + INPUT:" user INPUT ID", + ID_WORKER:" global unique ID (numeric type unique ID)", + UUID:" global unique ID UUID"; + + + + + mybatis-plus.global-config.db-config.field-strategy + NOT_NULL + + value-list + + + IGNORED + + + + NOT_NULL + + + + NOT_EMPTY + + + + 1 + + + Field policy IGNORED:" ignore judgment ", + NOT_NULL:" not NULL judgment "), + NOT_EMPTY:" not NULL judgment" + + + + + mybatis-plus.global-config.db-config.column-underline + true + + boolean + + + + + + mybatis-plus.global-config.db-config.logic-delete-value + 1 + + int + + + + + + mybatis-plus.global-config.db-config.logic-not-delete-value + 0 + + int + + + + + + mybatis-plus.global-config.db-config.banner + true + + boolean + + + + + + + mybatis-plus.configuration.map-underscore-to-camel-case + true + + boolean + + + + + + mybatis-plus.configuration.cache-enabled + false + + boolean + + + + + + mybatis-plus.configuration.call-setters-on-nulls + true + + boolean + + + + + + mybatis-plus.configuration.jdbc-type-for-null + null + + + + + master.exec.threads + 100 + + int + + + + + + master.exec.task.num + 20 + + int + + + + + + master.heartbeat.interval + 10 + + int + + + + + + master.task.commit.retryTimes + 5 + + int + + + + + + master.task.commit.interval + 1000 + + int + + + + + + master.max.cpuload.avg + 100 + + int + + + + + + master.reserved.memory + 0.1 + + float + + + + + + worker.exec.threads + 100 + + int + + + + + + worker.heartbeat.interval + 10 + + int + + + + + + worker.fetch.task.num + 3 + + int + + + + + + worker.max.cpuload.avg + 100 + + int + + + + + + worker.reserved.memory + 0.1 + + float + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-env.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-env.xml new file mode 100644 index 0000000000..8e14716d05 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-env.xml @@ -0,0 +1,123 @@ + + + + dolphin.database.type + mysql + Dolphin Scheduler DataBase Type Which Is Select + Dolphin Database Type + + value-list + + + mysql + + + + postgresql + + + + 1 + + + + + + dolphin.database.host + + Dolphin Database Host + + + + + dolphin.database.port + + Dolphin Database Port + + + + + dolphin.database.username + + Dolphin Database Username + + + + + dolphin.database.password + + Dolphin Database Password + PASSWORD + + password + + + + + + dolphin.user + + Which user to install and admin dolphin scheduler + Deploy User + + + + dolphin.group + + Which user to install and admin dolphin scheduler + Deploy Group + + + + + dolphinscheduler-env-content + Dolphinscheduler Env template + This is the jinja template for dolphinscheduler.env.sh file + # +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export HADOOP_HOME=/opt/soft/hadoop +export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop +export SPARK_HOME1=/opt/soft/spark1 +export SPARK_HOME2=/opt/soft/spark2 +export PYTHON_HOME=/opt/soft/python +export JAVA_HOME=/opt/soft/java +export HIVE_HOME=/opt/soft/hive +export FLINK_HOME=/opt/soft/flink + + content + false + false + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-master.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-master.xml new file mode 100644 index 0000000000..c8eec047fc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-master.xml @@ -0,0 +1,88 @@ + + + + master.exec.threads + 100 + + int + + master execute thread num + + + + master.exec.task.num + 20 + + int + + master execute task number in parallel + + + + master.heartbeat.interval + 10 + + int + + master heartbeat interval + + + + master.task.commit.retryTimes + 5 + + int + + master commit task retry times + + + + master.task.commit.interval + 1000 + + int + + master commit task interval + + + + master.max.cpuload.avg + 100 + + int + + only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 + + + + master.reserved.memory + 0.3 + only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. + + + + + master.listen.port + 5678 + + int + + master listen port + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-quartz.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-quartz.xml new file mode 100644 index 0000000000..7a0c68b051 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-quartz.xml @@ -0,0 +1,126 @@ + + + + org.quartz.scheduler.instanceName + DolphinScheduler + + + + + org.quartz.scheduler.instanceId + AUTO + + + + org.quartz.scheduler.makeSchedulerThreadDaemon + true + + boolean + + + + + org.quartz.jobStore.useProperties + false + + boolean + + + + + org.quartz.threadPool.class + org.quartz.simpl.SimpleThreadPool + + + + org.quartz.threadPool.makeThreadsDaemons + true + + boolean + + + + + org.quartz.threadPool.threadCount + 25 + + int + + + + + org.quartz.threadPool.threadPriority + 5 + + int + + + + + org.quartz.jobStore.class + org.quartz.impl.jdbcjobstore.JobStoreTX + + + + org.quartz.jobStore.tablePrefix + QRTZ_ + + + + org.quartz.jobStore.isClustered + true + + boolean + + + + + org.quartz.jobStore.misfireThreshold + 60000 + + int + + + + + org.quartz.jobStore.clusterCheckinInterval + 5000 + + int + + + + + org.quartz.jobStore.acquireTriggersWithinLock + true + + boolean + + + + + org.quartz.jobStore.dataSource + myDs + + + + org.quartz.dataSource.myDs.connectionProvider.class + org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-worker.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-worker.xml new file mode 100644 index 0000000000..97beade1bc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-worker.xml @@ -0,0 +1,76 @@ + + + + worker.exec.threads + 100 + + int + + worker execute thread num + + + + worker.heartbeat.interval + 10 + + int + + worker heartbeat interval + + + + worker.fetch.task.num + 3 + + int + + submit the number of tasks at a time + + + + worker.max.cpuload.avg + 100 + + int + + only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 + + + + worker.reserved.memory + 0.3 + only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G. + + + + + worker.listen.port + 1234 + + int + + worker listen port + + + + worker.group + default + default worker group + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-zookeeper.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-zookeeper.xml new file mode 100644 index 0000000000..5882162254 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/configuration/dolphin-zookeeper.xml @@ -0,0 +1,84 @@ + + + + dolphinscheduler.queue.impl + zookeeper + + Task queue implementation, default "zookeeper" + + + + + zookeeper.dolphinscheduler.root + /dolphinscheduler + + dolphinscheduler root directory + + + + + zookeeper.session.timeout + 300 + + int + + + + + + + zookeeper.connection.timeout + 300 + + int + + + + + + + zookeeper.retry.base.sleep + 100 + + int + + + + + + + zookeeper.retry.max.sleep + 30000 + + int + + + + + + + zookeeper.retry.maxtime + 5 + + int + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/metainfo.xml b/ambari_plugin/common-services/DOLPHIN/1.3.0/metainfo.xml new file mode 100644 index 0000000000..a559085f03 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/metainfo.xml @@ -0,0 +1,137 @@ + + + + 2.0 + + + DOLPHIN + Dolphin Scheduler + 分布式易扩展的可视化DAG工作流任务调度系统 + 1.3.0 + + + DOLPHIN_MASTER + DS Master + MASTER + 1+ + + + PYTHON + 600 + + + + + DOLPHIN_LOGGER + DS Logger + SLAVE + 1+ + + + PYTHON + 600 + + + + + DOLPHIN_WORKER + DS Worker + SLAVE + 1+ + + + DOLPHIN/DOLPHIN_LOGGER + host + + true + + + + + + PYTHON + 600 + + + + + DOLPHIN_ALERT + DS Alert + SLAVE + 1 + + + PYTHON + 600 + + + + + DOLPHIN_API + DS_Api + SLAVE + 1 + + + PYTHON + 600 + + + + + + ZOOKEEPER + + + + + any + + + apache-dolphinscheduler-incubating-1.3.0* + + + + + + + dolphin-alert + dolphin-app-api + dolphin-app-dao + dolphin-common + dolphin-env + dolphin-quartz + + + + + theme.json + true + + + + quicklinks + + + quicklinks.json + true + + + + + diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py new file mode 100644 index 0000000000..87cc7b453b --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/alerts/alert_dolphin_scheduler_status.py @@ -0,0 +1,124 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import socket +import urllib2 +import os +import logging +import ambari_simplejson as json +from resource_management.libraries.script.script import Script +import sys +reload(sys) +sys.setdefaultencoding('utf-8') + +logger = logging.getLogger('ambari_alerts') + +config = Script.get_config() + + +def get_tokens(): + """ + Returns a tuple of tokens in the format {{site/property}} that will be used + to build the dictionary passed into execute + + :rtype tuple + """ + +def get_info(url, connection_timeout): + response = None + + try: + response = urllib2.urlopen(url, timeout=connection_timeout) + json_data = response.read() + return json_data + finally: + if response is not None: + try: + response.close() + except: + pass + + +def execute(configurations={}, parameters={}, host_name=None): + """ + Returns a tuple containing the result code and a pre-formatted result label + + Keyword arguments: + configurations : a mapping of configuration key to value + parameters : a mapping of script parameter key to value + host_name : the name of this host where the alert is running + + :type configurations dict + :type parameters dict + :type host_name str + """ + + alert_name = parameters['alertName'] + + dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" + + pid = "0" + + + from resource_management.core import sudo + + is_running = True + pid_file_path = "" + if alert_name == 'DOLPHIN_MASTER': + pid_file_path = dolphin_pidfile_dir + "/master-server.pid" + elif alert_name == 'DOLPHIN_WORKER': + pid_file_path = dolphin_pidfile_dir + "/worker-server.pid" + elif alert_name == 'DOLPHIN_ALERT': + pid_file_path = dolphin_pidfile_dir + "/alert-server.pid" + elif alert_name == 'DOLPHIN_LOGGER': + pid_file_path = dolphin_pidfile_dir + "/logger-server.pid" + elif alert_name == 'DOLPHIN_API': + pid_file_path = dolphin_pidfile_dir + "/api-server.pid" + + if not pid_file_path or not os.path.isfile(pid_file_path): + is_running = False + + try: + pid = int(sudo.read_file(pid_file_path)) + except: + is_running = False + + try: + # Kill will not actually kill the process + # From the doc: + # If sig is 0, then no signal is sent, but error checking is still + # performed; this can be used to check for the existence of a + # process ID or process group ID. + sudo.kill(pid, 0) + except OSError: + is_running = False + + if host_name is None: + host_name = socket.getfqdn() + + if not is_running: + result_code = "CRITICAL" + else: + result_code = "OK" + + label = "The comment {0} of DOLPHIN_SCHEDULER on {1} is {2}".format(alert_name, host_name, result_code) + + return ((result_code, [label])) + +if __name__ == "__main__": + pass diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_alert_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_alert_service.py new file mode 100644 index 0000000000..62255a3432 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_alert_service.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinAlertService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/alert-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/alert-server.pid` | grep `cat {dolphin_pidfile_dir}/alert-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start alert-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop alert-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "alert-server.pid") + + +if __name__ == "__main__": + DolphinAlertService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_api_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_api_service.py new file mode 100644 index 0000000000..bdc18fb602 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_api_service.py @@ -0,0 +1,70 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinApiService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + + #init + init_cmd=format("sh " + params.dolphin_home + "/script/create-dolphinscheduler.sh") + Execute(init_cmd, user=params.dolphin_user) + + #upgrade + upgrade_cmd=format("sh " + params.dolphin_home + "/script/upgrade-dolphinscheduler.sh") + Execute(upgrade_cmd, user=params.dolphin_user) + + no_op_test = format("ls {dolphin_pidfile_dir}/api-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/api-server.pid` | grep `cat {dolphin_pidfile_dir}/api-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start api-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop api-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "api-server.pid") + + +if __name__ == "__main__": + DolphinApiService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_env.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_env.py new file mode 100644 index 0000000000..1661d76c75 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_env.py @@ -0,0 +1,123 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + + +def dolphin_env(): + import params + + Directory(params.dolphin_pidfile_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_log_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_conf_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + + Directory(params.dolphin_common_map['data.basedir.path'], + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + + + File(format(params.dolphin_env_path), + mode=0777, + content=InlineTemplate(params.dolphin_env_content), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + + File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), + mode=0755, + content=Template("dolphin-daemon.sh.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/master.properties"), + mode=0755, + content=Template("master.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/worker.properties"), + mode=0755, + content=Template("worker.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + + File(format(params.dolphin_conf_dir + "/alert.properties"), + mode=0755, + content=Template("alert.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/datasource.properties"), + mode=0755, + content=Template("datasource.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/application-api.properties"), + mode=0755, + content=Template("application-api.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/common.properties"), + mode=0755, + content=Template("common.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/quartz.properties"), + mode=0755, + content=Template("quartz.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/zookeeper.properties"), + mode=0755, + content=Template("zookeeper.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_logger_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_logger_service.py new file mode 100644 index 0000000000..f1c19bd66f --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_logger_service.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinLoggerService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/logger-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/logger-server.pid` | grep `cat {dolphin_pidfile_dir}/logger-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start logger-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop logger-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "logger-server.pid") + + +if __name__ == "__main__": + DolphinLoggerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_master_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_master_service.py new file mode 100644 index 0000000000..6ee7ecfcf3 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_master_service.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinMasterService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/master-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/master-server.pid` | grep `cat {dolphin_pidfile_dir}/master-server.pid` >/dev/null 2>&1") + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start master-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop master-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "master-server.pid") + + +if __name__ == "__main__": + DolphinMasterService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_worker_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_worker_service.py new file mode 100644 index 0000000000..2d145ee730 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/dolphin_worker_service.py @@ -0,0 +1,60 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinWorkerService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/worker-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/worker-server.pid` | grep `cat {dolphin_pidfile_dir}/worker-server.pid` >/dev/null 2>&1") + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start worker-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop worker-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "worker-server.pid") + + +if __name__ == "__main__": + DolphinWorkerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/params.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/params.py new file mode 100644 index 0000000000..b09b2589f4 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/params.py @@ -0,0 +1,154 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + + +import sys +from resource_management import * +from resource_management.core.logger import Logger +from resource_management.libraries.functions import default + +Logger.initialize_logger() +reload(sys) +sys.setdefaultencoding('utf-8') + +# server configurations +config = Script.get_config() + +# conf_dir = "/etc/" +dolphin_home = "/opt/soft/dolphinscheduler" +dolphin_conf_dir = dolphin_home + "/conf" +dolphin_log_dir = dolphin_home + "/logs" +dolphin_bin_dir = dolphin_home + "/bin" +dolphin_lib_jars = dolphin_home + "/lib/*" +dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" + +rmHosts = default("/clusterHostInfo/rm_host", []) + +# dolphin-env +dolphin_env_map = {} +dolphin_env_map.update(config['configurations']['dolphin-env']) + +# which user to install and admin dolphin scheduler +dolphin_user = dolphin_env_map['dolphin.user'] +dolphin_group = dolphin_env_map['dolphin.group'] + +# .dolphinscheduler_env.sh +dolphin_env_path = dolphin_conf_dir + '/env/dolphinscheduler_env.sh' +dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content'] + +# database config +dolphin_database_config = {} +dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] +dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] +dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] +if 'mysql' == dolphin_database_config['dolphin_database_type']: + dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' + dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' + dolphin_database_config['dolphin_database_url'] = 'jdbc:mysql://' + dolphin_env_map['dolphin.database.host'] \ + + ':' + dolphin_env_map['dolphin.database.port'] \ + + '/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8' +else: + dolphin_database_config['dolphin_database_driver'] = 'org.postgresql.Driver' + dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.PostgreSQLDelegate' + dolphin_database_config['dolphin_database_url'] = 'jdbc:postgresql://' + dolphin_env_map['dolphin.database.host'] \ + + ':' + dolphin_env_map['dolphin.database.port'] \ + + '/dolphinscheduler' + + + + + +# application-alert.properties +dolphin_alert_map = {} +wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' +wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret' +wechat_team_send_msg = '{\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}' +wechat_user_send_msg = '{\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}' + +dolphin_alert_config_map = config['configurations']['dolphin-alert'] + +if dolphin_alert_config_map['enterprise.wechat.enable']: + dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url + dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url + dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg + dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg + +dolphin_alert_map.update(dolphin_alert_config_map) + + + +# application-api.properties +dolphin_app_api_map = {} +dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) + + +# common.properties +dolphin_common_map = {} + +if 'yarn-site' in config['configurations'] and \ + 'yarn.resourcemanager.webapp.address' in config['configurations']['yarn-site']: + yarn_resourcemanager_webapp_address = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'] + yarn_application_status_address = 'http://' + yarn_resourcemanager_webapp_address + '/ws/v1/cluster/apps/%s' + dolphin_common_map['yarn.application.status.address'] = yarn_application_status_address + +rmHosts = default("/clusterHostInfo/rm_host", []) +if len(rmHosts) > 1: + dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = ','.join(rmHosts) +else: + dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = '' + +dolphin_common_map_tmp = config['configurations']['dolphin-common'] +data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] +process_exec_basepath = data_basedir_path + '/exec' +data_download_basedir_path = data_basedir_path + '/download' +dolphin_common_map['process.exec.basepath'] = process_exec_basepath +dolphin_common_map['data.download.basedir.path'] = data_download_basedir_path +dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path +dolphin_common_map.update(config['configurations']['dolphin-common']) + +# datasource.properties +dolphin_datasource_map = {} +dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' +dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] +dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] +dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] +dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] +dolphin_datasource_map.update(config['configurations']['dolphin-datasource']) + +# master.properties +dolphin_master_map = config['configurations']['dolphin-master'] + +# quartz.properties +dolphin_quartz_map = {} +dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] +dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) + +# worker.properties +dolphin_worker_map = config['configurations']['dolphin-worker'] + +# zookeeper.properties +dolphin_zookeeper_map={} +zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) +if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: + clientPort = config['configurations']['zoo.cfg']['clientPort'] + zookeeperPort = ":" + clientPort + "," + dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort +dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper']) + + + diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/service_check.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/service_check.py new file mode 100644 index 0000000000..0e12f69932 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/service_check.py @@ -0,0 +1,31 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +from resource_management.libraries.functions import get_unique_id_and_date + +class ServiceCheck(Script): + def service_check(self, env): + import params + #env.set_params(params) + + # Execute(format("which pika_server")) + +if __name__ == "__main__": + ServiceCheck().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/status_params.py b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/status_params.py new file mode 100644 index 0000000000..24b2c8b1bc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/scripts/status_params.py @@ -0,0 +1,23 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from resource_management import * + +config = Script.get_config() + +dolphin_run_dir = "/opt/soft/run/dolphinscheduler/" diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/alert.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/alert.properties.j2 new file mode 100644 index 0000000000..73840b8c18 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/alert.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_alert_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/application-api.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/application-api.properties.j2 new file mode 100644 index 0000000000..70118003b9 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/application-api.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_app_api_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/common.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/common.properties.j2 new file mode 100644 index 0000000000..2220c4effa --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/common.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_common_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/datasource.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/datasource.properties.j2 new file mode 100644 index 0000000000..40aed83543 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/datasource.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_datasource_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/dolphin-daemon.sh.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/dolphin-daemon.sh.j2 new file mode 100644 index 0000000000..0802b74750 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/dolphin-daemon.sh.j2 @@ -0,0 +1,116 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +usage="Usage: dolphinscheduler-daemon.sh (start|stop) " + +# if no args specified, show usage +if [ $# -le 1 ]; then + echo $usage + exit 1 +fi + +startStop=$1 +shift +command=$1 +shift + +echo "Begin $startStop $command......" + +BIN_DIR=`dirname $0` +BIN_DIR=`cd "$BIN_DIR"; pwd` +DOLPHINSCHEDULER_HOME=$BIN_DIR/.. + +export HOSTNAME=`hostname` + +DOLPHINSCHEDULER_LIB_JARS={{dolphin_lib_jars}} + +DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms4g -Xss512k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" +STOP_TIMEOUT=5 + +log={{dolphin_log_dir}}/dolphinscheduler-$command-$HOSTNAME.out +pid={{dolphin_pidfile_dir}}/$command.pid + +cd $DOLPHINSCHEDULER_HOME + +if [ "$command" = "api-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api" + CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer +elif [ "$command" = "master-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false" + CLASS=org.apache.dolphinscheduler.server.master.MasterServer +elif [ "$command" = "worker-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false" + CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer +elif [ "$command" = "alert-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml" + CLASS=org.apache.dolphinscheduler.alert.AlertServer +elif [ "$command" = "logger-server" ]; then + CLASS=org.apache.dolphinscheduler.server.log.LoggerServer +else + echo "Error: No command named \`$command' was found." + exit 1 +fi + +case $startStop in + (start) + + if [ -f $pid ]; then + if kill -0 `cat $pid` > /dev/null 2>&1; then + echo $command running as process `cat $pid`. Stop it first. + exit 1 + fi + fi + + echo starting $command, logging to $log + + exec_command="$LOG_FILE $DOLPHINSCHEDULER_OPTS -classpath {{dolphin_conf_dir}}:{{dolphin_lib_jars}} $CLASS" + + echo "nohup java $exec_command > $log 2>&1 < /dev/null &" + nohup java $exec_command > $log 2>&1 < /dev/null & + echo $! > $pid + ;; + + (stop) + + if [ -f $pid ]; then + TARGET_PID=`cat $pid` + if kill -0 $TARGET_PID > /dev/null 2>&1; then + echo stopping $command + kill $TARGET_PID + sleep $STOP_TIMEOUT + if kill -0 $TARGET_PID > /dev/null 2>&1; then + echo "$command did not stop gracefully after $STOP_TIMEOUT seconds: killing with kill -9" + kill -9 $TARGET_PID + fi + else + echo no $command to stop + fi + rm -f $pid + else + echo no $command to stop + fi + ;; + + (*) + echo $usage + exit 1 + ;; + +esac + +echo "End $startStop $command." \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/master.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/master.properties.j2 new file mode 100644 index 0000000000..d9b85e14cf --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/master.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_master_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/quartz.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/quartz.properties.j2 new file mode 100644 index 0000000000..e027a263b5 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/quartz.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_quartz_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/worker.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/worker.properties.j2 new file mode 100644 index 0000000000..a008b74084 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/worker.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_worker_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/zookeeper.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/zookeeper.properties.j2 new file mode 100644 index 0000000000..9eb14eaef3 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/package/templates/zookeeper.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_zookeeper_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/quicklinks/quicklinks.json b/ambari_plugin/common-services/DOLPHIN/1.3.0/quicklinks/quicklinks.json new file mode 100755 index 0000000000..8753004fef --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/quicklinks/quicklinks.json @@ -0,0 +1,26 @@ +{ + "name": "default", + "description": "default quick links configuration", + "configuration": { + "protocol": + { + "type":"http" + }, + + "links": [ + { + "name": "dolphin-application-ui", + "label": "DolphinApplication UI", + "requires_user_name": "false", + "component_name": "DOLPHIN_API", + "url": "%@://%@:%@/dolphinscheduler/ui/view/login/index.html", + "port":{ + "http_property": "server.port", + "http_default_port": "12345", + "regex": "^(\\d+)$", + "site": "dolphin-application-api" + } + } + ] + } +} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.0/themes/theme.json b/ambari_plugin/common-services/DOLPHIN/1.3.0/themes/theme.json new file mode 100644 index 0000000000..953e2323f8 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.0/themes/theme.json @@ -0,0 +1,661 @@ +{ + "name": "default", + "description": "Default theme for Dolphin Scheduler service", + "configuration": { + "layouts": [ + { + "name": "default", + "tabs": [ + { + "name": "settings", + "display-name": "Settings", + "layout": { + "tab-rows": "3", + "tab-columns": "3", + "sections": [ + { + "name": "dolphin-env-config", + "display-name": "Dolphin Env Config", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "2", + "subsections": [ + { + "name": "env-row1-col1", + "display-name": "Deploy User Info", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "env-row1-col2", + "display-name": "System Env Optimization", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + } + ] + }, + { + "name": "dolphin-database-config", + "display-name": "Database Config", + "row-index": "1", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "3", + "subsections": [ + { + "name": "database-row1-col1", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "database-row1-col2", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + }, + { + "name": "database-row1-col3", + "row-index": "0", + "column-index": "2", + "row-span": "1", + "column-span": "1" + } + ] + }, + { + "name": "dynamic-config", + "row-index": "2", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "3", + "subsections": [ + { + "name": "dynamic-row1-col1", + "display-name": "Resource FS Config", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "dynamic-row1-col2", + "display-name": "Kerberos Info", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + }, + { + "name": "dynamic-row1-col3", + "display-name": "Wechat Info", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + } + ] + } + ] + } + } + ] + } + ], + "placement": { + "configuration-layout": "default", + "configs": [ + { + "config": "dolphin-env/dolphin.database.type", + "subsection-name": "database-row1-col1" + }, + { + "config": "dolphin-env/dolphin.database.host", + "subsection-name": "database-row1-col2" + }, + { + "config": "dolphin-env/dolphin.database.port", + "subsection-name": "database-row1-col2" + }, + { + "config": "dolphin-env/dolphin.database.username", + "subsection-name": "database-row1-col3" + }, + { + "config": "dolphin-env/dolphin.database.password", + "subsection-name": "database-row1-col3" + }, + { + "config": "dolphin-env/dolphin.user", + "subsection-name": "env-row1-col1" + }, + { + "config": "dolphin-env/dolphin.group", + "subsection-name": "env-row1-col1" + }, + { + "config": "dolphin-env/dolphinscheduler-env-content", + "subsection-name": "env-row1-col2" + }, + { + "config": "dolphin-common/resource.storage.type", + "subsection-name": "dynamic-row1-col1" + }, + { + "config": "dolphin-common/resource.upload.path", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/hdfs.root.user", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/data.store2hdfs.basepath", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.defaultFS", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.endpoint", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.access.key", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.secret.key", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/hadoop.security.authentication.startup.state", + "subsection-name": "dynamic-row1-col2" + }, + { + "config": "dolphin-common/java.security.krb5.conf.path", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/login.user.keytab.username", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/login.user.keytab.path", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/kerberos.expire.time", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.enable", + "subsection-name": "dynamic-row1-col3" + }, + { + "config": "dolphin-alert/enterprise.wechat.corp.id", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.secret", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.agent.id", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.users", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + } + ] + }, + "widgets": [ + { + "config": "dolphin-env/dolphin.database.type", + "widget": { + "type": "combo" + } + }, + { + "config": "dolphin-env/dolphin.database.host", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.database.port", + "widget": { + "type": "text-field", + "units": [ + { + "unit-name": "int" + } + ] + } + }, + { + "config": "dolphin-env/dolphin.database.username", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.database.password", + "widget": { + "type": "password" + } + }, + { + "config": "dolphin-env/dolphin.user", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.group", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphinscheduler-env-content", + "widget": { + "type": "text-area" + } + }, + { + "config": "dolphin-common/resource.storage.type", + "widget": { + "type": "combo" + } + }, + { + "config": "dolphin-common/resource.upload.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/hdfs.root.user", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/data.store2hdfs.basepath", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.defaultFS", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.endpoint", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.access.key", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.secret.key", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/hadoop.security.authentication.startup.state", + "widget": { + "type": "toggle" + } + }, + { + "config": "dolphin-common/java.security.krb5.conf.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/login.user.keytab.username", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/login.user.keytab.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/kerberos.expire.time", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.enable", + "widget": { + "type": "toggle" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.corp.id", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.secret", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.agent.id", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.users", + "widget": { + "type": "text-field" + } + } + ] + } +} diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/alerts.json b/ambari_plugin/common-services/DOLPHIN/2.0.0/alerts.json new file mode 100644 index 0000000000..385c5d5599 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/alerts.json @@ -0,0 +1,164 @@ +{ + "DOLPHIN": { + "service": [], + "DOLPHIN_API": [ + { + "name": "dolphin_api_port_check", + "label": "dolphin_api_port_check", + "description": "dolphin_api_port_check.", + "interval": 10, + "scope": "ANY", + "source": { + "type": "PORT", + "uri": "{{dolphin-application-api/server.port}}", + "default_port": 12345, + "reporting": { + "ok": { + "text": "TCP OK - {0:.3f}s response on port {1}" + }, + "warning": { + "text": "TCP OK - {0:.3f}s response on port {1}", + "value": 1.5 + }, + "critical": { + "text": "Connection failed: {0} to {1}:{2}", + "value": 5.0 + } + } + } + } + ], + "DOLPHIN_MASTER": [ + { + "name": "DOLPHIN_MASTER_CHECK", + "label": "check dolphin scheduler master status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_MASTER", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ], + "DOLPHIN_WORKER": [ + { + "name": "DOLPHIN_WORKER_CHECK", + "label": "check dolphin scheduler worker status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_WORKER", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ], + "DOLPHIN_ALERT": [ + { + "name": "DOLPHIN_DOLPHIN_ALERT_CHECK", + "label": "check dolphin scheduler alert status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_ALERT", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ], + "DOLPHIN_ALERT": [ + { + "name": "DOLPHIN_DOLPHIN_LOGGER_CHECK", + "label": "check dolphin scheduler alert status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_LOGGER", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ] + } +} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-alert.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-alert.xml new file mode 100644 index 0000000000..5f44a1a4c8 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-alert.xml @@ -0,0 +1,143 @@ + + + + alert.type + EMAIL + alert type is EMAIL/SMS + + + + alert.template + html + alter msg template, default is html template + + + + mail.protocol + SMTP + + + + + mail.server.host + xxx.xxx.com + + + + + mail.server.port + 25 + + int + + + + + + mail.sender + admin + + + + + mail.user + admin + + + + + mail.passwd + 000000 + + PASSWORD + + password + + + + + + mail.smtp.starttls.enable + true + + boolean + + + + + + mail.smtp.ssl.enable + true + + boolean + + + + + + mail.smtp.ssl.trust + xxx.xxx.com + + + + + + enterprise.wechat.enable + false + + + value-list + + + true + + + + false + + + + 1 + + + + + enterprise.wechat.corp.id + wechatId + + + + + enterprise.wechat.secret + secret + + + + + enterprise.wechat.agent.id + agentId + + + + + enterprise.wechat.users + wechatUsers + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-application-api.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-application-api.xml new file mode 100644 index 0000000000..766c0f477d --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-application-api.xml @@ -0,0 +1,87 @@ + + + + server.port + 12345 + + server port + + + int + + + + server.servlet.session.timeout + 7200 + + int + + + + + + server.servlet.context-path + /dolphinscheduler/ + + + + + spring.servlet.multipart.max-file-size + 1024 + + MB + int + + + + + + spring.servlet.multipart.max-request-size + 1024 + + MB + int + + + + + + server.jetty.max-http-post-size + 5000000 + + int + + + + + + spring.messages.encoding + UTF-8 + + + + spring.messages.basename + i18n/messages + + + + security.authentication.type + PASSWORD + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-common.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-common.xml new file mode 100644 index 0000000000..439e21188a --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-common.xml @@ -0,0 +1,158 @@ + + + + resource.storage.type + Choose Resource Upload Startup Type + + Resource upload startup type : HDFS,S3,NONE + + NONE + + value-list + + + HDFS + + + + S3 + + + + NONE + + + + 1 + + + + + resource.upload.path + /dolphinscheduler + + resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended + + + + + data.basedir.path + /tmp/dolphinscheduler + + user data local directory path, please make sure the directory exists and have read write permissions + + + + + + hadoop.security.authentication.startup.state + false + + value-list + + + true + + + + false + + + + 1 + + whether kerberos starts + + + java.security.krb5.conf.path + /opt/krb5.conf + + java.security.krb5.conf path + + + + + login.user.keytab.username + hdfs-mycluster@ESZ.COM + + LoginUserFromKeytab user + + + + + login.user.keytab.path + /opt/hdfs.headless.keytab + + LoginUserFromKeytab path + + + + + resource.view.suffixs + txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties + + + + hdfs.root.user + hdfs + + Users who have permission to create directories under the HDFS root path + + + + + fs.defaultFS + hdfs://mycluster:8020 + + HA or single namenode, + If namenode ha needs to copy core-site.xml and hdfs-site.xml to the conf directory, + support s3,for example : s3a://dolphinscheduler + + + + + fs.s3a.endpoint + http://host:9010 + + s3 need,s3 endpoint + + + + + fs.s3a.access.key + A3DXS30FO22544RE + + s3 need,s3 access key + + + + + fs.s3a.secret.key + OloCLq3n+8+sdPHUhJ21XrSxTC+JK + + s3 need,s3 secret key + + + + + kerberos.expire.time + 7 + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-datasource.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-datasource.xml new file mode 100644 index 0000000000..6e50a1b649 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-datasource.xml @@ -0,0 +1,467 @@ + + + + spring.datasource.initialSize + 5 + + Init connection number + + + int + + + + + spring.datasource.minIdle + 5 + + Min connection number + + + int + + + + + spring.datasource.maxActive + 50 + + Max connection number + + + int + + + + + spring.datasource.maxWait + 60000 + + Max wait time for get a connection in milliseconds. + If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. + If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. + + + int + + + + + spring.datasource.timeBetweenEvictionRunsMillis + 60000 + + Milliseconds for check to close free connections + + + int + + + + + spring.datasource.timeBetweenConnectErrorMillis + 60000 + + The Destroy thread detects the connection interval and closes the physical connection in milliseconds + if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. + + + int + + + + + spring.datasource.minEvictableIdleTimeMillis + 300000 + + The longest time a connection remains idle without being evicted, in milliseconds + + + int + + + + + spring.datasource.validationQuery + SELECT 1 + + The SQL used to check whether the connection is valid requires a query statement. + If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. + + + + + spring.datasource.validationQueryTimeout + 3 + + int + + + Check whether the connection is valid for timeout, in seconds + + + + + spring.datasource.testWhileIdle + true + + boolean + + + When applying for a connection, + if it is detected that the connection is idle longer than time Between Eviction Runs Millis, + validation Query is performed to check whether the connection is valid + + + + + spring.datasource.testOnBorrow + true + + boolean + + + Execute validation to check if the connection is valid when applying for a connection + + + + + spring.datasource.testOnReturn + false + + boolean + + + Execute validation to check if the connection is valid when the connection is returned + + + + + spring.datasource.defaultAutoCommit + true + + boolean + + + + + + + spring.datasource.keepAlive + false + + boolean + + + + + + + + spring.datasource.poolPreparedStatements + true + + boolean + + + Open PSCache, specify count PSCache for every connection + + + + + spring.datasource.maxPoolPreparedStatementPerConnectionSize + 20 + + int + + + + + + spring.datasource.spring.datasource.filters + stat,wall,log4j + + + + + spring.datasource.connectionProperties + druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 + + + + + + mybatis-plus.mapper-locations + classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml + + + + + mybatis-plus.typeEnumsPackage + org.apache.dolphinscheduler.*.enums + + + + + mybatis-plus.typeAliasesPackage + org.apache.dolphinscheduler.dao.entity + + Entity scan, where multiple packages are separated by a comma or semicolon + + + + + mybatis-plus.global-config.db-config.id-type + AUTO + + value-list + + + AUTO + + + + INPUT + + + + ID_WORKER + + + + UUID + + + + 1 + + + Primary key type AUTO:" database ID AUTO ", + INPUT:" user INPUT ID", + ID_WORKER:" global unique ID (numeric type unique ID)", + UUID:" global unique ID UUID"; + + + + + mybatis-plus.global-config.db-config.field-strategy + NOT_NULL + + value-list + + + IGNORED + + + + NOT_NULL + + + + NOT_EMPTY + + + + 1 + + + Field policy IGNORED:" ignore judgment ", + NOT_NULL:" not NULL judgment "), + NOT_EMPTY:" not NULL judgment" + + + + + mybatis-plus.global-config.db-config.column-underline + true + + boolean + + + + + + mybatis-plus.global-config.db-config.logic-delete-value + 1 + + int + + + + + + mybatis-plus.global-config.db-config.logic-not-delete-value + 0 + + int + + + + + + mybatis-plus.global-config.db-config.banner + true + + boolean + + + + + + + mybatis-plus.configuration.map-underscore-to-camel-case + true + + boolean + + + + + + mybatis-plus.configuration.cache-enabled + false + + boolean + + + + + + mybatis-plus.configuration.call-setters-on-nulls + true + + boolean + + + + + + mybatis-plus.configuration.jdbc-type-for-null + null + + + + + master.exec.threads + 100 + + int + + + + + + master.exec.task.num + 20 + + int + + + + + + master.heartbeat.interval + 10 + + int + + + + + + master.task.commit.retryTimes + 5 + + int + + + + + + master.task.commit.interval + 1000 + + int + + + + + + master.max.cpuload.avg + 100 + + int + + + + + + master.reserved.memory + 0.1 + + float + + + + + + worker.exec.threads + 100 + + int + + + + + + worker.heartbeat.interval + 10 + + int + + + + + + worker.fetch.task.num + 3 + + int + + + + + + worker.max.cpuload.avg + 100 + + int + + + + + + worker.reserved.memory + 0.1 + + float + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-env.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-env.xml new file mode 100644 index 0000000000..8e14716d05 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-env.xml @@ -0,0 +1,123 @@ + + + + dolphin.database.type + mysql + Dolphin Scheduler DataBase Type Which Is Select + Dolphin Database Type + + value-list + + + mysql + + + + postgresql + + + + 1 + + + + + + dolphin.database.host + + Dolphin Database Host + + + + + dolphin.database.port + + Dolphin Database Port + + + + + dolphin.database.username + + Dolphin Database Username + + + + + dolphin.database.password + + Dolphin Database Password + PASSWORD + + password + + + + + + dolphin.user + + Which user to install and admin dolphin scheduler + Deploy User + + + + dolphin.group + + Which user to install and admin dolphin scheduler + Deploy Group + + + + + dolphinscheduler-env-content + Dolphinscheduler Env template + This is the jinja template for dolphinscheduler.env.sh file + # +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export HADOOP_HOME=/opt/soft/hadoop +export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop +export SPARK_HOME1=/opt/soft/spark1 +export SPARK_HOME2=/opt/soft/spark2 +export PYTHON_HOME=/opt/soft/python +export JAVA_HOME=/opt/soft/java +export HIVE_HOME=/opt/soft/hive +export FLINK_HOME=/opt/soft/flink + + content + false + false + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-master.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-master.xml new file mode 100644 index 0000000000..c8eec047fc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-master.xml @@ -0,0 +1,88 @@ + + + + master.exec.threads + 100 + + int + + master execute thread num + + + + master.exec.task.num + 20 + + int + + master execute task number in parallel + + + + master.heartbeat.interval + 10 + + int + + master heartbeat interval + + + + master.task.commit.retryTimes + 5 + + int + + master commit task retry times + + + + master.task.commit.interval + 1000 + + int + + master commit task interval + + + + master.max.cpuload.avg + 100 + + int + + only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 + + + + master.reserved.memory + 0.3 + only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. + + + + + master.listen.port + 5678 + + int + + master listen port + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-quartz.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-quartz.xml new file mode 100644 index 0000000000..7a0c68b051 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-quartz.xml @@ -0,0 +1,126 @@ + + + + org.quartz.scheduler.instanceName + DolphinScheduler + + + + + org.quartz.scheduler.instanceId + AUTO + + + + org.quartz.scheduler.makeSchedulerThreadDaemon + true + + boolean + + + + + org.quartz.jobStore.useProperties + false + + boolean + + + + + org.quartz.threadPool.class + org.quartz.simpl.SimpleThreadPool + + + + org.quartz.threadPool.makeThreadsDaemons + true + + boolean + + + + + org.quartz.threadPool.threadCount + 25 + + int + + + + + org.quartz.threadPool.threadPriority + 5 + + int + + + + + org.quartz.jobStore.class + org.quartz.impl.jdbcjobstore.JobStoreTX + + + + org.quartz.jobStore.tablePrefix + QRTZ_ + + + + org.quartz.jobStore.isClustered + true + + boolean + + + + + org.quartz.jobStore.misfireThreshold + 60000 + + int + + + + + org.quartz.jobStore.clusterCheckinInterval + 5000 + + int + + + + + org.quartz.jobStore.acquireTriggersWithinLock + true + + boolean + + + + + org.quartz.jobStore.dataSource + myDs + + + + org.quartz.dataSource.myDs.connectionProvider.class + org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-worker.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-worker.xml new file mode 100644 index 0000000000..97beade1bc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-worker.xml @@ -0,0 +1,76 @@ + + + + worker.exec.threads + 100 + + int + + worker execute thread num + + + + worker.heartbeat.interval + 10 + + int + + worker heartbeat interval + + + + worker.fetch.task.num + 3 + + int + + submit the number of tasks at a time + + + + worker.max.cpuload.avg + 100 + + int + + only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 + + + + worker.reserved.memory + 0.3 + only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G. + + + + + worker.listen.port + 1234 + + int + + worker listen port + + + + worker.group + default + default worker group + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-zookeeper.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-zookeeper.xml new file mode 100644 index 0000000000..5882162254 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/configuration/dolphin-zookeeper.xml @@ -0,0 +1,84 @@ + + + + dolphinscheduler.queue.impl + zookeeper + + Task queue implementation, default "zookeeper" + + + + + zookeeper.dolphinscheduler.root + /dolphinscheduler + + dolphinscheduler root directory + + + + + zookeeper.session.timeout + 300 + + int + + + + + + + zookeeper.connection.timeout + 300 + + int + + + + + + + zookeeper.retry.base.sleep + 100 + + int + + + + + + + zookeeper.retry.max.sleep + 30000 + + int + + + + + + + zookeeper.retry.maxtime + 5 + + int + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/metainfo.xml b/ambari_plugin/common-services/DOLPHIN/2.0.0/metainfo.xml new file mode 100644 index 0000000000..b3c14e33cb --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/metainfo.xml @@ -0,0 +1,137 @@ + + + + 2.0 + + + DOLPHIN + Dolphin Scheduler + 分布式易扩展的可视化DAG工作流任务调度系统 + 2.0.0 + + + DOLPHIN_MASTER + DS Master + MASTER + 1+ + + + PYTHON + 600 + + + + + DOLPHIN_LOGGER + DS Logger + SLAVE + 1+ + + + PYTHON + 600 + + + + + DOLPHIN_WORKER + DS Worker + SLAVE + 1+ + + + DOLPHIN/DOLPHIN_LOGGER + host + + true + + + + + + PYTHON + 600 + + + + + DOLPHIN_ALERT + DS Alert + SLAVE + 1 + + + PYTHON + 600 + + + + + DOLPHIN_API + DS_Api + SLAVE + 1 + + + PYTHON + 600 + + + + + + ZOOKEEPER + + + + + any + + + apache-dolphinscheduler-incubating-1.2.1* + + + + + + + dolphin-alert + dolphin-app-api + dolphin-app-dao + dolphin-common + dolphin-env + dolphin-quartz + + + + + theme.json + true + + + + quicklinks + + + quicklinks.json + true + + + + + diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py new file mode 100644 index 0000000000..87cc7b453b --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/alerts/alert_dolphin_scheduler_status.py @@ -0,0 +1,124 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import socket +import urllib2 +import os +import logging +import ambari_simplejson as json +from resource_management.libraries.script.script import Script +import sys +reload(sys) +sys.setdefaultencoding('utf-8') + +logger = logging.getLogger('ambari_alerts') + +config = Script.get_config() + + +def get_tokens(): + """ + Returns a tuple of tokens in the format {{site/property}} that will be used + to build the dictionary passed into execute + + :rtype tuple + """ + +def get_info(url, connection_timeout): + response = None + + try: + response = urllib2.urlopen(url, timeout=connection_timeout) + json_data = response.read() + return json_data + finally: + if response is not None: + try: + response.close() + except: + pass + + +def execute(configurations={}, parameters={}, host_name=None): + """ + Returns a tuple containing the result code and a pre-formatted result label + + Keyword arguments: + configurations : a mapping of configuration key to value + parameters : a mapping of script parameter key to value + host_name : the name of this host where the alert is running + + :type configurations dict + :type parameters dict + :type host_name str + """ + + alert_name = parameters['alertName'] + + dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" + + pid = "0" + + + from resource_management.core import sudo + + is_running = True + pid_file_path = "" + if alert_name == 'DOLPHIN_MASTER': + pid_file_path = dolphin_pidfile_dir + "/master-server.pid" + elif alert_name == 'DOLPHIN_WORKER': + pid_file_path = dolphin_pidfile_dir + "/worker-server.pid" + elif alert_name == 'DOLPHIN_ALERT': + pid_file_path = dolphin_pidfile_dir + "/alert-server.pid" + elif alert_name == 'DOLPHIN_LOGGER': + pid_file_path = dolphin_pidfile_dir + "/logger-server.pid" + elif alert_name == 'DOLPHIN_API': + pid_file_path = dolphin_pidfile_dir + "/api-server.pid" + + if not pid_file_path or not os.path.isfile(pid_file_path): + is_running = False + + try: + pid = int(sudo.read_file(pid_file_path)) + except: + is_running = False + + try: + # Kill will not actually kill the process + # From the doc: + # If sig is 0, then no signal is sent, but error checking is still + # performed; this can be used to check for the existence of a + # process ID or process group ID. + sudo.kill(pid, 0) + except OSError: + is_running = False + + if host_name is None: + host_name = socket.getfqdn() + + if not is_running: + result_code = "CRITICAL" + else: + result_code = "OK" + + label = "The comment {0} of DOLPHIN_SCHEDULER on {1} is {2}".format(alert_name, host_name, result_code) + + return ((result_code, [label])) + +if __name__ == "__main__": + pass diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_alert_service.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_alert_service.py new file mode 100644 index 0000000000..62255a3432 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_alert_service.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinAlertService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/alert-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/alert-server.pid` | grep `cat {dolphin_pidfile_dir}/alert-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start alert-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop alert-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "alert-server.pid") + + +if __name__ == "__main__": + DolphinAlertService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_api_service.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_api_service.py new file mode 100644 index 0000000000..bdc18fb602 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_api_service.py @@ -0,0 +1,70 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinApiService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + + #init + init_cmd=format("sh " + params.dolphin_home + "/script/create-dolphinscheduler.sh") + Execute(init_cmd, user=params.dolphin_user) + + #upgrade + upgrade_cmd=format("sh " + params.dolphin_home + "/script/upgrade-dolphinscheduler.sh") + Execute(upgrade_cmd, user=params.dolphin_user) + + no_op_test = format("ls {dolphin_pidfile_dir}/api-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/api-server.pid` | grep `cat {dolphin_pidfile_dir}/api-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start api-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop api-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "api-server.pid") + + +if __name__ == "__main__": + DolphinApiService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_env.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_env.py new file mode 100644 index 0000000000..1661d76c75 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_env.py @@ -0,0 +1,123 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + + +def dolphin_env(): + import params + + Directory(params.dolphin_pidfile_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_log_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_conf_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + + Directory(params.dolphin_common_map['data.basedir.path'], + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + + + File(format(params.dolphin_env_path), + mode=0777, + content=InlineTemplate(params.dolphin_env_content), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + + File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), + mode=0755, + content=Template("dolphin-daemon.sh.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/master.properties"), + mode=0755, + content=Template("master.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/worker.properties"), + mode=0755, + content=Template("worker.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + + File(format(params.dolphin_conf_dir + "/alert.properties"), + mode=0755, + content=Template("alert.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/datasource.properties"), + mode=0755, + content=Template("datasource.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/application-api.properties"), + mode=0755, + content=Template("application-api.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/common.properties"), + mode=0755, + content=Template("common.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/quartz.properties"), + mode=0755, + content=Template("quartz.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/zookeeper.properties"), + mode=0755, + content=Template("zookeeper.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_logger_service.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_logger_service.py new file mode 100644 index 0000000000..f1c19bd66f --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_logger_service.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinLoggerService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/logger-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/logger-server.pid` | grep `cat {dolphin_pidfile_dir}/logger-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start logger-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop logger-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "logger-server.pid") + + +if __name__ == "__main__": + DolphinLoggerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_master_service.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_master_service.py new file mode 100644 index 0000000000..6ee7ecfcf3 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_master_service.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinMasterService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/master-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/master-server.pid` | grep `cat {dolphin_pidfile_dir}/master-server.pid` >/dev/null 2>&1") + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start master-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop master-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "master-server.pid") + + +if __name__ == "__main__": + DolphinMasterService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_worker_service.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_worker_service.py new file mode 100644 index 0000000000..2d145ee730 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/dolphin_worker_service.py @@ -0,0 +1,60 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinWorkerService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/worker-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/worker-server.pid` | grep `cat {dolphin_pidfile_dir}/worker-server.pid` >/dev/null 2>&1") + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start worker-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop worker-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "worker-server.pid") + + +if __name__ == "__main__": + DolphinWorkerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/params.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/params.py new file mode 100644 index 0000000000..b09b2589f4 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/params.py @@ -0,0 +1,154 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + + +import sys +from resource_management import * +from resource_management.core.logger import Logger +from resource_management.libraries.functions import default + +Logger.initialize_logger() +reload(sys) +sys.setdefaultencoding('utf-8') + +# server configurations +config = Script.get_config() + +# conf_dir = "/etc/" +dolphin_home = "/opt/soft/dolphinscheduler" +dolphin_conf_dir = dolphin_home + "/conf" +dolphin_log_dir = dolphin_home + "/logs" +dolphin_bin_dir = dolphin_home + "/bin" +dolphin_lib_jars = dolphin_home + "/lib/*" +dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" + +rmHosts = default("/clusterHostInfo/rm_host", []) + +# dolphin-env +dolphin_env_map = {} +dolphin_env_map.update(config['configurations']['dolphin-env']) + +# which user to install and admin dolphin scheduler +dolphin_user = dolphin_env_map['dolphin.user'] +dolphin_group = dolphin_env_map['dolphin.group'] + +# .dolphinscheduler_env.sh +dolphin_env_path = dolphin_conf_dir + '/env/dolphinscheduler_env.sh' +dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content'] + +# database config +dolphin_database_config = {} +dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] +dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] +dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] +if 'mysql' == dolphin_database_config['dolphin_database_type']: + dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' + dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' + dolphin_database_config['dolphin_database_url'] = 'jdbc:mysql://' + dolphin_env_map['dolphin.database.host'] \ + + ':' + dolphin_env_map['dolphin.database.port'] \ + + '/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8' +else: + dolphin_database_config['dolphin_database_driver'] = 'org.postgresql.Driver' + dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.PostgreSQLDelegate' + dolphin_database_config['dolphin_database_url'] = 'jdbc:postgresql://' + dolphin_env_map['dolphin.database.host'] \ + + ':' + dolphin_env_map['dolphin.database.port'] \ + + '/dolphinscheduler' + + + + + +# application-alert.properties +dolphin_alert_map = {} +wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' +wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret' +wechat_team_send_msg = '{\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}' +wechat_user_send_msg = '{\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}' + +dolphin_alert_config_map = config['configurations']['dolphin-alert'] + +if dolphin_alert_config_map['enterprise.wechat.enable']: + dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url + dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url + dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg + dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg + +dolphin_alert_map.update(dolphin_alert_config_map) + + + +# application-api.properties +dolphin_app_api_map = {} +dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) + + +# common.properties +dolphin_common_map = {} + +if 'yarn-site' in config['configurations'] and \ + 'yarn.resourcemanager.webapp.address' in config['configurations']['yarn-site']: + yarn_resourcemanager_webapp_address = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'] + yarn_application_status_address = 'http://' + yarn_resourcemanager_webapp_address + '/ws/v1/cluster/apps/%s' + dolphin_common_map['yarn.application.status.address'] = yarn_application_status_address + +rmHosts = default("/clusterHostInfo/rm_host", []) +if len(rmHosts) > 1: + dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = ','.join(rmHosts) +else: + dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = '' + +dolphin_common_map_tmp = config['configurations']['dolphin-common'] +data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] +process_exec_basepath = data_basedir_path + '/exec' +data_download_basedir_path = data_basedir_path + '/download' +dolphin_common_map['process.exec.basepath'] = process_exec_basepath +dolphin_common_map['data.download.basedir.path'] = data_download_basedir_path +dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path +dolphin_common_map.update(config['configurations']['dolphin-common']) + +# datasource.properties +dolphin_datasource_map = {} +dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' +dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] +dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] +dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] +dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] +dolphin_datasource_map.update(config['configurations']['dolphin-datasource']) + +# master.properties +dolphin_master_map = config['configurations']['dolphin-master'] + +# quartz.properties +dolphin_quartz_map = {} +dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] +dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) + +# worker.properties +dolphin_worker_map = config['configurations']['dolphin-worker'] + +# zookeeper.properties +dolphin_zookeeper_map={} +zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) +if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: + clientPort = config['configurations']['zoo.cfg']['clientPort'] + zookeeperPort = ":" + clientPort + "," + dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort +dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper']) + + + diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/service_check.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/service_check.py new file mode 100644 index 0000000000..0e12f69932 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/service_check.py @@ -0,0 +1,31 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +from resource_management.libraries.functions import get_unique_id_and_date + +class ServiceCheck(Script): + def service_check(self, env): + import params + #env.set_params(params) + + # Execute(format("which pika_server")) + +if __name__ == "__main__": + ServiceCheck().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/status_params.py b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/status_params.py new file mode 100644 index 0000000000..24b2c8b1bc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/scripts/status_params.py @@ -0,0 +1,23 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from resource_management import * + +config = Script.get_config() + +dolphin_run_dir = "/opt/soft/run/dolphinscheduler/" diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/alert.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/alert.properties.j2 new file mode 100644 index 0000000000..73840b8c18 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/alert.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_alert_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/application-api.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/application-api.properties.j2 new file mode 100644 index 0000000000..70118003b9 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/application-api.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_app_api_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/common.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/common.properties.j2 new file mode 100644 index 0000000000..2220c4effa --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/common.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_common_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/datasource.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/datasource.properties.j2 new file mode 100644 index 0000000000..40aed83543 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/datasource.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_datasource_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/dolphin-daemon.sh.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/dolphin-daemon.sh.j2 new file mode 100644 index 0000000000..0802b74750 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/dolphin-daemon.sh.j2 @@ -0,0 +1,116 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +usage="Usage: dolphinscheduler-daemon.sh (start|stop) " + +# if no args specified, show usage +if [ $# -le 1 ]; then + echo $usage + exit 1 +fi + +startStop=$1 +shift +command=$1 +shift + +echo "Begin $startStop $command......" + +BIN_DIR=`dirname $0` +BIN_DIR=`cd "$BIN_DIR"; pwd` +DOLPHINSCHEDULER_HOME=$BIN_DIR/.. + +export HOSTNAME=`hostname` + +DOLPHINSCHEDULER_LIB_JARS={{dolphin_lib_jars}} + +DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms4g -Xss512k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" +STOP_TIMEOUT=5 + +log={{dolphin_log_dir}}/dolphinscheduler-$command-$HOSTNAME.out +pid={{dolphin_pidfile_dir}}/$command.pid + +cd $DOLPHINSCHEDULER_HOME + +if [ "$command" = "api-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api" + CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer +elif [ "$command" = "master-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false" + CLASS=org.apache.dolphinscheduler.server.master.MasterServer +elif [ "$command" = "worker-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false" + CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer +elif [ "$command" = "alert-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml" + CLASS=org.apache.dolphinscheduler.alert.AlertServer +elif [ "$command" = "logger-server" ]; then + CLASS=org.apache.dolphinscheduler.server.log.LoggerServer +else + echo "Error: No command named \`$command' was found." + exit 1 +fi + +case $startStop in + (start) + + if [ -f $pid ]; then + if kill -0 `cat $pid` > /dev/null 2>&1; then + echo $command running as process `cat $pid`. Stop it first. + exit 1 + fi + fi + + echo starting $command, logging to $log + + exec_command="$LOG_FILE $DOLPHINSCHEDULER_OPTS -classpath {{dolphin_conf_dir}}:{{dolphin_lib_jars}} $CLASS" + + echo "nohup java $exec_command > $log 2>&1 < /dev/null &" + nohup java $exec_command > $log 2>&1 < /dev/null & + echo $! > $pid + ;; + + (stop) + + if [ -f $pid ]; then + TARGET_PID=`cat $pid` + if kill -0 $TARGET_PID > /dev/null 2>&1; then + echo stopping $command + kill $TARGET_PID + sleep $STOP_TIMEOUT + if kill -0 $TARGET_PID > /dev/null 2>&1; then + echo "$command did not stop gracefully after $STOP_TIMEOUT seconds: killing with kill -9" + kill -9 $TARGET_PID + fi + else + echo no $command to stop + fi + rm -f $pid + else + echo no $command to stop + fi + ;; + + (*) + echo $usage + exit 1 + ;; + +esac + +echo "End $startStop $command." \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/master.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/master.properties.j2 new file mode 100644 index 0000000000..d9b85e14cf --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/master.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_master_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/quartz.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/quartz.properties.j2 new file mode 100644 index 0000000000..e027a263b5 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/quartz.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_quartz_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/worker.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/worker.properties.j2 new file mode 100644 index 0000000000..a008b74084 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/worker.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_worker_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/zookeeper.properties.j2 b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/zookeeper.properties.j2 new file mode 100644 index 0000000000..9eb14eaef3 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/package/templates/zookeeper.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_zookeeper_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/quicklinks/quicklinks.json b/ambari_plugin/common-services/DOLPHIN/2.0.0/quicklinks/quicklinks.json new file mode 100755 index 0000000000..8753004fef --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/quicklinks/quicklinks.json @@ -0,0 +1,26 @@ +{ + "name": "default", + "description": "default quick links configuration", + "configuration": { + "protocol": + { + "type":"http" + }, + + "links": [ + { + "name": "dolphin-application-ui", + "label": "DolphinApplication UI", + "requires_user_name": "false", + "component_name": "DOLPHIN_API", + "url": "%@://%@:%@/dolphinscheduler/ui/view/login/index.html", + "port":{ + "http_property": "server.port", + "http_default_port": "12345", + "regex": "^(\\d+)$", + "site": "dolphin-application-api" + } + } + ] + } +} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/2.0.0/themes/theme.json b/ambari_plugin/common-services/DOLPHIN/2.0.0/themes/theme.json new file mode 100644 index 0000000000..953e2323f8 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/2.0.0/themes/theme.json @@ -0,0 +1,661 @@ +{ + "name": "default", + "description": "Default theme for Dolphin Scheduler service", + "configuration": { + "layouts": [ + { + "name": "default", + "tabs": [ + { + "name": "settings", + "display-name": "Settings", + "layout": { + "tab-rows": "3", + "tab-columns": "3", + "sections": [ + { + "name": "dolphin-env-config", + "display-name": "Dolphin Env Config", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "2", + "subsections": [ + { + "name": "env-row1-col1", + "display-name": "Deploy User Info", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "env-row1-col2", + "display-name": "System Env Optimization", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + } + ] + }, + { + "name": "dolphin-database-config", + "display-name": "Database Config", + "row-index": "1", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "3", + "subsections": [ + { + "name": "database-row1-col1", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "database-row1-col2", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + }, + { + "name": "database-row1-col3", + "row-index": "0", + "column-index": "2", + "row-span": "1", + "column-span": "1" + } + ] + }, + { + "name": "dynamic-config", + "row-index": "2", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "3", + "subsections": [ + { + "name": "dynamic-row1-col1", + "display-name": "Resource FS Config", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "dynamic-row1-col2", + "display-name": "Kerberos Info", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + }, + { + "name": "dynamic-row1-col3", + "display-name": "Wechat Info", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + } + ] + } + ] + } + } + ] + } + ], + "placement": { + "configuration-layout": "default", + "configs": [ + { + "config": "dolphin-env/dolphin.database.type", + "subsection-name": "database-row1-col1" + }, + { + "config": "dolphin-env/dolphin.database.host", + "subsection-name": "database-row1-col2" + }, + { + "config": "dolphin-env/dolphin.database.port", + "subsection-name": "database-row1-col2" + }, + { + "config": "dolphin-env/dolphin.database.username", + "subsection-name": "database-row1-col3" + }, + { + "config": "dolphin-env/dolphin.database.password", + "subsection-name": "database-row1-col3" + }, + { + "config": "dolphin-env/dolphin.user", + "subsection-name": "env-row1-col1" + }, + { + "config": "dolphin-env/dolphin.group", + "subsection-name": "env-row1-col1" + }, + { + "config": "dolphin-env/dolphinscheduler-env-content", + "subsection-name": "env-row1-col2" + }, + { + "config": "dolphin-common/resource.storage.type", + "subsection-name": "dynamic-row1-col1" + }, + { + "config": "dolphin-common/resource.upload.path", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/hdfs.root.user", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/data.store2hdfs.basepath", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.defaultFS", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.endpoint", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.access.key", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.secret.key", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/hadoop.security.authentication.startup.state", + "subsection-name": "dynamic-row1-col2" + }, + { + "config": "dolphin-common/java.security.krb5.conf.path", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/login.user.keytab.username", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/login.user.keytab.path", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/kerberos.expire.time", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.enable", + "subsection-name": "dynamic-row1-col3" + }, + { + "config": "dolphin-alert/enterprise.wechat.corp.id", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.secret", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.agent.id", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.users", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + } + ] + }, + "widgets": [ + { + "config": "dolphin-env/dolphin.database.type", + "widget": { + "type": "combo" + } + }, + { + "config": "dolphin-env/dolphin.database.host", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.database.port", + "widget": { + "type": "text-field", + "units": [ + { + "unit-name": "int" + } + ] + } + }, + { + "config": "dolphin-env/dolphin.database.username", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.database.password", + "widget": { + "type": "password" + } + }, + { + "config": "dolphin-env/dolphin.user", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.group", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphinscheduler-env-content", + "widget": { + "type": "text-area" + } + }, + { + "config": "dolphin-common/resource.storage.type", + "widget": { + "type": "combo" + } + }, + { + "config": "dolphin-common/resource.upload.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/hdfs.root.user", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/data.store2hdfs.basepath", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.defaultFS", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.endpoint", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.access.key", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.secret.key", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/hadoop.security.authentication.startup.state", + "widget": { + "type": "toggle" + } + }, + { + "config": "dolphin-common/java.security.krb5.conf.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/login.user.keytab.username", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/login.user.keytab.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/kerberos.expire.time", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.enable", + "widget": { + "type": "toggle" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.corp.id", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.secret", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.agent.id", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.users", + "widget": { + "type": "text-field" + } + } + ] + } +} diff --git a/ambari_plugin/readme.pdf b/ambari_plugin/readme.pdf deleted file mode 100644 index 1209375701..0000000000 Binary files a/ambari_plugin/readme.pdf and /dev/null differ diff --git a/ambari_plugin/statcks/DOLPHIN/metainfo.xml b/ambari_plugin/statcks/DOLPHIN/metainfo.xml index c41db5f513..ea40cd304d 100755 --- a/ambari_plugin/statcks/DOLPHIN/metainfo.xml +++ b/ambari_plugin/statcks/DOLPHIN/metainfo.xml @@ -20,7 +20,7 @@ DOLPHIN - common-services/DOLPHIN/1.2.1 + common-services/DOLPHIN/1.3.0 \ No newline at end of file diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index de5908583c..7e9c4e57cb 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -1,3 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. version: '2' services: zookeeper: diff --git a/docker/docker-swarm/docker-compose.yml b/docker/docker-swarm/docker-compose.yml new file mode 100644 index 0000000000..20fb0cced3 --- /dev/null +++ b/docker/docker-swarm/docker-compose.yml @@ -0,0 +1,261 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +version: "3.4" + +networks: + dolphinscheduler-postgresql: + driver: bridge + dolphinscheduler-zookeeper: + driver: bridge + dolphinscheduler-api: + driver: bridge + dolphinscheduler-frontend: + driver: bridge + dolphinscheduler-alert: + driver: bridge + dolphinscheduler-master: + driver: bridge + dolphinscheduler-worker: + driver: bridge + +volumes: + dolphinscheduler-postgresql: + dolphinscheduler-zookeeper: + dolphinscheduler-api: + dolphinscheduler-frontend: + dolphinscheduler-alert: + dolphinscheduler-master: + dolphinscheduler-worker-data: + dolphinscheduler-worker-logs: + +configs: + dolphinscheduler-worker-task-env: + file: ./dolphinscheduler_env.sh + +services: + + dolphinscheduler-postgresql: + image: bitnami/postgresql:latest + container_name: dolphinscheduler-postgresql + ports: + - 5432:5432 + environment: + TZ: Asia/Shanghai + POSTGRESQL_USERNAME: root + POSTGRESQL_PASSWORD: root + POSTGRESQL_DATABASE: dolphinscheduler + healthcheck: + test: ["CMD", "pg_isready", "-U", "${POSTGRESQL_USERNAME}", "-d", "{POSTGRESQL_PASSWORD}", "-h", "localhost", "5432"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + volumes: + - dolphinscheduler-postgresql:/bitnami/postgresql + networks: + - dolphinscheduler-postgresql + + dolphinscheduler-zookeeper: + image: bitnami/zookeeper:latest + container_name: dolphinscheduler-zookeeper + ports: + - 2181:2181 + environment: + TZ: Asia/Shanghai + ALLOW_ANONYMOUS_LOGIN: "yes" + healthcheck: + test: ["CMD-SHELL", "nc -z localhost 2181"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + volumes: + - dolphinscheduler-zookeeper:/bitnami/zookeeper + networks: + - dolphinscheduler-zookeeper + + dolphinscheduler-api: + image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1 + container_name: dolphinscheduler-api + command: ["api-server"] + ports: + - 12345:12345 + environment: + TZ: Asia/Shanghai + POSTGRESQL_HOST: dolphinscheduler-postgresql + POSTGRESQL_PORT: 5432 + POSTGRESQL_USERNAME: root + POSTGRESQL_PASSWORD: root + POSTGRESQL_DATABASE: dolphinscheduler + ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181 + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:12345"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + depends_on: + - dolphinscheduler-postgresql + - dolphinscheduler-zookeeper + volumes: + - dolphinscheduler-api:/opt/dolphinscheduler/logs + networks: + - dolphinscheduler-api + - dolphinscheduler-postgresql + - dolphinscheduler-zookeeper + + dolphinscheduler-frontend: + image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1 + container_name: dolphinscheduler-frontend + command: ["frontend"] + ports: + - 8888:8888 + environment: + TZ: Asia/Shanghai + FRONTEND_API_SERVER_HOST: dolphinscheduler-api + FRONTEND_API_SERVER_PORT: 12345 + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:8888"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + depends_on: + - dolphinscheduler-api + volumes: + - dolphinscheduler-frontend:/var/log/nginx + networks: + - dolphinscheduler-frontend + - dolphinscheduler-api + + dolphinscheduler-alert: + image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1 + container_name: dolphinscheduler-alert + command: ["alert-server"] + environment: + TZ: Asia/Shanghai + XLS_FILE_PATH: "/tmp/xls" + MAIL_SERVER_HOST: "" + MAIL_SERVER_PORT: "" + MAIL_SENDER: "" + MAIL_USER: "" + MAIL_PASSWD: "" + MAIL_SMTP_STARTTLS_ENABLE: "false" + MAIL_SMTP_SSL_ENABLE: "false" + MAIL_SMTP_SSL_TRUST: "" + ENTERPRISE_WECHAT_ENABLE: "false" + ENTERPRISE_WECHAT_CORP_ID: "" + ENTERPRISE_WECHAT_SECRET: "" + ENTERPRISE_WECHAT_AGENT_ID: "" + ENTERPRISE_WECHAT_USERS: "" + POSTGRESQL_HOST: dolphinscheduler-postgresql + POSTGRESQL_PORT: 5432 + POSTGRESQL_USERNAME: root + POSTGRESQL_PASSWORD: root + POSTGRESQL_DATABASE: dolphinscheduler + healthcheck: + test: ["CMD", "/root/checkpoint.sh", "AlertServer"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + depends_on: + - dolphinscheduler-postgresql + volumes: + - dolphinscheduler-alert:/opt/dolphinscheduler/logs + networks: + - dolphinscheduler-alert + - dolphinscheduler-postgresql + + dolphinscheduler-master: + image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1 + container_name: dolphinscheduler-master + command: ["master-server"] + ports: + - 5678:5678 + environment: + TZ: Asia/Shanghai + MASTER_EXEC_THREADS: "100" + MASTER_EXEC_TASK_NUM: "20" + MASTER_HEARTBEAT_INTERVAL: "10" + MASTER_TASK_COMMIT_RETRYTIMES: "5" + MASTER_TASK_COMMIT_INTERVAL: "1000" + MASTER_MAX_CPULOAD_AVG: "100" + MASTER_RESERVED_MEMORY: "0.1" + POSTGRESQL_HOST: dolphinscheduler-postgresql + POSTGRESQL_PORT: 5432 + POSTGRESQL_USERNAME: root + POSTGRESQL_PASSWORD: root + POSTGRESQL_DATABASE: dolphinscheduler + ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181 + healthcheck: + test: ["CMD", "/root/checkpoint.sh", "MasterServer"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + depends_on: + - dolphinscheduler-postgresql + - dolphinscheduler-zookeeper + volumes: + - dolphinscheduler-master:/opt/dolphinscheduler/logs + networks: + - dolphinscheduler-master + - dolphinscheduler-postgresql + - dolphinscheduler-zookeeper + + dolphinscheduler-worker: + image: registry.cn-qingdao.aliyuncs.com/sxyj/dolphinscheduler:1.2.1 + container_name: dolphinscheduler-worker + command: ["worker-server"] + ports: + - 1234:1234 + - 50051:50051 + environment: + TZ: Asia/Shanghai + WORKER_EXEC_THREADS: "100" + WORKER_HEARTBEAT_INTERVAL: "10" + WORKER_FETCH_TASK_NUM: "3" + WORKER_MAX_CPULOAD_AVG: "100" + WORKER_RESERVED_MEMORY: "0.1" + WORKER_GROUP: "default" + DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler" + POSTGRESQL_HOST: dolphinscheduler-postgresql + POSTGRESQL_PORT: 5432 + POSTGRESQL_USERNAME: root + POSTGRESQL_PASSWORD: root + POSTGRESQL_DATABASE: dolphinscheduler + ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181 + healthcheck: + test: ["CMD", "/root/checkpoint.sh", "WorkerServer"] + interval: 30s + timeout: 5s + retries: 3 + # start_period: 30s + depends_on: + - dolphinscheduler-postgresql + - dolphinscheduler-zookeeper + volumes: + - dolphinscheduler-worker-data:/tmp/dolphinscheduler + - dolphinscheduler-worker-logs:/opt/dolphinscheduler/logs + configs: + - source: dolphinscheduler-worker-task-env + target: /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh + networks: + - dolphinscheduler-worker + - dolphinscheduler-postgresql + - dolphinscheduler-zookeeper \ No newline at end of file diff --git a/docker/docker-swarm/dolphinscheduler_env.sh b/docker/docker-swarm/dolphinscheduler_env.sh new file mode 100644 index 0000000000..790e30636e --- /dev/null +++ b/docker/docker-swarm/dolphinscheduler_env.sh @@ -0,0 +1,26 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export HADOOP_HOME=/opt/soft/hadoop +export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop +export SPARK_HOME1=/opt/soft/spark1 +export SPARK_HOME2=/opt/soft/spark2 +export PYTHON_HOME=/opt/soft/python +export JAVA_HOME=/opt/soft/java +export HIVE_HOME=/opt/soft/hive +export FLINK_HOME=/opt/soft/flink +export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/Chart.yaml b/docker/kubernetes/dolphinscheduler/Chart.yaml new file mode 100644 index 0000000000..ac989d571f --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/Chart.yaml @@ -0,0 +1,52 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +apiVersion: v2 +name: dolphinscheduler +description: Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. +home: https://dolphinscheduler.apache.org +icon: https://dolphinscheduler.apache.org/img/hlogo_colorful.svg +keywords: +- dolphinscheduler +- Scheduler +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +version: 1.0.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. +appVersion: 1.3.0 + +dependencies: +- name: postgresql + version: 8.x.x + repository: https://charts.bitnami.com/bitnami + condition: postgresql.enabled +- name: zookeeper + version: 5.x.x + repository: https://charts.bitnami.com/bitnami + condition: redis.enabled diff --git a/docker/kubernetes/dolphinscheduler/README.md b/docker/kubernetes/dolphinscheduler/README.md new file mode 100644 index 0000000000..9e6d1c6448 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/README.md @@ -0,0 +1,228 @@ +# Dolphin Scheduler + +[Dolphin Scheduler](https://dolphinscheduler.apache.org) is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. + +## Introduction +This chart bootstraps a [Dolphin Scheduler](https://dolphinscheduler.apache.org) distributed deployment on a [Kubernetes](http://kubernetes.io) cluster using the [Helm](https://helm.sh) package manager. + +## Prerequisites + +- Kubernetes 1.10+ +- PV provisioner support in the underlying infrastructure + +## Installing the Chart + +To install the chart with the release name `my-release`: + +```bash +$ git clone https://github.com/apache/incubator-dolphinscheduler.git +$ cd incubator-dolphinscheduler/kubernetes/dolphinscheduler +$ helm repo add bitnami https://charts.bitnami.com/bitnami +$ helm dependency update . +$ helm install --name dolphinscheduler . +``` +These commands deploy Dolphin Scheduler on the Kubernetes cluster in the default configuration. The [configuration](#configuration) section lists the parameters that can be configured during installation. + +> **Tip**: List all releases using `helm list` + +## Uninstalling the Chart + +To uninstall/delete the `dolphinscheduler` deployment: + +```bash +$ helm delete --purge dolphinscheduler +``` + +The command removes all the Kubernetes components associated with the chart and deletes the release. + +## Configuration + +The following tables lists the configurable parameters of the Dolphins Scheduler chart and their default values. + +| Parameter | Description | Default | +| --------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------- | +| `timezone` | World time and date for cities in all time zones | `Asia/Shanghai` | +| `image.registry` | Docker image registry for the Dolphins Scheduler | `docker.io` | +| `image.repository` | Docker image repository for the Dolphins Scheduler | `dolphinscheduler` | +| `image.tag` | Docker image version for the Dolphins Scheduler | `1.2.1` | +| `image.imagePullPolicy` | Image pull policy. One of Always, Never, IfNotPresent | `IfNotPresent` | +| `imagePullSecrets` | ImagePullSecrets is an optional list of references to secrets in the same namespace to use for pulling any of the images | `[]` | +| | | | +| `postgresql.enabled` | If not exists external PostgreSQL, by default, the Dolphins Scheduler will use a internal PostgreSQL | `true` | +| `postgresql.postgresqlUsername` | The username for internal PostgreSQL | `root` | +| `postgresql.postgresqlPassword` | The password for internal PostgreSQL | `root` | +| `postgresql.postgresqlDatabase` | The database for internal PostgreSQL | `dolphinscheduler` | +| `postgresql.persistence.enabled` | Set `postgresql.persistence.enabled` to `true` to mount a new volume for internal PostgreSQL | `false` | +| `postgresql.persistence.size` | `PersistentVolumeClaim` Size | `20Gi` | +| `postgresql.persistence.storageClass` | PostgreSQL data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `externalDatabase.host` | If exists external PostgreSQL, and set `postgresql.enable` value to false. Dolphins Scheduler's database host will use it. | `localhost` | +| `externalDatabase.port` | If exists external PostgreSQL, and set `postgresql.enable` value to false. Dolphins Scheduler's database port will use it. | `5432` | +| `externalDatabase.username` | If exists external PostgreSQL, and set `postgresql.enable` value to false. Dolphins Scheduler's database username will use it. | `root` | +| `externalDatabase.password` | If exists external PostgreSQL, and set `postgresql.enable` value to false. Dolphins Scheduler's database password will use it. | `root` | +| `externalDatabase.database` | If exists external PostgreSQL, and set `postgresql.enable` value to false. Dolphins Scheduler's database database will use it. | `dolphinscheduler` | +| | | | +| `zookeeper.enabled` | If not exists external Zookeeper, by default, the Dolphin Scheduler will use a internal Zookeeper | `true` | +| `zookeeper.taskQueue` | Specify task queue for `master` and `worker` | `zookeeper` | +| `zookeeper.persistence.enabled` | Set `zookeeper.persistence.enabled` to `true` to mount a new volume for internal Zookeeper | `false` | +| `zookeeper.persistence.size` | `PersistentVolumeClaim` Size | `20Gi` | +| `zookeeper.persistence.storageClass` | Zookeeper data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `externalZookeeper.taskQueue` | If exists external Zookeeper, and set `zookeeper.enable` value to false. Specify task queue for `master` and `worker` | `zookeeper` | +| `externalZookeeper.zookeeperQuorum` | If exists external Zookeeper, and set `zookeeper.enable` value to false. Specify Zookeeper quorum | `127.0.0.1:2181` | +| | | | +| `master.podManagementPolicy` | PodManagementPolicy controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down | `Parallel` | +| `master.replicas` | Replicas is the desired number of replicas of the given Template | `3` | +| `master.nodeSelector` | NodeSelector is a selector which must be true for the pod to fit on a node | `{}` | +| `master.tolerations` | If specified, the pod's tolerations | `{}` | +| `master.affinity` | If specified, the pod's scheduling constraints | `{}` | +| `master.configmap.MASTER_EXEC_THREADS` | Master execute thread num | `100` | +| `master.configmap.MASTER_EXEC_TASK_NUM` | Master execute task number in parallel | `20` | +| `master.configmap.MASTER_HEARTBEAT_INTERVAL` | Master heartbeat interval | `10` | +| `master.configmap.MASTER_TASK_COMMIT_RETRYTIMES` | Master commit task retry times | `5` | +| `master.configmap.MASTER_TASK_COMMIT_INTERVAL` | Master commit task interval | `1000` | +| `master.configmap.MASTER_MAX_CPULOAD_AVG` | Only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 | `100` | +| `master.configmap.MASTER_RESERVED_MEMORY` | Only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G | `0.1` | +| `master.livenessProbe.enabled` | Turn on and off liveness probe | `true` | +| `master.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `30` | +| `master.livenessProbe.periodSeconds` | How often to perform the probe | `30` | +| `master.livenessProbe.timeoutSeconds` | When the probe times out | `5` | +| `master.livenessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `master.livenessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `master.readinessProbe.enabled` | Turn on and off readiness probe | `true` | +| `master.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` | +| `master.readinessProbe.periodSeconds` | How often to perform the probe | `30` | +| `master.readinessProbe.timeoutSeconds` | When the probe times out | `5` | +| `master.readinessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `master.readinessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `master.persistentVolumeClaim.enabled` | Set `master.persistentVolumeClaim.enabled` to `true` to mount a new volume for `master` | `false` | +| `master.persistentVolumeClaim.accessModes` | `PersistentVolumeClaim` Access Modes | `[ReadWriteOnce]` | +| `master.persistentVolumeClaim.storageClassName` | `Master` logs data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `master.persistentVolumeClaim.storage` | `PersistentVolumeClaim` Size | `20Gi` | +| | | | +| `worker.podManagementPolicy` | PodManagementPolicy controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down | `Parallel` | +| `worker.replicas` | Replicas is the desired number of replicas of the given Template | `3` | +| `worker.nodeSelector` | NodeSelector is a selector which must be true for the pod to fit on a node | `{}` | +| `worker.tolerations` | If specified, the pod's tolerations | `{}` | +| `worker.affinity` | If specified, the pod's scheduling constraints | `{}` | +| `worker.configmap.WORKER_EXEC_THREADS` | Worker execute thread num | `100` | +| `worker.configmap.WORKER_HEARTBEAT_INTERVAL` | Worker heartbeat interval | `10` | +| `worker.configmap.WORKER_FETCH_TASK_NUM` | Submit the number of tasks at a time | `3` | +| `worker.configmap.WORKER_MAX_CPULOAD_AVG` | Only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 | `100` | +| `worker.configmap.WORKER_RESERVED_MEMORY` | Only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G | `0.1` | +| `worker.configmap.DOLPHINSCHEDULER_DATA_BASEDIR_PATH` | User data directory path, self configuration, please make sure the directory exists and have read write permissions | `/tmp/dolphinscheduler` | +| `worker.configmap.DOLPHINSCHEDULER_ENV` | System env path, self configuration, please read `values.yaml` | `[]` | +| `worker.livenessProbe.enabled` | Turn on and off liveness probe | `true` | +| `worker.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `30` | +| `worker.livenessProbe.periodSeconds` | How often to perform the probe | `30` | +| `worker.livenessProbe.timeoutSeconds` | When the probe times out | `5` | +| `worker.livenessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `worker.livenessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `worker.readinessProbe.enabled` | Turn on and off readiness probe | `true` | +| `worker.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` | +| `worker.readinessProbe.periodSeconds` | How often to perform the probe | `30` | +| `worker.readinessProbe.timeoutSeconds` | When the probe times out | `5` | +| `worker.readinessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `worker.readinessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `worker.persistentVolumeClaim.enabled` | Set `worker.persistentVolumeClaim.enabled` to `true` to enable `persistentVolumeClaim` for `worker` | `false` | +| `worker.persistentVolumeClaim.dataPersistentVolume.enabled` | Set `worker.persistentVolumeClaim.dataPersistentVolume.enabled` to `true` to mount a data volume for `worker` | `false` | +| `worker.persistentVolumeClaim.dataPersistentVolume.accessModes` | `PersistentVolumeClaim` Access Modes | `[ReadWriteOnce]` | +| `worker.persistentVolumeClaim.dataPersistentVolume.storageClassName` | `Worker` data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `worker.persistentVolumeClaim.dataPersistentVolume.storage` | `PersistentVolumeClaim` Size | `20Gi` | +| `worker.persistentVolumeClaim.logsPersistentVolume.enabled` | Set `worker.persistentVolumeClaim.logsPersistentVolume.enabled` to `true` to mount a logs volume for `worker` | `false` | +| `worker.persistentVolumeClaim.logsPersistentVolume.accessModes` | `PersistentVolumeClaim` Access Modes | `[ReadWriteOnce]` | +| `worker.persistentVolumeClaim.logsPersistentVolume.storageClassName` | `Worker` logs data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `worker.persistentVolumeClaim.logsPersistentVolume.storage` | `PersistentVolumeClaim` Size | `20Gi` | +| | | | +| `alert.strategy.type` | Type of deployment. Can be "Recreate" or "RollingUpdate" | `RollingUpdate` | +| `alert.strategy.rollingUpdate.maxSurge` | The maximum number of pods that can be scheduled above the desired number of pods | `25%` | +| `alert.strategy.rollingUpdate.maxUnavailable` | The maximum number of pods that can be unavailable during the update | `25%` | +| `alert.replicas` | Replicas is the desired number of replicas of the given Template | `1` | +| `alert.nodeSelector` | NodeSelector is a selector which must be true for the pod to fit on a node | `{}` | +| `alert.tolerations` | If specified, the pod's tolerations | `{}` | +| `alert.affinity` | If specified, the pod's scheduling constraints | `{}` | +| `alert.configmap.XLS_FILE_PATH` | XLS file path | `/tmp/xls` | +| `alert.configmap.MAIL_SERVER_HOST` | Mail `SERVER HOST ` | `nil` | +| `alert.configmap.MAIL_SERVER_PORT` | Mail `SERVER PORT` | `nil` | +| `alert.configmap.MAIL_SENDER` | Mail `SENDER` | `nil` | +| `alert.configmap.MAIL_USER` | Mail `USER` | `nil` | +| `alert.configmap.MAIL_PASSWD` | Mail `PASSWORD` | `nil` | +| `alert.configmap.MAIL_SMTP_STARTTLS_ENABLE` | Mail `SMTP STARTTLS` enable | `false` | +| `alert.configmap.MAIL_SMTP_SSL_ENABLE` | Mail `SMTP SSL` enable | `false` | +| `alert.configmap.MAIL_SMTP_SSL_TRUST` | Mail `SMTP SSL TRUST` | `nil` | +| `alert.configmap.ENTERPRISE_WECHAT_ENABLE` | `Enterprise Wechat` enable | `false` | +| `alert.configmap.ENTERPRISE_WECHAT_CORP_ID` | `Enterprise Wechat` corp id | `nil` | +| `alert.configmap.ENTERPRISE_WECHAT_SECRET` | `Enterprise Wechat` secret | `nil` | +| `alert.configmap.ENTERPRISE_WECHAT_AGENT_ID` | `Enterprise Wechat` agent id | `nil` | +| `alert.configmap.ENTERPRISE_WECHAT_USERS` | `Enterprise Wechat` users | `nil` | +| `alert.livenessProbe.enabled` | Turn on and off liveness probe | `true` | +| `alert.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `30` | +| `alert.livenessProbe.periodSeconds` | How often to perform the probe | `30` | +| `alert.livenessProbe.timeoutSeconds` | When the probe times out | `5` | +| `alert.livenessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `alert.livenessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `alert.readinessProbe.enabled` | Turn on and off readiness probe | `true` | +| `alert.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` | +| `alert.readinessProbe.periodSeconds` | How often to perform the probe | `30` | +| `alert.readinessProbe.timeoutSeconds` | When the probe times out | `5` | +| `alert.readinessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `alert.readinessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `alert.persistentVolumeClaim.enabled` | Set `alert.persistentVolumeClaim.enabled` to `true` to mount a new volume for `alert` | `false` | +| `alert.persistentVolumeClaim.accessModes` | `PersistentVolumeClaim` Access Modes | `[ReadWriteOnce]` | +| `alert.persistentVolumeClaim.storageClassName` | `Alert` logs data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `alert.persistentVolumeClaim.storage` | `PersistentVolumeClaim` Size | `20Gi` | +| | | | +| `api.strategy.type` | Type of deployment. Can be "Recreate" or "RollingUpdate" | `RollingUpdate` | +| `api.strategy.rollingUpdate.maxSurge` | The maximum number of pods that can be scheduled above the desired number of pods | `25%` | +| `api.strategy.rollingUpdate.maxUnavailable` | The maximum number of pods that can be unavailable during the update | `25%` | +| `api.replicas` | Replicas is the desired number of replicas of the given Template | `1` | +| `api.nodeSelector` | NodeSelector is a selector which must be true for the pod to fit on a node | `{}` | +| `api.tolerations` | If specified, the pod's tolerations | `{}` | +| `api.affinity` | If specified, the pod's scheduling constraints | `{}` | +| `api.livenessProbe.enabled` | Turn on and off liveness probe | `true` | +| `api.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `30` | +| `api.livenessProbe.periodSeconds` | How often to perform the probe | `30` | +| `api.livenessProbe.timeoutSeconds` | When the probe times out | `5` | +| `api.livenessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `api.livenessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `api.readinessProbe.enabled` | Turn on and off readiness probe | `true` | +| `api.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` | +| `api.readinessProbe.periodSeconds` | How often to perform the probe | `30` | +| `api.readinessProbe.timeoutSeconds` | When the probe times out | `5` | +| `api.readinessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `api.readinessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `api.persistentVolumeClaim.enabled` | Set `api.persistentVolumeClaim.enabled` to `true` to mount a new volume for `api` | `false` | +| `api.persistentVolumeClaim.accessModes` | `PersistentVolumeClaim` Access Modes | `[ReadWriteOnce]` | +| `api.persistentVolumeClaim.storageClassName` | `api` logs data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `api.persistentVolumeClaim.storage` | `PersistentVolumeClaim` Size | `20Gi` | +| | | | +| `frontend.strategy.type` | Type of deployment. Can be "Recreate" or "RollingUpdate" | `RollingUpdate` | +| `frontend.strategy.rollingUpdate.maxSurge` | The maximum number of pods that can be scheduled above the desired number of pods | `25%` | +| `frontend.strategy.rollingUpdate.maxUnavailable` | The maximum number of pods that can be unavailable during the update | `25%` | +| `frontend.replicas` | Replicas is the desired number of replicas of the given Template | `1` | +| `frontend.nodeSelector` | NodeSelector is a selector which must be true for the pod to fit on a node | `{}` | +| `frontend.tolerations` | If specified, the pod's tolerations | `{}` | +| `frontend.affinity` | If specified, the pod's scheduling constraints | `{}` | +| `frontend.livenessProbe.enabled` | Turn on and off liveness probe | `true` | +| `frontend.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `30` | +| `frontend.livenessProbe.periodSeconds` | How often to perform the probe | `30` | +| `frontend.livenessProbe.timeoutSeconds` | When the probe times out | `5` | +| `frontend.livenessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `frontend.livenessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `frontend.readinessProbe.enabled` | Turn on and off readiness probe | `true` | +| `frontend.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` | +| `frontend.readinessProbe.periodSeconds` | How often to perform the probe | `30` | +| `frontend.readinessProbe.timeoutSeconds` | When the probe times out | `5` | +| `frontend.readinessProbe.failureThreshold` | Minimum consecutive successes for the probe | `3` | +| `frontend.readinessProbe.successThreshold` | Minimum consecutive failures for the probe | `1` | +| `frontend.persistentVolumeClaim.enabled` | Set `frontend.persistentVolumeClaim.enabled` to `true` to mount a new volume for `frontend` | `false` | +| `frontend.persistentVolumeClaim.accessModes` | `PersistentVolumeClaim` Access Modes | `[ReadWriteOnce]` | +| `frontend.persistentVolumeClaim.storageClassName` | `frontend` logs data Persistent Volume Storage Class. If set to "-", storageClassName: "", which disables dynamic provisioning | `-` | +| `frontend.persistentVolumeClaim.storage` | `PersistentVolumeClaim` Size | `20Gi` | +| | | | +| `ingress.enabled` | Enable ingress | `false` | +| `ingress.host` | Ingress host | `dolphinscheduler.org` | +| `ingress.path` | Ingress path | `/` | +| `ingress.tls.enabled` | Enable ingress tls | `false` | +| `ingress.tls.hosts` | Ingress tls hosts | `dolphinscheduler.org` | +| `ingress.tls.secretName` | Ingress tls secret name | `dolphinscheduler-tls` | + +For more information please refer to the [chart](https://github.com/apache/incubator-dolphinscheduler.git) documentation. diff --git a/docker/kubernetes/dolphinscheduler/requirements.yaml b/docker/kubernetes/dolphinscheduler/requirements.yaml new file mode 100644 index 0000000000..e219975995 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/requirements.yaml @@ -0,0 +1,25 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +dependencies: +- name: postgresql + version: 8.x.x + repository: https://charts.bitnami.com/bitnami + condition: postgresql.enabled +- name: zookeeper + version: 5.x.x + repository: https://charts.bitnami.com/bitnami + condition: redis.enabled \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/NOTES.txt b/docker/kubernetes/dolphinscheduler/templates/NOTES.txt new file mode 100644 index 0000000000..eb3a9cfc52 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/NOTES.txt @@ -0,0 +1,44 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +** Please be patient while the chart is being deployed ** + +1. Get the Dolphinscheduler URL by running: + +{{- if .Values.ingress.enabled }} + + export HOSTNAME=$(kubectl get ingress --namespace {{ .Release.Namespace }} {{ template "dolphinscheduler.fullname" . }} -o jsonpath='{.spec.rules[0].host}') + echo "Dolphinscheduler URL: http://$HOSTNAME/" + +{{- else }} + + kubectl port-forward --namespace {{ .Release.Namespace }} svc/{{ template "dolphinscheduler.fullname" . }}-frontend 8888:8888 + +{{- end }} + +2. Get the Dolphinscheduler URL by running: + +{{- if .Values.ingress.enabled }} + + export HOSTNAME=$(kubectl get ingress --namespace {{ .Release.Namespace }} {{ template "dolphinscheduler.fullname" . }} -o jsonpath='{.spec.rules[0].host}') + echo "Dolphinscheduler URL: http://$HOSTNAME/" + +{{- else }} + + kubectl port-forward --namespace {{ .Release.Namespace }} svc/{{ template "dolphinscheduler.fullname" . }}-frontend 8888:8888 + +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl b/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl new file mode 100644 index 0000000000..9ba290b771 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/_helpers.tpl @@ -0,0 +1,133 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{{/* vim: set filetype=mustache: */}} +{{/* +Expand the name of the chart. +*/}} +{{- define "dolphinscheduler.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "dolphinscheduler.fullname" -}} +{{- if .Values.fullnameOverride -}} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- $name := default .Chart.Name .Values.nameOverride -}} +{{- if contains $name .Release.Name -}} +{{- .Release.Name | trunc 63 | trimSuffix "-" -}} +{{- else -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} +{{- end -}} +{{- end -}} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "dolphinscheduler.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Common labels +*/}} +{{- define "dolphinscheduler.labels" -}} +helm.sh/chart: {{ include "dolphinscheduler.chart" . }} +{{ include "dolphinscheduler.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end -}} + +{{/* +Selector labels +*/}} +{{- define "dolphinscheduler.selectorLabels" -}} +app.kubernetes.io/name: {{ include "dolphinscheduler.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end -}} + +{{/* +Create the name of the service account to use +*/}} +{{- define "dolphinscheduler.serviceAccountName" -}} +{{- if .Values.serviceAccount.create -}} + {{ default (include "dolphinscheduler.fullname" .) .Values.serviceAccount.name }} +{{- else -}} + {{ default "default" .Values.serviceAccount.name }} +{{- end -}} +{{- end -}} + +{{/* +Create a default docker image registry. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "dolphinscheduler.image.registry" -}} +{{- $registry := default "docker.io" .Values.image.registry -}} +{{- printf "%s" $registry | trunc 63 | trimSuffix "/" -}} +{{- end -}} + +{{/* +Create a default docker image repository. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "dolphinscheduler.image.repository" -}} +{{- printf "%s/%s:%s" (include "dolphinscheduler.image.registry" .) .Values.image.repository .Values.image.tag -}} +{{- end -}} + +{{/* +Create a default fully qualified postgresql name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "dolphinscheduler.postgresql.fullname" -}} +{{- $name := default "postgresql" .Values.postgresql.nameOverride -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified zookkeeper name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "dolphinscheduler.zookeeper.fullname" -}} +{{- $name := default "zookeeper" .Values.zookeeper.nameOverride -}} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default fully qualified zookkeeper quorum. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "dolphinscheduler.zookeeper.quorum" -}} +{{- $port := default "2181" (.Values.zookeeper.service.port | toString) -}} +{{- printf "%s:%s" (include "dolphinscheduler.zookeeper.fullname" .) $port | trunc 63 | trimSuffix "-" -}} +{{- end -}} + +{{/* +Create a default dolphinscheduler worker base dir. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +*/}} +{{- define "dolphinscheduler.worker.base.dir" -}} +{{- $name := default "/tmp/dolphinscheduler" .Values.worker.configmap.DOLPHINSCHEDULER_DATA_BASEDIR_PATH -}} +{{- printf "%s" $name | trunc 63 | trimSuffix "/" -}} +{{- end -}} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-alert.yaml b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-alert.yaml new file mode 100644 index 0000000000..76daad8568 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-alert.yaml @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.alert.configmap }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-alert + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-alert + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +data: + XLS_FILE_PATH: {{ .Values.alert.configmap.XLS_FILE_PATH | quote }} + MAIL_SERVER_HOST: {{ .Values.alert.configmap.MAIL_SERVER_HOST | quote }} + MAIL_SERVER_PORT: {{ .Values.alert.configmap.MAIL_SERVER_PORT | quote }} + MAIL_SENDER: {{ .Values.alert.configmap.MAIL_SENDER | quote }} + MAIL_USER: {{ .Values.alert.configmap.MAIL_USER | quote }} + MAIL_PASSWD: {{ .Values.alert.configmap.MAIL_PASSWD | quote }} + MAIL_SMTP_STARTTLS_ENABLE: {{ .Values.alert.configmap.MAIL_SMTP_STARTTLS_ENABLE | quote }} + MAIL_SMTP_SSL_ENABLE: {{ .Values.alert.configmap.MAIL_SMTP_SSL_ENABLE | quote }} + MAIL_SMTP_SSL_TRUST: {{ .Values.alert.configmap.MAIL_SMTP_SSL_TRUST | quote }} + ENTERPRISE_WECHAT_ENABLE: {{ .Values.alert.configmap.ENTERPRISE_WECHAT_ENABLE | quote }} + ENTERPRISE_WECHAT_CORP_ID: {{ .Values.alert.configmap.ENTERPRISE_WECHAT_CORP_ID | quote }} + ENTERPRISE_WECHAT_SECRET: {{ .Values.alert.configmap.ENTERPRISE_WECHAT_SECRET | quote }} + ENTERPRISE_WECHAT_AGENT_ID: {{ .Values.alert.configmap.ENTERPRISE_WECHAT_AGENT_ID | quote }} + ENTERPRISE_WECHAT_USERS: {{ .Values.alert.configmap.ENTERPRISE_WECHAT_USERS | quote }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-master.yaml b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-master.yaml new file mode 100644 index 0000000000..da82d639cb --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-master.yaml @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.master.configmap }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-master + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +data: + MASTER_EXEC_THREADS: {{ .Values.master.configmap.MASTER_EXEC_THREADS | quote }} + MASTER_EXEC_TASK_NUM: {{ .Values.master.configmap.MASTER_EXEC_TASK_NUM | quote }} + MASTER_HEARTBEAT_INTERVAL: {{ .Values.master.configmap.MASTER_HEARTBEAT_INTERVAL | quote }} + MASTER_TASK_COMMIT_RETRYTIMES: {{ .Values.master.configmap.MASTER_TASK_COMMIT_RETRYTIMES | quote }} + MASTER_TASK_COMMIT_INTERVAL: {{ .Values.master.configmap.MASTER_TASK_COMMIT_INTERVAL | quote }} + MASTER_MAX_CPULOAD_AVG: {{ .Values.master.configmap.MASTER_MAX_CPULOAD_AVG | quote }} + MASTER_RESERVED_MEMORY: {{ .Values.master.configmap.MASTER_RESERVED_MEMORY | quote }} + MASTER_LISTEN_PORT: {{ .Values.master.configmap.MASTER_LISTEN_PORT | quote }} + DOLPHINSCHEDULER_DATA_BASEDIR_PATH: {{ include "dolphinscheduler.worker.base.dir" . | quote }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml new file mode 100644 index 0000000000..1e08b67b53 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/configmap-dolphinscheduler-worker.yaml @@ -0,0 +1,39 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.worker.configmap }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-worker + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +data: + WORKER_EXEC_THREADS: {{ .Values.worker.configmap.WORKER_EXEC_THREADS | quote }} + WORKER_HEARTBEAT_INTERVAL: {{ .Values.worker.configmap.WORKER_HEARTBEAT_INTERVAL | quote }} + WORKER_FETCH_TASK_NUM: {{ .Values.worker.configmap.WORKER_FETCH_TASK_NUM | quote }} + WORKER_MAX_CPULOAD_AVG: {{ .Values.worker.configmap.WORKER_MAX_CPULOAD_AVG | quote }} + WORKER_RESERVED_MEMORY: {{ .Values.worker.configmap.WORKER_RESERVED_MEMORY | quote }} + WORKER_LISTEN_PORT: {{ .Values.worker.configmap.WORKER_LISTEN_PORT | quote }} + WORKER_GROUP: {{ .Values.worker.configmap.WORKER_GROUP | quote }} + DOLPHINSCHEDULER_DATA_BASEDIR_PATH: {{ include "dolphinscheduler.worker.base.dir" . | quote }} + dolphinscheduler_env.sh: |- + {{- range .Values.worker.configmap.DOLPHINSCHEDULER_ENV }} + {{ . }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml new file mode 100644 index 0000000000..69662e95d9 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-alert.yaml @@ -0,0 +1,234 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-alert + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-alert + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: alert +spec: + replicas: {{ .Values.alert.replicas }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-alert + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: alert + strategy: + type: {{ .Values.alert.strategy.type | quote }} + rollingUpdate: + maxSurge: {{ .Values.alert.strategy.rollingUpdate.maxSurge | quote }} + maxUnavailable: {{ .Values.alert.strategy.rollingUpdate.maxUnavailable | quote }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-alert + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: alert + spec: + {{- if .Values.alert.affinity }} + affinity: {{- toYaml .Values.alert.affinity | nindent 8 }} + {{- end }} + {{- if .Values.alert.nodeSelector }} + nodeSelector: {{- toYaml .Values.alert.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.alert.tolerations }} + tolerations: {{- toYaml . | nindent 8 }} + {{- end }} + initContainers: + - name: init-postgresql + image: busybox:1.31.0 + command: + - /bin/sh + - -ec + - | + while ! nc -z ${POSTGRESQL_HOST} ${POSTGRESQL_PORT}; do + counter=$((counter+1)) + if [ $counter == 5 ]; then + echo "Error: Couldn't connect to postgresql." + exit 1 + fi + echo "Trying to connect to postgresql at ${POSTGRESQL_HOST}:${POSTGRESQL_PORT}. Attempt $counter." + sleep 60 + done + env: + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + containers: + - name: {{ include "dolphinscheduler.fullname" . }}-alert + image: {{ include "dolphinscheduler.image.repository" . | quote }} + args: + - "alert-server" + imagePullPolicy: {{ .Values.image.pullPolicy }} + env: + - name: TZ + value: {{ .Values.timezone }} + - name: XLS_FILE_PATH + valueFrom: + configMapKeyRef: + key: XLS_FILE_PATH + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_SERVER_HOST + valueFrom: + configMapKeyRef: + key: MAIL_SERVER_HOST + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_SERVER_PORT + valueFrom: + configMapKeyRef: + key: MAIL_SERVER_PORT + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_SENDER + valueFrom: + configMapKeyRef: + key: MAIL_SENDER + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_USER + valueFrom: + configMapKeyRef: + key: MAIL_USER + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_PASSWD + valueFrom: + configMapKeyRef: + key: MAIL_PASSWD + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_SMTP_STARTTLS_ENABLE + valueFrom: + configMapKeyRef: + key: MAIL_SMTP_STARTTLS_ENABLE + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_SMTP_SSL_ENABLE + valueFrom: + configMapKeyRef: + key: MAIL_SMTP_SSL_ENABLE + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: MAIL_SMTP_SSL_TRUST + valueFrom: + configMapKeyRef: + key: MAIL_SMTP_SSL_TRUST + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: ENTERPRISE_WECHAT_ENABLE + valueFrom: + configMapKeyRef: + key: ENTERPRISE_WECHAT_ENABLE + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: ENTERPRISE_WECHAT_CORP_ID + valueFrom: + configMapKeyRef: + key: ENTERPRISE_WECHAT_CORP_ID + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: ENTERPRISE_WECHAT_SECRET + valueFrom: + configMapKeyRef: + key: ENTERPRISE_WECHAT_SECRET + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: ENTERPRISE_WECHAT_AGENT_ID + valueFrom: + configMapKeyRef: + key: ENTERPRISE_WECHAT_AGENT_ID + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: ENTERPRISE_WECHAT_USERS + valueFrom: + configMapKeyRef: + key: ENTERPRISE_WECHAT_USERS + name: {{ include "dolphinscheduler.fullname" . }}-alert + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + - name: POSTGRESQL_USERNAME + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlUsername }} + {{- else }} + value: {{ .Values.externalDatabase.username | quote }} + {{- end }} + - name: POSTGRESQL_PASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ template "dolphinscheduler.postgresql.fullname" . }} + key: postgresql-password + {{- else }} + name: {{ printf "%s-%s" .Release.Name "externaldb" }} + key: db-password + {{- end }} + - name: POSTGRESQL_DATABASE + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlDatabase }} + {{- else }} + value: {{ .Values.externalDatabase.database | quote }} + {{- end }} + {{- if .Values.alert.livenessProbe.enabled }} + livenessProbe: + exec: + command: + - sh + - /root/checkpoint.sh + - AlertServer + initialDelaySeconds: {{ .Values.alert.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.alert.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.alert.livenessProbe.timeoutSeconds }} + successThreshold: {{ .Values.alert.livenessProbe.successThreshold }} + failureThreshold: {{ .Values.alert.livenessProbe.failureThreshold }} + {{- end }} + {{- if .Values.alert.readinessProbe.enabled }} + readinessProbe: + exec: + command: + - sh + - /root/checkpoint.sh + - AlertServer + initialDelaySeconds: {{ .Values.alert.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.alert.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.alert.readinessProbe.timeoutSeconds }} + successThreshold: {{ .Values.alert.readinessProbe.successThreshold }} + failureThreshold: {{ .Values.alert.readinessProbe.failureThreshold }} + {{- end }} + volumeMounts: + - mountPath: "/opt/dolphinscheduler/logs" + name: {{ include "dolphinscheduler.fullname" . }}-alert + volumes: + - name: {{ include "dolphinscheduler.fullname" . }}-alert + {{- if .Values.alert.persistentVolumeClaim.enabled }} + persistentVolumeClaim: + claimName: {{ include "dolphinscheduler.fullname" . }}-alert + {{- else }} + emptyDir: {} + {{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml new file mode 100644 index 0000000000..487ede0b8f --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-api.yaml @@ -0,0 +1,167 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-api + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: api +spec: + replicas: {{ .Values.api.replicas }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: api + strategy: + type: {{ .Values.api.strategy.type | quote }} + rollingUpdate: + maxSurge: {{ .Values.api.strategy.rollingUpdate.maxSurge | quote }} + maxUnavailable: {{ .Values.api.strategy.rollingUpdate.maxUnavailable | quote }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: api + spec: + {{- if .Values.api.affinity }} + affinity: {{- toYaml .Values.api.affinity | nindent 8 }} + {{- end }} + {{- if .Values.api.nodeSelector }} + nodeSelector: {{- toYaml .Values.api.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.api.tolerations }} + tolerations: {{- toYaml . | nindent 8 }} + {{- end }} + initContainers: + - name: init-postgresql + image: busybox:1.31.0 + command: + - /bin/sh + - -ec + - | + while ! nc -z ${POSTGRESQL_HOST} ${POSTGRESQL_PORT}; do + counter=$((counter+1)) + if [ $counter == 5 ]; then + echo "Error: Couldn't connect to postgresql." + exit 1 + fi + echo "Trying to connect to postgresql at ${POSTGRESQL_HOST}:${POSTGRESQL_PORT}. Attempt $counter." + sleep 60 + done + env: + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + containers: + - name: {{ include "dolphinscheduler.fullname" . }}-api + image: {{ include "dolphinscheduler.image.repository" . | quote }} + args: + - "api-server" + ports: + - containerPort: 12345 + name: tcp-port + imagePullPolicy: {{ .Values.image.pullPolicy }} + env: + - name: TZ + value: {{ .Values.timezone }} + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + - name: POSTGRESQL_USERNAME + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlUsername }} + {{- else }} + value: {{ .Values.externalDatabase.username | quote }} + {{- end }} + - name: POSTGRESQL_PASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ template "dolphinscheduler.postgresql.fullname" . }} + key: postgresql-password + {{- else }} + name: {{ printf "%s-%s" .Release.Name "externaldb" }} + key: db-password + {{- end }} + - name: POSTGRESQL_DATABASE + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlDatabase }} + {{- else }} + value: {{ .Values.externalDatabase.database | quote }} + {{- end }} + - name: ZOOKEEPER_QUORUM + {{- if .Values.zookeeper.enabled }} + value: "{{ template "dolphinscheduler.zookeeper.quorum" . }}" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperQuorum }} + {{- end }} + {{- if .Values.api.livenessProbe.enabled }} + livenessProbe: + tcpSocket: + port: 12345 + initialDelaySeconds: {{ .Values.api.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.api.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.api.livenessProbe.timeoutSeconds }} + successThreshold: {{ .Values.api.livenessProbe.successThreshold }} + failureThreshold: {{ .Values.api.livenessProbe.failureThreshold }} + {{- end }} + {{- if .Values.api.readinessProbe.enabled }} + readinessProbe: + tcpSocket: + port: 12345 + initialDelaySeconds: {{ .Values.api.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.api.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.api.readinessProbe.timeoutSeconds }} + successThreshold: {{ .Values.api.readinessProbe.successThreshold }} + failureThreshold: {{ .Values.api.readinessProbe.failureThreshold }} + {{- end }} + volumeMounts: + - mountPath: "/opt/dolphinscheduler/logs" + name: {{ include "dolphinscheduler.fullname" . }}-api + volumes: + - name: {{ include "dolphinscheduler.fullname" . }}-api + {{- if .Values.api.persistentVolumeClaim.enabled }} + persistentVolumeClaim: + claimName: {{ include "dolphinscheduler.fullname" . }}-api + {{- else }} + emptyDir: {} + {{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-frontend.yaml b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-frontend.yaml new file mode 100644 index 0000000000..aea09f107f --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/deployment-dolphinscheduler-frontend.yaml @@ -0,0 +1,102 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-frontend + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-frontend + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: frontend +spec: + replicas: {{ .Values.frontend.replicas }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-frontend + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: frontend + strategy: + type: {{ .Values.frontend.strategy.type | quote }} + rollingUpdate: + maxSurge: {{ .Values.frontend.strategy.rollingUpdate.maxSurge | quote }} + maxUnavailable: {{ .Values.frontend.strategy.rollingUpdate.maxUnavailable | quote }} + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-frontend + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: frontend + spec: + {{- if .Values.frontend.affinity }} + affinity: {{- toYaml .Values.frontend.affinity | nindent 8 }} + {{- end }} + {{- if .Values.frontend.nodeSelector }} + nodeSelector: {{- toYaml .Values.frontend.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.frontend.tolerations }} + tolerations: {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: {{ include "dolphinscheduler.fullname" . }}-frontend + image: {{ include "dolphinscheduler.image.repository" . | quote }} + args: + - "frontend" + ports: + - containerPort: 8888 + name: tcp-port + imagePullPolicy: {{ .Values.image.pullPolicy }} + env: + - name: TZ + value: {{ .Values.timezone }} + - name: FRONTEND_API_SERVER_HOST + value: '{{ include "dolphinscheduler.fullname" . }}-api' + - name: FRONTEND_API_SERVER_PORT + value: "12345" + {{- if .Values.frontend.livenessProbe.enabled }} + livenessProbe: + tcpSocket: + port: 8888 + initialDelaySeconds: {{ .Values.frontend.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.frontend.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.frontend.livenessProbe.timeoutSeconds }} + successThreshold: {{ .Values.frontend.livenessProbe.successThreshold }} + failureThreshold: {{ .Values.frontend.livenessProbe.failureThreshold }} + {{- end }} + {{- if .Values.frontend.readinessProbe.enabled }} + readinessProbe: + tcpSocket: + port: 8888 + initialDelaySeconds: {{ .Values.frontend.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.frontend.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.frontend.readinessProbe.timeoutSeconds }} + successThreshold: {{ .Values.frontend.readinessProbe.successThreshold }} + failureThreshold: {{ .Values.frontend.readinessProbe.failureThreshold }} + {{- end }} + volumeMounts: + - mountPath: "/var/log/nginx" + name: {{ include "dolphinscheduler.fullname" . }}-frontend + volumes: + - name: {{ include "dolphinscheduler.fullname" . }}-frontend + {{- if .Values.frontend.persistentVolumeClaim.enabled }} + persistentVolumeClaim: + claimName: {{ include "dolphinscheduler.fullname" . }}-frontend + {{- else }} + emptyDir: {} + {{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/ingress.yaml b/docker/kubernetes/dolphinscheduler/templates/ingress.yaml new file mode 100644 index 0000000000..d0f923dcf1 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/ingress.yaml @@ -0,0 +1,43 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.ingress.enabled }} +apiVersion: networking.k8s.io/v1beta1 +kind: Ingress +metadata: + name: {{ include "dolphinscheduler.fullname" . }} + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.name" . }} + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + rules: + - host: {{ .Values.ingress.host }} + http: + paths: + - path: {{ .Values.ingress.path }} + backend: + serviceName: {{ include "dolphinscheduler.fullname" . }}-frontend + servicePort: tcp-port + {{- if .Values.ingress.tls.enabled }} + tls: + hosts: + {{- range .Values.ingress.tls.hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .Values.ingress.tls.secretName }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-alert.yaml b/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-alert.yaml new file mode 100644 index 0000000000..7f74cd94ae --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-alert.yaml @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.alert.persistentVolumeClaim.enabled }} +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-alert + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-alert + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + accessModes: + {{- range .Values.alert.persistentVolumeClaim.accessModes }} + - {{ . | quote }} + {{- end }} + storageClassName: {{ .Values.alert.persistentVolumeClaim.storageClassName | quote }} + resources: + requests: + storage: {{ .Values.alert.persistentVolumeClaim.storage | quote }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-api.yaml b/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-api.yaml new file mode 100644 index 0000000000..c1074cc2b1 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-api.yaml @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.api.persistentVolumeClaim.enabled }} +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-api + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + accessModes: + {{- range .Values.api.persistentVolumeClaim.accessModes }} + - {{ . | quote }} + {{- end }} + storageClassName: {{ .Values.api.persistentVolumeClaim.storageClassName | quote }} + resources: + requests: + storage: {{ .Values.api.persistentVolumeClaim.storage | quote }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-frontend.yaml b/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-frontend.yaml new file mode 100644 index 0000000000..ac9fe02a9e --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/pvc-dolphinscheduler-frontend.yaml @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.frontend.persistentVolumeClaim.enabled }} +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-frontend + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-frontend + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + accessModes: + {{- range .Values.frontend.persistentVolumeClaim.accessModes }} + - {{ . | quote }} + {{- end }} + storageClassName: {{ .Values.frontend.persistentVolumeClaim.storageClassName | quote }} + resources: + requests: + storage: {{ .Values.frontend.persistentVolumeClaim.storage | quote }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/secret-external-postgresql.yaml b/docker/kubernetes/dolphinscheduler/templates/secret-external-postgresql.yaml new file mode 100644 index 0000000000..16d026afc6 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/secret-external-postgresql.yaml @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if not .Values.postgresql.enabled }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ printf "%s-%s" .Release.Name "externaldb" }} + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-postgresql + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +type: Opaque +data: + db-password: {{ .Values.externalDatabase.password | b64enc | quote }} +{{- end }} \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml new file mode 100644 index 0000000000..b142fac4fe --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-master.yaml @@ -0,0 +1,257 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-master + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: master +spec: + podManagementPolicy: {{ .Values.master.podManagementPolicy }} + replicas: {{ .Values.master.replicas }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: master + serviceName: {{ template "dolphinscheduler.fullname" . }}-master-headless + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: master + spec: + {{- if .Values.master.affinity }} + affinity: {{- toYaml .Values.master.affinity | nindent 8 }} + {{- end }} + {{- if .Values.master.nodeSelector }} + nodeSelector: {{- toYaml .Values.master.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.master.tolerations }} + tolerations: {{- toYaml . | nindent 8 }} + {{- end }} + initContainers: + - name: init-zookeeper + image: busybox:1.31.0 + command: + - /bin/sh + - -ec + - | + echo "${ZOOKEEPER_QUORUM}" | awk -F ',' 'BEGIN{ i=1 }{ while( i <= NF ){ print $i; i++ } }' | while read line; do + while ! nc -z ${line%:*} ${line#*:}; do + counter=$((counter+1)) + if [ $counter == 5 ]; then + echo "Error: Couldn't connect to zookeeper." + exit 1 + fi + echo "Trying to connect to zookeeper at ${line}. Attempt $counter." + sleep 60 + done + done + env: + - name: ZOOKEEPER_QUORUM + {{- if .Values.zookeeper.enabled }} + value: "{{ template "dolphinscheduler.zookeeper.quorum" . }}" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperQuorum }} + {{- end }} + - name: init-postgresql + image: busybox:1.31.0 + command: + - /bin/sh + - -ec + - | + while ! nc -z ${POSTGRESQL_HOST} ${POSTGRESQL_PORT}; do + counter=$((counter+1)) + if [ $counter == 5 ]; then + echo "Error: Couldn't connect to postgresql." + exit 1 + fi + echo "Trying to connect to postgresql at ${POSTGRESQL_HOST}:${POSTGRESQL_PORT}. Attempt $counter." + sleep 60 + done + env: + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + containers: + - name: {{ include "dolphinscheduler.fullname" . }}-master + image: {{ include "dolphinscheduler.image.repository" . | quote }} + args: + - "master-server" + ports: + - containerPort: {{ .Values.master.configmap.MASTER_LISTEN_PORT }} + name: "master-port" + imagePullPolicy: {{ .Values.image.pullPolicy }} + env: + - name: TZ + value: {{ .Values.timezone }} + - name: MASTER_EXEC_THREADS + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_EXEC_THREADS + - name: MASTER_EXEC_TASK_NUM + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_EXEC_TASK_NUM + - name: MASTER_HEARTBEAT_INTERVAL + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_HEARTBEAT_INTERVAL + - name: MASTER_TASK_COMMIT_RETRYTIMES + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_TASK_COMMIT_RETRYTIMES + - name: MASTER_TASK_COMMIT_INTERVAL + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_TASK_COMMIT_INTERVAL + - name: MASTER_MAX_CPULOAD_AVG + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_MAX_CPULOAD_AVG + - name: MASTER_RESERVED_MEMORY + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_RESERVED_MEMORY + - name: MASTER_LISTEN_PORT + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: MASTER_LISTEN_PORT + - name: DOLPHINSCHEDULER_DATA_BASEDIR_PATH + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: DOLPHINSCHEDULER_DATA_BASEDIR_PATH + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + - name: POSTGRESQL_USERNAME + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlUsername }} + {{- else }} + value: {{ .Values.externalDatabase.username | quote }} + {{- end }} + - name: POSTGRESQL_PASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ template "dolphinscheduler.postgresql.fullname" . }} + key: postgresql-password + {{- else }} + name: {{ printf "%s-%s" .Release.Name "externaldb" }} + key: db-password + {{- end }} + - name: POSTGRESQL_DATABASE + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlDatabase }} + {{- else }} + value: {{ .Values.externalDatabase.database | quote }} + {{- end }} + - name: ZOOKEEPER_QUORUM + {{- if .Values.zookeeper.enabled }} + value: {{ template "dolphinscheduler.zookeeper.quorum" . }} + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperQuorum }} + {{- end }} + {{- if .Values.master.livenessProbe.enabled }} + livenessProbe: + exec: + command: + - sh + - /root/checkpoint.sh + - MasterServer + initialDelaySeconds: {{ .Values.master.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.master.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.master.livenessProbe.timeoutSeconds }} + successThreshold: {{ .Values.master.livenessProbe.successThreshold }} + failureThreshold: {{ .Values.master.livenessProbe.failureThreshold }} + {{- end }} + {{- if .Values.master.readinessProbe.enabled }} + readinessProbe: + exec: + command: + - sh + - /root/checkpoint.sh + - MasterServer + initialDelaySeconds: {{ .Values.master.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.master.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.master.readinessProbe.timeoutSeconds }} + successThreshold: {{ .Values.master.readinessProbe.successThreshold }} + failureThreshold: {{ .Values.master.readinessProbe.failureThreshold }} + {{- end }} + volumeMounts: + - mountPath: "/opt/dolphinscheduler/logs" + name: {{ include "dolphinscheduler.fullname" . }}-master + volumes: + - name: {{ include "dolphinscheduler.fullname" . }}-master + {{- if .Values.master.persistentVolumeClaim.enabled }} + persistentVolumeClaim: + claimName: {{ include "dolphinscheduler.fullname" . }}-master + {{- else }} + emptyDir: {} + {{- end }} + {{- if .Values.master.persistentVolumeClaim.enabled }} + volumeClaimTemplates: + - metadata: + name: {{ include "dolphinscheduler.fullname" . }}-master + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + spec: + accessModes: + {{- range .Values.master.persistentVolumeClaim.accessModes }} + - {{ . | quote }} + {{- end }} + storageClassName: {{ .Values.master.persistentVolumeClaim.storageClassName | quote }} + resources: + requests: + storage: {{ .Values.master.persistentVolumeClaim.storage | quote }} + {{- end }} diff --git a/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml new file mode 100644 index 0000000000..198cef43b6 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/statefulset-dolphinscheduler-worker.yaml @@ -0,0 +1,292 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-worker + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: worker +spec: + podManagementPolicy: {{ .Values.worker.podManagementPolicy }} + replicas: {{ .Values.worker.replicas }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: worker + serviceName: {{ template "dolphinscheduler.fullname" . }}-worker-headless + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: worker + spec: + {{- if .Values.worker.affinity }} + affinity: {{- toYaml .Values.worker.affinity | nindent 8 }} + {{- end }} + {{- if .Values.worker.nodeSelector }} + nodeSelector: {{- toYaml .Values.worker.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.worker.tolerations }} + tolerations: {{- toYaml . | nindent 8 }} + {{- end }} + initContainers: + - name: init-zookeeper + image: busybox:1.31.0 + command: + - /bin/sh + - -ec + - | + echo "${ZOOKEEPER_QUORUM}" | awk -F ',' 'BEGIN{ i=1 }{ while( i <= NF ){ print $i; i++ } }' | while read line; do + while ! nc -z ${line%:*} ${line#*:}; do + counter=$((counter+1)) + if [ $counter == 5 ]; then + echo "Error: Couldn't connect to zookeeper." + exit 1 + fi + echo "Trying to connect to zookeeper at ${line}. Attempt $counter." + sleep 60 + done + done + env: + - name: ZOOKEEPER_QUORUM + {{- if .Values.zookeeper.enabled }} + value: "{{ template "dolphinscheduler.zookeeper.quorum" . }}" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperQuorum }} + {{- end }} + - name: init-postgresql + image: busybox:1.31.0 + command: + - /bin/sh + - -ec + - | + while ! nc -z ${POSTGRESQL_HOST} ${POSTGRESQL_PORT}; do + counter=$((counter+1)) + if [ $counter == 5 ]; then + echo "Error: Couldn't connect to postgresql." + exit 1 + fi + echo "Trying to connect to postgresql at ${POSTGRESQL_HOST}:${POSTGRESQL_PORT}. Attempt $counter." + sleep 60 + done + env: + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + containers: + - name: {{ include "dolphinscheduler.fullname" . }}-worker + image: {{ include "dolphinscheduler.image.repository" . | quote }} + args: + - "worker-server" + ports: + - containerPort: {{ .Values.worker.configmap.WORKER_LISTEN_PORT }} + name: "worker-port" + - containerPort: 50051 + name: "logs-port" + imagePullPolicy: {{ .Values.image.pullPolicy }} + env: + - name: TZ + value: {{ .Values.timezone }} + - name: WORKER_EXEC_THREADS + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_EXEC_THREADS + - name: WORKER_FETCH_TASK_NUM + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_FETCH_TASK_NUM + - name: WORKER_HEARTBEAT_INTERVAL + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_HEARTBEAT_INTERVAL + - name: WORKER_MAX_CPULOAD_AVG + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_MAX_CPULOAD_AVG + - name: WORKER_RESERVED_MEMORY + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_RESERVED_MEMORY + - name: WORKER_LISTEN_PORT + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_LISTEN_PORT + - name: WORKER_GROUP + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-worker + key: WORKER_GROUP + - name: DOLPHINSCHEDULER_DATA_BASEDIR_PATH + valueFrom: + configMapKeyRef: + name: {{ include "dolphinscheduler.fullname" . }}-master + key: DOLPHINSCHEDULER_DATA_BASEDIR_PATH + - name: POSTGRESQL_HOST + {{- if .Values.postgresql.enabled }} + value: {{ template "dolphinscheduler.postgresql.fullname" . }} + {{- else }} + value: {{ .Values.externalDatabase.host | quote }} + {{- end }} + - name: POSTGRESQL_PORT + {{- if .Values.postgresql.enabled }} + value: "5432" + {{- else }} + value: {{ .Values.externalDatabase.port }} + {{- end }} + - name: POSTGRESQL_USERNAME + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlUsername }} + {{- else }} + value: {{ .Values.externalDatabase.username | quote }} + {{- end }} + - name: POSTGRESQL_PASSWORD + valueFrom: + secretKeyRef: + {{- if .Values.postgresql.enabled }} + name: {{ template "dolphinscheduler.postgresql.fullname" . }} + key: postgresql-password + {{- else }} + name: {{ printf "%s-%s" .Release.Name "externaldb" }} + key: db-password + {{- end }} + - name: POSTGRESQL_DATABASE + {{- if .Values.postgresql.enabled }} + value: {{ .Values.postgresql.postgresqlDatabase }} + {{- else }} + value: {{ .Values.externalDatabase.database | quote }} + {{- end }} + - name: ZOOKEEPER_QUORUM + {{- if .Values.zookeeper.enabled }} + value: "{{ template "dolphinscheduler.zookeeper.quorum" . }}" + {{- else }} + value: {{ .Values.externalZookeeper.zookeeperQuorum }} + {{- end }} + {{- if .Values.worker.livenessProbe.enabled }} + livenessProbe: + exec: + command: + - sh + - /root/checkpoint.sh + - WorkerServer + initialDelaySeconds: {{ .Values.worker.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.worker.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.worker.livenessProbe.timeoutSeconds }} + successThreshold: {{ .Values.worker.livenessProbe.successThreshold }} + failureThreshold: {{ .Values.worker.livenessProbe.failureThreshold }} + {{- end }} + {{- if .Values.worker.readinessProbe.enabled }} + readinessProbe: + exec: + command: + - sh + - /root/checkpoint.sh + - WorkerServer + initialDelaySeconds: {{ .Values.worker.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.worker.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.worker.readinessProbe.timeoutSeconds }} + successThreshold: {{ .Values.worker.readinessProbe.successThreshold }} + failureThreshold: {{ .Values.worker.readinessProbe.failureThreshold }} + {{- end }} + volumeMounts: + - mountPath: {{ include "dolphinscheduler.worker.base.dir" . | quote }} + name: {{ include "dolphinscheduler.fullname" . }}-worker-data + - mountPath: "/opt/dolphinscheduler/logs" + name: {{ include "dolphinscheduler.fullname" . }}-worker-logs + - mountPath: "/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh" + subPath: "dolphinscheduler_env.sh" + name: {{ include "dolphinscheduler.fullname" . }}-worker-configmap + volumes: + - name: {{ include "dolphinscheduler.fullname" . }}-worker-data + {{- if .Values.worker.persistentVolumeClaim.dataPersistentVolume.enabled }} + persistentVolumeClaim: + claimName: {{ include "dolphinscheduler.fullname" . }}-worker-data + {{- else }} + emptyDir: {} + {{- end }} + - name: {{ include "dolphinscheduler.fullname" . }}-worker-logs + {{- if .Values.worker.persistentVolumeClaim.logsPersistentVolume.enabled }} + persistentVolumeClaim: + claimName: {{ include "dolphinscheduler.fullname" . }}-worker-logs + {{- else }} + emptyDir: {} + {{- end }} + - name: {{ include "dolphinscheduler.fullname" . }}-worker-configmap + configMap: + defaultMode: 0777 + name: {{ include "dolphinscheduler.fullname" . }}-worker + items: + - key: dolphinscheduler_env.sh + path: dolphinscheduler_env.sh + {{- if .Values.worker.persistentVolumeClaim.enabled }} + volumeClaimTemplates: + {{- if .Values.worker.persistentVolumeClaim.dataPersistentVolume.enabled }} + - metadata: + name: {{ include "dolphinscheduler.fullname" . }}-worker-data + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker-data + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + spec: + accessModes: + {{- range .Values.worker.persistentVolumeClaim.dataPersistentVolume.accessModes }} + - {{ . | quote }} + {{- end }} + storageClassName: {{ .Values.worker.persistentVolumeClaim.dataPersistentVolume.storageClassName | quote }} + resources: + requests: + storage: {{ .Values.worker.persistentVolumeClaim.dataPersistentVolume.storage | quote }} + {{- end }} + {{- if .Values.worker.persistentVolumeClaim.logsPersistentVolume.enabled }} + - metadata: + name: {{ include "dolphinscheduler.fullname" . }}-worker-logs + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker-logs + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + spec: + accessModes: + {{- range .Values.worker.persistentVolumeClaim.logsPersistentVolume.accessModes }} + - {{ . | quote }} + {{- end }} + storageClassName: {{ .Values.worker.persistentVolumeClaim.logsPersistentVolume.storageClassName | quote }} + resources: + requests: + storage: {{ .Values.worker.persistentVolumeClaim.logsPersistentVolume.storage | quote }} + {{- end }} + {{- end }} diff --git a/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-api.yaml b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-api.yaml new file mode 100644 index 0000000000..4d07ade242 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-api.yaml @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: v1 +kind: Service +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-api + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + ports: + - port: 12345 + targetPort: tcp-port + protocol: TCP + name: tcp-port + selector: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-api + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: api \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-frontend.yaml b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-frontend.yaml new file mode 100644 index 0000000000..60d0d6e7b5 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-frontend.yaml @@ -0,0 +1,35 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: v1 +kind: Service +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-frontend + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-frontend + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + ports: + - port: 8888 + targetPort: tcp-port + protocol: TCP + name: tcp-port + selector: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-frontend + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: frontend \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-master-headless.yaml b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-master-headless.yaml new file mode 100644 index 0000000000..7f82cff31e --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-master-headless.yaml @@ -0,0 +1,36 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: v1 +kind: Service +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-master-headless + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master-headless + app.kubernetes.io/instance: {{ .Release.Name }}-master-headless + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + clusterIP: "None" + ports: + - port: {{ .Values.master.configmap.MASTER_LISTEN_PORT }} + targetPort: master-port + protocol: TCP + name: master-port + selector: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-master + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: master \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-worker-headless.yaml b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-worker-headless.yaml new file mode 100644 index 0000000000..fb3b85b5c3 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/templates/svc-dolphinscheduler-worker-headless.yaml @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: v1 +kind: Service +metadata: + name: {{ include "dolphinscheduler.fullname" . }}-worker-headless + labels: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker-headless + app.kubernetes.io/instance: {{ .Release.Name }}-worker-headless + app.kubernetes.io/managed-by: {{ .Release.Service }} +spec: + clusterIP: "None" + ports: + - port: {{ .Values.worker.configmap.WORKER_LISTEN_PORT }} + targetPort: worker-port + protocol: TCP + name: worker-port + - port: 50051 + targetPort: logs-port + protocol: TCP + name: logs-port + selector: + app.kubernetes.io/name: {{ include "dolphinscheduler.fullname" . }}-worker + app.kubernetes.io/instance: {{ .Release.Name }} + app.kubernetes.io/managed-by: {{ .Release.Service }} + app.kubernetes.io/component: worker \ No newline at end of file diff --git a/docker/kubernetes/dolphinscheduler/values.yaml b/docker/kubernetes/dolphinscheduler/values.yaml new file mode 100644 index 0000000000..4f70afade5 --- /dev/null +++ b/docker/kubernetes/dolphinscheduler/values.yaml @@ -0,0 +1,360 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Default values for dolphinscheduler-chart. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +nameOverride: "" +fullnameOverride: "" + +timezone: "Asia/Shanghai" + +image: + registry: "docker.io" + repository: "dolphinscheduler" + tag: "1.3.0" + pullPolicy: "IfNotPresent" + +imagePullSecrets: [] + +# If not exists external postgresql, by default, Dolphinscheduler's database will use it. +postgresql: + enabled: true + postgresqlUsername: "root" + postgresqlPassword: "root" + postgresqlDatabase: "dolphinscheduler" + persistence: + enabled: false + size: "20Gi" + storageClass: "-" + +# If exists external postgresql, and set postgresql.enable value to false. +# If postgresql.enable is false, Dolphinscheduler's database will use it. +externalDatabase: + host: "localhost" + port: "5432" + username: "root" + password: "root" + database: "dolphinscheduler" + +# If not exists external zookeeper, by default, Dolphinscheduler's zookeeper will use it. +zookeeper: + enabled: true + taskQueue: "zookeeper" + service: + port: "2181" + persistence: + enabled: false + size: "20Gi" + storageClass: "-" + +# If exists external zookeeper, and set zookeeper.enable value to false. +# If zookeeper.enable is false, Dolphinscheduler's zookeeper will use it. +externalZookeeper: + taskQueue: "zookeeper" + zookeeperQuorum: "127.0.0.1:2181" + +master: + podManagementPolicy: "Parallel" + replicas: "3" + # NodeSelector is a selector which must be true for the pod to fit on a node. + # Selector which must match a node's labels for the pod to be scheduled on that node. + # More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/ + nodeSelector: {} + # Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission, + # effectively unioning the set of nodes tolerated by the pod and the RuntimeClass. + tolerations: [] + # Affinity is a group of affinity scheduling rules. + # If specified, the pod's scheduling constraints. + # More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core + affinity: {} + ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + configmap: + MASTER_EXEC_THREADS: "100" + MASTER_EXEC_TASK_NUM: "20" + MASTER_HEARTBEAT_INTERVAL: "10" + MASTER_TASK_COMMIT_RETRYTIMES: "5" + MASTER_TASK_COMMIT_INTERVAL: "1000" + MASTER_MAX_CPULOAD_AVG: "100" + MASTER_RESERVED_MEMORY: "0.1" + MASTER_LISTEN_PORT: "5678" + livenessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + readinessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## volumeClaimTemplates is a list of claims that pods are allowed to reference. + ## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. + ## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. + ## A claim in this list takes precedence over any volumes in the template, with the same name. + persistentVolumeClaim: + enabled: false + accessModes: + - "ReadWriteOnce" + storageClassName: "-" + storage: "20Gi" + +worker: + podManagementPolicy: "Parallel" + replicas: "3" + # NodeSelector is a selector which must be true for the pod to fit on a node. + # Selector which must match a node's labels for the pod to be scheduled on that node. + # More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/ + nodeSelector: {} + # Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission, + # effectively unioning the set of nodes tolerated by the pod and the RuntimeClass. + tolerations: [] + # Affinity is a group of affinity scheduling rules. + # If specified, the pod's scheduling constraints. + # More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core + affinity: {} + ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + livenessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + readinessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + configmap: + WORKER_EXEC_THREADS: "100" + WORKER_HEARTBEAT_INTERVAL: "10" + WORKER_FETCH_TASK_NUM: "3" + WORKER_MAX_CPULOAD_AVG: "100" + WORKER_RESERVED_MEMORY: "0.1" + WORKER_LISTEN_PORT: "1234" + WORKER_GROUP: "default" + DOLPHINSCHEDULER_DATA_BASEDIR_PATH: "/tmp/dolphinscheduler" + DOLPHINSCHEDULER_ENV: + - "export HADOOP_HOME=/opt/soft/hadoop" + - "export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop" + - "export SPARK_HOME1=/opt/soft/spark1" + - "export SPARK_HOME2=/opt/soft/spark2" + - "export PYTHON_HOME=/opt/soft/python" + - "export JAVA_HOME=/opt/soft/java" + - "export HIVE_HOME=/opt/soft/hive" + - "export FLINK_HOME=/opt/soft/flink" + - "export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH" + ## volumeClaimTemplates is a list of claims that pods are allowed to reference. + ## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. + ## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. + ## A claim in this list takes precedence over any volumes in the template, with the same name. + persistentVolumeClaim: + enabled: false + ## dolphinscheduler data volume + dataPersistentVolume: + enabled: false + accessModes: + - "ReadWriteOnce" + storageClassName: "-" + storage: "20Gi" + ## dolphinscheduler logs volume + logsPersistentVolume: + enabled: false + accessModes: + - "ReadWriteOnce" + storageClassName: "-" + storage: "20Gi" + +alert: + strategy: + type: "RollingUpdate" + rollingUpdate: + maxSurge: "25%" + maxUnavailable: "25%" + replicas: "1" + # NodeSelector is a selector which must be true for the pod to fit on a node. + # Selector which must match a node's labels for the pod to be scheduled on that node. + # More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/ + nodeSelector: {} + # Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission, + # effectively unioning the set of nodes tolerated by the pod and the RuntimeClass. + tolerations: [] + # Affinity is a group of affinity scheduling rules. + # If specified, the pod's scheduling constraints. + # More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core + affinity: {} + ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + configmap: + XLS_FILE_PATH: "/tmp/xls" + MAIL_SERVER_HOST: "" + MAIL_SERVER_PORT: "" + MAIL_SENDER: "" + MAIL_USER: "" + MAIL_PASSWD: "" + MAIL_SMTP_STARTTLS_ENABLE: false + MAIL_SMTP_SSL_ENABLE: false + MAIL_SMTP_SSL_TRUST: "" + ENTERPRISE_WECHAT_ENABLE: false + ENTERPRISE_WECHAT_CORP_ID: "" + ENTERPRISE_WECHAT_SECRET: "" + ENTERPRISE_WECHAT_AGENT_ID: "" + ENTERPRISE_WECHAT_USERS: "" + livenessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + readinessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## volumeClaimTemplates is a list of claims that pods are allowed to reference. + ## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. + ## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. + ## A claim in this list takes precedence over any volumes in the template, with the same name. + persistentVolumeClaim: + enabled: false + accessModes: + - "ReadWriteOnce" + storageClassName: "-" + storage: "20Gi" + +api: + strategy: + type: "RollingUpdate" + rollingUpdate: + maxSurge: "25%" + maxUnavailable: "25%" + replicas: "1" + # NodeSelector is a selector which must be true for the pod to fit on a node. + # Selector which must match a node's labels for the pod to be scheduled on that node. + # More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/ + nodeSelector: {} + # Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission, + # effectively unioning the set of nodes tolerated by the pod and the RuntimeClass. + tolerations: [] + # Affinity is a group of affinity scheduling rules. + # If specified, the pod's scheduling constraints. + # More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core + affinity: {} + ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + livenessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + readinessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## volumeClaimTemplates is a list of claims that pods are allowed to reference. + ## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. + ## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. + ## A claim in this list takes precedence over any volumes in the template, with the same name. + persistentVolumeClaim: + enabled: false + accessModes: + - "ReadWriteOnce" + storageClassName: "-" + storage: "20Gi" + +frontend: + strategy: + type: "RollingUpdate" + rollingUpdate: + maxSurge: "25%" + maxUnavailable: "25%" + replicas: "1" + # NodeSelector is a selector which must be true for the pod to fit on a node. + # Selector which must match a node's labels for the pod to be scheduled on that node. + # More info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/ + nodeSelector: {} + # Tolerations are appended (excluding duplicates) to pods running with this RuntimeClass during admission, + # effectively unioning the set of nodes tolerated by the pod and the RuntimeClass. + tolerations: [] + # Affinity is a group of affinity scheduling rules. + # If specified, the pod's scheduling constraints. + # More info: https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.17/#affinity-v1-core + affinity: {} + ## Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + livenessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## Periodic probe of container service readiness. Container will be removed from service endpoints if the probe fails. Cannot be updated. + ## More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes + readinessProbe: + enabled: true + initialDelaySeconds: "30" + periodSeconds: "30" + timeoutSeconds: "5" + failureThreshold: "3" + successThreshold: "1" + ## volumeClaimTemplates is a list of claims that pods are allowed to reference. + ## The StatefulSet controller is responsible for mapping network identities to claims in a way that maintains the identity of a pod. + ## Every claim in this list must have at least one matching (by name) volumeMount in one container in the template. + ## A claim in this list takes precedence over any volumes in the template, with the same name. + persistentVolumeClaim: + enabled: false + accessModes: + - "ReadWriteOnce" + storageClassName: "-" + storage: "20Gi" + +ingress: + enabled: false + host: "dolphinscheduler.org" + path: "/" + tls: + enabled: false + hosts: + - "dolphinscheduler.org" + secretName: "dolphinscheduler-tls" \ No newline at end of file diff --git a/docker/postgres/docker-entrypoint-initdb/init.sql b/docker/postgres/docker-entrypoint-initdb/init.sql index b3c61ebce4..b26520e29c 100755 --- a/docker/postgres/docker-entrypoint-initdb/init.sql +++ b/docker/postgres/docker-entrypoint-initdb/init.sql @@ -191,7 +191,7 @@ CREATE TABLE t_ds_alert ( content text , alert_type int DEFAULT NULL , alert_status int DEFAULT '0' , - log text , + ·log· text , alertgroup_id int DEFAULT NULL , receivers text , receivers_cc text , @@ -234,7 +234,7 @@ CREATE TABLE t_ds_command ( dependence varchar(255) DEFAULT NULL , update_time timestamp DEFAULT NULL , process_instance_priority int DEFAULT NULL , - worker_group_id int DEFAULT '-1' , + worker_group varchar(64), PRIMARY KEY (id) ) ; @@ -275,7 +275,7 @@ CREATE TABLE t_ds_error_command ( update_time timestamp DEFAULT NULL , dependence text , process_instance_priority int DEFAULT NULL , - worker_group_id int DEFAULT '-1' , + worker_group varchar(64), message text , PRIMARY KEY (id) ); @@ -283,18 +283,6 @@ CREATE TABLE t_ds_error_command ( -- Table structure for table t_ds_master_server -- -DROP TABLE IF EXISTS t_ds_master_server; -CREATE TABLE t_ds_master_server ( - id int NOT NULL , - host varchar(45) DEFAULT NULL , - port int DEFAULT NULL , - zk_directory varchar(64) DEFAULT NULL , - res_info varchar(256) DEFAULT NULL , - create_time timestamp DEFAULT NULL , - last_heartbeat_time timestamp DEFAULT NULL , - PRIMARY KEY (id) -) ; - -- -- Table structure for table t_ds_process_definition -- @@ -319,6 +307,8 @@ CREATE TABLE t_ds_process_definition ( timeout int DEFAULT '0' , tenant_id int NOT NULL DEFAULT '-1' , update_time timestamp DEFAULT NULL , + modify_by varchar(36) DEFAULT '' , + resource_ids varchar(64), PRIMARY KEY (id) ) ; @@ -359,7 +349,7 @@ CREATE TABLE t_ds_process_instance ( history_cmd text , dependence_schedule_times text , process_instance_priority int DEFAULT NULL , - worker_group_id int DEFAULT '-1' , + worker_group varchar(64) , timeout int DEFAULT '0' , tenant_id int NOT NULL DEFAULT '-1' , PRIMARY KEY (id) @@ -505,9 +495,12 @@ CREATE TABLE t_ds_resources ( size bigint DEFAULT NULL , create_time timestamp DEFAULT NULL , update_time timestamp DEFAULT NULL , + pid int, + full_name varchar(64), + is_directory int, PRIMARY KEY (id) ) ; -; + -- -- Table structure for table t_ds_schedules @@ -526,7 +519,7 @@ CREATE TABLE t_ds_schedules ( warning_type int NOT NULL , warning_group_id int DEFAULT NULL , process_instance_priority int DEFAULT NULL , - worker_group_id int DEFAULT '-1' , + worker_group varchar(64), create_time timestamp NOT NULL , update_time timestamp NOT NULL , PRIMARY KEY (id) @@ -572,7 +565,8 @@ CREATE TABLE t_ds_task_instance ( retry_interval int DEFAULT NULL , max_retry_times int DEFAULT NULL , task_instance_priority int DEFAULT NULL , - worker_group_id int DEFAULT '-1' , + worker_group varchar(64), + executor_id int DEFAULT NULL , PRIMARY KEY (id) ) ; @@ -691,9 +685,6 @@ ALTER TABLE t_ds_command ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_command_id_se DROP SEQUENCE IF EXISTS t_ds_datasource_id_sequence; CREATE SEQUENCE t_ds_datasource_id_sequence; ALTER TABLE t_ds_datasource ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_datasource_id_sequence'); -DROP SEQUENCE IF EXISTS t_ds_master_server_id_sequence; -CREATE SEQUENCE t_ds_master_server_id_sequence; -ALTER TABLE t_ds_master_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_master_server_id_sequence'); DROP SEQUENCE IF EXISTS t_ds_process_definition_id_sequence; CREATE SEQUENCE t_ds_process_definition_id_sequence; ALTER TABLE t_ds_process_definition ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_process_definition_id_sequence'); @@ -757,7 +748,7 @@ CREATE SEQUENCE t_ds_worker_server_id_sequence; ALTER TABLE t_ds_worker_server ALTER COLUMN id SET DEFAULT NEXTVAL('t_ds_worker_server_id_sequence'); --- Records of t_ds_user,user : admin , password : dolphinscheduler123 +-- Records of t_ds_user?user : admin , password : dolphinscheduler123 INSERT INTO t_ds_user(user_name,user_password,user_type,email,phone,tenant_id,create_time,update_time) VALUES ('admin', '7ad2410b2f4c074479a8937a28a22b8f', '0', 'xxx@qq.com', 'xx', '0', '2018-03-27 15:48:50', '2018-10-24 17:40:22'); -- Records of t_ds_alertgroup,dolphinscheduler warning group @@ -768,4 +759,4 @@ INSERT INTO t_ds_relation_user_alertgroup(alertgroup_id,user_id,create_time,upda INSERT INTO t_ds_queue(queue_name,queue,create_time,update_time) VALUES ('default', 'default','2018-11-29 10:22:33', '2018-11-29 10:22:33'); -- Records of t_ds_queue,default queue name : default -INSERT INTO t_ds_version(version) VALUES ('1.2.0'); \ No newline at end of file +INSERT INTO t_ds_version(version) VALUES ('2.0.0'); \ No newline at end of file diff --git a/dockerfile/Dockerfile b/dockerfile/Dockerfile index acd2cb99e5..c48b51e377 100644 --- a/dockerfile/Dockerfile +++ b/dockerfile/Dockerfile @@ -90,6 +90,6 @@ RUN chmod +x /root/checkpoint.sh && \ RUN rm -rf /var/cache/apk/* #9. expose port -EXPOSE 2181 2888 3888 5432 12345 50051 8888 +EXPOSE 2181 2888 3888 5432 5678 1234 12345 50051 8888 ENTRYPOINT ["/sbin/tini", "--", "/root/startup.sh"] \ No newline at end of file diff --git a/dockerfile/README.md b/dockerfile/README.md index 60af7fad8f..b407f57d3b 100644 --- a/dockerfile/README.md +++ b/dockerfile/README.md @@ -162,18 +162,6 @@ This environment variable sets the runtime environment for task. The default val User data directory path, self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler` -**`DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH`** - -Directory path for user data download. self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler/download` - -**`DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH`** - -Process execute directory. self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler/exec` - -**`TASK_QUEUE`** - -This environment variable sets the task queue for `master-server` and `worker-serverr`. The default value is `zookeeper`. - **`ZOOKEEPER_QUORUM`** This environment variable sets zookeeper quorum for `master-server` and `worker-serverr`. The default value is `127.0.0.1:2181`. @@ -208,6 +196,10 @@ This environment variable sets max cpu load avg for `master-server`. The default This environment variable sets reserved memory for `master-server`. The default value is `0.1`. +**`MASTER_LISTEN_PORT`** + +This environment variable sets port for `master-server`. The default value is `5678`. + **`WORKER_EXEC_THREADS`** This environment variable sets exec thread num for `worker-server`. The default value is `100`. @@ -228,6 +220,14 @@ This environment variable sets max cpu load avg for `worker-server`. The default This environment variable sets reserved memory for `worker-server`. The default value is `0.1`. +**`WORKER_LISTEN_PORT`** + +This environment variable sets port for `worker-server`. The default value is `1234`. + +**`WORKER_GROUP`** + +This environment variable sets group for `worker-server`. The default value is `default`. + **`XLS_FILE_PATH`** This environment variable sets xls file path for `alert-server`. The default value is `/tmp/xls`. diff --git a/dockerfile/README_zh_CN.md b/dockerfile/README_zh_CN.md index 900c8b50d9..187261581d 100644 --- a/dockerfile/README_zh_CN.md +++ b/dockerfile/README_zh_CN.md @@ -162,18 +162,6 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些 用户数据目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler`。 -**`DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH`** - -用户数据下载目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler/download`。 - -**`DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH`** - -任务执行目录, 用户自己配置, 请确保这个目录存在并且用户读写权限, 默认值 `/tmp/dolphinscheduler/exec`。 - -**`TASK_QUEUE`** - -配置`master-server`和`worker-serverr`的`Zookeeper`任务队列名, 默认值 `zookeeper`。 - **`ZOOKEEPER_QUORUM`** 配置`master-server`和`worker-serverr`的`Zookeeper`地址, 默认值 `127.0.0.1:2181`。 @@ -208,6 +196,10 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些 配置`master-server`的保留内存,默认值 `0.1`。 +**`MASTER_LISTEN_PORT`** + +配置`master-server`的端口,默认值 `5678`。 + **`WORKER_EXEC_THREADS`** 配置`worker-server`中的执行线程数量,默认值 `100`。 @@ -228,6 +220,14 @@ Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些 配置`worker-server`的保留内存,默认值 `0.1`。 +**`WORKER_LISTEN_PORT`** + +配置`worker-server`的端口,默认值 `1234`。 + +**`WORKER_GROUP`** + +配置`worker-server`的分组,默认值 `default`。 + **`XLS_FILE_PATH`** 配置`alert-server`的`XLS`文件的存储路径,默认值 `/tmp/xls`。 diff --git a/dockerfile/conf/dolphinscheduler/application-api.properties.tpl b/dockerfile/conf/dolphinscheduler/application-api.properties.tpl index 424ea55f7d..88915923fa 100644 --- a/dockerfile/conf/dolphinscheduler/application-api.properties.tpl +++ b/dockerfile/conf/dolphinscheduler/application-api.properties.tpl @@ -14,21 +14,29 @@ # See the License for the specific language governing permissions and # limitations under the License. # + # server port server.port=12345 + # session config server.servlet.session.timeout=7200 + # servlet config server.servlet.context-path=/dolphinscheduler/ + # file size limit for upload spring.servlet.multipart.max-file-size=1024MB spring.servlet.multipart.max-request-size=1024MB + # post content server.jetty.max-http-post-size=5000000 + # i18n spring.messages.encoding=UTF-8 + #i18n classpath folder , file prefix messages, if have many files, use "," seperator spring.messages.basename=i18n/messages + # Authentication types (supported types: PASSWORD) security.authentication.type=PASSWORD diff --git a/dockerfile/conf/dolphinscheduler/application.properties.tpl b/dockerfile/conf/dolphinscheduler/application.properties.tpl deleted file mode 100644 index 6dd8a18e11..0000000000 --- a/dockerfile/conf/dolphinscheduler/application.properties.tpl +++ /dev/null @@ -1,115 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# base spring data source configuration -spring.datasource.type=com.alibaba.druid.pool.DruidDataSource -# postgre -spring.datasource.driver-class-name=org.postgresql.Driver -spring.datasource.url=jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/${POSTGRESQL_DATABASE}?characterEncoding=utf8 -# mysql -#spring.datasource.driver-class-name=com.mysql.jdbc.Driver -#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 -spring.datasource.username=${POSTGRESQL_USERNAME} -spring.datasource.password=${POSTGRESQL_PASSWORD} -# connection configuration -spring.datasource.initialSize=5 -# min connection number -spring.datasource.minIdle=5 -# max connection number -spring.datasource.maxActive=50 -# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. -# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. -spring.datasource.maxWait=60000 -# milliseconds for check to close free connections -spring.datasource.timeBetweenEvictionRunsMillis=60000 -# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. -spring.datasource.timeBetweenConnectErrorMillis=60000 -# the longest time a connection remains idle without being evicted, in milliseconds -spring.datasource.minEvictableIdleTimeMillis=300000 -#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. -spring.datasource.validationQuery=SELECT 1 -#check whether the connection is valid for timeout, in seconds -spring.datasource.validationQueryTimeout=3 -# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis, -# validation Query is performed to check whether the connection is valid -spring.datasource.testWhileIdle=true -#execute validation to check if the connection is valid when applying for a connection -spring.datasource.testOnBorrow=true -#execute validation to check if the connection is valid when the connection is returned -spring.datasource.testOnReturn=false -spring.datasource.defaultAutoCommit=true -spring.datasource.keepAlive=true -# open PSCache, specify count PSCache for every connection -spring.datasource.poolPreparedStatements=true -spring.datasource.maxPoolPreparedStatementPerConnectionSize=20 -spring.datasource.spring.datasource.filters=stat,wall,log4j -spring.datasource.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 - -#mybatis -mybatis-plus.mapper-locations=classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml -mybatis-plus.typeEnumsPackage=org.apache.dolphinscheduler.*.enums -#Entity scan, where multiple packages are separated by a comma or semicolon -mybatis-plus.typeAliasesPackage=org.apache.dolphinscheduler.dao.entity -#Primary key type AUTO:" database ID AUTO ", INPUT:" user INPUT ID", ID_WORKER:" global unique ID (numeric type unique ID)", UUID:" global unique ID UUID"; -mybatis-plus.global-config.db-config.id-type=AUTO -#Field policy IGNORED:" ignore judgment ",NOT_NULL:" not NULL judgment "),NOT_EMPTY:" not NULL judgment" -mybatis-plus.global-config.db-config.field-strategy=NOT_NULL -#The hump underline is converted -mybatis-plus.global-config.db-config.column-underline=true -mybatis-plus.global-config.db-config.logic-delete-value=-1 -mybatis-plus.global-config.db-config.logic-not-delete-value=0 -mybatis-plus.global-config.db-config.banner=false -#The original configuration -mybatis-plus.configuration.map-underscore-to-camel-case=true -mybatis-plus.configuration.cache-enabled=false -mybatis-plus.configuration.call-setters-on-nulls=true -mybatis-plus.configuration.jdbc-type-for-null=null - -# master settings -# master execute thread num -master.exec.threads=${MASTER_EXEC_THREADS} -# master execute task number in parallel -master.exec.task.num=${MASTER_EXEC_TASK_NUM} -# master heartbeat interval -master.heartbeat.interval=${MASTER_HEARTBEAT_INTERVAL} -# master commit task retry times -master.task.commit.retryTimes=${MASTER_TASK_COMMIT_RETRYTIMES} -# master commit task interval -master.task.commit.interval=${MASTER_TASK_COMMIT_INTERVAL} -# only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 -master.max.cpuload.avg=${MASTER_MAX_CPULOAD_AVG} -# only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. -master.reserved.memory=${MASTER_RESERVED_MEMORY} - -# worker settings -# worker execute thread num -worker.exec.threads=${WORKER_EXEC_THREADS} -# worker heartbeat interval -worker.heartbeat.interval=${WORKER_HEARTBEAT_INTERVAL} -# submit the number of tasks at a time -worker.fetch.task.num=${WORKER_FETCH_TASK_NUM} -# only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 -worker.max.cpuload.avg=${WORKER_MAX_CPULOAD_AVG} -# only larger than reserved memory, worker server can work. default value : physical memory * 1/6, unit is G. -worker.reserved.memory=${WORKER_RESERVED_MEMORY} - -# data quality analysis is not currently in use. please ignore the following configuration -# task record -task.record.flag=false -task.record.datasource.url=jdbc:mysql://192.168.xx.xx:3306/etl?characterEncoding=UTF-8 -task.record.datasource.username=xx -task.record.datasource.password=xx diff --git a/dockerfile/conf/dolphinscheduler/common.properties.tpl b/dockerfile/conf/dolphinscheduler/common.properties.tpl index 8134fc7a9b..f318ff8414 100644 --- a/dockerfile/conf/dolphinscheduler/common.properties.tpl +++ b/dockerfile/conf/dolphinscheduler/common.properties.tpl @@ -15,70 +15,64 @@ # limitations under the License. # -#task queue implementation, default "zookeeper" -dolphinscheduler.queue.impl=${TASK_QUEUE} - -#zookeeper cluster. multiple are separated by commas. eg. 192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181 -zookeeper.quorum=${ZOOKEEPER_QUORUM} -#dolphinscheduler root directory -zookeeper.dolphinscheduler.root=/dolphinscheduler -#dolphinscheduler failover directory -zookeeper.session.timeout=300 -zookeeper.connection.timeout=300 -zookeeper.retry.base.sleep=100 -zookeeper.retry.max.sleep=30000 -zookeeper.retry.maxtime=5 - #============================================================================ # System #============================================================================ # system env path. self configuration, please make sure the directory and file exists and have read write execute permissions dolphinscheduler.env.path=${DOLPHINSCHEDULER_ENV_PATH} -#resource.view.suffixs -resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties -# is development state? default "false" -development.state=true + # user data directory path, self configuration, please make sure the directory exists and have read write permissions data.basedir.path=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH} -# directory path for user data download. self configuration, please make sure the directory exists and have read write permissions -data.download.basedir.path=${DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH} -# process execute directory. self configuration, please make sure the directory exists and have read write permissions -process.exec.basepath=${DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH} # resource upload startup type : HDFS,S3,NONE -res.upload.startup.type=NONE +resource.storage.type=NONE #============================================================================ # HDFS #============================================================================ -# Users who have permission to create directories under the HDFS root path -hdfs.root.user=hdfs -# data base dir, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended -data.store2hdfs.basepath=/dolphinscheduler +# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended +#resource.upload.path=/dolphinscheduler + # whether kerberos starts -hadoop.security.authentication.startup.state=false +#hadoop.security.authentication.startup.state=false + # java.security.krb5.conf path -java.security.krb5.conf.path=/opt/krb5.conf +#java.security.krb5.conf.path=/opt/krb5.conf + # loginUserFromKeytab user -login.user.keytab.username=hdfs-mycluster@ESZ.COM +#login.user.keytab.username=hdfs-mycluster@ESZ.COM + # loginUserFromKeytab path -login.user.keytab.path=/opt/hdfs.headless.keytab +#login.user.keytab.path=/opt/hdfs.headless.keytab + +#resource.view.suffixs +#resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties + +# if resource.storage.type=HDFS, the user need to have permission to create directories under the HDFS root path +hdfs.root.user=hdfs + +# kerberos expire time +kerberos.expire.time=7 #============================================================================ # S3 #============================================================================ -# ha or single namenode,If namenode ha needs to copy core-site.xml and hdfs-site.xml -# to the conf directory,support s3,for example : s3a://dolphinscheduler +# if resource.storage.type=S3,the value like: s3a://dolphinscheduler ; if resource.storage.type=HDFS, When namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir fs.defaultFS=hdfs://mycluster:8020 -# s3 need,s3 endpoint -fs.s3a.endpoint=http://192.168.199.91:9010 -# s3 need,s3 access key -fs.s3a.access.key=A3DXS30FO22544RE -# s3 need,s3 secret key -fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK -#resourcemanager ha note this need ips , this empty if single + +# if resource.storage.type=S3,s3 endpoint +#fs.s3a.endpoint=http://192.168.199.91:9010 + +# if resource.storage.type=S3,s3 access key +#fs.s3a.access.key=A3DXS30FO22544RE + +# if resource.storage.type=S3,s3 secret key +#fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK + +# if not use hadoop resourcemanager, please keep default value; if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty TODO yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx -# If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine + +# If resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname. yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s diff --git a/dockerfile/conf/dolphinscheduler/conf/worker_logback.xml b/dockerfile/conf/dolphinscheduler/conf/worker_logback.xml new file mode 100644 index 0000000000..1b09260334 --- /dev/null +++ b/dockerfile/conf/dolphinscheduler/conf/worker_logback.xml @@ -0,0 +1,79 @@ + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + INFO + + + + taskAppId + ${log.base} + + + + ${log.base}/${taskAppId}.log + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + true + + + + + + ${log.base}/dolphinscheduler-worker.log + + INFO + + + + ${log.base}/dolphinscheduler-worker.%d{yyyy-MM-dd_HH}.%i.log + 168 + 200MB + +       + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + +    + + + + + + + + \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/datasource.properties.tpl b/dockerfile/conf/dolphinscheduler/datasource.properties.tpl new file mode 100644 index 0000000000..aefb9e3b0b --- /dev/null +++ b/dockerfile/conf/dolphinscheduler/datasource.properties.tpl @@ -0,0 +1,71 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +# mysql +#spring.datasource.driver-class-name=com.mysql.jdbc.Driver +#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 + +# postgre +spring.datasource.driver-class-name=org.postgresql.Driver +spring.datasource.url=jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/${POSTGRESQL_DATABASE}?characterEncoding=utf8 +spring.datasource.username=${POSTGRESQL_USERNAME} +spring.datasource.password=${POSTGRESQL_PASSWORD} + +## base spring data source configuration todo need to remove +#spring.datasource.type=com.alibaba.druid.pool.DruidDataSource + +# connection configuration +#spring.datasource.initialSize=5 +# min connection number +#spring.datasource.minIdle=5 +# max connection number +#spring.datasource.maxActive=50 + +# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. +# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. +#spring.datasource.maxWait=60000 + +# milliseconds for check to close free connections +#spring.datasource.timeBetweenEvictionRunsMillis=60000 + +# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. +#spring.datasource.timeBetweenConnectErrorMillis=60000 + +# the longest time a connection remains idle without being evicted, in milliseconds +#spring.datasource.minEvictableIdleTimeMillis=300000 + +#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. +#spring.datasource.validationQuery=SELECT 1 + +#check whether the connection is valid for timeout, in seconds +#spring.datasource.validationQueryTimeout=3 + +# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis, +# validation Query is performed to check whether the connection is valid +#spring.datasource.testWhileIdle=true + +#execute validation to check if the connection is valid when applying for a connection +#spring.datasource.testOnBorrow=true +#execute validation to check if the connection is valid when the connection is returned +#spring.datasource.testOnReturn=false +#spring.datasource.defaultAutoCommit=true +#spring.datasource.keepAlive=true + +# open PSCache, specify count PSCache for every connection +#spring.datasource.poolPreparedStatements=true +#spring.datasource.maxPoolPreparedStatementPerConnectionSize=20 \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/master.properties.tpl b/dockerfile/conf/dolphinscheduler/master.properties.tpl new file mode 100644 index 0000000000..17dd6f9d69 --- /dev/null +++ b/dockerfile/conf/dolphinscheduler/master.properties.tpl @@ -0,0 +1,40 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# master execute thread num +master.exec.threads=${MASTER_EXEC_THREADS} + +# master execute task number in parallel +master.exec.task.num=${MASTER_EXEC_TASK_NUM} + +# master heartbeat interval +master.heartbeat.interval=${MASTER_HEARTBEAT_INTERVAL} + +# master commit task retry times +master.task.commit.retryTimes=${MASTER_TASK_COMMIT_RETRYTIMES} + +# master commit task interval +master.task.commit.interval=${MASTER_TASK_COMMIT_INTERVAL} + +# only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 +master.max.cpuload.avg=${MASTER_MAX_CPULOAD_AVG} + +# only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. +master.reserved.memory=${MASTER_RESERVED_MEMORY} + +# master listen port +#master.listen.port=${MASTER_LISTEN_PORT} \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/quartz.properties.tpl b/dockerfile/conf/dolphinscheduler/quartz.properties.tpl index 7c7c92e425..25645795bb 100644 --- a/dockerfile/conf/dolphinscheduler/quartz.properties.tpl +++ b/dockerfile/conf/dolphinscheduler/quartz.properties.tpl @@ -19,39 +19,36 @@ # Configure Main Scheduler Properties #============================================================================ #org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate -org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate -# postgre -org.quartz.dataSource.myDs.driver = org.postgresql.Driver -org.quartz.dataSource.myDs.URL = jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/${POSTGRESQL_DATABASE}?characterEncoding=utf8 -org.quartz.dataSource.myDs.user = ${POSTGRESQL_USERNAME} -org.quartz.dataSource.myDs.password = ${POSTGRESQL_PASSWORD} -org.quartz.scheduler.instanceName = DolphinScheduler -org.quartz.scheduler.instanceId = AUTO -org.quartz.scheduler.makeSchedulerThreadDaemon = true -org.quartz.jobStore.useProperties = false +#org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate + +#org.quartz.scheduler.instanceName = DolphinScheduler +#org.quartz.scheduler.instanceId = AUTO +#org.quartz.scheduler.makeSchedulerThreadDaemon = true +#org.quartz.jobStore.useProperties = false #============================================================================ # Configure ThreadPool #============================================================================ -org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool -org.quartz.threadPool.makeThreadsDaemons = true -org.quartz.threadPool.threadCount = 25 -org.quartz.threadPool.threadPriority = 5 + +#org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool +#org.quartz.threadPool.makeThreadsDaemons = true +#org.quartz.threadPool.threadCount = 25 +#org.quartz.threadPool.threadPriority = 5 #============================================================================ # Configure JobStore #============================================================================ -org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX -org.quartz.jobStore.tablePrefix = QRTZ_ -org.quartz.jobStore.isClustered = true -org.quartz.jobStore.misfireThreshold = 60000 -org.quartz.jobStore.clusterCheckinInterval = 5000 -org.quartz.jobStore.acquireTriggersWithinLock=true -org.quartz.jobStore.dataSource = myDs + +#org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX + +#org.quartz.jobStore.tablePrefix = QRTZ_ +#org.quartz.jobStore.isClustered = true +#org.quartz.jobStore.misfireThreshold = 60000 +#org.quartz.jobStore.clusterCheckinInterval = 5000 +#org.quartz.jobStore.acquireTriggersWithinLock=true +#org.quartz.jobStore.dataSource = myDs #============================================================================ -# Configure Datasources +# Configure Datasources #============================================================================ -org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider -org.quartz.dataSource.myDs.maxConnections = 10 -org.quartz.dataSource.myDs.validationQuery = select 1 \ No newline at end of file +#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/worker.properties.tpl b/dockerfile/conf/dolphinscheduler/worker.properties.tpl new file mode 100644 index 0000000000..d596be94bc --- /dev/null +++ b/dockerfile/conf/dolphinscheduler/worker.properties.tpl @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# worker execute thread num +worker.exec.threads=${WORKER_EXEC_THREADS} + +# worker heartbeat interval +worker.heartbeat.interval=${WORKER_HEARTBEAT_INTERVAL} + +# submit the number of tasks at a time +worker.fetch.task.num=${WORKER_FETCH_TASK_NUM} + +# only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 +worker.max.cpuload.avg=${WORKER_MAX_CPULOAD_AVG} + +# only larger than reserved memory, worker server can work. default value : physical memory * 1/6, unit is G. +worker.reserved.memory=${WORKER_RESERVED_MEMORY} + +# worker listener port +#worker.listen.port=${WORKER_LISTEN_PORT} + +# default worker group +#worker.group=${WORKER_GROUP} \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/zookeeper.properties.tpl b/dockerfile/conf/dolphinscheduler/zookeeper.properties.tpl new file mode 100644 index 0000000000..362a4e85ad --- /dev/null +++ b/dockerfile/conf/dolphinscheduler/zookeeper.properties.tpl @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# zookeeper cluster. multiple are separated by commas. eg. 192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181 +zookeeper.quorum=${ZOOKEEPER_QUORUM} + +# dolphinscheduler root directory +#zookeeper.dolphinscheduler.root=/dolphinscheduler + +# dolphinscheduler failover directory +#zookeeper.session.timeout=60000 +#zookeeper.connection.timeout=30000 +#zookeeper.retry.base.sleep=100 +#zookeeper.retry.max.sleep=30000 +#zookeeper.retry.maxtime=10 \ No newline at end of file diff --git a/dockerfile/hooks/check b/dockerfile/hooks/check index 21f3f4db12..fdb1902311 100644 --- a/dockerfile/hooks/check +++ b/dockerfile/hooks/check @@ -16,7 +16,7 @@ # limitations under the License. # echo "------ dolphinscheduler check - server - status -------" -sleep 20 +sleep 60 server_num=$(docker top `docker container list | grep '/sbin/tini' | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l) if [ $server_num -eq 5 ] then @@ -25,3 +25,11 @@ else echo "Server start failed "$server_num exit 1 fi +ready=`curl http://127.0.0.1:8888/dolphinscheduler/login -d 'userName=admin&userPassword=dolphinscheduler123' -v | grep "login success" | wc -l` +if [ $ready -eq 1 ] +then + echo "Servers is ready" +else + echo "Servers is not ready" + exit 1 +fi diff --git a/dockerfile/startup-init-conf.sh b/dockerfile/startup-init-conf.sh index d2b50fa3e7..da6eb21b7d 100644 --- a/dockerfile/startup-init-conf.sh +++ b/dockerfile/startup-init-conf.sh @@ -35,13 +35,10 @@ export POSTGRESQL_DATABASE=${POSTGRESQL_DATABASE:-"dolphinscheduler"} #============================================================================ export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"} export DOLPHINSCHEDULER_DATA_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH:-"/tmp/dolphinscheduler"} -export DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_DOWNLOAD_BASEDIR_PATH:-"/tmp/dolphinscheduler/download"} -export DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH=${DOLPHINSCHEDULER_PROCESS_EXEC_BASEPATH:-"/tmp/dolphinscheduler/exec"} #============================================================================ # Zookeeper #============================================================================ -export TASK_QUEUE=${TASK_QUEUE:-"zookeeper"} export ZOOKEEPER_QUORUM=${ZOOKEEPER_QUORUM:-"127.0.0.1:2181"} #============================================================================ @@ -54,6 +51,7 @@ export MASTER_TASK_COMMIT_RETRYTIMES=${MASTER_TASK_COMMIT_RETRYTIMES:-"5"} export MASTER_TASK_COMMIT_INTERVAL=${MASTER_TASK_COMMIT_INTERVAL:-"1000"} export MASTER_MAX_CPULOAD_AVG=${MASTER_MAX_CPULOAD_AVG:-"100"} export MASTER_RESERVED_MEMORY=${MASTER_RESERVED_MEMORY:-"0.1"} +export MASTER_LISTEN_PORT=${MASTER_LISTEN_PORT:-"5678"} #============================================================================ # Worker Server @@ -63,6 +61,8 @@ export WORKER_HEARTBEAT_INTERVAL=${WORKER_HEARTBEAT_INTERVAL:-"10"} export WORKER_FETCH_TASK_NUM=${WORKER_FETCH_TASK_NUM:-"3"} export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"} export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"} +export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"} +export WORKER_GROUP=${WORKER_GROUP:-"default"} #============================================================================ # Alert Server diff --git a/dockerfile/startup.sh b/dockerfile/startup.sh index 30439c2321..af3c456116 100644 --- a/dockerfile/startup.sh +++ b/dockerfile/startup.sh @@ -70,10 +70,10 @@ initZK() { while ! nc -z ${line%:*} ${line#*:}; do counter=$((counter+1)) if [ $counter == 30 ]; then - log "Error: Couldn't connect to zookeeper." + echo "Error: Couldn't connect to zookeeper." exit 1 fi - log "Trying to connect to zookeeper at ${line}. Attempt $counter." + echo "Trying to connect to zookeeper at ${line}. Attempt $counter." sleep 5 done done diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactory.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactory.java index 58e3800339..965677e7e1 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactory.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactory.java @@ -17,9 +17,6 @@ package org.apache.dolphinscheduler.alert.template; import org.apache.dolphinscheduler.alert.template.impl.DefaultHTMLTemplate; -import org.apache.dolphinscheduler.alert.utils.Constants; -import org.apache.dolphinscheduler.alert.utils.PropertyUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,8 +27,6 @@ public class AlertTemplateFactory { private static final Logger logger = LoggerFactory.getLogger(AlertTemplateFactory.class); - private static final String alertTemplate = PropertyUtils.getString(Constants.ALERT_TEMPLATE); - private AlertTemplateFactory(){} /** @@ -39,16 +34,6 @@ public class AlertTemplateFactory { * @return a template, default is DefaultHTMLTemplate */ public static AlertTemplate getMessageTemplate() { - - if(StringUtils.isEmpty(alertTemplate)){ - return new DefaultHTMLTemplate(); - } - - switch (alertTemplate){ - case "html": - return new DefaultHTMLTemplate(); - default: - throw new IllegalArgumentException(String.format("not support alert template: %s",alertTemplate)); - } + return new DefaultHTMLTemplate(); } } diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java index 94d95b3c26..28be8aa195 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java @@ -77,8 +77,6 @@ public class Constants { public static final int NUMBER_1000 = 1000; - public static final String ALERT_TEMPLATE = "alert.template"; - public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name"; public static final String SPRING_DATASOURCE_URL = "spring.datasource.url"; diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java index 900c120cd4..170c0dd37e 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtils.java @@ -48,7 +48,7 @@ public class EnterpriseWeChatUtils { private static final String ENTERPRISE_WE_CHAT_SECRET = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_SECRET); private static final String ENTERPRISE_WE_CHAT_TOKEN_URL = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TOKEN_URL); - private static final String ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE = ENTERPRISE_WE_CHAT_TOKEN_URL + private static final String ENTERPRISE_WE_CHAT_TOKEN_URL_REPLACE = ENTERPRISE_WE_CHAT_TOKEN_URL == null ? null : ENTERPRISE_WE_CHAT_TOKEN_URL .replaceAll("\\$corpId", ENTERPRISE_WE_CHAT_CORP_ID) .replaceAll("\\$secret", ENTERPRISE_WE_CHAT_SECRET); @@ -266,5 +266,4 @@ public class EnterpriseWeChatUtils { return result; } - } diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java index b2e71a8980..366e2828c5 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/ExcelUtils.java @@ -26,6 +26,7 @@ import org.apache.poi.ss.usermodel.HorizontalAlignment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.*; @@ -102,7 +103,11 @@ public class ExcelUtils { for (int i = 0; i < headerList.size(); i++) { sheet.setColumnWidth(i, headerList.get(i).length() * 800); + } + File file = new File(xlsFilePath); + if (!file.exists()) { + file.mkdirs(); } //setting file output diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java index 1cd9f490b2..5f8371de2d 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/JSONUtils.java @@ -16,12 +16,13 @@ */ package org.apache.dolphinscheduler.alert.utils; +import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONArray; -import com.alibaba.fastjson.JSONObject; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Collections; import java.util.List; /** @@ -38,7 +39,7 @@ public class JSONUtils { */ public static String toJsonString(Object object) { try{ - return JSONObject.toJSONString(object,false); + return JSON.toJSONString(object,false); } catch (Exception e) { throw new RuntimeException("Json deserialization exception.", e); } @@ -50,19 +51,19 @@ public class JSONUtils { * @param json the json * @param clazz c * @param the generic clazz - * @return the result list + * @return the result list or empty list */ public static List toList(String json, Class clazz) { if (StringUtils.isEmpty(json)) { - return null; + return Collections.emptyList(); } try { - return JSONArray.parseArray(json, clazz); + return JSON.parseArray(json, clazz); } catch (Exception e) { logger.error("JSONArray.parseArray exception!",e); } - return null; + return Collections.emptyList(); } } diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java index 130ad8ade6..ef364cb1c2 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java @@ -39,29 +39,29 @@ public class MailUtils { public static final Logger logger = LoggerFactory.getLogger(MailUtils.class); - public static final String mailProtocol = PropertyUtils.getString(Constants.MAIL_PROTOCOL); + public static final String MAIL_PROTOCOL = PropertyUtils.getString(Constants.MAIL_PROTOCOL); - public static final String mailServerHost = PropertyUtils.getString(Constants.MAIL_SERVER_HOST); + public static final String MAIL_SERVER_HOST = PropertyUtils.getString(Constants.MAIL_SERVER_HOST); - public static final Integer mailServerPort = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT); + public static final Integer MAIL_SERVER_PORT = PropertyUtils.getInt(Constants.MAIL_SERVER_PORT); - public static final String mailSender = PropertyUtils.getString(Constants.MAIL_SENDER); + public static final String MAIL_SENDER = PropertyUtils.getString(Constants.MAIL_SENDER); - public static final String mailUser = PropertyUtils.getString(Constants.MAIL_USER); + public static final String MAIL_USER = PropertyUtils.getString(Constants.MAIL_USER); - public static final String mailPasswd = PropertyUtils.getString(Constants.MAIL_PASSWD); + public static final String MAIL_PASSWD = PropertyUtils.getString(Constants.MAIL_PASSWD); - public static final Boolean mailUseStartTLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE); + public static final Boolean MAIL_USE_START_TLS = PropertyUtils.getBoolean(Constants.MAIL_SMTP_STARTTLS_ENABLE); - public static final Boolean mailUseSSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE); + public static final Boolean MAIL_USE_SSL = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE); - public static final String xlsFilePath = PropertyUtils.getString(Constants.XLS_FILE_PATH); + public static final String xlsFilePath = PropertyUtils.getString(Constants.XLS_FILE_PATH,"/tmp/xls"); - public static final String starttlsEnable = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE); + public static final String STARTTLS_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE); - public static final String sslEnable = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE); + public static final String SSL_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE); - public static final String sslTrust = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST); + public static final String SSL_TRUST = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST); public static final AlertTemplate alertTemplate = AlertTemplateFactory.getMessageTemplate(); @@ -105,7 +105,7 @@ public class MailUtils { try { Session session = getSession(); email.setMailSession(session); - email.setFrom(mailSender); + email.setFrom(MAIL_SENDER); email.setCharset(Constants.UTF_8); if (CollectionUtils.isNotEmpty(receivers)){ // receivers mail @@ -199,10 +199,10 @@ public class MailUtils { // 2. creating mail: Creating a MimeMessage MimeMessage msg = new MimeMessage(session); // 3. set sender - msg.setFrom(new InternetAddress(mailSender)); + msg.setFrom(new InternetAddress(MAIL_SENDER)); // 4. set receivers for (String receiver : receivers) { - msg.addRecipients(MimeMessage.RecipientType.TO, InternetAddress.parse(receiver)); + msg.addRecipients(Message.RecipientType.TO, InternetAddress.parse(receiver)); } return msg; } @@ -213,19 +213,19 @@ public class MailUtils { */ private static Session getSession() { Properties props = new Properties(); - props.setProperty(Constants.MAIL_HOST, mailServerHost); - props.setProperty(Constants.MAIL_PORT, String.valueOf(mailServerPort)); + props.setProperty(Constants.MAIL_HOST, MAIL_SERVER_HOST); + props.setProperty(Constants.MAIL_PORT, String.valueOf(MAIL_SERVER_PORT)); props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE); - props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, mailProtocol); - props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, starttlsEnable); - props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, sslEnable); - props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, sslTrust); + props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, MAIL_PROTOCOL); + props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, STARTTLS_ENABLE); + props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, SSL_ENABLE); + props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, SSL_TRUST); Authenticator auth = new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { // mail username and password - return new PasswordAuthentication(mailUser, mailPasswd); + return new PasswordAuthentication(MAIL_USER, MAIL_PASSWD); } }; @@ -248,12 +248,10 @@ public class MailUtils { */ if(CollectionUtils.isNotEmpty(receiversCc)){ for (String receiverCc : receiversCc){ - msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(receiverCc)); + msg.addRecipients(Message.RecipientType.CC, InternetAddress.parse(receiverCc)); } } - // set receivers type to cc - // msg.addRecipients(MimeMessage.RecipientType.CC, InternetAddress.parse(propMap.get("${CC}"))); // set subject msg.setSubject(title); MimeMultipart partList = new MimeMultipart(); @@ -262,9 +260,14 @@ public class MailUtils { part1.setContent(partContent, Constants.TEXT_HTML_CHARSET_UTF_8); // set attach file MimeBodyPart part2 = new MimeBodyPart(); + File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); + if (!file.getParentFile().exists()) { + file.getParentFile().mkdirs(); + } // make excel file + ExcelUtils.genExcelFile(content,title,xlsFilePath); - File file = new File(xlsFilePath + Constants.SINGLE_SLASH + title + Constants.EXCEL_SUFFIX_XLS); + part2.attachFile(file); part2.setFileName(MimeUtility.encodeText(title + Constants.EXCEL_SUFFIX_XLS,Constants.UTF_8,"B")); // add components to collection diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java index c2f479d101..91f7261db2 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/PropertyUtils.java @@ -79,6 +79,18 @@ public class PropertyUtils { return properties.getProperty(key.trim()); } + /** + * get property value + * + * @param key property name + * @param defaultVal default value + * @return property value + */ + public static String getString(String key, String defaultVal) { + String val = properties.getProperty(key.trim()); + return val == null ? defaultVal : val; + } + /** * get property value * diff --git a/dolphinscheduler-alert/src/main/resources/alert.properties b/dolphinscheduler-alert/src/main/resources/alert.properties index 9f5acea188..3e83c01235 100644 --- a/dolphinscheduler-alert/src/main/resources/alert.properties +++ b/dolphinscheduler-alert/src/main/resources/alert.properties @@ -18,9 +18,6 @@ #alert type is EMAIL/SMS alert.type=EMAIL -# alter msg template, default is html template -#alert.template=html - # mail server configuration mail.protocol=SMTP mail.server.host=xxx.xxx.com @@ -35,18 +32,18 @@ mail.smtp.ssl.enable=false mail.smtp.ssl.trust=xxx.xxx.com #xls file path,need create if not exist -xls.file.path=/tmp/xls +#xls.file.path=/tmp/xls # Enterprise WeChat configuration enterprise.wechat.enable=false -enterprise.wechat.corp.id=xxxxxxx -enterprise.wechat.secret=xxxxxxx -enterprise.wechat.agent.id=xxxxxxx -enterprise.wechat.users=xxxxxxx -enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret -enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token -enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"} -enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}} +#enterprise.wechat.corp.id=xxxxxxx +#enterprise.wechat.secret=xxxxxxx +#enterprise.wechat.agent.id=xxxxxxx +#enterprise.wechat.users=xxxxxxx +#enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret +#enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token +#enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"} +#enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}} diff --git a/dolphinscheduler-alert/src/main/resources/logback-alert.xml b/dolphinscheduler-alert/src/main/resources/logback-alert.xml new file mode 100644 index 0000000000..5d1c07858d --- /dev/null +++ b/dolphinscheduler-alert/src/main/resources/logback-alert.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + ${log.base}/dolphinscheduler-alert.log + + ${log.base}/dolphinscheduler-alert.%d{yyyy-MM-dd_HH}.%i.log + 20 + 64MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactoryTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactoryTest.java index 6865b895e2..32201e6011 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactoryTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/template/AlertTemplateFactoryTest.java @@ -47,7 +47,6 @@ public class AlertTemplateFactoryTest { public void testGetMessageTemplate(){ PowerMockito.mockStatic(PropertyUtils.class); - when(PropertyUtils.getString(Constants.ALERT_TEMPLATE)).thenReturn("html"); AlertTemplate defaultTemplate = AlertTemplateFactory.getMessageTemplate(); diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java index 15b92a622e..d0f3538c1b 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/EnterpriseWeChatUtilsTest.java @@ -17,13 +17,22 @@ package org.apache.dolphinscheduler.alert.utils; import com.alibaba.fastjson.JSON; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.dao.entity.Alert; import org.junit.Assert; +import org.junit.Before; import org.junit.Ignore; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; +import java.util.*; /** * Please manually modify the configuration file before testing. @@ -36,84 +45,220 @@ import java.util.Collection; * enterprise.wechat.agent.id * enterprise.wechat.users */ -@Ignore +@PrepareForTest(PropertyUtils.class) +@RunWith(PowerMockRunner.class) public class EnterpriseWeChatUtilsTest { - private String agentId = PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_AGENT_ID); // app id - private Collection listUserId = Arrays.asList(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USERS).split(",")); + private static final String toParty = "wwc99134b6fc1edb6"; + private static final String enterpriseWechatSecret = "Uuv2KFrkdf7SeKOsTDCpsTkpawXBMNRhFy6VKX5FV"; + private static final String enterpriseWechatAgentId = "1000004"; + private static final String enterpriseWechatUsers="LiGang,journey"; + private static final String msg = "hello world"; + + private static final String enterpriseWechatTeamSendMsg = "{\\\"toparty\\\":\\\"$toParty\\\",\\\"agentid\\\":\\\"$agentId\\\",\\\"msgtype\\\":\\\"text\\\",\\\"text\\\":{\\\"content\\\":\\\"$msg\\\"},\\\"safe\\\":\\\"0\\\"}"; + private static final String enterpriseWechatUserSendMsg = "{\\\"touser\\\":\\\"$toUser\\\",\\\"agentid\\\":\\\"$agentId\\\",\\\"msgtype\\\":\\\"markdown\\\",\\\"markdown\\\":{\\\"content\\\":\\\"$msg\\\"}}"; + + @Before + public void init(){ + PowerMockito.mockStatic(PropertyUtils.class); + Mockito.when(PropertyUtils.getBoolean(Constants.ENTERPRISE_WECHAT_ENABLE)).thenReturn(true); + Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_USER_SEND_MSG)).thenReturn(enterpriseWechatUserSendMsg); + Mockito.when(PropertyUtils.getString(Constants.ENTERPRISE_WECHAT_TEAM_SEND_MSG)).thenReturn(enterpriseWechatTeamSendMsg); + } - // Please change - private String partyId = "2"; - private Collection listPartyId = Arrays.asList("2","4"); @Test - public void testSendSingleTeamWeChat() { - try { - String token = EnterpriseWeChatUtils.getToken(); - String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world"); - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals("ok",errmsg); - } catch (IOException e) { - e.printStackTrace(); - } + public void testIsEnable(){ + Boolean weChartEnable = EnterpriseWeChatUtils.isEnable(); + Assert.assertTrue(weChartEnable); } + @Test - public void testSendMultiTeamWeChat() { - - try { - String token = EnterpriseWeChatUtils.getToken(); - String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world"); - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals("ok",errmsg); - } catch (IOException e) { - e.printStackTrace(); - } + public void testMakeTeamSendMsg1(){ + String sendMsg = EnterpriseWeChatUtils.makeTeamSendMsg(toParty, enterpriseWechatSecret, msg); + Assert.assertTrue(sendMsg.contains(toParty)); + Assert.assertTrue(sendMsg.contains(enterpriseWechatSecret)); + Assert.assertTrue(sendMsg.contains(msg)); + } + @Test - public void testSendSingleUserWeChat() { - try { - String token = EnterpriseWeChatUtils.getToken(); - String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" + - ">**matter details** \n" + - ">matter:meeting
" + - ">organizer:@miglioguan \n" + - ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" + - "> \n" + - ">meeting room:Guangzhou TIT 1st Floor 301 \n" + - ">date:May 18, 2018 \n" + - ">time:9:00-11:00 am \n" + - "> \n" + - ">please attend the meeting on time\n" + - "> \n" + - ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\""); - - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals("ok",errmsg); - } catch (IOException e) { - e.printStackTrace(); - } + public void testMakeTeamSendMsg2(){ + List parties = new ArrayList<>(); + parties.add(toParty); + parties.add("test1"); + + String sendMsg = EnterpriseWeChatUtils.makeTeamSendMsg(parties, enterpriseWechatSecret, msg); + Assert.assertTrue(sendMsg.contains(toParty)); + Assert.assertTrue(sendMsg.contains(enterpriseWechatSecret)); + Assert.assertTrue(sendMsg.contains(msg)); } @Test - public void testSendMultiUserWeChat() { - try { - String token = EnterpriseWeChatUtils.getToken(); - - String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world"); - String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); - - String errmsg = JSON.parseObject(resp).getString("errmsg"); - Assert.assertEquals("ok",errmsg); - } catch (IOException e) { - e.printStackTrace(); - } + public void tesMakeUserSendMsg1(){ + + String sendMsg = EnterpriseWeChatUtils.makeUserSendMsg(enterpriseWechatUsers, enterpriseWechatAgentId, msg); + Assert.assertTrue(sendMsg.contains(enterpriseWechatUsers)); + Assert.assertTrue(sendMsg.contains(enterpriseWechatAgentId)); + Assert.assertTrue(sendMsg.contains(msg)); } + @Test + public void tesMakeUserSendMsg2(){ + List users = new ArrayList<>(); + users.add("user1"); + users.add("user2"); + + String sendMsg = EnterpriseWeChatUtils.makeUserSendMsg(users, enterpriseWechatAgentId, msg); + Assert.assertTrue(sendMsg.contains(users.get(0))); + Assert.assertTrue(sendMsg.contains(users.get(1))); + Assert.assertTrue(sendMsg.contains(enterpriseWechatAgentId)); + Assert.assertTrue(sendMsg.contains(msg)); + } + + @Test + public void testMarkdownByAlertForText(){ + Alert alertForText = createAlertForText(); + String result = EnterpriseWeChatUtils.markdownByAlert(alertForText); + Assert.assertNotNull(result); + } + + @Test + public void testMarkdownByAlertForTable(){ + Alert alertForText = createAlertForTable(); + String result = EnterpriseWeChatUtils.markdownByAlert(alertForText); + Assert.assertNotNull(result); + } + + private Alert createAlertForText(){ + String content ="[\"id:69\"," + + "\"name:UserBehavior-0--1193959466\"," + + "\"Job name: Start workflow\"," + + "\"State: SUCCESS\"," + + "\"Recovery:NO\"," + + "\"Run time: 1\"," + + "\"Start time: 2018-08-06 10:31:34.0\"," + + "\"End time: 2018-08-06 10:31:49.0\"," + + "\"Host: 192.168.xx.xx\"," + + "\"Notify group :4\"]"; + + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TEXT); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(4); + + return alert; + } + + private String list2String(){ + + LinkedHashMap map1 = new LinkedHashMap<>(); + map1.put("mysql service name","mysql200"); + map1.put("mysql address","192.168.xx.xx"); + map1.put("port","3306"); + map1.put("no index of number","80"); + map1.put("database client connections","190"); + + LinkedHashMap map2 = new LinkedHashMap<>(); + map2.put("mysql service name","mysql210"); + map2.put("mysql address","192.168.xx.xx"); + map2.put("port", "3306"); + map2.put("no index of number", "10"); + map2.put("database client connections", "90"); + + List> maps = new ArrayList<>(); + maps.add(0, map1); + maps.add(1, map2); + String mapjson = JSONUtils.toJsonString(maps); + return mapjson; + } + + private Alert createAlertForTable(){ + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TABLE); + String content= list2String(); + alert.setContent(content); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + return alert; + } + + + + +// @Test +// public void testSendSingleTeamWeChat() { +// try { +// String token = EnterpriseWeChatUtils.getToken(); +// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(partyId, agentId, "hello world"); +// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); +// +// String errmsg = JSON.parseObject(resp).getString("errmsg"); +// Assert.assertEquals("ok",errmsg); +// } catch (IOException e) { +// e.printStackTrace(); +// } +// } +// +// @Test +// public void testSendMultiTeamWeChat() { +// +// try { +// String token = EnterpriseWeChatUtils.getToken(); +// String msg = EnterpriseWeChatUtils.makeTeamSendMsg(listPartyId, agentId, "hello world"); +// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); +// +// String errmsg = JSON.parseObject(resp).getString("errmsg"); +// Assert.assertEquals("ok",errmsg); +// } catch (IOException e) { +// e.printStackTrace(); +// } +// } +// +// @Test +// public void testSendSingleUserWeChat() { +// try { +// String token = EnterpriseWeChatUtils.getToken(); +// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId.stream().findFirst().get(), agentId, "your meeting room has been booked and will be synced to the 'mailbox' later \n" + +// ">**matter details** \n" + +// ">matter:meeting
" + +// ">organizer:@miglioguan \n" + +// ">participant:@miglioguan、@kunliu、@jamdeezhou、@kanexiong、@kisonwang \n" + +// "> \n" + +// ">meeting room:Guangzhou TIT 1st Floor 301 \n" + +// ">date:May 18, 2018 \n" + +// ">time:9:00-11:00 am \n" + +// "> \n" + +// ">please attend the meeting on time\n" + +// "> \n" + +// ">to modify the meeting information, please click: [Modify Meeting Information](https://work.weixin.qq.com)\""); +// +// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); +// +// String errmsg = JSON.parseObject(resp).getString("errmsg"); +// Assert.assertEquals("ok",errmsg); +// } catch (IOException e) { +// e.printStackTrace(); +// } +// } +// +// @Test +// public void testSendMultiUserWeChat() { +// try { +// String token = EnterpriseWeChatUtils.getToken(); +// +// String msg = EnterpriseWeChatUtils.makeUserSendMsg(listUserId, agentId, "hello world"); +// String resp = EnterpriseWeChatUtils.sendEnterpriseWeChat("utf-8", msg, token); +// +// String errmsg = JSON.parseObject(resp).getString("errmsg"); +// Assert.assertEquals("ok",errmsg); +// } catch (IOException e) { +// e.printStackTrace(); +// } +// } + } diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/ExcelUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/ExcelUtilsTest.java index 3ef43aeef4..8ee62358dd 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/ExcelUtilsTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/ExcelUtilsTest.java @@ -89,4 +89,14 @@ public class ExcelUtilsTest { ExcelUtils.genExcelFile(incorrectContent1, title, xlsFilePath); } + + /** + * Test GenExcelFile (check directory) + */ + @Test + public void testGenExcelFileByCheckDir() { + ExcelUtils.genExcelFile("[{\"a\": \"a\"},{\"a\": \"a\"}]", "t", "/tmp/xls"); + File file = new File("/tmp/xls" + Constants.SINGLE_SLASH + "t" + Constants.EXCEL_SUFFIX_XLS); + file.delete(); + } } \ No newline at end of file diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java index 277c42b5bd..a151abc714 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/JSONUtilsTest.java @@ -26,8 +26,7 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.Assert.*; public class JSONUtilsTest { @@ -73,7 +72,7 @@ public class JSONUtilsTest { result = JSONUtils.toJsonString(null); logger.info(result); - assertEquals(result,"null"); + assertEquals("null", result); } @@ -86,25 +85,27 @@ public class JSONUtilsTest { //Invoke toList List result = JSONUtils.toList(expected ,LinkedHashMap.class); //Equal list size=1 - assertEquals(result.size(),1); + assertEquals(1,result.size()); //Transform entity to LinkedHashMap LinkedHashMap entity = result.get(0); //Equal expected values - assertEquals(entity.get("mysql service name"),"mysql200"); - assertEquals(entity.get("mysql address"),"192.168.xx.xx"); - assertEquals(entity.get("port"),"3306"); - assertEquals(entity.get("no index of number"),"80"); - assertEquals(entity.get("database client connections"),"190"); + assertEquals("mysql200",entity.get("mysql service name")); + assertEquals("192.168.xx.xx", entity.get("mysql address")); + assertEquals("3306", entity.get("port")); + assertEquals("80", entity.get("no index of number")); + assertEquals("190", entity.get("database client connections")); - //If param is null, then return null + //If param is null, then return empty list result = JSONUtils.toList(null ,LinkedHashMap.class); - assertNull(result); + assertNotNull(result); + assertTrue(result.isEmpty()); - //If param is incorrect, then return null and log error message + //If param is incorrect, then return empty list and log error message result = JSONUtils.toList("}{" ,LinkedHashMap.class); - assertNull(result); + assertNotNull(result); + assertTrue(result.isEmpty()); } diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java index 612de3e31d..1820a1ef89 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/utils/MailUtilsTest.java @@ -23,7 +23,7 @@ import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.DaoFactory; import org.apache.dolphinscheduler.dao.entity.Alert; import org.apache.dolphinscheduler.dao.entity.User; -import org.junit.Ignore; +import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -33,7 +33,6 @@ import java.util.*; /** */ -@Ignore public class MailUtilsTest { private static final Logger logger = LoggerFactory.getLogger(MailUtilsTest.class); @Test @@ -138,8 +137,10 @@ public class MailUtilsTest { * Table */ @Test - public void addAlertTable(){ + public void testAddAlertTable(){ + logger.info("testAddAlertTable"); AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + Assert.assertNotNull(alertDao); Alert alert = new Alert(); alert.setTitle("Mysql Exception"); alert.setShowType(ShowType.TABLE); @@ -149,6 +150,7 @@ public class MailUtilsTest { alert.setAlertType(AlertType.EMAIL); alert.setAlertGroupId(1); alertDao.addAlert(alert); + logger.info("" +alert); } @Test diff --git a/dolphinscheduler-alert/src/test/resources/alert.properties b/dolphinscheduler-alert/src/test/resources/alert.properties deleted file mode 100644 index ce233cea37..0000000000 --- a/dolphinscheduler-alert/src/test/resources/alert.properties +++ /dev/null @@ -1,67 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# For unit test - -#alert type is EMAIL/SMS -alert.type=EMAIL - -# mail server configuration -mail.protocol=SMTP -mail.server.host=xxx.xxx.test -mail.server.port=25 -mail.sender=xxx@xxx.com -mail.user=xxx@xxx.com -mail.passwd=111111 - -# Test double -test.server.factor=3.0 - - -# Test NumberFormat -test.server.testnumber=abc - -# Test array -test.server.list=xxx.xxx.test1,xxx.xxx.test2,xxx.xxx.test3 - -# Test enum -test.server.enum1=MASTER -test.server.enum2=DEAD_SERVER -test.server.enum3=abc - -# TLS -mail.smtp.starttls.enable=true -# SSL -mail.smtp.ssl.enable=false -mail.smtp.ssl.trust=xxx.xxx.com - -#xls file path,need create if not exist -xls.file.path=/tmp/xls - -# Enterprise WeChat configuration -enterprise.wechat.enable=false -enterprise.wechat.corp.id=xxxxxxx -enterprise.wechat.secret=xxxxxxx -enterprise.wechat.agent.id=xxxxxxx -enterprise.wechat.users=xxxxxxx -enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret -enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token -enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"} -enterprise.wechat.user.send.msg={\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}} - - - diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java index 8376c2876d..e4817ddc18 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java @@ -21,11 +21,15 @@ import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.web.servlet.ServletComponentScan; import org.springframework.boot.web.servlet.support.SpringBootServletInitializer; import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.FilterType; import springfox.documentation.swagger2.annotations.EnableSwagger2; @SpringBootApplication @ServletComponentScan -@ComponentScan("org.apache.dolphinscheduler") +@ComponentScan(basePackages = {"org.apache.dolphinscheduler"}, + excludeFilters = @ComponentScan.Filter(type = FilterType.REGEX, + pattern = "org.apache.dolphinscheduler.server.*")) + public class ApiApplicationServer extends SpringBootServletInitializer { public static void main(String[] args) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java index c03281df7e..8731b264e9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AccessTokenService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -37,13 +38,14 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * access token controller */ @Api(tags = "ACCESS_TOKEN_TAG", position = 1) @RestController @RequestMapping("/access-token") -public class AccessTokenController extends BaseController{ +public class AccessTokenController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(AccessTokenController.class); @@ -54,140 +56,125 @@ public class AccessTokenController extends BaseController{ /** * create token - * @param loginUser login user - * @param userId token for user id + * + * @param loginUser login user + * @param userId token for user id * @param expireTime expire time for the token - * @param token token + * @param token token * @return create result state code */ @ApiIgnore @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_ACCESS_TOKEN_ERROR) public Result createToken(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "expireTime") String expireTime, - @RequestParam(value = "token") String token){ + @RequestParam(value = "userId") int userId, + @RequestParam(value = "expireTime") String expireTime, + @RequestParam(value = "token") String token) { logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), - userId,expireTime,token); - - try { - Map result = accessTokenService.createToken(userId, expireTime, token); - return returnDataList(result); - }catch (Exception e){ - logger.error(CREATE_ACCESS_TOKEN_ERROR.getMsg(),e); - return error(CREATE_ACCESS_TOKEN_ERROR.getCode(), CREATE_ACCESS_TOKEN_ERROR.getMsg()); - } + userId, expireTime, token); + + Map result = accessTokenService.createToken(userId, expireTime, token); + return returnDataList(result); } /** * generate token string - * @param loginUser login user - * @param userId token for user + * + * @param loginUser login user + * @param userId token for user * @param expireTime expire time * @return token string */ @ApiIgnore @PostMapping(value = "/generate") @ResponseStatus(HttpStatus.CREATED) + @ApiException(GENERATE_TOKEN_ERROR) public Result generateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "expireTime") String expireTime){ - logger.info("login user {}, generate token , userId : {} , token expire time : {}",loginUser,userId,expireTime); - try { - Map result = accessTokenService.generateToken(userId, expireTime); - return returnDataList(result); - }catch (Exception e){ - logger.error(GENERATE_TOKEN_ERROR.getMsg(),e); - return error(GENERATE_TOKEN_ERROR.getCode(), GENERATE_TOKEN_ERROR.getMsg()); - } + @RequestParam(value = "userId") int userId, + @RequestParam(value = "expireTime") String expireTime) { + logger.info("login user {}, generate token , userId : {} , token expire time : {}", loginUser, userId, expireTime); + Map result = accessTokenService.generateToken(userId, expireTime); + return returnDataList(result); } /** * query access token list paging * * @param loginUser login user - * @param pageNo page number + * @param pageNo page number * @param searchVal search value - * @param pageSize page size + * @param pageSize page size * @return token list of page number and page size */ - @ApiOperation(value = "queryAccessTokenList", notes= "QUERY_ACCESS_TOKEN_LIST_NOTES") + @ApiOperation(value = "queryAccessTokenList", notes = "QUERY_ACCESS_TOKEN_LIST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR) public Result queryAccessTokenList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize) { logger.info("login user {}, list access token paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize); + loginUser.getUserName(), pageNo, searchVal, pageSize); + + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg(),e); - return error(QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getCode(),QUERY_ACCESSTOKEN_LIST_PAGING_ERROR.getMsg()); } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = accessTokenService.queryAccessTokenList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * delete access token by id + * * @param loginUser login user - * @param id token id + * @param id token id * @return delete result code */ @ApiIgnore @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_ACCESS_TOKEN_ERROR) public Result delAccessTokenById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { + @RequestParam(value = "id") int id) { logger.info("login user {}, delete access token, id: {},", loginUser.getUserName(), id); - try { - Map result = accessTokenService.delAccessTokenById(loginUser, id); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_ACCESS_TOKEN_ERROR.getMsg(),e); - return error(Status.DELETE_ACCESS_TOKEN_ERROR.getCode(), Status.DELETE_ACCESS_TOKEN_ERROR.getMsg()); - } + Map result = accessTokenService.delAccessTokenById(loginUser, id); + return returnDataList(result); } /** * update token - * @param loginUser login user - * @param id token id - * @param userId token for user + * + * @param loginUser login user + * @param id token id + * @param userId token for user * @param expireTime token expire time - * @param token token string + * @param token token string * @return update result code */ @ApiIgnore @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_ACCESS_TOKEN_ERROR) public Result updateToken(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id, @RequestParam(value = "userId") int userId, @RequestParam(value = "expireTime") String expireTime, - @RequestParam(value = "token") String token){ + @RequestParam(value = "token") String token) { logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), - userId,expireTime,token); - - try { - Map result = accessTokenService.updateToken(id,userId, expireTime, token); - return returnDataList(result); - }catch (Exception e){ - logger.error(UPDATE_ACCESS_TOKEN_ERROR.getMsg(),e); - return error(UPDATE_ACCESS_TOKEN_ERROR.getCode(), UPDATE_ACCESS_TOKEN_ERROR.getMsg()); - } + userId, expireTime, token); + + Map result = accessTokenService.updateToken(id, userId, expireTime, token); + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java index e9bffa510b..35bbc2af2f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.api.controller; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.AlertGroupService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -37,13 +38,15 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.HashMap; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * alert group controller */ @Api(tags = "ALERT_GROUP_TAG", position = 1) @RestController @RequestMapping("alert-group") -public class AlertGroupController extends BaseController{ +public class AlertGroupController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); @@ -53,201 +56,187 @@ public class AlertGroupController extends BaseController{ /** * create alert group - * @param loginUser login user - * @param groupName group name - * @param groupType group type + * + * @param loginUser login user + * @param groupName group name + * @param groupType group type * @param description description * @return create result code */ - @ApiOperation(value = "createAlertgroup", notes= "CREATE_ALERT_GROUP_NOTES") + @ApiOperation(value = "createAlertgroup", notes = "CREATE_ALERT_GROUP_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"), - @ApiImplicitParam(name = "description", value = "DESC", dataType ="String") + @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType = "AlertType"), + @ApiImplicitParam(name = "description", value = "DESC", dataType = "String") }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_ALERT_GROUP_ERROR) public Result createAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "groupName") String groupName, - @RequestParam(value = "groupType") AlertType groupType, - @RequestParam(value = "description",required = false) String description) { + @RequestParam(value = "groupName") String groupName, + @RequestParam(value = "groupType") AlertType groupType, + @RequestParam(value = "description", required = false) String description) { logger.info("loginUser user {}, create alertgroup, groupName: {}, groupType: {}, desc: {}", - loginUser.getUserName(), groupName, groupType,description); - try { - Map result = alertGroupService.createAlertgroup(loginUser, groupName, groupType,description); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.CREATE_ALERT_GROUP_ERROR.getMsg(),e); - return error(Status.CREATE_ALERT_GROUP_ERROR.getCode(), Status.CREATE_ALERT_GROUP_ERROR.getMsg()); - } + loginUser.getUserName(), groupName, groupType, description); + Map result = alertGroupService.createAlertgroup(loginUser, groupName, groupType, description); + return returnDataList(result); } /** * alert group list + * * @param loginUser login user * @return alert group list */ - @ApiOperation(value = "list", notes= "QUERY_ALERT_GROUP_LIST_NOTES") + @ApiOperation(value = "list", notes = "QUERY_ALERT_GROUP_LIST_NOTES") @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_ALL_ALERTGROUP_ERROR) public Result list(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, query all alertGroup", loginUser.getUserName()); - try{ - HashMap result = alertGroupService.queryAlertgroup(); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_ALL_ALERTGROUP_ERROR.getMsg(),e); - return error(Status.QUERY_ALL_ALERTGROUP_ERROR.getCode(), Status.QUERY_ALL_ALERTGROUP_ERROR.getMsg()); - } + HashMap result = alertGroupService.queryAlertgroup(); + return returnDataList(result); } /** * paging query alarm group list * * @param loginUser login user - * @param pageNo page number + * @param pageNo page number * @param searchVal search value - * @param pageSize page size + * @param pageSize page size * @return alert group list page */ - @ApiOperation(value = "queryAlertGroupListPaging", notes= "QUERY_ALERT_GROUP_LIST_PAGING_NOTES") + @ApiOperation(value = "queryAlertGroupListPaging", notes = "QUERY_ALERT_GROUP_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(LIST_PAGING_ALERT_GROUP_ERROR) public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ + @RequestParam("pageSize") Integer pageSize) { logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - searchVal = ParameterUtils.handleEscapes(searchVal); - result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize); + loginUser.getUserName(), pageNo, searchVal, pageSize); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.LIST_PAGING_ALERT_GROUP_ERROR.getMsg(),e); - return error(Status.LIST_PAGING_ALERT_GROUP_ERROR.getCode(), Status.LIST_PAGING_ALERT_GROUP_ERROR.getMsg()); } + + searchVal = ParameterUtils.handleEscapes(searchVal); + result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * updateProcessInstance alert group - * @param loginUser login user - * @param id alert group id - * @param groupName group name - * @param groupType group type + * + * @param loginUser login user + * @param id alert group id + * @param groupName group name + * @param groupType group type * @param description description * @return update result code */ - @ApiOperation(value = "updateAlertgroup", notes= "UPDATE_ALERT_GROUP_NOTES") + @ApiOperation(value = "updateAlertgroup", notes = "UPDATE_ALERT_GROUP_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"), + @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType ="AlertType"), - @ApiImplicitParam(name = "description", value = "DESC", dataType ="String") + @ApiImplicitParam(name = "groupType", value = "GROUP_TYPE", required = true, dataType = "AlertType"), + @ApiImplicitParam(name = "description", value = "DESC", dataType = "String") }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_ALERT_GROUP_ERROR) public Result updateAlertgroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id, @RequestParam(value = "groupName") String groupName, @RequestParam(value = "groupType") AlertType groupType, - @RequestParam(value = "description",required = false) String description) { + @RequestParam(value = "description", required = false) String description) { logger.info("login user {}, updateProcessInstance alertgroup, groupName: {}, groupType: {}, desc: {}", - loginUser.getUserName(), groupName, groupType,description); - try { - Map result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, description); - return returnDataList(result); - - }catch (Exception e){ - logger.error(Status.UPDATE_ALERT_GROUP_ERROR.getMsg(),e); - return error(Status.UPDATE_ALERT_GROUP_ERROR.getCode(), Status.UPDATE_ALERT_GROUP_ERROR.getMsg()); - } + loginUser.getUserName(), groupName, groupType, description); + Map result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, description); + return returnDataList(result); } /** * delete alert group by id + * * @param loginUser login user - * @param id alert group id + * @param id alert group id * @return delete result code */ - @ApiOperation(value = "delAlertgroupById", notes= "DELETE_ALERT_GROUP_BY_ID_NOTES") + @ApiOperation(value = "delAlertgroupById", notes = "DELETE_ALERT_GROUP_BY_ID_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100") + @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_ALERT_GROUP_ERROR) public Result delAlertgroupById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { + @RequestParam(value = "id") int id) { logger.info("login user {}, delete AlertGroup, id: {},", loginUser.getUserName(), id); - try { - Map result = alertGroupService.delAlertgroupById(loginUser, id); - return returnDataList(result); - - }catch (Exception e){ - logger.error(Status.DELETE_ALERT_GROUP_ERROR.getMsg(),e); - return error(Status.DELETE_ALERT_GROUP_ERROR.getCode(), Status.DELETE_ALERT_GROUP_ERROR.getMsg()); - } + Map result = alertGroupService.delAlertgroupById(loginUser, id); + return returnDataList(result); } /** * check alert group exist + * * @param loginUser login user * @param groupName group name * @return check result code */ - @ApiOperation(value = "verifyGroupName", notes= "VERIFY_ALERT_GROUP_NAME_NOTES") + @ApiOperation(value = "verifyGroupName", notes = "VERIFY_ALERT_GROUP_NAME_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "groupName", value = "GROUP_NAME", required = true, dataType = "String"), }) @GetMapping(value = "/verify-group-name") @ResponseStatus(HttpStatus.OK) public Result verifyGroupName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="groupName") String groupName - ) { - logger.info("login user {}, verfiy group name: {}", - loginUser.getUserName(),groupName); - - return alertGroupService.verifyGroupName(loginUser, groupName); + @RequestParam(value = "groupName") String groupName) { + logger.info("login user {}, verify group name: {}", loginUser.getUserName(), groupName); + + boolean exist = alertGroupService.existGroupName(groupName); + Result result = new Result(); + if (exist) { + logger.error("group {} has exist, can't create again.", groupName); + result.setCode(Status.ALERT_GROUP_EXIST.getCode()); + result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); + } else { + result.setCode(Status.SUCCESS.getCode()); + result.setMsg(Status.SUCCESS.getMsg()); + } + return result; } /** * grant user * - * @param loginUser login user - * @param userIds user ids in the group + * @param loginUser login user + * @param userIds user ids in the group * @param alertgroupId alert group id * @return grant result code */ - @ApiOperation(value = "grantUser", notes= "GRANT_ALERT_GROUP_NOTES") + @ApiOperation(value = "grantUser", notes = "GRANT_ALERT_GROUP_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int",example = "100"), + @ApiImplicitParam(name = "id", value = "ALERT_GROUP_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "userIds", value = "USER_IDS", required = true, dataType = "String") }) @PostMapping(value = "/grant-user") @ResponseStatus(HttpStatus.OK) + @ApiException(ALERT_GROUP_GRANT_USER_ERROR) public Result grantUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "alertgroupId") int alertgroupId, - @RequestParam(value = "userIds") String userIds) { - logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId,userIds); - try { - Map result = alertGroupService.grantUser(loginUser, alertgroupId, userIds); - return returnDataList(result); - - }catch (Exception e){ - logger.error(Status.ALERT_GROUP_GRANT_USER_ERROR.getMsg(),e); - return error(Status.ALERT_GROUP_GRANT_USER_ERROR.getCode(), Status.ALERT_GROUP_GRANT_USER_ERROR.getMsg()); - } + @RequestParam(value = "alertgroupId") int alertgroupId, + @RequestParam(value = "userIds") String userIds) { + logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId, userIds); + Map result = alertGroupService.grantUser(loginUser, alertgroupId, userIds); + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java index f93e7d6944..f53391f203 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataAnalysisController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataAnalysisService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -25,7 +26,6 @@ import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; -import org.apache.dolphinscheduler.api.enums.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -35,13 +35,15 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * data analysis controller */ @Api(tags = "DATA_ANALYSIS_TAG", position = 1) @RestController @RequestMapping("projects/analysis") -public class DataAnalysisController extends BaseController{ +public class DataAnalysisController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(DataAnalysisController.class); @@ -54,31 +56,27 @@ public class DataAnalysisController extends BaseController{ * * @param loginUser login user * @param startDate count start date - * @param endDate count end date + * @param endDate count end date * @param projectId project id * @return task instance count data */ - @ApiOperation(value = "countTaskState", notes= "COUNT_TASK_STATE_NOTES") + @ApiOperation(value = "countTaskState", notes = "COUNT_TASK_STATE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType = "String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/task-state-count") + @GetMapping(value = "/task-state-count") @ResponseStatus(HttpStatus.OK) + @ApiException(TASK_INSTANCE_STATE_COUNT_ERROR) public Result countTaskState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="startDate", required=false) String startDate, - @RequestParam(value="endDate", required=false) String endDate, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}", - loginUser.getUserName(), startDate, endDate, projectId); - Map result = dataAnalysisService.countTaskStateByProject(loginUser,projectId, startDate, endDate); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.TASK_INSTANCE_STATE_COUNT_ERROR.getMsg(),e); - return error(Status.TASK_INSTANCE_STATE_COUNT_ERROR.getCode(), Status.TASK_INSTANCE_STATE_COUNT_ERROR.getMsg()); - } + @RequestParam(value = "startDate", required = false) String startDate, + @RequestParam(value = "endDate", required = false) String endDate, + @RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) { + logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}", + loginUser.getUserName(), startDate, endDate, projectId); + Map result = dataAnalysisService.countTaskStateByProject(loginUser, projectId, startDate, endDate); + return returnDataList(result); } /** @@ -86,31 +84,27 @@ public class DataAnalysisController extends BaseController{ * * @param loginUser login user * @param startDate start date - * @param endDate end date + * @param endDate end date * @param projectId project id * @return process instance data */ - @ApiOperation(value = "countProcessInstanceState", notes= "COUNT_PROCESS_INSTANCE_NOTES") + @ApiOperation(value = "countProcessInstanceState", notes = "COUNT_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType = "String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/process-state-count") + @GetMapping(value = "/process-state-count") @ResponseStatus(HttpStatus.OK) + @ApiException(COUNT_PROCESS_INSTANCE_STATE_ERROR) public Result countProcessInstanceState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="startDate", required=false) String startDate, - @RequestParam(value="endDate", required=false) String endDate, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id:{}", - loginUser.getUserName(), startDate, endDate, projectId); - Map result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e); - return error(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getCode(), Status.COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg()); - } + @RequestParam(value = "startDate", required = false) String startDate, + @RequestParam(value = "endDate", required = false) String endDate, + @RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) { + logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id:{}", + loginUser.getUserName(), startDate, endDate, projectId); + Map result = dataAnalysisService.countProcessInstanceStateByProject(loginUser, projectId, startDate, endDate); + return returnDataList(result); } /** @@ -120,23 +114,19 @@ public class DataAnalysisController extends BaseController{ * @param projectId project id * @return definition count in project id */ - @ApiOperation(value = "countDefinitionByUser", notes= "COUNT_PROCESS_DEFINITION_BY_USER_NOTES") + @ApiOperation(value = "countDefinitionByUser", notes = "COUNT_PROCESS_DEFINITION_BY_USER_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/define-user-count") + @GetMapping(value = "/define-user-count") @ResponseStatus(HttpStatus.OK) + @ApiException(COUNT_PROCESS_DEFINITION_USER_ERROR) public Result countDefinitionByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count process definition , user:{}, project id:{}", - loginUser.getUserName(), projectId); - Map result = dataAnalysisService.countDefinitionByUser(loginUser, projectId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg(),e); - return error(Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getCode(), Status.COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg()); - } + @RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) { + logger.info("count process definition , user:{}, project id:{}", + loginUser.getUserName(), projectId); + Map result = dataAnalysisService.countDefinitionByUser(loginUser, projectId); + return returnDataList(result); } @@ -145,31 +135,27 @@ public class DataAnalysisController extends BaseController{ * * @param loginUser login user * @param startDate start date - * @param endDate end date + * @param endDate end date * @param projectId project id * @return command state in project id */ - @ApiOperation(value = "countCommandState", notes= "COUNT_COMMAND_STATE_NOTES") + @ApiOperation(value = "countCommandState", notes = "COUNT_COMMAND_STATE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "startDate", value = "START_DATE", dataType = "String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "endDate", value = "END_DATE", dataType = "String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/command-state-count") + @GetMapping(value = "/command-state-count") @ResponseStatus(HttpStatus.OK) + @ApiException(COMMAND_STATE_COUNT_ERROR) public Result countCommandState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="startDate", required=false) String startDate, - @RequestParam(value="endDate", required=false) String endDate, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}", - loginUser.getUserName(), startDate, endDate, projectId); - Map result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.COMMAND_STATE_COUNT_ERROR.getMsg(),e); - return error(Status.COMMAND_STATE_COUNT_ERROR.getCode(), Status.COMMAND_STATE_COUNT_ERROR.getMsg()); - } + @RequestParam(value = "startDate", required = false) String startDate, + @RequestParam(value = "endDate", required = false) String endDate, + @RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) { + logger.info("count command state, user:{}, start date: {}, end date:{}, project id {}", + loginUser.getUserName(), startDate, endDate, projectId); + Map result = dataAnalysisService.countCommandState(loginUser, projectId, startDate, endDate); + return returnDataList(result); } /** @@ -179,23 +165,19 @@ public class DataAnalysisController extends BaseController{ * @param projectId project id * @return queue state count */ - @ApiOperation(value = "countQueueState", notes= "COUNT_QUEUE_STATE_NOTES") + @ApiOperation(value = "countQueueState", notes = "COUNT_QUEUE_STATE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/queue-count") + @GetMapping(value = "/queue-count") @ResponseStatus(HttpStatus.OK) + @ApiException(QUEUE_COUNT_ERROR) public Result countQueueState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value="projectId", required=false, defaultValue = "0") int projectId){ - try{ - logger.info("count command state, user:{}, project id {}", - loginUser.getUserName(), projectId); - Map result = dataAnalysisService.countQueueState(loginUser, projectId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUEUE_COUNT_ERROR.getMsg(),e); - return error(Status.QUEUE_COUNT_ERROR.getCode(), Status.QUEUE_COUNT_ERROR.getMsg()); - } + @RequestParam(value = "projectId", required = false, defaultValue = "0") int projectId) { + logger.info("count command state, user:{}, project id {}", + loginUser.getUserName(), projectId); + Map result = dataAnalysisService.countQueueState(loginUser, projectId); + return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java index 881c93f2f7..7f35ac0d70 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -16,18 +16,20 @@ */ package org.apache.dolphinscheduler.api.controller; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.DataSourceService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -38,6 +40,7 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * data source controller */ @@ -53,33 +56,36 @@ public class DataSourceController extends BaseController { /** * create data source + * * @param loginUser login user - * @param name data source name - * @param note data source description - * @param type data source type - * @param host host - * @param port port - * @param database data base + * @param name data source name + * @param note data source description + * @param type data source type + * @param host host + * @param port port + * @param database data base * @param principal principal - * @param userName user name - * @param password password - * @param other other arguments + * @param userName user name + * @param password password + * @param other other arguments * @return create result code */ - @ApiOperation(value = "createDataSource", notes= "CREATE_DATA_SOURCE_NOTES") + @ApiOperation(value = "createDataSource", notes = "CREATE_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), - @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), - @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), - @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"), + @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"), + @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"), + @ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"), + @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String") }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_DATASOURCE_ERROR) public Result createDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("name") String name, @RequestParam(value = "note", required = false) String note, @@ -90,18 +96,13 @@ public class DataSourceController extends BaseController { @RequestParam(value = "principal") String principal, @RequestParam(value = "userName") String userName, @RequestParam(value = "password") String password, + @RequestParam(value = "connectType") DbConnectType connectType, @RequestParam(value = "other") String other) { - logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {},port: {},database : {},principal: {},userName : {} other: {}", - loginUser.getUserName(), name, note, type, host,port,database,principal,userName,other); - try { - String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other); - Map result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); - return returnDataList(result); - - } catch (Exception e) { - logger.error(CREATE_DATASOURCE_ERROR.getMsg(),e); - return error(Status.CREATE_DATASOURCE_ERROR.getCode(), Status.CREATE_DATASOURCE_ERROR.getMsg()); - } + logger.info("login user {} create datasource name: {}, note: {}, type: {}, host: {}, port: {}, database : {}, principal: {}, userName : {}, connectType: {}, other: {}", + loginUser.getUserName(), name, note, type, host, port, database, principal, userName, connectType, other); + String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other); + Map result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); + return returnDataList(result); } @@ -109,34 +110,36 @@ public class DataSourceController extends BaseController { * updateProcessInstance data source * * @param loginUser login user - * @param name data source name - * @param note description - * @param type data source type - * @param other other arguments - * @param id data source di - * @param host host - * @param port port - * @param database database + * @param name data source name + * @param note description + * @param type data source type + * @param other other arguments + * @param id data source di + * @param host host + * @param port port + * @param database database * @param principal principal - * @param userName user name - * @param password password + * @param userName user name + * @param password password * @return update result code */ - @ApiOperation(value = "updateDataSource", notes= "UPDATE_DATA_SOURCE_NOTES") + @ApiOperation(value = "updateDataSource", notes = "UPDATE_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), - @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), - @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), - @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"), + @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"), + @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"), + @ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"), + @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String") }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_DATASOURCE_ERROR) public Result updateDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("id") int id, @RequestParam("name") String name, @@ -148,72 +151,56 @@ public class DataSourceController extends BaseController { @RequestParam(value = "principal") String principal, @RequestParam(value = "userName") String userName, @RequestParam(value = "password") String password, + @RequestParam(value = "connectType") DbConnectType connectType, @RequestParam(value = "other") String other) { - logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}", - loginUser.getUserName(), name, note, type, other); - try { - String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal, userName, password, other); - Map dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); - return returnDataList(dataSource); - } catch (Exception e) { - logger.error(UPDATE_DATASOURCE_ERROR.getMsg(),e); - return error(UPDATE_DATASOURCE_ERROR.getCode(), UPDATE_DATASOURCE_ERROR.getMsg()); - } - - + logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, connectType: {}, other: {}", + loginUser.getUserName(), name, note, type, connectType, other); + String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other); + Map dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); + return returnDataList(dataSource); } /** * query data source detail * * @param loginUser login user - * @param id datasource id + * @param id datasource id * @return data source detail */ - @ApiOperation(value = "queryDataSource", notes= "QUERY_DATA_SOURCE_NOTES") + @ApiOperation(value = "queryDataSource", notes = "QUERY_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/update-ui") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_DATASOURCE_ERROR) public Result queryDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("id") int id) { logger.info("login user {}, query datasource: {}", loginUser.getUserName(), id); - try { - Map result = dataSourceService.queryDataSource(id); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); - return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); - } - - + Map result = dataSourceService.queryDataSource(id); + return returnDataList(result); } /** * query datasouce by type * * @param loginUser login user - * @param type data source type + * @param type data source type * @return data source list page */ - @ApiOperation(value = "queryDataSourceList", notes= "QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES") + @ApiOperation(value = "queryDataSourceList", notes = "QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType") + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType") }) @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_DATASOURCE_ERROR) public Result queryDataSourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("type") DbType type) { - try { - Map result = dataSourceService.queryDataSourceList(loginUser, type.ordinal()); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); - return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); - } + Map result = dataSourceService.queryDataSourceList(loginUser, type.ordinal()); + return returnDataList(result); } /** @@ -221,66 +208,64 @@ public class DataSourceController extends BaseController { * * @param loginUser login user * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return data source list page */ - @ApiOperation(value = "queryDataSourceListPaging", notes= "QUERY_DATA_SOURCE_LIST_PAGING_NOTES") + @ApiOperation(value = "queryDataSourceListPaging", notes = "QUERY_DATA_SOURCE_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_DATASOURCE_ERROR) public Result queryDataSourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { - try { - Map result = checkPageParams(pageNo, pageSize); - if (result.get(Constants.STATUS) != Status.SUCCESS) { - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - } catch (Exception e) { - logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); - return error(QUERY_DATASOURCE_ERROR.getCode(), QUERY_DATASOURCE_ERROR.getMsg()); } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * connect datasource * * @param loginUser login user - * @param name data source name - * @param note data soruce description - * @param type data source type - * @param other other parameters - * @param host host - * @param port port - * @param database data base + * @param name data source name + * @param note data soruce description + * @param type data source type + * @param other other parameters + * @param host host + * @param port port + * @param database data base * @param principal principal - * @param userName user name - * @param password password + * @param userName user name + * @param password password * @return connect result code */ - @ApiOperation(value = "connectDataSource", notes= "CONNECT_DATA_SOURCE_NOTES") + @ApiOperation(value = "connectDataSource", notes = "CONNECT_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String"), + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "note", value = "DATA_SOURCE_NOTE", dataType = "String"), - @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true,dataType ="DbType"), - @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST",required = true, dataType ="String"), - @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT",required = true, dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "password", value = "PASSWORD", dataType ="String"), - @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType ="String") + @ApiImplicitParam(name = "type", value = "DB_TYPE", required = true, dataType = "DbType"), + @ApiImplicitParam(name = "host", value = "DATA_SOURCE_HOST", required = true, dataType = "String"), + @ApiImplicitParam(name = "port", value = "DATA_SOURCE_PORT", required = true, dataType = "String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "password", value = "PASSWORD", dataType = "String"), + @ApiImplicitParam(name = "connectType", value = "CONNECT_TYPE", dataType = "DbConnectType"), + @ApiImplicitParam(name = "other", value = "DATA_SOURCE_OTHER", dataType = "String") }) @PostMapping(value = "/connect") @ResponseStatus(HttpStatus.OK) + @ApiException(CONNECT_DATASOURCE_FAILURE) public Result connectDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("name") String name, @RequestParam(value = "note", required = false) String note, @@ -291,137 +276,115 @@ public class DataSourceController extends BaseController { @RequestParam(value = "principal") String principal, @RequestParam(value = "userName") String userName, @RequestParam(value = "password") String password, + @RequestParam(value = "connectType") DbConnectType connectType, @RequestParam(value = "other") String other) { - logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}", - loginUser.getUserName(), name, note, type, other); - try { - String parameter = dataSourceService.buildParameter(name, note, type, host, port, database,principal,userName, password, other); - Boolean isConnection = dataSourceService.checkConnection(type, parameter); - Result result = new Result(); - - if (isConnection) { - putMsg(result, SUCCESS); - } else { - putMsg(result, CONNECT_DATASOURCE_FAILURE); - } - return result; - } catch (Exception e) { - logger.error(CONNECT_DATASOURCE_FAILURE.getMsg(),e); - return error(CONNECT_DATASOURCE_FAILURE.getCode(), CONNECT_DATASOURCE_FAILURE.getMsg()); + logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, connectType: {}, other: {}", + loginUser.getUserName(), name, note, type, connectType, other); + String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, principal, userName, password, connectType, other); + Boolean isConnection = dataSourceService.checkConnection(type, parameter); + Result result = new Result(); + + if (isConnection) { + putMsg(result, SUCCESS); + } else { + putMsg(result, CONNECT_DATASOURCE_FAILURE); } + return result; } /** * connection test * * @param loginUser login user - * @param id data source id + * @param id data source id * @return connect result code */ - @ApiOperation(value = "connectionTest", notes= "CONNECT_DATA_SOURCE_TEST_NOTES") + @ApiOperation(value = "connectionTest", notes = "CONNECT_DATA_SOURCE_TEST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/connect-by-id") @ResponseStatus(HttpStatus.OK) + @ApiException(CONNECTION_TEST_FAILURE) public Result connectionTest(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("id") int id) { logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id); - try { - Boolean isConnection = dataSourceService.connectionTest(loginUser, id); - Result result = new Result(); + Boolean isConnection = dataSourceService.connectionTest(loginUser, id); + Result result = new Result(); - if (isConnection) { - putMsg(result, SUCCESS); - } else { - putMsg(result, CONNECTION_TEST_FAILURE); - } - return result; - } catch (Exception e) { - logger.error(CONNECTION_TEST_FAILURE.getMsg(),e); - return error(CONNECTION_TEST_FAILURE.getCode(), CONNECTION_TEST_FAILURE.getMsg()); + if (isConnection) { + putMsg(result, SUCCESS); + } else { + putMsg(result, CONNECTION_TEST_FAILURE); } - + return result; } /** * delete datasource by id * * @param loginUser login user - * @param id datasource id + * @param id datasource id * @return delete result */ - @ApiOperation(value = "delete", notes= "DELETE_DATA_SOURCE_NOTES") + @ApiOperation(value = "delete", notes = "DELETE_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_DATA_SOURCE_FAILURE) public Result delete(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("id") int id) { - try { - logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id); - return dataSourceService.delete(loginUser, id); - } catch (Exception e) { - logger.error(DELETE_DATA_SOURCE_FAILURE.getMsg(),e); - return error(DELETE_DATA_SOURCE_FAILURE.getCode(), DELETE_DATA_SOURCE_FAILURE.getMsg()); - } + logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id); + return dataSourceService.delete(loginUser, id); } /** * verify datasource name * * @param loginUser login user - * @param name data source name + * @param name data source name * @return true if data source name not exists.otherwise return false */ - @ApiOperation(value = "verifyDataSourceName", notes= "VERIFY_DATA_SOURCE_NOTES") + @ApiOperation(value = "verifyDataSourceName", notes = "VERIFY_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType ="String") + @ApiImplicitParam(name = "name", value = "DATA_SOURCE_NAME", required = true, dataType = "String") }) @GetMapping(value = "/verify-name") @ResponseStatus(HttpStatus.OK) + @ApiException(VERIFY_DATASOURCE_NAME_FAILURE) public Result verifyDataSourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "name") String name ) { logger.info("login user {}, verfiy datasource name: {}", loginUser.getUserName(), name); - try { - return dataSourceService.verifyDataSourceName(loginUser, name); - } catch (Exception e) { - logger.error(VERIFY_DATASOURCE_NAME_FAILURE.getMsg(), e); - return error(VERIFY_DATASOURCE_NAME_FAILURE.getCode(), VERIFY_DATASOURCE_NAME_FAILURE.getMsg()); - } + return dataSourceService.verifyDataSourceName(loginUser, name); } - /** * unauthorized datasource * * @param loginUser login user - * @param userId user id + * @param userId user id * @return unauthed data source result code */ - @ApiOperation(value = "unauthDatasource", notes= "UNAUTHORIZED_DATA_SOURCE_NOTES") + @ApiOperation(value = "unauthDatasource", notes = "UNAUTHORIZED_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/unauth-datasource") @ResponseStatus(HttpStatus.OK) + @ApiException(UNAUTHORIZED_DATASOURCE) public Result unauthDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try { - logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}", - loginUser.getUserName(), userId); - Map result = dataSourceService.unauthDatasource(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(UNAUTHORIZED_DATASOURCE.getMsg(),e); - return error(UNAUTHORIZED_DATASOURCE.getCode(), UNAUTHORIZED_DATASOURCE.getMsg()); - } + logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}", + loginUser.getUserName(), userId); + Map result = dataSourceService.unauthDatasource(loginUser, userId); + return returnDataList(result); } @@ -429,26 +392,22 @@ public class DataSourceController extends BaseController { * authorized datasource * * @param loginUser login user - * @param userId user id + * @param userId user id * @return authorized result code */ - @ApiOperation(value = "authedDatasource", notes= "AUTHORIZED_DATA_SOURCE_NOTES") + @ApiOperation(value = "authedDatasource", notes = "AUTHORIZED_DATA_SOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/authed-datasource") @ResponseStatus(HttpStatus.OK) + @ApiException(AUTHORIZED_DATA_SOURCE) public Result authedDatasource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try { - logger.info("authorized data source, login user:{}, authorized useId:{}", - loginUser.getUserName(), userId); - Map result = dataSourceService.authedDatasource(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(AUTHORIZED_DATA_SOURCE.getMsg(),e); - return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg()); - } + logger.info("authorized data source, login user:{}, authorized useId:{}", + loginUser.getUserName(), userId); + Map result = dataSourceService.authedDatasource(loginUser, userId); + return returnDataList(result); } /** @@ -457,17 +416,13 @@ public class DataSourceController extends BaseController { * @param loginUser login user * @return user info data */ - @ApiOperation(value = "getKerberosStartupState", notes= "GET_USER_INFO_NOTES") - @GetMapping(value="/kerberos-startup-state") + @ApiOperation(value = "getKerberosStartupState", notes = "GET_USER_INFO_NOTES") + @GetMapping(value = "/kerberos-startup-state") @ResponseStatus(HttpStatus.OK) - public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + @ApiException(KERBEROS_STARTUP_STATE) + public Result getKerberosStartupState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}", loginUser.getUserName()); - try{ - // if upload resource is HDFS and kerberos startup is true , else false - return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState()); - }catch (Exception e){ - logger.error(KERBEROS_STARTUP_STATE.getMsg(),e); - return error(Status.KERBEROS_STARTUP_STATE.getCode(), Status.KERBEROS_STARTUP_STATE.getMsg()); - } + // if upload resource is HDFS and kerberos startup is true , else false + return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java index ffedd5703c..20f4285ffa 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.ExecuteType; -import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ExecutorService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -32,8 +32,11 @@ import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.*; import springfox.documentation.annotations.ApiIgnore; +import java.text.ParseException; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * execute process controller @@ -50,43 +53,45 @@ public class ExecutorController extends BaseController { /** * execute process instance - * @param loginUser login user - * @param projectName project name - * @param processDefinitionId process definition id - * @param scheduleTime schedule time - * @param failureStrategy failure strategy - * @param startNodeList start nodes list - * @param taskDependType task depend type - * @param execType execute type - * @param warningType warning type - * @param warningGroupId warning group id - * @param receivers receivers - * @param receiversCc receivers cc - * @param runMode run mode + * + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process definition id + * @param scheduleTime schedule time + * @param failureStrategy failure strategy + * @param startNodeList start nodes list + * @param taskDependType task depend type + * @param execType execute type + * @param warningType warning type + * @param warningGroupId warning group id + * @param receivers receivers + * @param receiversCc receivers cc + * @param runMode run mode * @param processInstancePriority process instance priority - * @param workerGroupId worker group id - * @param timeout timeout + * @param workerGroup worker group + * @param timeout timeout * @return start process result code */ - @ApiOperation(value = "startProcessInstance", notes= "RUN_PROCESS_INSTANCE_NOTES") + @ApiOperation(value = "startProcessInstance", notes = "RUN_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"), - @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType ="FailureStrategy"), - @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType ="String"), - @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType ="TaskDependType"), - @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType ="CommandType"), - @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE",required = true, dataType ="WarningType"), - @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID",required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS",dataType ="String" ), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC",dataType ="String" ), - @ApiImplicitParam(name = "runMode", value = "RUN_MODE",dataType ="RunMode" ), - @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority" ), - @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int",example = "100"), - @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int",example = "100"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType = "FailureStrategy"), + @ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType = "String"), + @ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType = "TaskDependType"), + @ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType = "CommandType"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", required = true, dataType = "WarningType"), + @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "receivers", value = "RECEIVERS", dataType = "String"), + @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", dataType = "String"), + @ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"), + @ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"), + @ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"), }) @PostMapping(value = "start-process-instance") @ResponseStatus(HttpStatus.OK) + @ApiException(START_PROCESS_INSTANCE_ERROR) public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionId") int processDefinitionId, @@ -101,99 +106,85 @@ public class ExecutorController extends BaseController { @RequestParam(value = "receiversCc", required = false) String receiversCc, @RequestParam(value = "runMode", required = false) RunMode runMode, @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, - @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, - @RequestParam(value = "timeout", required = false) Integer timeout) { - try { - logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " - + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " - + "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroupId: {}, timeout: {}", - loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, - failureStrategy, startNodeList, taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority, - workerGroupId, timeout); - - if (timeout == null) { - timeout = Constants.MAX_TASK_TIMEOUT; - } - - Map result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, - startNodeList, taskDependType, warningType, - warningGroupId,receivers,receiversCc, runMode,processInstancePriority, workerGroupId, timeout); - return returnDataList(result); - } catch (Exception e) { - logger.error(Status.START_PROCESS_INSTANCE_ERROR.getMsg(),e); - return error(Status.START_PROCESS_INSTANCE_ERROR.getCode(), Status.START_PROCESS_INSTANCE_ERROR.getMsg()); + @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, + @RequestParam(value = "timeout", required = false) Integer timeout) throws ParseException { + logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " + + "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " + + "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, workerGroup: {}, timeout: {}", + loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, + failureStrategy, startNodeList, taskDependType, warningType, workerGroup, receivers, receiversCc, runMode, processInstancePriority, + workerGroup, timeout); + + if (timeout == null) { + timeout = Constants.MAX_TASK_TIMEOUT; } + + Map result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, + startNodeList, taskDependType, warningType, + warningGroupId, receivers, receiversCc, runMode, processInstancePriority, workerGroup, timeout); + return returnDataList(result); } /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceId process instance id - * @param executeType execute type + * @param executeType execute type * @return execute result code */ - @ApiOperation(value = "execute", notes= "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES") + @ApiOperation(value = "execute", notes = "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType") }) @PostMapping(value = "/execute") @ResponseStatus(HttpStatus.OK) + @ApiException(EXECUTE_PROCESS_INSTANCE_ERROR) public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("processInstanceId") Integer processInstanceId, @RequestParam("executeType") ExecuteType executeType ) { - try { - logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}", - loginUser.getUserName(), projectName, processInstanceId, executeType); - Map result = execService.execute(loginUser, projectName, processInstanceId, executeType); - return returnDataList(result); - } catch (Exception e) { - logger.error(Status.EXECUTE_PROCESS_INSTANCE_ERROR.getMsg(),e); - return error(Status.EXECUTE_PROCESS_INSTANCE_ERROR.getCode(), Status.EXECUTE_PROCESS_INSTANCE_ERROR.getMsg()); - } + logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}", + loginUser.getUserName(), projectName, processInstanceId, executeType); + Map result = execService.execute(loginUser, projectName, processInstanceId, executeType); + return returnDataList(result); } /** * check process definition and all of the son process definitions is on line. * - * @param loginUser login user + * @param loginUser login user * @param processDefinitionId process definition id * @return check result code */ - @ApiOperation(value = "startCheckProcessDefinition", notes= "START_CHECK_PROCESS_DEFINITION_NOTES") + @ApiOperation(value = "startCheckProcessDefinition", notes = "START_CHECK_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/start-check") @ResponseStatus(HttpStatus.OK) + @ApiException(CHECK_PROCESS_DEFINITION_ERROR) public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "processDefinitionId") int processDefinitionId) { + @RequestParam(value = "processDefinitionId") int processDefinitionId) { logger.info("login user {}, check process definition {}", loginUser.getUserName(), processDefinitionId); - try { - Map result = execService.startCheckByProcessDefinedId(processDefinitionId); - return returnDataList(result); - - } catch (Exception e) { - logger.error(Status.CHECK_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(Status.CHECK_PROCESS_DEFINITION_ERROR.getCode(), Status.CHECK_PROCESS_DEFINITION_ERROR.getMsg()); - } + Map result = execService.startCheckByProcessDefinedId(processDefinitionId); + return returnDataList(result); } /** * query recipients and copyers by process definition ID * - * @param loginUser login user + * @param loginUser login user * @param processDefinitionId process definition id - * @param processInstanceId process instance id + * @param processInstanceId process instance id * @return receivers cc list */ @ApiIgnore - @ApiOperation(value = "getReceiverCc", notes= "GET_RECEIVER_CC_NOTES") + @ApiOperation(value = "getReceiverCc", notes = "GET_RECEIVER_CC_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100") @@ -201,17 +192,13 @@ public class ExecutorController extends BaseController { }) @GetMapping(value = "/get-receiver-cc") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR) public Result getReceiverCc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "processDefinitionId",required = false) Integer processDefinitionId, - @RequestParam(value = "processInstanceId",required = false) Integer processInstanceId) { + @RequestParam(value = "processDefinitionId", required = false) Integer processDefinitionId, + @RequestParam(value = "processInstanceId", required = false) Integer processInstanceId) { logger.info("login user {}, get process definition receiver and cc", loginUser.getUserName()); - try { - Map result = execService.getReceiverCc(processDefinitionId,processInstanceId); - return returnDataList(result); - } catch (Exception e) { - logger.error(Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg()); - } + Map result = execService.getReceiverCc(processDefinitionId, processInstanceId); + return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java index 802f09ff20..a5b8176a48 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoggerController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.LoggerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -25,7 +26,6 @@ import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; -import org.apache.dolphinscheduler.api.enums.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -35,6 +35,8 @@ import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import springfox.documentation.annotations.ApiIgnore; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * log controller @@ -52,61 +54,53 @@ public class LoggerController extends BaseController { /** * query task log - * @param loginUser login user + * + * @param loginUser login user * @param taskInstanceId task instance id - * @param skipNum skip number - * @param limit limit + * @param skipNum skip number + * @param limit limit * @return task log content */ - @ApiOperation(value = "queryLog", notes= "QUERY_TASK_INSTANCE_LOG_NOTES") + @ApiOperation(value = "queryLog", notes = "QUERY_TASK_INSTANCE_LOG_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "taskInstId", value = "TASK_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", dataType ="Int", example = "100"), - @ApiImplicitParam(name = "limit", value = "LIMIT", dataType ="Int", example = "100") + @ApiImplicitParam(name = "taskInstanceId", value = "TASK_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "limit", value = "LIMIT", dataType = "Int", example = "100") }) @GetMapping(value = "/detail") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_INSTANCE_LOG_ERROR) public Result queryLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskInstId") int taskInstanceId, + @RequestParam(value = "taskInstanceId") int taskInstanceId, @RequestParam(value = "skipLineNum") int skipNum, @RequestParam(value = "limit") int limit) { - try { - - logger.info( - "login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit); - return loggerService.queryLog(taskInstanceId, skipNum, limit); - } catch (Exception e) { - logger.error(Status.QUERY_TASK_INSTANCE_LOG_ERROR.getMsg(), e); - return error(Status.QUERY_TASK_INSTANCE_LOG_ERROR.getCode(), Status.QUERY_TASK_INSTANCE_LOG_ERROR.getMsg()); - } + logger.info( + "login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit); + return loggerService.queryLog(taskInstanceId, skipNum, limit); } /** * download log file * - * @param loginUser login user + * @param loginUser login user * @param taskInstanceId task instance id * @return log file content */ - @ApiOperation(value = "downloadTaskLog", notes= "DOWNLOAD_TASK_INSTANCE_LOG_NOTES") + @ApiOperation(value = "downloadTaskLog", notes = "DOWNLOAD_TASK_INSTANCE_LOG_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "taskInstId", value = "TASK_ID",dataType = "Int", example = "100") + @ApiImplicitParam(name = "taskInstanceId", value = "TASK_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/download-log") @ResponseBody + @ApiException(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR) public ResponseEntity downloadTaskLog(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskInstId") int taskInstanceId) { - try { - byte[] logBytes = loggerService.getLogBytes(taskInstanceId); - return ResponseEntity - .ok() - .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".log" + "\"") - .body(logBytes); - } catch (Exception e) { - logger.error(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg(), e); - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg()); - } + @RequestParam(value = "taskInstanceId") int taskInstanceId) { + byte[] logBytes = loggerService.getLogBytes(taskInstanceId); + return ResponseEntity + .ok() + .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".log" + "\"") + .body(logBytes); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java index e3a862d376..ce21425605 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/LoginController.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.security.Authenticator; import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.api.utils.Result; @@ -42,7 +43,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.*; /** * user login controller - * + *

* swagger bootstrap ui docs refer : https://doc.xiaominfo.com/guide/enh-func.html */ @Api(tags = "LOGIN_TAG", position = 1) @@ -63,81 +64,71 @@ public class LoginController extends BaseController { /** * login * - * @param userName user name + * @param userName user name * @param userPassword user password - * @param request request - * @param response response + * @param request request + * @param response response * @return login result */ - @ApiOperation(value = "login", notes= "LOGIN_NOTES") + @ApiOperation(value = "login", notes = "LOGIN_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "userName", value = "USER_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", required = true, dataType ="String") + @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", required = true, dataType = "String") }) @PostMapping(value = "/login") + @ApiException(USER_LOGIN_FAILURE) public Result login(@RequestParam(value = "userName") String userName, @RequestParam(value = "userPassword") String userPassword, HttpServletRequest request, HttpServletResponse response) { + logger.info("login user name: {} ", userName); - try { - logger.info("login user name: {} ", userName); - - //user name check - if (StringUtils.isEmpty(userName)) { - return error(Status.USER_NAME_NULL.getCode(), - Status.USER_NAME_NULL.getMsg()); - } - - // user ip check - String ip = getClientIpAddress(request); - if (StringUtils.isEmpty(ip)) { - return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg()); - } - - // verify username and password - Result> result = authenticator.authenticate(userName, userPassword, ip); - if (result.getCode() != Status.SUCCESS.getCode()) { - return result; - } - - response.setStatus(HttpStatus.SC_OK); - Map cookieMap = result.getData(); - for (Map.Entry cookieEntry : cookieMap.entrySet()) { - Cookie cookie = new Cookie(cookieEntry.getKey(), cookieEntry.getValue()); - cookie.setHttpOnly(true); - response.addCookie(cookie); - } + //user name check + if (StringUtils.isEmpty(userName)) { + return error(Status.USER_NAME_NULL.getCode(), + Status.USER_NAME_NULL.getMsg()); + } + // user ip check + String ip = getClientIpAddress(request); + if (StringUtils.isEmpty(ip)) { + return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg()); + } + + // verify username and password + Result> result = authenticator.authenticate(userName, userPassword, ip); + if (result.getCode() != Status.SUCCESS.getCode()) { return result; - } catch (Exception e) { - logger.error(USER_LOGIN_FAILURE.getMsg(),e); - return error(USER_LOGIN_FAILURE.getCode(), USER_LOGIN_FAILURE.getMsg()); } + + response.setStatus(HttpStatus.SC_OK); + Map cookieMap = result.getData(); + for (Map.Entry cookieEntry : cookieMap.entrySet()) { + Cookie cookie = new Cookie(cookieEntry.getKey(), cookieEntry.getValue()); + cookie.setHttpOnly(true); + response.addCookie(cookie); + } + + return result; } /** * sign out * * @param loginUser login user - * @param request request + * @param request request * @return sign out result */ @ApiOperation(value = "signOut", notes = "SIGNOUT_NOTES") @PostMapping(value = "/signOut") + @ApiException(SIGN_OUT_ERROR) public Result signOut(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, HttpServletRequest request) { - - try { - logger.info("login user:{} sign out", loginUser.getUserName()); - String ip = getClientIpAddress(request); - sessionService.signOut(ip, loginUser); - //clear session - request.removeAttribute(Constants.SESSION_USER); - return success(); - } catch (Exception e) { - logger.error(SIGN_OUT_ERROR.getMsg(),e); - return error(SIGN_OUT_ERROR.getCode(), SIGN_OUT_ERROR.getMsg()); - } + logger.info("login user:{} sign out", loginUser.getUserName()); + String ip = getClientIpAddress(request); + sessionService.signOut(ip, loginUser); + //clear session + request.removeAttribute(Constants.SESSION_USER); + return success(); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java index 74a5d91a6c..308a6d33d5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/MonitorController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.MonitorService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -33,13 +34,14 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * monitor controller */ @Api(tags = "MONITOR_TAG", position = 1) @RestController @RequestMapping("/monitor") -public class MonitorController extends BaseController{ +public class MonitorController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(MonitorController.class); @@ -48,84 +50,67 @@ public class MonitorController extends BaseController{ /** * master list + * * @param loginUser login user * @return master list */ - @ApiOperation(value = "listMaster", notes= "MASTER_LIST_NOTES") + @ApiOperation(value = "listMaster", notes = "MASTER_LIST_NOTES") @GetMapping(value = "/master/list") @ResponseStatus(HttpStatus.OK) + @ApiException(LIST_MASTERS_ERROR) public Result listMaster(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user: {}, query all master", loginUser.getUserName()); - try{ - logger.info("list master, user:{}", loginUser.getUserName()); - Map result = monitorService.queryMaster(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(LIST_MASTERS_ERROR.getMsg(),e); - return error(LIST_MASTERS_ERROR.getCode(), - LIST_MASTERS_ERROR.getMsg()); - } + logger.info("list master, user:{}", loginUser.getUserName()); + Map result = monitorService.queryMaster(loginUser); + return returnDataList(result); } /** * worker list + * * @param loginUser login user * @return worker information list */ - @ApiOperation(value = "listWorker", notes= "WORKER_LIST_NOTES") + @ApiOperation(value = "listWorker", notes = "WORKER_LIST_NOTES") @GetMapping(value = "/worker/list") @ResponseStatus(HttpStatus.OK) + @ApiException(LIST_WORKERS_ERROR) public Result listWorker(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user: {}, query all workers", loginUser.getUserName()); - try{ - Map result = monitorService.queryWorker(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(LIST_WORKERS_ERROR.getMsg(),e); - return error(LIST_WORKERS_ERROR.getCode(), - LIST_WORKERS_ERROR.getMsg()); - } + Map result = monitorService.queryWorker(loginUser); + return returnDataList(result); } /** * query database state + * * @param loginUser login user * @return data base state */ - @ApiOperation(value = "queryDatabaseState", notes= "QUERY_DATABASE_STATE_NOTES") + @ApiOperation(value = "queryDatabaseState", notes = "QUERY_DATABASE_STATE_NOTES") @GetMapping(value = "/database") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_DATABASE_STATE_ERROR) public Result queryDatabaseState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user: {}, query database state", loginUser.getUserName()); - try{ - - Map result = monitorService.queryDatabaseState(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_DATABASE_STATE_ERROR.getMsg(),e); - return error(QUERY_DATABASE_STATE_ERROR.getCode(), - QUERY_DATABASE_STATE_ERROR.getMsg()); - } + Map result = monitorService.queryDatabaseState(loginUser); + return returnDataList(result); } /** * query zookeeper state + * * @param loginUser login user * @return zookeeper information list */ - @ApiOperation(value = "queryZookeeperState", notes= "QUERY_ZOOKEEPER_STATE_NOTES") + @ApiOperation(value = "queryZookeeperState", notes = "QUERY_ZOOKEEPER_STATE_NOTES") @GetMapping(value = "/zookeeper/list") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_ZOOKEEPER_STATE_ERROR) public Result queryZookeeperState(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user: {}, query zookeeper state", loginUser.getUserName()); - try{ - Map result = monitorService.queryZookeeperState(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_ZOOKEEPER_STATE_ERROR.getMsg(),e); - return error(QUERY_ZOOKEEPER_STATE_ERROR.getCode(), - QUERY_ZOOKEEPER_STATE_ERROR.getMsg()); - } + Map result = monitorService.queryZookeeperState(loginUser); + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index c07ecf9ca7..4f3dafdf27 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -16,7 +16,9 @@ */ package org.apache.dolphinscheduler.api.controller; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -37,6 +39,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * process definition controller @@ -44,7 +48,7 @@ import java.util.Map; @Api(tags = "PROCESS_DEFINITION_TAG", position = 2) @RestController @RequestMapping("projects/{projectName}/process") -public class ProcessDefinitionController extends BaseController{ +public class ProcessDefinitionController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class); @@ -54,130 +58,144 @@ public class ProcessDefinitionController extends BaseController{ /** * create process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param name process definition name - * @param json process definition json + * @param name process definition name + * @param json process definition json * @param description description - * @param locations locations for nodes - * @param connects connects for nodes + * @param locations locations for nodes + * @param connects connects for nodes * @return create result code */ - @ApiOperation(value = "save", notes= "CREATE_PROCESS_DEFINITION_NOTES") + @ApiOperation(value = "save", notes = "CREATE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), - @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"), - @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"), - @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"), - @ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"), + @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type = "String"), + @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type = "String"), + @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type = "String"), + @ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"), }) @PostMapping(value = "/save") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_PROCESS_DEFINITION) public Result createProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "name", required = true) String name, @RequestParam(value = "processDefinitionJson", required = true) String json, @RequestParam(value = "locations", required = true) String locations, @RequestParam(value = "connects", required = true) String connects, - @RequestParam(value = "description", required = false) String description) { + @RequestParam(value = "description", required = false) String description) throws JsonProcessingException { + + logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + + "process_definition_json: {}, desc: {} locations:{}, connects:{}", + loginUser.getUserName(), projectName, name, json, description, locations, connects); + Map result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json, + description, locations, connects); + return returnDataList(result); + } - try { - logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + - "process_definition_json: {}, desc: {} locations:{}, connects:{}", - loginUser.getUserName(), projectName, name, json, description, locations, connects); - Map result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json, - description, locations, connects); - return returnDataList(result); - } catch (Exception e) { - logger.error(Status.CREATE_PROCESS_DEFINITION.getMsg(), e); - return error(Status.CREATE_PROCESS_DEFINITION.getCode(), Status.CREATE_PROCESS_DEFINITION.getMsg()); - } + /** + * copy process definition + * + * @param loginUser login user + * @param projectName project name + * @param processId process definition id + * @return copy result code + */ + @ApiOperation(value = "copyProcessDefinition", notes= "COPY_PROCESS_DEFINITION_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") + }) + @PostMapping(value = "/copy") + @ResponseStatus(HttpStatus.OK) + @ApiException(COPY_PROCESS_DEFINITION_ERROR) + public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processId", required = true) int processId) throws JsonProcessingException { + logger.info("copy process definition, login user:{}, project name:{}, process definition id:{}", + loginUser.getUserName(), projectName, processId); + Map result = processDefinitionService.copyProcessDefinition(loginUser, projectName, processId); + return returnDataList(result); } /** * verify process definition name unique * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param name name + * @param name name * @return true if process definition name not exists, otherwise false */ - @ApiOperation(value = "verify-name", notes = "VERIFY_PROCCESS_DEFINITION_NAME_NOTES") + @ApiOperation(value = "verify-name", notes = "VERIFY_PROCESS_DEFINITION_NAME_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String") }) @GetMapping(value = "/verify-name") @ResponseStatus(HttpStatus.OK) - public Result verifyProccessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name){ - try { - logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}", - loginUser.getUserName(), projectName, name); - Map result = processDefinitionService.verifyProccessDefinitionName(loginUser, projectName, name); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg(),e); - return error(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg()); - } + @ApiException(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR) + public Result verifyProcessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name) { + logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}", + loginUser.getUserName(), projectName, name); + Map result = processDefinitionService.verifyProcessDefinitionName(loginUser, projectName, name); + return returnDataList(result); } /** * update process definition * - * @param loginUser login user - * @param projectName project name - * @param name process definition name - * @param id process definition id + * @param loginUser login user + * @param projectName project name + * @param name process definition name + * @param id process definition id * @param processDefinitionJson process definition json - * @param description description - * @param locations locations for nodes - * @param connects connects for nodes + * @param description description + * @param locations locations for nodes + * @param connects connects for nodes * @return update result code */ - @ApiOperation(value = "updateProccessDefinition", notes= "UPDATE_PROCCESS_DEFINITION_NOTES") + + @ApiOperation(value = "updateProcessDefinition", notes= "UPDATE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), @ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type ="String"), - @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type ="String"), - @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type ="String"), - @ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type ="String"), + @ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type = "String"), + @ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type = "String"), + @ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type = "String"), + @ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"), }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) - public Result updateProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam(value = "name", required = true) String name, - @RequestParam(value = "id", required = true) int id, - @RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, - @RequestParam(value = "locations", required = false) String locations, - @RequestParam(value = "connects", required = false) String connects, - @RequestParam(value = "description", required = false) String description) { - - try { - logger.info("login user {}, update process define, project name: {}, process define name: {}, " + - "process_definition_json: {}, desc: {}, locations:{}, connects:{}", - loginUser.getUserName(), projectName, name, processDefinitionJson,description, locations, connects); - Map result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name, - processDefinitionJson, description, locations, connects); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(Status.UPDATE_PROCESS_DEFINITION_ERROR.getCode(), Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg()); - } + @ApiException(UPDATE_PROCESS_DEFINITION_ERROR) + public Result updateProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "name", required = true) String name, + @RequestParam(value = "id", required = true) int id, + @RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, + @RequestParam(value = "locations", required = false) String locations, + @RequestParam(value = "connects", required = false) String connects, + @RequestParam(value = "description", required = false) String description) { + + logger.info("login user {}, update process define, project name: {}, process define name: {}, " + + "process_definition_json: {}, desc: {}, locations:{}, connects:{}", + loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects); + Map result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name, + processDefinitionJson, description, locations, connects); + return returnDataList(result); } /** * release process definition * - * @param loginUser login user - * @param projectName project name - * @param processId process definition id + * @param loginUser login user + * @param projectName project name + * @param processId process definition id * @param releaseState release state * @return release result code */ - @ApiOperation(value = "releaseProccessDefinition", notes= "RELEASE_PROCCESS_DEFINITION_NOTES") + + @ApiOperation(value = "releaseProcessDefinition", notes= "RELEASE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"), @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @@ -185,344 +203,296 @@ public class ProcessDefinitionController extends BaseController{ }) @PostMapping(value = "/release") @ResponseStatus(HttpStatus.OK) - public Result releaseProccessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam(value = "processId", required = true) int processId, - @RequestParam(value = "releaseState", required = true) int releaseState) { - - try { - logger.info("login user {}, release process definition, project name: {}, release state: {}", - loginUser.getUserName(), projectName, releaseState); - Map result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(Status.RELEASE_PROCESS_DEFINITION_ERROR.getCode(), Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg()); - } + @ApiException(RELEASE_PROCESS_DEFINITION_ERROR) + public Result releaseProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "processId", required = true) int processId, + @RequestParam(value = "releaseState", required = true) int releaseState) { + + logger.info("login user {}, release process definition, project name: {}, release state: {}", + loginUser.getUserName(), projectName, releaseState); + Map result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState); + return returnDataList(result); } - /** * query datail of process definition * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param processId process definition id + * @param processId process definition id * @return process definition detail */ - @ApiOperation(value = "queryProccessDefinitionById", notes= "QUERY_PROCCESS_DEFINITION_BY_ID_NOTES") + @ApiOperation(value = "queryProcessDefinitionById", notes= "QUERY_PROCESS_DEFINITION_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) - @GetMapping(value="/select-by-id") + @GetMapping(value = "/select-by-id") @ResponseStatus(HttpStatus.OK) - public Result queryProccessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR) + public Result queryProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("processId") Integer processId - ){ - try{ - logger.info("query datail of process definition, login user:{}, project name:{}, process definition id:{}", - loginUser.getUserName(), projectName, processId); - Map result = processDefinitionService.queryProccessDefinitionById(loginUser, projectName, processId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg(),e); - return error(Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg()); - } + ) { + logger.info("query detail of process definition, login user:{}, project name:{}, process definition id:{}", + loginUser.getUserName(), projectName, processId); + Map result = processDefinitionService.queryProcessDefinitionById(loginUser, projectName, processId); + return returnDataList(result); } - /** - * query proccess definition list + * query Process definition list * - * @param loginUser login user + * @param loginUser login user * @param projectName project name * @return process definition list */ - @ApiOperation(value = "queryProccessDefinitionList", notes= "QUERY_PROCCESS_DEFINITION_LIST_NOTES") - @GetMapping(value="/list") + @ApiOperation(value = "queryProcessDefinitionList", notes = "QUERY_PROCESS_DEFINITION_LIST_NOTES") + @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) - public Result queryProccessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName - ){ - try{ - logger.info("query proccess definition list, login user:{}, project name:{}", - loginUser.getUserName(), projectName); - Map result = processDefinitionService.queryProccessDefinitionList(loginUser, projectName); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); - return error(Status.QUERY_PROCCESS_DEFINITION_LIST.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg()); - } + @ApiException(QUERY_PROCESS_DEFINITION_LIST) + public Result queryProcessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName + ) { + logger.info("query process definition list, login user:{}, project name:{}", + loginUser.getUserName(), projectName); + Map result = processDefinitionService.queryProcessDefinitionList(loginUser, projectName); + return returnDataList(result); } /** - * query proccess definition list paging - * @param loginUser login user + * query process definition list paging + * + * @param loginUser login user * @param projectName project name - * @param searchVal search value - * @param pageNo page number - * @param pageSize page size - * @param userId user id + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @param userId user id * @return process definition page */ - @ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES") + @ApiOperation(value = "queryProcessDefinitionListPaging", notes= "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"), @ApiImplicitParam(name = "userId", value = "USER_ID", required = false, dataType = "Int", example = "100"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR) public Result queryProcessDefinitionListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "userId", required = false, defaultValue = "0") Integer userId, - @RequestParam("pageSize") Integer pageSize){ - try{ - logger.info("query proccess definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId); + @RequestParam("pageSize") Integer pageSize) { + logger.info("query process definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg()); } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId); + return returnDataListPaging(result); } - /** * encapsulation treeview structure * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param id process definition id - * @param limit limit + * @param id process definition id + * @param limit limit * @return tree view json data */ - @ApiOperation(value = "viewTree", notes= "VIEW_TREE_NOTES") + @ApiOperation(value = "viewTree", notes = "VIEW_TREE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100") }) - @GetMapping(value="/view-tree") + @GetMapping(value = "/view-tree") @ResponseStatus(HttpStatus.OK) + @ApiException(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR) public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processId") Integer id, - @RequestParam("limit") Integer limit){ - try{ - Map result = processDefinitionService.viewTree(id, limit); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg(),e); - return error(Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getCode(), Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg()); - } + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processId") Integer id, + @RequestParam("limit") Integer limit) throws Exception { + Map result = processDefinitionService.viewTree(id, limit); + return returnDataList(result); } - /** - * * get tasks list by process definition id * - * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id * @return task list */ - @ApiOperation(value = "getNodeListByDefinitionId", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") + @ApiOperation(value = "getNodeListByDefinitionId", notes = "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) - @GetMapping(value="gen-task-list") + @GetMapping(value = "gen-task-list") @ResponseStatus(HttpStatus.OK) + @ApiException(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR) public Result getNodeListByDefinitionId( @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processDefinitionId") Integer processDefinitionId){ - try { - logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}", - loginUser.getUserName(), projectName, processDefinitionId); - Map result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); - return error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); - } + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processDefinitionId") Integer processDefinitionId) throws Exception { + logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}", + loginUser.getUserName(), projectName, processDefinitionId); + Map result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId); + return returnDataList(result); } /** - * * get tasks list by process definition id * - * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionIdList process definition id list * @return node list data */ - @ApiOperation(value = "getNodeListByDefinitionIdList", notes= "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") + @ApiOperation(value = "getNodeListByDefinitionIdList", notes = "GET_NODE_LIST_BY_DEFINITION_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIdList", value = "PROCESS_DEFINITION_ID_LIST", required = true, type = "String") }) - @GetMapping(value="get-task-list") + @GetMapping(value = "get-task-list") @ResponseStatus(HttpStatus.OK) + @ApiException(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR) public Result getNodeListByDefinitionIdList( @ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME",required = true) @PathVariable String projectName, - @RequestParam("processDefinitionIdList") String processDefinitionIdList){ + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processDefinitionIdList") String processDefinitionIdList) throws Exception { - try { - logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}", - loginUser.getUserName(), projectName, processDefinitionIdList); - Map result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); - return error(Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); - } + logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}", + loginUser.getUserName(), projectName, processDefinitionIdList); + Map result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList); + return returnDataList(result); } /** * delete process definition by id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id * @return delete result code */ - @ApiOperation(value = "deleteProcessDefinitionById", notes= "DELETE_PROCESS_DEFINITION_BY_ID_NOTES") + @ApiOperation(value = "deleteProcessDefinitionById", notes = "DELETE_PROCESS_DEFINITION_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/delete") + @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_PROCESS_DEFINE_BY_ID_ERROR) public Result deleteProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processDefinitionId") Integer processDefinitionId - ){ - try{ - logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}", - loginUser.getUserName(), projectName, processDefinitionId); - Map result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR.getMsg()); - } + @RequestParam("processDefinitionId") Integer processDefinitionId + ) { + logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}", + loginUser.getUserName(), projectName, processDefinitionId); + Map result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId); + return returnDataList(result); } /** * batch delete process definition by ids * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionIds process definition id list * @return delete result code */ - @ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes= "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES") + @ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes = "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", type = "String") }) - @GetMapping(value="/batch-delete") + @GetMapping(value = "/batch-delete") @ResponseStatus(HttpStatus.OK) + @ApiException(BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR) public Result batchDeleteProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processDefinitionIds") String processDefinitionIds - ){ - try{ - logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}", - loginUser.getUserName(), projectName, processDefinitionIds); - - Map result = new HashMap<>(5); - List deleteFailedIdList = new ArrayList<>(); - if(StringUtils.isNotEmpty(processDefinitionIds)){ - String[] processDefinitionIdArray = processDefinitionIds.split(","); - - for (String strProcessDefinitionId:processDefinitionIdArray) { - int processDefinitionId = Integer.parseInt(strProcessDefinitionId); - try { - Map deleteResult = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId); - if(!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))){ - deleteFailedIdList.add(strProcessDefinitionId); - logger.error((String)deleteResult.get(Constants.MSG)); - } - } catch (Exception e) { + @RequestParam("processDefinitionIds") String processDefinitionIds + ) { + logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}", + loginUser.getUserName(), projectName, processDefinitionIds); + + Map result = new HashMap<>(5); + List deleteFailedIdList = new ArrayList<>(); + if (StringUtils.isNotEmpty(processDefinitionIds)) { + String[] processDefinitionIdArray = processDefinitionIds.split(","); + + for (String strProcessDefinitionId : processDefinitionIdArray) { + int processDefinitionId = Integer.parseInt(strProcessDefinitionId); + try { + Map deleteResult = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId); + if (!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))) { deleteFailedIdList.add(strProcessDefinitionId); + logger.error((String) deleteResult.get(Constants.MSG)); } + } catch (Exception e) { + deleteFailedIdList.add(strProcessDefinitionId); } } + } - if(!deleteFailedIdList.isEmpty()){ - putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR, String.join(",", deleteFailedIdList)); - }else{ - putMsg(result, Status.SUCCESS); - } - - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(),e); - return error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg()); + if (!deleteFailedIdList.isEmpty()) { + putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR, String.join(",", deleteFailedIdList)); + } else { + putMsg(result, Status.SUCCESS); } + + return returnDataList(result); } /** * export process definition by id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id - * @param response response + * @param response response */ - @ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCCESS_DEFINITION_BY_ID_NOTES") + + @ApiOperation(value = "exportProcessDefinitionById", notes= "EXPORT_PROCESS_DEFINITION_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100") }) - @GetMapping(value="/export") + @GetMapping(value = "/export") @ResponseBody public void exportProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @PathVariable String projectName, @RequestParam("processDefinitionId") Integer processDefinitionId, - HttpServletResponse response){ - try{ + HttpServletResponse response) { + try { logger.info("export process definition by id, login user:{}, project name:{}, process definition id:{}", loginUser.getUserName(), projectName, processDefinitionId); - processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId,response); - }catch (Exception e){ - logger.error(Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(),e); + processDefinitionService.exportProcessDefinitionById(loginUser, projectName, processDefinitionId, response); + } catch (Exception e) { + logger.error(Status.EXPORT_PROCESS_DEFINE_BY_ID_ERROR.getMsg(), e); } } - - /** - * query proccess definition all by project id + * query process definition all by project id * * @param loginUser login user - * @param projectId project id + * @param projectId project id * @return process definition list */ - @ApiOperation(value = "queryProccessDefinitionAllByProjectId", notes= "QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES") - @GetMapping(value="/queryProccessDefinitionAllByProjectId") + @ApiOperation(value = "queryProcessDefinitionAllByProjectId", notes = "QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES") + @GetMapping(value = "/queryProcessDefinitionAllByProjectId") @ResponseStatus(HttpStatus.OK) - public Result queryProccessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("projectId") Integer projectId){ - try{ - logger.info("query proccess definition list, login user:{}, project id:{}", - loginUser.getUserName(),projectId); - Map result = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); - return error(Status.QUERY_PROCCESS_DEFINITION_LIST.getCode(), Status.QUERY_PROCCESS_DEFINITION_LIST.getMsg()); - } + @ApiException(QUERY_PROCESS_DEFINITION_LIST) + public Result queryProcessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("projectId") Integer projectId) { + logger.info("query process definition list, login user:{}, project id:{}", + loginUser.getUserName(), projectId); + Map result = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId); + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index 102f116575..10dc8e4ce5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessInstanceService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -26,8 +27,6 @@ import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import io.swagger.annotations.*; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -35,6 +34,8 @@ import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.*; import springfox.documentation.annotations.ApiIgnore; +import java.io.IOException; +import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -48,7 +49,7 @@ import static org.apache.dolphinscheduler.api.enums.Status.*; @Api(tags = "PROCESS_INSTANCE_TAG", position = 10) @RestController @RequestMapping("projects/{projectName}/instance") -public class ProcessInstanceController extends BaseController{ +public class ProcessInstanceController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class); @@ -59,102 +60,94 @@ public class ProcessInstanceController extends BaseController{ /** * query process instance list paging * - * @param loginUser login user - * @param projectName project name - * @param pageNo page number - * @param pageSize page size + * @param loginUser login user + * @param projectName project name + * @param pageNo page number + * @param pageSize page size * @param processDefinitionId process definition id - * @param searchVal search value - * @param stateType state type - * @param host host - * @param startTime start time - * @param endTime end time + * @param searchVal search value + * @param stateType state type + * @param host host + * @param startTime start time + * @param endTime end time * @return process instance list */ - @ApiOperation(value = "queryProcessInstanceList", notes= "QUERY_PROCESS_INSTANCE_LIST_NOTES") + @ApiOperation(value = "queryProcessInstanceList", notes = "QUERY_PROCESS_INSTANCE_LIST_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), - @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type ="String"), - @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), - @ApiImplicitParam(name = "host", value = "HOST", type ="String"), - @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), + @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type = "String"), + @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type = "ExecutionStatus"), + @ApiImplicitParam(name = "host", value = "HOST", type = "String"), + @ApiImplicitParam(name = "startDate", value = "START_DATE", type = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") }) - @GetMapping(value="list-paging") + @GetMapping(value = "list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR) public Result queryProcessInstanceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam(value = "executorName", required = false) String executorName, - @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, - @RequestParam(value = "host", required = false) String host, - @RequestParam(value = "startDate", required = false) String startTime, - @RequestParam(value = "endDate", required = false) String endTime, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize){ - try{ - logger.info("query all process instance list, login user:{},project name:{}, define id:{}," + - "search value:{},executor name:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", - loginUser.getUserName(), projectName, processDefinitionId, searchVal, executorName,stateType,host, - startTime, endTime, pageNo, pageSize); - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = processInstanceService.queryProcessInstanceList( - loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, executorName, stateType, host, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg()); - } + @RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "executorName", required = false) String executorName, + @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, + @RequestParam(value = "host", required = false) String host, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + logger.info("query all process instance list, login user:{},project name:{}, define id:{}," + + "search value:{},executor name:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", + loginUser.getUserName(), projectName, processDefinitionId, searchVal, executorName, stateType, host, + startTime, endTime, pageNo, pageSize); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = processInstanceService.queryProcessInstanceList( + loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, executorName, stateType, host, pageNo, pageSize); + return returnDataListPaging(result); } /** * query task list by process instance id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceId process instance id * @return task list for the process instance */ - @ApiOperation(value = "queryTaskListByProcessId", notes= "QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES") + @ApiOperation(value = "queryTaskListByProcessId", notes = "QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/task-list-by-process-id") + @GetMapping(value = "/task-list-by-process-id") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR) public Result queryTaskListByProcessId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("processInstanceId") Integer processInstanceId - ) { - try{ - logger.info("query task instance list by process instance id, login user:{}, project name:{}, process instance id:{}", - loginUser.getUserName(), projectName, processInstanceId); - Map result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); - return error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getCode(), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg()); - } + ) throws IOException { + logger.info("query task instance list by process instance id, login user:{}, project name:{}, process instance id:{}", + loginUser.getUserName(), projectName, processInstanceId); + Map result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, processInstanceId); + return returnDataList(result); } /** * update process instance * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceJson process instance json - * @param processInstanceId process instance id - * @param scheduleTime schedule time - * @param syncDefine sync define - * @param flag flag - * @param locations locations - * @param connects connects + * @param processInstanceId process instance id + * @param scheduleTime schedule time + * @param syncDefine sync define + * @param flag flag + * @param locations locations + * @param connects connects * @return update result code */ - @ApiOperation(value = "updateProcessInstance", notes= "UPDATE_PROCESS_INSTANCE_NOTES") + @ApiOperation(value = "updateProcessInstance", notes = "UPDATE_PROCESS_INSTANCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceJson", value = "PROCESS_INSTANCE_JSON", type = "String"), @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100"), @@ -164,243 +157,209 @@ public class ProcessInstanceController extends BaseController{ @ApiImplicitParam(name = "connects", value = "PROCESS_INSTANCE_CONNECTS", type = "String"), @ApiImplicitParam(name = "flag", value = "RECOVERY_PROCESS_INSTANCE_FLAG", type = "Flag"), }) - @PostMapping(value="/update") + @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_PROCESS_INSTANCE_ERROR) public Result updateProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam( value = "processInstanceJson", required = false) String processInstanceJson, - @RequestParam( value = "processInstanceId") Integer processInstanceId, - @RequestParam( value = "scheduleTime", required = false) String scheduleTime, - @RequestParam( value = "syncDefine", required = true) Boolean syncDefine, + @RequestParam(value = "processInstanceJson", required = false) String processInstanceJson, + @RequestParam(value = "processInstanceId") Integer processInstanceId, + @RequestParam(value = "scheduleTime", required = false) String scheduleTime, + @RequestParam(value = "syncDefine", required = true) Boolean syncDefine, @RequestParam(value = "locations", required = false) String locations, @RequestParam(value = "connects", required = false) String connects, - @RequestParam( value = "flag", required = false) Flag flag - ){ - try{ - logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," + - "process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}", - loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime, - syncDefine, flag, locations, connects); - Map result = processInstanceService.updateProcessInstance(loginUser, projectName, - processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects); - return returnDataList(result); - }catch (Exception e){ - logger.error(UPDATE_PROCESS_INSTANCE_ERROR.getMsg(),e); - return error(Status.UPDATE_PROCESS_INSTANCE_ERROR.getCode(), Status.UPDATE_PROCESS_INSTANCE_ERROR.getMsg()); - } + @RequestParam(value = "flag", required = false) Flag flag + ) throws ParseException { + logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," + + "process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}", + loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime, + syncDefine, flag, locations, connects); + Map result = processInstanceService.updateProcessInstance(loginUser, projectName, + processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects); + return returnDataList(result); } /** * query process instance by id * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceId process instance id * @return process instance detail */ - @ApiOperation(value = "queryProcessInstanceById", notes= "QUERY_PROCESS_INSTANCE_BY_ID_NOTES") + @ApiOperation(value = "queryProcessInstanceById", notes = "QUERY_PROCESS_INSTANCE_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/select-by-id") + @GetMapping(value = "/select-by-id") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PROCESS_INSTANCE_BY_ID_ERROR) public Result queryProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId - ){ - try{ - logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}", - loginUser.getUserName(), projectName, processInstanceId); - Map result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); - return error(Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); - } + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId + ) { + logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}", + loginUser.getUserName(), projectName, processInstanceId); + Map result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId); + return returnDataList(result); } /** * delete process instance by id, at the same time, * delete task instance and their mapping relation data * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceId process instance id * @return delete result code */ - @ApiOperation(value = "deleteProcessInstanceById", notes= "DELETE_PROCESS_INSTANCE_BY_ID_NOTES") + @ApiOperation(value = "deleteProcessInstanceById", notes = "DELETE_PROCESS_INSTANCE_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/delete") + @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_PROCESS_INSTANCE_BY_ID_ERROR) public Result deleteProcessInstanceById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId - ){ - try{ - logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}", - loginUser.getUserName(), projectName, processInstanceId); - // task queue - ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - Map result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); - } + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId + ) { + logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}", + loginUser.getUserName(), projectName, processInstanceId); + // task queue + Map result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId); + return returnDataList(result); } /** * query sub process instance detail info by task id * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param taskId task id + * @param taskId task id * @return sub process instance detail */ - @ApiOperation(value = "querySubProcessInstanceByTaskId", notes= "QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES") + @ApiOperation(value = "querySubProcessInstanceByTaskId", notes = "QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "taskId", value = "TASK_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/select-sub-process") + @GetMapping(value = "/select-sub-process") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR) public Result querySubProcessInstanceByTaskId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("taskId") Integer taskId){ - try{ - Map result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg(),e); - return error(Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getCode(), Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg()); - } + @RequestParam("taskId") Integer taskId) { + Map result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId); + return returnDataList(result); } /** * query parent process instance detail info by sub process instance id * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param subId sub process id + * @param subId sub process id * @return parent instance detail */ - @ApiOperation(value = "queryParentInstanceBySubId", notes= "QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES") + @ApiOperation(value = "queryParentInstanceBySubId", notes = "QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "subId", value = "SUB_PROCESS_INSTANCE_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/select-parent-process") + @GetMapping(value = "/select-parent-process") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR) public Result queryParentInstanceBySubId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("subId") Integer subId){ - try{ - Map result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); - return error(Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getCode(), Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg()); - } + @RequestParam("subId") Integer subId) { + Map result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId); + return returnDataList(result); } /** * query process instance global variables and local variables * - * @param loginUser login user + * @param loginUser login user * @param processInstanceId process instance id * @return variables data */ - @ApiOperation(value = "viewVariables", notes= "QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES") + @ApiOperation(value = "viewVariables", notes = "QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/view-variables") + @GetMapping(value = "/view-variables") @ResponseStatus(HttpStatus.OK) - public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser - , @RequestParam("processInstanceId") Integer processInstanceId){ - try{ - Map result = processInstanceService.viewVariables(processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg(),e); - return error(Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg()); - } + @ApiException(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR) + public Result viewVariables(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("processInstanceId") Integer processInstanceId) throws Exception { + Map result = processInstanceService.viewVariables(processInstanceId); + return returnDataList(result); } /** * encapsulation gantt structure * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceId process instance id * @return gantt tree data */ - @ApiOperation(value = "vieGanttTree", notes= "VIEW_GANTT_NOTES") + @ApiOperation(value = "vieGanttTree", notes = "VIEW_GANTT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", dataType = "Int", example = "100") }) - @GetMapping(value="/view-gantt") + @GetMapping(value = "/view-gantt") @ResponseStatus(HttpStatus.OK) + @ApiException(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR) public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam("processInstanceId") Integer processInstanceId){ - try{ - Map result = processInstanceService.viewGantt(processInstanceId); - return returnDataList(result); - }catch (Exception e){ - logger.error(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg(),e); - return error(Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getCode(),ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg()); - } + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam("processInstanceId") Integer processInstanceId) throws Exception { + Map result = processInstanceService.viewGantt(processInstanceId); + return returnDataList(result); } /** * batch delete process instance by ids, at the same time, * delete task instance and their mapping relation data * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceIds process instance id * @return delete result code */ - @GetMapping(value="/batch-delete") + @GetMapping(value = "/batch-delete") @ResponseStatus(HttpStatus.OK) + @ApiException(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR) public Result batchDeleteProcessInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @PathVariable String projectName, - @RequestParam("processInstanceIds") String processInstanceIds - ){ - try{ - logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}", - loginUser.getUserName(), projectName, processInstanceIds); - // task queue - ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - Map result = new HashMap<>(5); - List deleteFailedIdList = new ArrayList<>(); - if(StringUtils.isNotEmpty(processInstanceIds)){ - String[] processInstanceIdArray = processInstanceIds.split(","); + @PathVariable String projectName, + @RequestParam("processInstanceIds") String processInstanceIds + ) { + logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}", + loginUser.getUserName(), projectName, processInstanceIds); + // task queue + Map result = new HashMap<>(5); + List deleteFailedIdList = new ArrayList<>(); + if (StringUtils.isNotEmpty(processInstanceIds)) { + String[] processInstanceIdArray = processInstanceIds.split(","); - for (String strProcessInstanceId:processInstanceIdArray) { - int processInstanceId = Integer.parseInt(strProcessInstanceId); - try { - Map deleteResult = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); - if(!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))){ - deleteFailedIdList.add(strProcessInstanceId); - logger.error((String)deleteResult.get(Constants.MSG)); - } - } catch (Exception e) { + for (String strProcessInstanceId : processInstanceIdArray) { + int processInstanceId = Integer.parseInt(strProcessInstanceId); + try { + Map deleteResult = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId); + if (!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))) { deleteFailedIdList.add(strProcessInstanceId); + logger.error((String) deleteResult.get(Constants.MSG)); } + } catch (Exception e) { + deleteFailedIdList.add(strProcessInstanceId); } } - if(deleteFailedIdList.size() > 0){ - putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR, String.join(",", deleteFailedIdList)); - }else{ - putMsg(result, Status.SUCCESS); - } - - return returnDataList(result); - }catch (Exception e){ - logger.error(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg(),e); - return error(Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg()); } + if (!deleteFailedIdList.isEmpty()) { + putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR, String.join(",", deleteFailedIdList)); + } else { + putMsg(result, Status.SUCCESS); + } + + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index 571b2ea469..cc9e0f657f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.api.controller; -import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.service.ProjectService; import org.apache.dolphinscheduler.api.utils.Result; @@ -59,61 +59,53 @@ public class ProjectController extends BaseController { /** * create project * - * @param loginUser login user + * @param loginUser login user * @param projectName project name * @param description description * @return returns an error if it exists */ - @ApiOperation(value = "createProject", notes= "CREATE_PROJECT_NOTES") + @ApiOperation(value = "createProject", notes = "CREATE_PROJECT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType ="String"), + @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType = "String"), @ApiImplicitParam(name = "description", value = "PROJECT_DESC", dataType = "String") }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_PROJECT_ERROR) public Result createProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectName") String projectName, @RequestParam(value = "description", required = false) String description) { - try { - logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, description); - Map result = projectService.createProject(loginUser, projectName, description); - return returnDataList(result); - } catch (Exception e) { - logger.error(CREATE_PROJECT_ERROR.getMsg(), e); - return error(CREATE_PROJECT_ERROR.getCode(), CREATE_PROJECT_ERROR.getMsg()); - } + logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, description); + Map result = projectService.createProject(loginUser, projectName, description); + return returnDataList(result); } /** * updateProcessInstance project * - * @param loginUser login user - * @param projectId project id + * @param loginUser login user + * @param projectId project id * @param projectName project name * @param description description * @return update result code */ - @ApiOperation(value = "updateProject", notes= "UPDATE_PROJECT_NOTES") + @ApiOperation(value = "updateProject", notes = "UPDATE_PROJECT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100"), - @ApiImplicitParam(name = "projectName",value = "PROJECT_NAME",dataType = "String"), + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "projectName", value = "PROJECT_NAME", dataType = "String"), @ApiImplicitParam(name = "description", value = "PROJECT_DESC", dataType = "String") }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_PROJECT_ERROR) public Result updateProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectId") Integer projectId, @RequestParam("projectName") String projectName, @RequestParam(value = "description", required = false) String description) { - try { - logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, description); - Map result = projectService.update(loginUser, projectId, projectName, description); - return returnDataList(result); - } catch (Exception e) { - logger.error(UPDATE_PROJECT_ERROR.getMsg(), e); - return error(UPDATE_PROJECT_ERROR.getCode(), UPDATE_PROJECT_ERROR.getMsg()); - } + logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, description); + Map result = projectService.update(loginUser, projectId, projectName, description); + return returnDataList(result); } /** @@ -123,23 +115,19 @@ public class ProjectController extends BaseController { * @param projectId project id * @return project detail information */ - @ApiOperation(value = "queryProjectById", notes= "QUERY_PROJECT_BY_ID_NOTES") + @ApiOperation(value = "queryProjectById", notes = "QUERY_PROJECT_BY_ID_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/query-by-id") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_PROJECT_DETAILS_BY_ID_ERROR) public Result queryProjectById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectId") Integer projectId) { logger.info("login user {}, query project by id: {}", loginUser.getUserName(), projectId); - try { - Map result = projectService.queryById(projectId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg(), e); - return error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getCode(), QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg()); - } + Map result = projectService.queryById(projectId); + return returnDataList(result); } /** @@ -147,33 +135,29 @@ public class ProjectController extends BaseController { * * @param loginUser login user * @param searchVal search value - * @param pageSize page size - * @param pageNo page number + * @param pageSize page size + * @param pageNo page number * @return project list which the login user have permission to see */ - @ApiOperation(value = "queryProjectListPaging", notes= "QUERY_PROJECT_LIST_PAGING_NOTES") + @ApiOperation(value = "queryProjectListPaging", notes = "QUERY_PROJECT_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), - @ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType ="Int", example = "20"), - @ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType ="Int", example = "1") + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), + @ApiImplicitParam(name = "projectId", value = "PAGE_SIZE", dataType = "Int", example = "20"), + @ApiImplicitParam(name = "projectId", value = "PAGE_NO", dataType = "Int", example = "1") }) @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR) public Result queryProjectListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageSize") Integer pageSize, @RequestParam("pageNo") Integer pageNo ) { - try { - logger.info("login user {}, query project list paging", loginUser.getUserName()); - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal); - return returnDataListPaging(result); - } catch (Exception e) { - logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); - return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); - } + logger.info("login user {}, query project list paging", loginUser.getUserName()); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal); + return returnDataListPaging(result); } /** @@ -183,49 +167,41 @@ public class ProjectController extends BaseController { * @param projectId project id * @return delete result code */ - @ApiOperation(value = "deleteProjectById", notes= "DELETE_PROJECT_BY_ID_NOTES") + @ApiOperation(value = "deleteProjectById", notes = "DELETE_PROJECT_BY_ID_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "projectId", value = "PROJECT_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_PROJECT_ERROR) public Result deleteProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("projectId") Integer projectId ) { - try { - logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId); - Map result = projectService.deleteProject(loginUser, projectId); - return returnDataList(result); - } catch (Exception e) { - logger.error(DELETE_PROJECT_ERROR.getMsg(), e); - return error(DELETE_PROJECT_ERROR.getCode(), DELETE_PROJECT_ERROR.getMsg()); - } + logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId); + Map result = projectService.deleteProject(loginUser, projectId); + return returnDataList(result); } /** * query unauthorized project * * @param loginUser login user - * @param userId user id + * @param userId user id * @return the projects which user have not permission to see */ - @ApiOperation(value = "queryUnauthorizedProject", notes= "QUERY_UNAUTHORIZED_PROJECT_NOTES") + @ApiOperation(value = "queryUnauthorizedProject", notes = "QUERY_UNAUTHORIZED_PROJECT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/unauth-project") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_UNAUTHORIZED_PROJECT_ERROR) public Result queryUnauthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try { - logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId); - Map result = projectService.queryUnauthorizedProject(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg(), e); - return error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getCode(), QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg()); - } + logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId); + Map result = projectService.queryUnauthorizedProject(loginUser, userId); + return returnDataList(result); } @@ -233,73 +209,62 @@ public class ProjectController extends BaseController { * query authorized project * * @param loginUser login user - * @param userId user id + * @param userId user id * @return projects which the user have permission to see, Except for items created by this user */ - @ApiOperation(value = "queryAuthorizedProject", notes= "QUERY_AUTHORIZED_PROJECT_NOTES") + @ApiOperation(value = "queryAuthorizedProject", notes = "QUERY_AUTHORIZED_PROJECT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100") }) @GetMapping(value = "/authed-project") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_AUTHORIZED_PROJECT) public Result queryAuthorizedProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try { - logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId); - Map result = projectService.queryAuthorizedProject(loginUser, userId); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_AUTHORIZED_PROJECT.getMsg(), e); - return error(QUERY_AUTHORIZED_PROJECT.getCode(), QUERY_AUTHORIZED_PROJECT.getMsg()); - } + logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId); + Map result = projectService.queryAuthorizedProject(loginUser, userId); + return returnDataList(result); } /** * import process definition - * @param loginUser login user - * @param file resource file + * + * @param loginUser login user + * @param file resource file * @param projectName project name * @return import result code */ - @ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCCESS_DEFINITION_NOTES") + + @ApiOperation(value = "importProcessDefinition", notes= "EXPORT_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") }) - @PostMapping(value="/import-definition") + @PostMapping(value = "/import-definition") + @ApiException(IMPORT_PROCESS_DEFINE_ERROR) public Result importProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("file") MultipartFile file, - @RequestParam("projectName") String projectName){ - try{ - logger.info("import process definition by id, login user:{}, project: {}", - loginUser.getUserName(), projectName); - Map result = processDefinitionService.importProcessDefinition(loginUser, file, projectName); - return returnDataList(result); - }catch (Exception e){ - logger.error(IMPORT_PROCESS_DEFINE_ERROR.getMsg(),e); - return error(IMPORT_PROCESS_DEFINE_ERROR.getCode(), IMPORT_PROCESS_DEFINE_ERROR.getMsg()); - } + @RequestParam("projectName") String projectName) { + logger.info("import process definition by id, login user:{}, project: {}", + loginUser.getUserName(), projectName); + Map result = processDefinitionService.importProcessDefinition(loginUser, file, projectName); + return returnDataList(result); } /** * query all project list + * * @param loginUser login user * @return all project list */ - @ApiOperation(value = "queryAllProjectList", notes= "QUERY_ALL_PROJECT_LIST_NOTES") + @ApiOperation(value = "queryAllProjectList", notes = "QUERY_ALL_PROJECT_LIST_NOTES") @GetMapping(value = "/query-project-list") @ResponseStatus(HttpStatus.OK) + @ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR) public Result queryAllProjectList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - - try { - logger.info("login user {}, query all project list", loginUser.getUserName()); - Map result = projectService.queryAllProjectList(); - return returnDataList(result); - } catch (Exception e) { - logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); - return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); - } + logger.info("login user {}, query all project list", loginUser.getUserName()); + Map result = projectService.queryAllProjectList(); + return returnDataList(result); } - } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java index 056ca618f5..cf62d1340b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/QueueController.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.QueueService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -36,6 +37,8 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * queue controller @@ -43,7 +46,7 @@ import java.util.Map; @Api(tags = "QUEUE_TAG", position = 1) @RestController @RequestMapping("/queue") -public class QueueController extends BaseController{ +public class QueueController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(QueueController.class); @@ -53,151 +56,131 @@ public class QueueController extends BaseController{ /** * query queue list + * * @param loginUser login user * @return queue list */ - @ApiOperation(value = "queryList", notes= "QUERY_QUEUE_LIST_NOTES") - @GetMapping(value="/list") + @ApiOperation(value = "queryList", notes = "QUERY_QUEUE_LIST_NOTES") + @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) - public Result queryList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ - try{ - logger.info("login user {}, query queue list", loginUser.getUserName()); - Map result = queueService.queryList(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_QUEUE_LIST_ERROR.getMsg(),e); - return error(Status.QUERY_QUEUE_LIST_ERROR.getCode(), Status.QUERY_QUEUE_LIST_ERROR.getMsg()); - } + @ApiException(QUERY_QUEUE_LIST_ERROR) + public Result queryList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user {}, query queue list", loginUser.getUserName()); + Map result = queueService.queryList(loginUser); + return returnDataList(result); } /** * query queue list paging + * * @param loginUser login user - * @param pageNo page number + * @param pageNo page number * @param searchVal search value - * @param pageSize page size + * @param pageSize page size * @return queue list */ - @ApiOperation(value = "queryQueueListPaging", notes= "QUERY_QUEUE_LIST_PAGING_NOTES") + @ApiOperation(value = "queryQueueListPaging", notes = "QUERY_QUEUE_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_QUEUE_LIST_ERROR) public Result queryQueueListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ - try{ - logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(),searchVal); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - searchVal = ParameterUtils.handleEscapes(searchVal); - result = queueService.queryList(loginUser,searchVal,pageNo,pageSize); + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize) { + logger.info("login user {}, query queue list,search value:{}", loginUser.getUserName(), searchVal); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_QUEUE_LIST_ERROR.getMsg(),e); - return error(Status.QUERY_QUEUE_LIST_ERROR.getCode(), Status.QUERY_QUEUE_LIST_ERROR.getMsg()); } + + searchVal = ParameterUtils.handleEscapes(searchVal); + result = queueService.queryList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * create queue * * @param loginUser login user - * @param queue queue + * @param queue queue * @param queueName queue name * @return create result */ - @ApiOperation(value = "createQueue", notes= "CREATE_QUEUE_NOTES") + @ApiOperation(value = "createQueue", notes = "CREATE_QUEUE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true,dataType ="String"), - @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") + @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME", required = true, dataType = "String") }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_QUEUE_ERROR) public Result createQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "queue") String queue, - @RequestParam(value = "queueName") String queueName) { + @RequestParam(value = "queue") String queue, + @RequestParam(value = "queueName") String queueName) { logger.info("login user {}, create queue, queue: {}, queueName: {}", loginUser.getUserName(), queue, queueName); - try { - Map result = queueService.createQueue(loginUser,queue,queueName); - return returnDataList(result); - - }catch (Exception e){ - logger.error(Status.CREATE_QUEUE_ERROR.getMsg(),e); - return error(Status.CREATE_QUEUE_ERROR.getCode(), Status.CREATE_QUEUE_ERROR.getMsg()); - } + Map result = queueService.createQueue(loginUser, queue, queueName); + return returnDataList(result); } /** * update queue * * @param loginUser login user - * @param queue queue - * @param id queue id + * @param queue queue + * @param id queue id * @param queueName queue name * @return update result code */ - @ApiOperation(value = "updateQueue", notes= "UPDATE_QUEUE_NOTES") + @ApiOperation(value = "updateQueue", notes = "UPDATE_QUEUE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") + @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME", required = true, dataType = "String") }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.CREATED) + @ApiException(UPDATE_QUEUE_ERROR) public Result updateQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id, @RequestParam(value = "queue") String queue, @RequestParam(value = "queueName") String queueName) { logger.info("login user {}, update queue, id: {}, queue: {}, queueName: {}", - loginUser.getUserName(), id,queue, queueName); - try { - Map result = queueService.updateQueue(loginUser,id,queue,queueName); - return returnDataList(result); - - }catch (Exception e){ - logger.error(Status.UPDATE_QUEUE_ERROR.getMsg(),e); - return error(Status.UPDATE_QUEUE_ERROR.getCode(), Status.UPDATE_QUEUE_ERROR.getMsg()); - } + loginUser.getUserName(), id, queue, queueName); + Map result = queueService.updateQueue(loginUser, id, queue, queueName); + return returnDataList(result); } /** * verify queue and queue name * * @param loginUser login user - * @param queue queue + * @param queue queue * @param queueName queue name * @return true if the queue name not exists, otherwise return false */ - @ApiOperation(value = "verifyQueue", notes= "VERIFY_QUEUE_NOTES") + @ApiOperation(value = "verifyQueue", notes = "VERIFY_QUEUE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME",required = true, dataType ="String") + @ApiImplicitParam(name = "id", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "YARN_QUEUE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "queueName", value = "QUEUE_NAME", required = true, dataType = "String") }) @PostMapping(value = "/verify-queue") @ResponseStatus(HttpStatus.OK) + @ApiException(VERIFY_QUEUE_ERROR) public Result verifyQueue(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="queue") String queue, - @RequestParam(value ="queueName") String queueName + @RequestParam(value = "queue") String queue, + @RequestParam(value = "queueName") String queueName ) { - try{ - logger.info("login user {}, verfiy queue: {} queue name: {}", - loginUser.getUserName(),queue,queueName); - return queueService.verifyQueue(queue,queueName); - }catch (Exception e){ - logger.error(Status.VERIFY_QUEUE_ERROR.getMsg(),e); - return error(Status.VERIFY_QUEUE_ERROR.getCode(), Status.VERIFY_QUEUE_ERROR.getMsg()); - } + logger.info("login user {}, verfiy queue: {} queue name: {}", + loginUser.getUserName(), queue, queueName); + return queueService.verifyQueue(queue, queueName); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java index 7bac6614ee..cc09b2d650 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ResourcesService; import org.apache.dolphinscheduler.api.service.UdfFuncService; import org.apache.dolphinscheduler.api.utils.Result; @@ -44,13 +45,14 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * resources controller */ @Api(tags = "RESOURCES_TAG", position = 1) @RestController @RequestMapping("resources") -public class ResourcesController extends BaseController{ +public class ResourcesController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class); @@ -67,159 +69,181 @@ public class ResourcesController extends BaseController{ * @param alias alias * @param description description * @param type type - * @param file file * @return create result code */ - @ApiOperation(value = "createResource", notes= "CREATE_RESOURCE_NOTES") + + /** + * @param loginUser login user + * @param type type + * @param alias alias + * @param description description + * @param pid parent id + * @param currentDir current directory + * @return + */ + @ApiOperation(value = "createDirctory", notes = "CREATE_RESOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType ="String"), + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), + @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") + }) + @PostMapping(value = "/directory/create") + @ApiException(CREATE_RESOURCE_ERROR) + public Result createDirectory(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type, + @RequestParam(value = "name") String alias, + @RequestParam(value = "description", required = false) String description, + @RequestParam(value = "pid") int pid, + @RequestParam(value = "currentDir") String currentDir) { + logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}", + loginUser.getUserName(), type, alias, description, pid, currentDir); + return resourceService.createDirectory(loginUser, alias, description, type, pid, currentDir); + } + + /** + * create resource + * + * @param loginUser login user + * @param alias alias + * @param description description + * @param type type + * @param file file + * @return create result code + */ + @ApiOperation(value = "createResource", notes = "CREATE_RESOURCE_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") }) @PostMapping(value = "/create") + @ApiException(CREATE_RESOURCE_ERROR) public Result createResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "type") ResourceType type, - @RequestParam(value ="name")String alias, + @RequestParam(value = "name") String alias, @RequestParam(value = "description", required = false) String description, - @RequestParam("file") MultipartFile file) { - try { - logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}", - loginUser.getUserName(),type, alias, description, file.getName(), file.getOriginalFilename()); - return resourceService.createResource(loginUser,alias, description,type ,file); - } catch (Exception e) { - logger.error(CREATE_RESOURCE_ERROR.getMsg(),e); - return error(CREATE_RESOURCE_ERROR.getCode(), CREATE_RESOURCE_ERROR.getMsg()); - } + @RequestParam("file") MultipartFile file, + @RequestParam(value = "pid") int pid, + @RequestParam(value = "currentDir") String currentDir) { + logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}", + loginUser.getUserName(), type, alias, description, file.getName(), file.getOriginalFilename()); + return resourceService.createResource(loginUser, alias, description, type, file, pid, currentDir); } /** * update resource * - * @param loginUser login user - * @param alias alias - * @param resourceId resource id - * @param type resource type + * @param loginUser login user + * @param alias alias + * @param resourceId resource id + * @param type resource type * @param description description * @return update result code */ - @ApiOperation(value = "updateResource", notes= "UPDATE_RESOURCE_NOTES") + @ApiOperation(value = "updateResource", notes = "UPDATE_RESOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType ="String"), - @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true,dataType = "MultipartFile") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String") }) @PostMapping(value = "/update") + @ApiException(UPDATE_RESOURCE_ERROR) public Result updateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="id") int resourceId, + @RequestParam(value = "id") int resourceId, @RequestParam(value = "type") ResourceType type, - @RequestParam(value ="name")String alias, + @RequestParam(value = "name") String alias, @RequestParam(value = "description", required = false) String description) { - try { - logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", - loginUser.getUserName(),type, alias, description); - return resourceService.updateResource(loginUser,resourceId,alias, description,type); - } catch (Exception e) { - logger.error(UPDATE_RESOURCE_ERROR.getMsg(),e); - return error(Status.UPDATE_RESOURCE_ERROR.getCode(), Status.UPDATE_RESOURCE_ERROR.getMsg()); - } + logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", + loginUser.getUserName(), type, alias, description); + return resourceService.updateResource(loginUser, resourceId, alias, description, type); } /** * query resources list * * @param loginUser login user - * @param type resource type + * @param type resource type * @return resource list */ - @ApiOperation(value = "queryResourceList", notes= "QUERY_RESOURCE_LIST_NOTES") + @ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType") + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType") }) - @GetMapping(value="/list") + @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RESOURCES_LIST_ERROR) public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="type") ResourceType type - ){ - try{ - logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); - Map result = resourceService.queryResourceList(loginUser, type); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_RESOURCES_LIST_ERROR.getMsg(),e); - return error(Status.QUERY_RESOURCES_LIST_ERROR.getCode(), Status.QUERY_RESOURCES_LIST_ERROR.getMsg()); - } + @RequestParam(value = "type") ResourceType type + ) { + logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type); + Map result = resourceService.queryResourceList(loginUser, type); + return returnDataList(result); } /** * query resources list paging * * @param loginUser login user - * @param type resource type + * @param type resource type * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return resource list page */ - @ApiOperation(value = "queryResourceListPaging", notes= "QUERY_RESOURCE_LIST_PAGING_NOTES") + @ApiOperation(value = "queryResourceListPaging", notes = "QUERY_RESOURCE_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "int"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RESOURCES_LIST_PAGING) public Result queryResourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="type") ResourceType type, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize - ){ - try{ - logger.info("query resource list, login user:{}, resource type:{}, search value:{}", - loginUser.getUserName(), type.toString(), searchVal); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - searchVal = ParameterUtils.handleEscapes(searchVal); - result = resourceService.queryResourceListPaging(loginUser,type,searchVal,pageNo, pageSize); + @RequestParam(value = "type") ResourceType type, + @RequestParam(value = "id") int id, + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize + ) { + logger.info("query resource list, login user:{}, resource type:{}, search value:{}", + loginUser.getUserName(), type, searchVal); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_RESOURCES_LIST_PAGING.getMsg(),e); - return error(Status.QUERY_RESOURCES_LIST_PAGING.getCode(), Status.QUERY_RESOURCES_LIST_PAGING.getMsg()); } + + searchVal = ParameterUtils.handleEscapes(searchVal); + result = resourceService.queryResourceListPaging(loginUser, id, type, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * delete resource * - * @param loginUser login user + * @param loginUser login user * @param resourceId resource id * @return delete result code */ - @ApiOperation(value = "deleteResource", notes= "DELETE_RESOURCE_BY_ID_NOTES") + @ApiOperation(value = "deleteResource", notes = "DELETE_RESOURCE_BY_ID_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_RESOURCE_ERROR) public Result deleteResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="id") int resourceId - ) { - try{ - logger.info("login user {}, delete resource id: {}", - loginUser.getUserName(),resourceId); - return resourceService.delete(loginUser,resourceId); - }catch (Exception e){ - logger.error(DELETE_RESOURCE_ERROR.getMsg(),e); - return error(Status.DELETE_RESOURCE_ERROR.getCode(), Status.DELETE_RESOURCE_ERROR.getMsg()); - } + @RequestParam(value = "id") int resourceId + ) throws Exception { + logger.info("login user {}, delete resource id: {}", + loginUser.getUserName(), resourceId); + return resourceService.delete(loginUser, resourceId); } @@ -227,286 +251,304 @@ public class ResourcesController extends BaseController{ * verify resource by alias and type * * @param loginUser login user - * @param alias resource name - * @param type resource type + * @param fullName resource full name + * @param type resource type * @return true if the resource name not exists, otherwise return false */ - @ApiOperation(value = "verifyResourceName", notes= "VERIFY_RESOURCE_NAME_NOTES") + @ApiOperation(value = "verifyResourceName", notes = "VERIFY_RESOURCE_NAME_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType ="String") + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String") }) @GetMapping(value = "/verify-name") @ResponseStatus(HttpStatus.OK) + @ApiException(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR) public Result verifyResourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="name") String alias, - @RequestParam(value ="type") ResourceType type + @RequestParam(value = "fullName") String fullName, + @RequestParam(value = "type") ResourceType type ) { - try { - logger.info("login user {}, verfiy resource alias: {},resource type: {}", - loginUser.getUserName(), alias,type); - - return resourceService.verifyResourceName(alias,type,loginUser); - } catch (Exception e) { - logger.error(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg(), e); - return error(Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getCode(), Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg()); - } + logger.info("login user {}, verfiy resource alias: {},resource type: {}", + loginUser.getUserName(), fullName, type); + + return resourceService.verifyResourceName(fullName, type, loginUser); } /** - * view resource file online + * query resources jar list * * @param loginUser login user - * @param resourceId resource id + * @param type resource type + * @return resource list + */ + @ApiOperation(value = "queryResourceJarList", notes = "QUERY_RESOURCE_LIST_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType") + }) + @GetMapping(value = "/list/jar") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RESOURCES_LIST_ERROR) + public Result queryResourceJarList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "type") ResourceType type + ) { + logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); + Map result = resourceService.queryResourceJarList(loginUser, type); + return returnDataList(result); + } + + /** + * query resource by full name and type + * + * @param loginUser login user + * @param fullName resource full name + * @param type resource type + * @return true if the resource name not exists, otherwise return false + */ + @ApiOperation(value = "queryResource", notes = "QUERY_BY_RESOURCE_NAME") + @ApiImplicitParams({ + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String") + }) + @GetMapping(value = "/queryResource") + @ResponseStatus(HttpStatus.OK) + @ApiException(RESOURCE_NOT_EXIST) + public Result queryResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "fullName", required = false) String fullName, + @RequestParam(value = "id", required = false) Integer id, + @RequestParam(value = "type") ResourceType type + ) { + logger.info("login user {}, query resource by full name: {} or id: {},resource type: {}", + loginUser.getUserName(), fullName, id, type); + + return resourceService.queryResource(fullName, id, type); + } + + /** + * view resource file online + * + * @param loginUser login user + * @param resourceId resource id * @param skipLineNum skip line number - * @param limit limit + * @param limit limit * @return resource content */ - @ApiOperation(value = "viewResource", notes= "VIEW_RESOURCE_BY_ID_NOTES") + @ApiOperation(value = "viewResource", notes = "VIEW_RESOURCE_BY_ID_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/view") + @ApiException(VIEW_RESOURCE_FILE_ON_LINE_ERROR) public Result viewResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int resourceId, @RequestParam(value = "skipLineNum") int skipLineNum, @RequestParam(value = "limit") int limit ) { - try{ - logger.info("login user {}, view resource : {}, skipLineNum {} , limit {}", - loginUser.getUserName(),resourceId,skipLineNum,limit); - - return resourceService.readResource(resourceId,skipLineNum,limit); - }catch (Exception e){ - logger.error(VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); - return error(Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); - } + logger.info("login user {}, view resource : {}, skipLineNum {} , limit {}", + loginUser.getUserName(), resourceId, skipLineNum, limit); + + return resourceService.readResource(resourceId, skipLineNum, limit); } /** * create resource file online * - * @param loginUser login user - * @param type resource type - * @param fileName file name - * @param fileSuffix file suffix + * @param loginUser login user + * @param type resource type + * @param fileName file name + * @param fileSuffix file suffix * @param description description - * @param content content + * @param content content * @return create result code */ - @ApiOperation(value = "onlineCreateResource", notes= "ONLINE_CREATE_RESOURCE_NOTES") + @ApiOperation(value = "onlineCreateResource", notes = "ONLINE_CREATE_RESOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType ="ResourceType"), - @ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType ="String"), - @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType ="String"), - @ApiImplicitParam(name = "content", value = "CONTENT",required = true, dataType ="String") + @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), + @ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType = "String"), + @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), + @ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String") }) @PostMapping(value = "/online-create") + @ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR) public Result onlineCreateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "type") ResourceType type, - @RequestParam(value ="fileName")String fileName, - @RequestParam(value ="suffix")String fileSuffix, + @RequestParam(value = "fileName") String fileName, + @RequestParam(value = "suffix") String fileSuffix, @RequestParam(value = "description", required = false) String description, - @RequestParam(value = "content") String content + @RequestParam(value = "content") String content, + @RequestParam(value = "pid") int pid, + @RequestParam(value = "currentDir") String currentDir ) { - try{ - logger.info("login user {}, online create resource! fileName : {}, type : {}, suffix : {},desc : {},content : {}", - loginUser.getUserName(),fileName,type,fileSuffix,description,content); - if(StringUtils.isEmpty(content)){ - logger.error("resource file contents are not allowed to be empty"); - return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); - } - return resourceService.onlineCreateResource(loginUser,type,fileName,fileSuffix,description,content); - }catch (Exception e){ - logger.error(CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); - return error(Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); + logger.info("login user {}, online create resource! fileName : {}, type : {}, suffix : {},desc : {},content : {}", + loginUser.getUserName(), fileName, type, fileSuffix, description, content, pid, currentDir); + if (StringUtils.isEmpty(content)) { + logger.error("resource file contents are not allowed to be empty"); + return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); } + return resourceService.onlineCreateResource(loginUser, type, fileName, fileSuffix, description, content, pid, currentDir); } /** * edit resource file online * - * @param loginUser login user + * @param loginUser login user * @param resourceId resource id - * @param content content + * @param content content * @return update result code */ - @ApiOperation(value = "updateResourceContent", notes= "UPDATE_RESOURCE_NOTES") + @ApiOperation(value = "updateResourceContent", notes = "UPDATE_RESOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "content", value = "CONTENT",required = true, dataType ="String") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String") }) @PostMapping(value = "/update-content") + @ApiException(EDIT_RESOURCE_FILE_ON_LINE_ERROR) public Result updateResourceContent(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int resourceId, @RequestParam(value = "content") String content ) { - try{ - logger.info("login user {}, updateProcessInstance resource : {}", - loginUser.getUserName(),resourceId); - if(StringUtils.isEmpty(content)){ - logger.error("The resource file contents are not allowed to be empty"); - return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); - } - return resourceService.updateResourceContent(resourceId,content); - }catch (Exception e){ - logger.error(EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); - return error(Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); + logger.info("login user {}, updateProcessInstance resource : {}", + loginUser.getUserName(), resourceId); + if (StringUtils.isEmpty(content)) { + logger.error("The resource file contents are not allowed to be empty"); + return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); } + return resourceService.updateResourceContent(resourceId, content); } /** * download resource file * - * @param loginUser login user + * @param loginUser login user * @param resourceId resource id * @return resource content */ - @ApiOperation(value = "downloadResource", notes= "DOWNLOAD_RESOURCE_NOTES") + @ApiOperation(value = "downloadResource", notes = "DOWNLOAD_RESOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/download") @ResponseBody + @ApiException(DOWNLOAD_RESOURCE_FILE_ERROR) public ResponseEntity downloadResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int resourceId) { - try{ - logger.info("login user {}, download resource : {}", - loginUser.getUserName(), resourceId); - Resource file = resourceService.downloadResource(resourceId); - if (file == null) { - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg()); - } - return ResponseEntity - .ok() - .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"") - .body(file); - }catch (Exception e){ - logger.error(DOWNLOAD_RESOURCE_FILE_ERROR.getMsg(),e); - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_RESOURCE_FILE_ERROR.getMsg()); + @RequestParam(value = "id") int resourceId) throws Exception { + logger.info("login user {}, download resource : {}", + loginUser.getUserName(), resourceId); + Resource file = resourceService.downloadResource(resourceId); + if (file == null) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg()); } + return ResponseEntity + .ok() + .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"") + .body(file); } /** * create udf function - * @param loginUser login user - * @param type udf type - * @param funcName function name - * @param argTypes argument types - * @param database database + * + * @param loginUser login user + * @param type udf type + * @param funcName function name + * @param argTypes argument types + * @param database database * @param description description - * @param className class name - * @param resourceId resource id + * @param className class name + * @param resourceId resource id * @return create result code */ - @ApiOperation(value = "createUdfFunc", notes= "CREATE_UDF_FUNCTION_NOTES") + @ApiOperation(value = "createUdfFunc", notes = "CREATE_UDF_FUNCTION_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType"), - @ApiImplicitParam(name = "funcName", value = "FUNC_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType ="String"), - @ApiImplicitParam(name = "description", value = "UDF_DESC", dataType ="String"), - @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"), + @ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"), + @ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"), + @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/udf-func/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_UDF_FUNCTION_ERROR) public Result createUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "type") UdfType type, - @RequestParam(value ="funcName")String funcName, - @RequestParam(value ="className")String className, - @RequestParam(value ="argTypes", required = false)String argTypes, - @RequestParam(value ="database", required = false)String database, + @RequestParam(value = "funcName") String funcName, + @RequestParam(value = "className") String className, + @RequestParam(value = "argTypes", required = false) String argTypes, + @RequestParam(value = "database", required = false) String database, @RequestParam(value = "description", required = false) String description, @RequestParam(value = "resourceId") int resourceId) { logger.info("login user {}, create udf function, type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", - loginUser.getUserName(),type, funcName, argTypes,database,description, resourceId); - Result result = new Result(); - - try { - return udfFuncService.createUdfFunction(loginUser,funcName,className,argTypes,database,description,type,resourceId); - } catch (Exception e) { - logger.error(CREATE_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.CREATE_UDF_FUNCTION_ERROR.getCode(), Status.CREATE_UDF_FUNCTION_ERROR.getMsg()); - } + loginUser.getUserName(), type, funcName, argTypes, database, description, resourceId); + return udfFuncService.createUdfFunction(loginUser, funcName, className, argTypes, database, description, type, resourceId); } /** * view udf function * * @param loginUser login user - * @param id resource id + * @param id resource id * @return udf function detail */ - @ApiOperation(value = "viewUIUdfFunction", notes= "VIEW_UDF_FUNCTION_NOTES") + @ApiOperation(value = "viewUIUdfFunction", notes = "VIEW_UDF_FUNCTION_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/udf-func/update-ui") @ResponseStatus(HttpStatus.OK) + @ApiException(VIEW_UDF_FUNCTION_ERROR) public Result viewUIUdfFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") int id) { + @RequestParam("id") int id) { logger.info("login user {}, query udf{}", loginUser.getUserName(), id); - try { - Map map = udfFuncService.queryUdfFuncDetail(id); - return returnDataList(map); - } catch (Exception e) { - logger.error(VIEW_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.VIEW_UDF_FUNCTION_ERROR.getCode(), Status.VIEW_UDF_FUNCTION_ERROR.getMsg()); - } + Map map = udfFuncService.queryUdfFuncDetail(id); + return returnDataList(map); } /** * update udf function * - * @param loginUser login user - * @param type resource type - * @param funcName function name - * @param argTypes argument types - * @param database data base + * @param loginUser login user + * @param type resource type + * @param funcName function name + * @param argTypes argument types + * @param database data base * @param description description - * @param resourceId resource id - * @param className class name - * @param udfFuncId udf function id + * @param resourceId resource id + * @param className class name + * @param udfFuncId udf function id * @return update result code */ - @ApiOperation(value = "updateUdfFunc", notes= "UPDATE_UDF_FUNCTION_NOTES") + @ApiOperation(value = "updateUdfFunc", notes = "UPDATE_UDF_FUNCTION_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType"), - @ApiImplicitParam(name = "funcName", value = "FUNC_NAME",required = true, dataType ="String"), - @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType ="String"), - @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType ="String"), - @ApiImplicitParam(name = "description", value = "UDF_DESC", dataType ="String"), - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"), + @ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"), + @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"), + @ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"), + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/udf-func/update") + @ApiException(UPDATE_UDF_FUNCTION_ERROR) public Result updateUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int udfFuncId, @RequestParam(value = "type") UdfType type, - @RequestParam(value ="funcName")String funcName, - @RequestParam(value ="className")String className, - @RequestParam(value ="argTypes", required = false)String argTypes, - @RequestParam(value ="database", required = false)String database, + @RequestParam(value = "funcName") String funcName, + @RequestParam(value = "className") String className, + @RequestParam(value = "argTypes", required = false) String argTypes, + @RequestParam(value = "database", required = false) String database, @RequestParam(value = "description", required = false) String description, @RequestParam(value = "resourceId") int resourceId) { - try { - logger.info("login user {}, updateProcessInstance udf function id: {},type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", - loginUser.getUserName(),udfFuncId,type, funcName, argTypes,database,description, resourceId); - Map result = udfFuncService.updateUdfFunc(udfFuncId,funcName,className,argTypes,database,description,type,resourceId); - return returnDataList(result); - } catch (Exception e) { - logger.error(UPDATE_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.UPDATE_UDF_FUNCTION_ERROR.getCode(), Status.UPDATE_UDF_FUNCTION_ERROR.getMsg()); - } + logger.info("login user {}, updateProcessInstance udf function id: {},type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", + loginUser.getUserName(), udfFuncId, type, funcName, argTypes, database, description, resourceId); + Map result = udfFuncService.updateUdfFunc(udfFuncId, funcName, className, argTypes, database, description, type, resourceId); + return returnDataList(result); } /** @@ -514,91 +556,78 @@ public class ResourcesController extends BaseController{ * * @param loginUser login user * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return udf function list page */ - @ApiOperation(value = "queryUdfFuncListPaging", notes= "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES") + @ApiOperation(value = "queryUdfFuncListPaging", notes = "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) - @GetMapping(value="/udf-func/list-paging") + @GetMapping(value = "/udf-func/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR) public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageSize") Integer pageSize - ){ - try{ - logger.info("query udf functions list, login user:{},search value:{}", - loginUser.getUserName(), searchVal); - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - - result = udfFuncService.queryUdfFuncListPaging(loginUser,searchVal,pageNo, pageSize); + ) { + logger.info("query udf functions list, login user:{},search value:{}", + loginUser.getUserName(), searchVal); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getCode(), Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg()); } + + result = udfFuncService.queryUdfFuncListPaging(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * query resource list by type * * @param loginUser login user - * @param type resource type + * @param type resource type * @return resource list */ - @ApiOperation(value = "queryResourceList", notes= "QUERY_RESOURCE_LIST_NOTES") + @ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType ="UdfType") + @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType") }) - @GetMapping(value="/udf-func/list") + @GetMapping(value = "/udf-func/list") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR) public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("type") UdfType type){ - try{ - logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type.toString()); - Map result = udfFuncService.queryResourceList(loginUser,type.ordinal()); - return returnDataList(result); - }catch (Exception e){ - logger.error(QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg(),e); - return error(Status.QUERY_DATASOURCE_BY_TYPE_ERROR.getCode(),QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg()); - } + @RequestParam("type") UdfType type) { + logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type); + Map result = udfFuncService.queryResourceList(loginUser, type.ordinal()); + return returnDataList(result); } /** * verify udf function name can use or not * * @param loginUser login user - * @param name name + * @param name name * @return true if the name can user, otherwise return false */ - @ApiOperation(value = "verifyUdfFuncName", notes= "VERIFY_UDF_FUNCTION_NAME_NOTES") + @ApiOperation(value = "verifyUdfFuncName", notes = "VERIFY_UDF_FUNCTION_NAME_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "name", value = "FUNC_NAME",required = true, dataType ="String") + @ApiImplicitParam(name = "name", value = "FUNC_NAME", required = true, dataType = "String") }) @GetMapping(value = "/udf-func/verify-name") @ResponseStatus(HttpStatus.OK) + @ApiException(VERIFY_UDF_FUNCTION_NAME_ERROR) public Result verifyUdfFuncName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="name") String name + @RequestParam(value = "name") String name ) { logger.info("login user {}, verfiy udf function name: {}", - loginUser.getUserName(),name); + loginUser.getUserName(), name); - try{ - - return udfFuncService.verifyUdfFuncByName(name); - }catch (Exception e){ - logger.error(VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg(),e); - return error(Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getCode(), Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg()); - } + return udfFuncService.verifyUdfFuncByName(name); } /** @@ -608,48 +637,39 @@ public class ResourcesController extends BaseController{ * @param udfFuncId udf function id * @return delete result code */ - @ApiOperation(value = "deleteUdfFunc", notes= "DELETE_UDF_FUNCTION_NOTES") + @ApiOperation(value = "deleteUdfFunc", notes = "DELETE_UDF_FUNCTION_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/udf-func/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_UDF_FUNCTION_ERROR) public Result deleteUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="id") int udfFuncId + @RequestParam(value = "id") int udfFuncId ) { - try{ - - logger.info("login user {}, delete udf function id: {}", loginUser.getUserName(),udfFuncId); - return udfFuncService.delete(udfFuncId); - }catch (Exception e){ - logger.error(DELETE_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.DELETE_UDF_FUNCTION_ERROR.getCode(), Status.DELETE_UDF_FUNCTION_ERROR.getMsg()); - } + logger.info("login user {}, delete udf function id: {}", loginUser.getUserName(), udfFuncId); + return udfFuncService.delete(udfFuncId); } /** * authorized file resource list * * @param loginUser login user - * @param userId user id + * @param userId user id * @return authorized result */ - @ApiOperation(value = "authorizedFile", notes= "AUTHORIZED_FILE_NOTES") + @ApiOperation(value = "authorizedFile", notes = "AUTHORIZED_FILE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/authed-file") @ResponseStatus(HttpStatus.CREATED) + @ApiException(AUTHORIZED_FILE_RESOURCE_ERROR) public Result authorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try{ - logger.info("authorized file resource, user: {}, user id:{}", loginUser.getUserName(), userId); - Map result = resourceService.authorizedFile(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(AUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); - return error(Status.AUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.AUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); - } + logger.info("authorized file resource, user: {}, user id:{}", loginUser.getUserName(), userId); + Map result = resourceService.authorizedFile(loginUser, userId); + return returnDataList(result); } @@ -657,25 +677,21 @@ public class ResourcesController extends BaseController{ * unauthorized file resource list * * @param loginUser login user - * @param userId user id + * @param userId user id * @return unauthorized result code */ - @ApiOperation(value = "unauthorizedFile", notes= "UNAUTHORIZED_FILE_NOTES") + @ApiOperation(value = "authorizeResourceTree", notes = "AUTHORIZE_RESOURCE_TREE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100") }) - @GetMapping(value = "/unauth-file") + @GetMapping(value = "/authorize-resource-tree") @ResponseStatus(HttpStatus.CREATED) - public Result unauthorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("userId") Integer userId) { - try{ - logger.info("resource unauthorized file, user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); - Map result = resourceService.unauthorizedFile(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); - return error(Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); - } + @ApiException(AUTHORIZE_RESOURCE_TREE) + public Result authorizeResourceTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam("userId") Integer userId) { + logger.info("all resource file, user:{}, user id:{}", loginUser.getUserName(), userId); + Map result = resourceService.authorizeResourceTree(loginUser, userId); + return returnDataList(result); } @@ -683,26 +699,22 @@ public class ResourcesController extends BaseController{ * unauthorized udf function * * @param loginUser login user - * @param userId user id + * @param userId user id * @return unauthorized result code */ - @ApiOperation(value = "unauthUDFFunc", notes= "UNAUTHORIZED_UDF_FUNC_NOTES") + @ApiOperation(value = "unauthUDFFunc", notes = "UNAUTHORIZED_UDF_FUNC_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/unauth-udf-func") @ResponseStatus(HttpStatus.CREATED) + @ApiException(UNAUTHORIZED_UDF_FUNCTION_ERROR) public Result unauthUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try{ - logger.info("unauthorized udf function, login user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); - - Map result = resourceService.unauthorizedUDFFunction(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); - } + logger.info("unauthorized udf function, login user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); + + Map result = resourceService.unauthorizedUDFFunction(loginUser, userId); + return returnDataList(result); } @@ -710,24 +722,20 @@ public class ResourcesController extends BaseController{ * authorized udf function * * @param loginUser login user - * @param userId user id + * @param userId user id * @return authorized result code */ - @ApiOperation(value = "authUDFFunc", notes= "AUTHORIZED_UDF_FUNC_NOTES") + @ApiOperation(value = "authUDFFunc", notes = "AUTHORIZED_UDF_FUNC_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/authed-udf-func") @ResponseStatus(HttpStatus.CREATED) + @ApiException(AUTHORIZED_UDF_FUNCTION_ERROR) public Result authorizedUDFFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { - try{ - logger.info("auth udf function, login user:{}, auth user id:{}", loginUser.getUserName(), userId); - Map result = resourceService.authorizedUDFFunction(loginUser, userId); - return returnDataList(result); - }catch (Exception e){ - logger.error(AUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.AUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.AUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); - } + logger.info("auth udf function, login user:{}, auth user id:{}", loginUser.getUserName(), userId); + Map result = resourceService.authorizedUDFFunction(loginUser, userId); + return returnDataList(result); } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java index 96038dcf8c..a9a5514027 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.api.controller; -import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.FailureStrategy; @@ -34,6 +34,7 @@ import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.*; import springfox.documentation.annotations.ApiIgnore; +import java.io.IOException; import java.util.Map; import static org.apache.dolphinscheduler.api.enums.Status.*; @@ -60,33 +61,34 @@ public class SchedulerController extends BaseController { /** * create schedule * - * @param loginUser login user - * @param projectName project name - * @param processDefinitionId process definition id - * @param schedule scheduler - * @param warningType warning type - * @param warningGroupId warning group id - * @param failureStrategy failure strategy + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process definition id + * @param schedule scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy * @param processInstancePriority process instance priority - * @param receivers receivers - * @param receiversCc receivers cc - * @param workerGroupId worker group id + * @param receivers receivers + * @param receiversCc receivers cc + * @param workerGroup worker group * @return create result code */ - @ApiOperation(value = "createSchedule", notes= "CREATE_SCHEDULE_NOTES") + @ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"), + @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), + @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"), + @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_SCHEDULE_ERROR) public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionId") Integer processDefinitionId, @@ -96,52 +98,48 @@ public class SchedulerController extends BaseController { @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, @RequestParam(value = "receivers", required = false) String receivers, @RequestParam(value = "receiversCc", required = false) String receiversCc, - @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException { logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}", loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId, - failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId); - try { - Map result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, - warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroupId); + failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup); + Map result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, + warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup); - return returnDataList(result); - } catch (Exception e) { - logger.error(CREATE_SCHEDULE_ERROR.getMsg(), e); - return error(CREATE_SCHEDULE_ERROR.getCode(), CREATE_SCHEDULE_ERROR.getMsg()); - } + return returnDataList(result); } /** * updateProcessInstance schedule * - * @param loginUser login user - * @param projectName project name - * @param id scheduler id - * @param schedule scheduler - * @param warningType warning type - * @param warningGroupId warning group id - * @param failureStrategy failure strategy - * @param receivers receivers - * @param workerGroupId worker group id + * @param loginUser login user + * @param projectName project name + * @param id scheduler id + * @param schedule scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param receivers receivers + * @param workerGroup worker group * @param processInstancePriority process instance priority - * @param receiversCc receivers cc + * @param receiversCc receivers cc * @return update result code */ - @ApiOperation(value = "updateSchedule", notes= "UPDATE_SCHEDULE_NOTES") + @ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type ="WarningType"), + @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), + @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type ="FailureStrategy"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type ="String"), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type ="String"), + @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), + @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"), + @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type ="Priority"), + @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @PostMapping("/update") + @ApiException(UPDATE_SCHEDULE_ERROR) public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "id") Integer id, @@ -151,196 +149,164 @@ public class SchedulerController extends BaseController { @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, @RequestParam(value = "receivers", required = false) String receivers, @RequestParam(value = "receiversCc", required = false) String receiversCc, - @RequestParam(value = "workerGroupId", required = false, defaultValue = "-1") int workerGroupId, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException { logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}", loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy, - receivers, receiversCc, processInstancePriority, workerGroupId); - - try { - Map result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, - warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroupId); - return returnDataList(result); + receivers, receiversCc, processInstancePriority, workerGroup); - } catch (Exception e) { - logger.error(UPDATE_SCHEDULE_ERROR.getMsg(), e); - return error(Status.UPDATE_SCHEDULE_ERROR.getCode(), Status.UPDATE_SCHEDULE_ERROR.getMsg()); - } + Map result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, + warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroup); + return returnDataList(result); } /** * publish schedule setScheduleState * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param id scheduler id + * @param id scheduler id * @return publish result code */ - @ApiOperation(value = "online", notes= "ONLINE_SCHEDULE_NOTES") + @ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/online") + @ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR) public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, @RequestParam("id") Integer id) { logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}", loginUser.getUserName(), projectName, id); - try { - Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE); - return returnDataList(result); - - } catch (Exception e) { - logger.error(PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg(), e); - return error(Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getCode(), Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg()); - } + Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE); + return returnDataList(result); } /** * offline schedule * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param id schedule id + * @param id schedule id * @return operation result code */ - @ApiOperation(value = "offline", notes= "OFFLINE_SCHEDULE_NOTES") + @ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping("/offline") + @ApiException(OFFLINE_SCHEDULE_ERROR) public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, @RequestParam("id") Integer id) { logger.info("login user {}, schedule offline, project name: {}, process definition id: {}", loginUser.getUserName(), projectName, id); - try { - Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE); - return returnDataList(result); - - } catch (Exception e) { - logger.error(OFFLINE_SCHEDULE_ERROR.getMsg(), e); - return error(Status.OFFLINE_SCHEDULE_ERROR.getCode(), Status.OFFLINE_SCHEDULE_ERROR.getMsg()); - } + Map result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE); + return returnDataList(result); } /** * query schedule list paging * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id - * @param pageNo page number - * @param pageSize page size - * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @param searchVal search value * @return schedule list page */ - @ApiOperation(value = "queryScheduleListPaging", notes= "QUERY_SCHEDULE_LIST_PAGING_NOTES") + @ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true,dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") + @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100") }) - @GetMapping("/list-paging") + @GetMapping("/list-paging") + @ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR) public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam Integer processDefinitionId, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { - logger.info("login user {}, query schedule, project name: {}, process definition id: {}", - loginUser.getUserName(), projectName, processDefinitionId); - try { - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg()); - } - + logger.info("login user {}, query schedule, project name: {}, process definition id: {}", + loginUser.getUserName(), projectName, processDefinitionId); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * delete schedule by id * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param scheduleId scheule id + * @param scheduleId scheule id * @return delete result code */ - @ApiOperation(value = "deleteScheduleById", notes= "OFFLINE_SCHEDULE_NOTES") + @ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100") }) - @GetMapping(value="/delete") + @GetMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR) public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, - @PathVariable String projectName, - @RequestParam("scheduleId") Integer scheduleId - ){ - try{ - logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}", - loginUser.getUserName(), projectName, scheduleId); - Map result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); - return returnDataList(result); - }catch (Exception e){ - logger.error(DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getCode(), Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg()); - } + @PathVariable String projectName, + @RequestParam("scheduleId") Integer scheduleId + ) { + logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}", + loginUser.getUserName(), projectName, scheduleId); + Map result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); + return returnDataList(result); } + /** * query schedule list * - * @param loginUser login user + * @param loginUser login user * @param projectName project name * @return schedule list */ - @ApiOperation(value = "queryScheduleList", notes= "QUERY_SCHEDULE_LIST_NOTES") + @ApiOperation(value = "queryScheduleList", notes = "QUERY_SCHEDULE_LIST_NOTES") @PostMapping("/list") + @ApiException(QUERY_SCHEDULE_LIST_ERROR) public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) { - try { - logger.info("login user {}, query schedule list, project name: {}", - loginUser.getUserName(), projectName); - Map result = schedulerService.queryScheduleList(loginUser, projectName); - return returnDataList(result); - } catch (Exception e) { - logger.error(QUERY_SCHEDULE_LIST_ERROR.getMsg(), e); - return error(Status.QUERY_SCHEDULE_LIST_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_ERROR.getMsg()); - } + logger.info("login user {}, query schedule list, project name: {}", + loginUser.getUserName(), projectName); + Map result = schedulerService.queryScheduleList(loginUser, projectName); + return returnDataList(result); } /** * preview schedule * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param schedule schedule expression + * @param schedule schedule expression * @return the next five fire time */ - @ApiOperation(value = "previewSchedule", notes= "PREVIEW_SCHEDULE_NOTES") + @ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"), }) @PostMapping("/preview") @ResponseStatus(HttpStatus.CREATED) + @ApiException(PREVIEW_SCHEDULE_ERROR) public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, - @RequestParam(value = "schedule") String schedule - ){ + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, + @RequestParam(value = "schedule") String schedule + ) { logger.info("login user {}, project name: {}, preview schedule: {}", loginUser.getUserName(), projectName, schedule); - try { - Map result = schedulerService.previewSchedule(loginUser, projectName, schedule); - return returnDataList(result); - } catch (Exception e) { - logger.error(PREVIEW_SCHEDULE_ERROR.getMsg(), e); - return error(PREVIEW_SCHEDULE_ERROR.getCode(), PREVIEW_SCHEDULE_ERROR.getMsg()); - } + Map result = schedulerService.previewSchedule(loginUser, projectName, schedule); + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java index 276d2ff7da..c0ad88f481 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.api.controller; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskInstanceService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -24,7 +25,6 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; import io.swagger.annotations.*; -import org.apache.dolphinscheduler.api.enums.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -34,13 +34,15 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_PAGING_ERROR; + /** * task instance controller */ @Api(tags = "TASK_INSTANCE_TAG", position = 11) @RestController @RequestMapping("/projects/{projectName}/task-instance") -public class TaskInstanceController extends BaseController{ +public class TaskInstanceController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(TaskInstanceController.class); @@ -51,34 +53,35 @@ public class TaskInstanceController extends BaseController{ /** * query task list paging * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processInstanceId process instance id - * @param searchVal search value - * @param taskName task name - * @param stateType state type - * @param host host - * @param startTime start time - * @param endTime end time - * @param pageNo page number - * @param pageSize page size + * @param searchVal search value + * @param taskName task name + * @param stateType state type + * @param host host + * @param startTime start time + * @param endTime end time + * @param pageNo page number + * @param pageSize page size * @return task list page */ - @ApiOperation(value = "queryTaskListPaging", notes= "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") + @ApiOperation(value = "queryTaskListPaging", notes = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID",required = false, dataType = "Int", example = "100"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), - @ApiImplicitParam(name = "taskName", value = "TASK_NAME", type ="String"), - @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type ="String"), - @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), - @ApiImplicitParam(name = "host", value = "HOST", type ="String"), - @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), - @ApiImplicitParam(name = "endDate", value = "END_DATE", type ="String"), + @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = false, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), + @ApiImplicitParam(name = "taskName", value = "TASK_NAME", type = "String"), + @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type = "String"), + @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type = "ExecutionStatus"), + @ApiImplicitParam(name = "host", value = "HOST", type = "String"), + @ApiImplicitParam(name = "startDate", value = "START_DATE", type = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", type = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) @GetMapping("/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_LIST_PAGING_ERROR) public Result queryTaskListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, @@ -90,20 +93,14 @@ public class TaskInstanceController extends BaseController{ @RequestParam(value = "startDate", required = false) String startTime, @RequestParam(value = "endDate", required = false) String endTime, @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize){ - - try{ - logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, executor name: {},state type:{}, host:{}, start:{}, end:{}", - projectName, processInstanceId, searchVal, taskName, executorName, stateType, host, startTime, endTime); - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = taskInstanceService.queryTaskListPaging( - loginUser, projectName, processInstanceId, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_TASK_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg()); - } + @RequestParam("pageSize") Integer pageSize) { + logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, executor name: {},state type:{}, host:{}, start:{}, end:{}", + projectName, processInstanceId, searchVal, taskName, executorName, stateType, host, startTime, endTime); + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = taskInstanceService.queryTaskListPaging( + loginUser, projectName, processInstanceId, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); + return returnDataListPaging(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java index 64121c26dd..e20c845d42 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskRecordController.java @@ -17,11 +17,11 @@ package org.apache.dolphinscheduler.api.controller; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskRecordService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.api.enums.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -31,13 +31,15 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * data quality controller */ @ApiIgnore @RestController @RequestMapping("/projects/task-record") -public class TaskRecordController extends BaseController{ +public class TaskRecordController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(TaskRecordController.class); @@ -49,20 +51,21 @@ public class TaskRecordController extends BaseController{ /** * query task record list page * - * @param loginUser login user - * @param taskName task name - * @param state state + * @param loginUser login user + * @param taskName task name + * @param state state * @param sourceTable source table - * @param destTable destination table - * @param taskDate task date - * @param startTime start time - * @param endTime end time - * @param pageNo page numbere - * @param pageSize page size + * @param destTable destination table + * @param taskDate task date + * @param startTime start time + * @param endTime end time + * @param pageNo page numbere + * @param pageSize page size * @return task record list */ @GetMapping("/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_RECORD_LIST_PAGING_ERROR) public Result queryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "taskName", required = false) String taskName, @RequestParam(value = "state", required = false) String state, @@ -73,59 +76,48 @@ public class TaskRecordController extends BaseController{ @RequestParam(value = "endDate", required = false) String endTime, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize - ){ + ) { - try{ logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", - taskName, state, taskDate, startTime, endTime); - Map result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); + taskName, state, taskDate, startTime, endTime); + Map result = taskRecordService.queryTaskRecordListPaging(false, taskName, startTime, taskDate, sourceTable, destTable, endTime, state, pageNo, pageSize); return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); - } - } /** * query history task record list paging * - * @param loginUser login user - * @param taskName task name - * @param state state + * @param loginUser login user + * @param taskName task name + * @param state state * @param sourceTable source table - * @param destTable destination table - * @param taskDate task date - * @param startTime start time - * @param endTime end time - * @param pageNo page number - * @param pageSize page size + * @param destTable destination table + * @param taskDate task date + * @param startTime start time + * @param endTime end time + * @param pageNo page number + * @param pageSize page size * @return history task record list */ @GetMapping("/history-list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TASK_RECORD_LIST_PAGING_ERROR) public Result queryHistoryTaskRecordListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "taskName", required = false) String taskName, - @RequestParam(value = "state", required = false) String state, - @RequestParam(value = "sourceTable", required = false) String sourceTable, - @RequestParam(value = "destTable", required = false) String destTable, - @RequestParam(value = "taskDate", required = false) String taskDate, - @RequestParam(value = "startDate", required = false) String startTime, - @RequestParam(value = "endDate", required = false) String endTime, - @RequestParam("pageNo") Integer pageNo, - @RequestParam("pageSize") Integer pageSize - ){ - - try{ - logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", - taskName, state, taskDate, startTime, endTime); - Map result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); - } + @RequestParam(value = "taskName", required = false) String taskName, + @RequestParam(value = "state", required = false) String state, + @RequestParam(value = "sourceTable", required = false) String sourceTable, + @RequestParam(value = "destTable", required = false) String destTable, + @RequestParam(value = "taskDate", required = false) String taskDate, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize + ) { + logger.info("query hisotry task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", + taskName, state, taskDate, startTime, endTime); + Map result = taskRecordService.queryTaskRecordListPaging(true, taskName, startTime, taskDate, sourceTable, destTable, endTime, state, pageNo, pageSize); + return returnDataListPaging(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java index afdb80bd2c..a603ac050c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -36,6 +37,8 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * tenant controller @@ -43,7 +46,7 @@ import java.util.Map; @Api(tags = "TENANT_TAG", position = 1) @RestController @RequestMapping("/tenant") -public class TenantController extends BaseController{ +public class TenantController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(TenantController.class); @@ -54,38 +57,33 @@ public class TenantController extends BaseController{ /** * create tenant * - * @param loginUser login user - * @param tenantCode tenant code - * @param tenantName tenant name - * @param queueId queue id + * @param loginUser login user + * @param tenantCode tenant code + * @param tenantName tenant name + * @param queueId queue id * @param description description * @return create result code */ - @ApiOperation(value = "createTenant", notes= "CREATE_TENANT_NOTES") + @ApiOperation(value = "createTenant", notes = "CREATE_TENANT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), - @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int",example = "100"), - @ApiImplicitParam(name = "description", value = "TENANT_DESC", dataType ="String") + @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "description", value = "TENANT_DESC", dataType = "String") }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_TENANT_ERROR) public Result createTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "tenantName") String tenantName, - @RequestParam(value = "queueId") int queueId, - @RequestParam(value = "description",required = false) String description) { + @RequestParam(value = "tenantCode") String tenantCode, + @RequestParam(value = "tenantName") String tenantName, + @RequestParam(value = "queueId") int queueId, + @RequestParam(value = "description", required = false) String description) throws Exception { logger.info("login user {}, create tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", - loginUser.getUserName(), tenantCode, tenantName, queueId,description); - try { - Map result = tenantService.createTenant(loginUser,tenantCode,tenantName,queueId,description); - return returnDataList(result); - - }catch (Exception e){ - logger.error(Status.CREATE_TENANT_ERROR.getMsg(),e); - return error(Status.CREATE_TENANT_ERROR.getCode(), Status.CREATE_TENANT_ERROR.getMsg()); - } + loginUser.getUserName(), tenantCode, tenantName, queueId, description); + Map result = tenantService.createTenant(loginUser, tenantCode, tenantName, queueId, description); + return returnDataList(result); } @@ -94,36 +92,32 @@ public class TenantController extends BaseController{ * * @param loginUser login user * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return tenant list page */ - @ApiOperation(value = "queryTenantlistPaging", notes= "QUERY_TENANT_LIST_PAGING_NOTES") + @ApiOperation(value = "queryTenantlistPaging", notes = "QUERY_TENANT_LIST_PAGING_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType ="String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType ="Int",example = "20") + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_TENANT_LIST_PAGING_ERROR) public Result queryTenantlistPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ + @RequestParam("pageNo") Integer pageNo, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageSize") Integer pageSize) { logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize); + loginUser.getUserName(), pageNo, searchVal, pageSize); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getCode(), Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg()); } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } @@ -133,113 +127,95 @@ public class TenantController extends BaseController{ * @param loginUser login user * @return tenant list */ - @ApiOperation(value = "queryTenantlist", notes= "QUERY_TENANT_LIST_NOTES") - @GetMapping(value="/list") + @ApiOperation(value = "queryTenantlist", notes = "QUERY_TENANT_LIST_NOTES") + @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) - public Result queryTenantlist(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + @ApiException(QUERY_TENANT_LIST_ERROR) + public Result queryTenantlist(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, query tenant list", loginUser.getUserName()); - try{ - Map result = tenantService.queryTenantList(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_TENANT_LIST_ERROR.getMsg(),e); - return error(Status.QUERY_TENANT_LIST_ERROR.getCode(), Status.QUERY_TENANT_LIST_ERROR.getMsg()); - } + Map result = tenantService.queryTenantList(loginUser); + return returnDataList(result); } - /** * udpate tenant * - * @param loginUser login user - * @param id tennat id - * @param tenantCode tennat code - * @param tenantName tennat name - * @param queueId queue id + * @param loginUser login user + * @param id tennat id + * @param tenantCode tennat code + * @param tenantName tennat name + * @param queueId queue id * @param description description * @return update result code */ - @ApiOperation(value = "updateTenant", notes= "UPDATE_TENANT_NOTES") + @ApiOperation(value = "updateTenant", notes = "UPDATE_TENANT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100"), + @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), - @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType ="Int", example = "100"), - @ApiImplicitParam(name = "description", value = "TENANT_DESC", type ="String") + @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "description", value = "TENANT_DESC", type = "String") }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_TENANT_ERROR) public Result updateTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "tenantName") String tenantName, - @RequestParam(value = "queueId") int queueId, - @RequestParam(value = "description",required = false) String description) { + @RequestParam(value = "id") int id, + @RequestParam(value = "tenantCode") String tenantCode, + @RequestParam(value = "tenantName") String tenantName, + @RequestParam(value = "queueId") int queueId, + @RequestParam(value = "description", required = false) String description) throws Exception { logger.info("login user {}, updateProcessInstance tenant, tenantCode: {}, tenantName: {}, queueId: {}, description: {}", - loginUser.getUserName(), tenantCode, tenantName, queueId,description); - try { - Map result = tenantService.updateTenant(loginUser,id,tenantCode, tenantName, queueId, description); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.UPDATE_TENANT_ERROR.getMsg(),e); - return error(Status.UPDATE_TENANT_ERROR.getCode(), Status.UPDATE_TENANT_ERROR.getMsg()); - } + loginUser.getUserName(), tenantCode, tenantName, queueId, description); + Map result = tenantService.updateTenant(loginUser, id, tenantCode, tenantName, queueId, description); + return returnDataList(result); } /** * delete tenant by id * * @param loginUser login user - * @param id tenant id + * @param id tenant id * @return delete result code */ - @ApiOperation(value = "deleteTenantById", notes= "DELETE_TENANT_NOTES") + @ApiOperation(value = "deleteTenantById", notes = "DELETE_TENANT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType ="Int", example = "100") + @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_TENANT_BY_ID_ERROR) public Result deleteTenantById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { + @RequestParam(value = "id") int id) throws Exception { logger.info("login user {}, delete tenant, tenantId: {},", loginUser.getUserName(), id); - try { - Map result = tenantService.deleteTenantById(loginUser,id); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.DELETE_TENANT_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_TENANT_BY_ID_ERROR.getCode(), Status.DELETE_TENANT_BY_ID_ERROR.getMsg()); - } + Map result = tenantService.deleteTenantById(loginUser, id); + return returnDataList(result); } /** * verify tenant code * - * @param loginUser login user + * @param loginUser login user * @param tenantCode tenant code * @return true if tenant code can user, otherwise return false */ - @ApiOperation(value = "verifyTenantCode", notes= "VERIFY_TENANT_CODE_NOTES") + @ApiOperation(value = "verifyTenantCode", notes = "VERIFY_TENANT_CODE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String") }) @GetMapping(value = "/verify-tenant-code") @ResponseStatus(HttpStatus.OK) + @ApiException(VERIFY_TENANT_CODE_ERROR) public Result verifyTenantCode(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="tenantCode") String tenantCode + @RequestParam(value = "tenantCode") String tenantCode ) { - - try{ - logger.info("login user {}, verfiy tenant code: {}", - loginUser.getUserName(),tenantCode); - return tenantService.verifyTenantCode(tenantCode); - }catch (Exception e){ - logger.error(Status.VERIFY_TENANT_CODE_ERROR.getMsg(),e); - return error(Status.VERIFY_TENANT_CODE_ERROR.getCode(), Status.VERIFY_TENANT_CODE_ERROR.getMsg()); - } + logger.info("login user {}, verfiy tenant code: {}", + loginUser.getUserName(), tenantCode); + return tenantService.verifyTenantCode(tenantCode); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index 42f89237ab..08d862e032 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.UsersService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -36,14 +37,16 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * user controller */ -@Api(tags = "USERS_TAG" , position = 14) +@Api(tags = "USERS_TAG", position = 14) @RestController @RequestMapping("/users") -public class UsersController extends BaseController{ +public class UsersController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(UsersController.class); @@ -52,20 +55,20 @@ public class UsersController extends BaseController{ /** * create user - * - * @param loginUser login user - * @param userName user name + * + * @param loginUser login user + * @param userName user name * @param userPassword user password - * @param email email - * @param tenantId tenant id - * @param phone phone - * @param queue queue + * @param email email + * @param tenantId tenant id + * @param phone phone + * @param queue queue * @return create result code */ - @ApiOperation(value = "createUser", notes= "CREATE_USER_NOTES") + @ApiOperation(value = "createUser", notes = "CREATE_USER_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"), - @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"), + @ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"), + @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type = "String"), @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), @@ -73,81 +76,73 @@ public class UsersController extends BaseController{ }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) + @ApiException(CREATE_USER_ERROR) public Result createUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "userPassword") String userPassword, - @RequestParam(value = "tenantId") int tenantId, - @RequestParam(value = "queue",required = false,defaultValue = "") String queue, - @RequestParam(value = "email") String email, - @RequestParam(value = "phone", required = false) String phone) { + @RequestParam(value = "userName") String userName, + @RequestParam(value = "userPassword") String userPassword, + @RequestParam(value = "tenantId") int tenantId, + @RequestParam(value = "queue", required = false, defaultValue = "") String queue, + @RequestParam(value = "email") String email, + @RequestParam(value = "phone", required = false) String phone) throws Exception { logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}", - loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue); + loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue); - try { - Map result = usersService.createUser(loginUser, userName, userPassword,email,tenantId, phone,queue); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.CREATE_USER_ERROR.getMsg(),e); - return error(Status.CREATE_USER_ERROR.getCode(), Status.CREATE_USER_ERROR.getMsg()); - } + Map result = usersService.createUser(loginUser, userName, userPassword, email, tenantId, phone, queue); + return returnDataList(result); } /** * query user list paging * * @param loginUser login user - * @param pageNo page number + * @param pageNo page number * @param searchVal search avlue - * @param pageSize page size + * @param pageSize page size * @return user list page */ - @ApiOperation(value = "queryUserList", notes= "QUERY_USER_LIST_NOTES") + @ApiOperation(value = "queryUserList", notes = "QUERY_USER_LIST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type ="String"), - @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String") + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type = "String"), + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String") }) - @GetMapping(value="/list-paging") + @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_USER_LIST_PAGING_ERROR) public Result queryUserList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize){ + @RequestParam("pageSize") Integer pageSize) { logger.info("login user {}, list user paging, pageNo: {}, searchVal: {}, pageSize: {}", - loginUser.getUserName(),pageNo,searchVal,pageSize); - try{ - Map result = checkPageParams(pageNo, pageSize); - if(result.get(Constants.STATUS) != Status.SUCCESS){ - return returnDataListPaging(result); - } - searchVal = ParameterUtils.handleEscapes(searchVal); - result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize); + loginUser.getUserName(), pageNo, searchVal, pageSize); + Map result = checkPageParams(pageNo, pageSize); + if (result.get(Constants.STATUS) != Status.SUCCESS) { return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_USER_LIST_PAGING_ERROR.getMsg(),e); - return error(Status.QUERY_USER_LIST_PAGING_ERROR.getCode(), Status.QUERY_USER_LIST_PAGING_ERROR.getMsg()); } + searchVal = ParameterUtils.handleEscapes(searchVal); + result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize); + return returnDataListPaging(result); } /** * update user * - * @param loginUser login user - * @param id user id - * @param userName user name + * @param loginUser login user + * @param id user id + * @param userName user name * @param userPassword user password - * @param email email - * @param tenantId tennat id - * @param phone phone - * @param queue queue + * @param email email + * @param tenantId tennat id + * @param phone phone + * @param queue queue * @return update result code */ - @ApiOperation(value = "updateUser", notes= "UPDATE_USER_NOTES") + @ApiOperation(value = "updateUser", notes = "UPDATE_USER_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String"), - @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type ="String"), + @ApiImplicitParam(name = "id", value = "USER_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"), + @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type = "String"), @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), @@ -155,103 +150,88 @@ public class UsersController extends BaseController{ }) @PostMapping(value = "/update") @ResponseStatus(HttpStatus.OK) + @ApiException(UPDATE_USER_ERROR) public Result updateUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id, - @RequestParam(value = "userName") String userName, - @RequestParam(value = "userPassword") String userPassword, - @RequestParam(value = "queue",required = false,defaultValue = "") String queue, - @RequestParam(value = "email") String email, - @RequestParam(value = "tenantId") int tenantId, - @RequestParam(value = "phone", required = false) String phone) { + @RequestParam(value = "id") int id, + @RequestParam(value = "userName") String userName, + @RequestParam(value = "userPassword") String userPassword, + @RequestParam(value = "queue", required = false, defaultValue = "") String queue, + @RequestParam(value = "email") String email, + @RequestParam(value = "tenantId") int tenantId, + @RequestParam(value = "phone", required = false) String phone) throws Exception { logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}", - loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone,queue); - try { - Map result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.UPDATE_USER_ERROR.getMsg(),e); - return error(Status.UPDATE_USER_ERROR.getCode(), Status.UPDATE_USER_ERROR.getMsg()); - } + loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue); + Map result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue); + return returnDataList(result); } /** * delete user by id + * * @param loginUser login user - * @param id user id + * @param id user id * @return delete result code */ - @ApiOperation(value = "delUserById", notes= "DELETE_USER_BY_ID_NOTES") + @ApiOperation(value = "delUserById", notes = "DELETE_USER_BY_ID_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "id", value = "USER_ID",dataType = "Int", example = "100") + @ApiImplicitParam(name = "id", value = "USER_ID", dataType = "Int", example = "100") }) @PostMapping(value = "/delete") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_USER_BY_ID_ERROR) public Result delUserById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id") int id) { + @RequestParam(value = "id") int id) throws Exception { logger.info("login user {}, delete user, userId: {},", loginUser.getUserName(), id); - try { - Map result = usersService.deleteUserById(loginUser, id); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.DELETE_USER_BY_ID_ERROR.getMsg(),e); - return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg()); - } + Map result = usersService.deleteUserById(loginUser, id); + return returnDataList(result); } /** * grant project * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param projectIds project id array * @return grant result code */ - @ApiOperation(value = "grantProject", notes= "GRANT_PROJECT_NOTES") + @ApiOperation(value = "grantProject", notes = "GRANT_PROJECT_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "projectIds", value = "PROJECT_IDS",type = "String") + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "projectIds", value = "PROJECT_IDS", type = "String") }) @PostMapping(value = "/grant-project") @ResponseStatus(HttpStatus.OK) + @ApiException(GRANT_PROJECT_ERROR) public Result grantProject(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "projectIds") String projectIds) { - logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId,projectIds); - try { - Map result = usersService.grantProject(loginUser, userId, projectIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GRANT_PROJECT_ERROR.getMsg(),e); - return error(Status.GRANT_PROJECT_ERROR.getCode(), Status.GRANT_PROJECT_ERROR.getMsg()); - } + @RequestParam(value = "userId") int userId, + @RequestParam(value = "projectIds") String projectIds) { + logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId, projectIds); + Map result = usersService.grantProject(loginUser, userId, projectIds); + return returnDataList(result); } /** * grant resource * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param resourceIds resource id array * @return grant result code */ - @ApiOperation(value = "grantResource", notes= "GRANT_RESOURCE_NOTES") + @ApiOperation(value = "grantResource", notes = "GRANT_RESOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "resourceIds", value = "RESOURCE_IDS",type = "String") + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "resourceIds", value = "RESOURCE_IDS", type = "String") }) @PostMapping(value = "/grant-file") @ResponseStatus(HttpStatus.OK) + @ApiException(GRANT_RESOURCE_ERROR) public Result grantResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "resourceIds") String resourceIds) { - logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,resourceIds); - try { - Map result = usersService.grantResources(loginUser, userId, resourceIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GRANT_RESOURCE_ERROR.getMsg(),e); - return error(Status.GRANT_RESOURCE_ERROR.getCode(), Status.GRANT_RESOURCE_ERROR.getMsg()); - } + @RequestParam(value = "userId") int userId, + @RequestParam(value = "resourceIds") String resourceIds) { + logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId, resourceIds); + Map result = usersService.grantResources(loginUser, userId, resourceIds); + return returnDataList(result); } @@ -259,58 +239,49 @@ public class UsersController extends BaseController{ * grant udf function * * @param loginUser login user - * @param userId user id - * @param udfIds udf id array + * @param userId user id + * @param udfIds udf id array * @return grant result code */ - @ApiOperation(value = "grantUDFFunc", notes= "GRANT_UDF_FUNC_NOTES") + @ApiOperation(value = "grantUDFFunc", notes = "GRANT_UDF_FUNC_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "udfIds", value = "UDF_IDS",type = "String") + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "udfIds", value = "UDF_IDS", type = "String") }) @PostMapping(value = "/grant-udf-func") @ResponseStatus(HttpStatus.OK) + @ApiException(GRANT_UDF_FUNCTION_ERROR) public Result grantUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "udfIds") String udfIds) { - logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,udfIds); - try { - Map result = usersService.grantUDFFunction(loginUser, userId, udfIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GRANT_UDF_FUNCTION_ERROR.getMsg(),e); - return error(Status.GRANT_UDF_FUNCTION_ERROR.getCode(), Status.GRANT_UDF_FUNCTION_ERROR.getMsg()); - } + @RequestParam(value = "userId") int userId, + @RequestParam(value = "udfIds") String udfIds) { + logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId, udfIds); + Map result = usersService.grantUDFFunction(loginUser, userId, udfIds); + return returnDataList(result); } - /** * grant datasource * - * @param loginUser login user - * @param userId user id - * @param datasourceIds data source id array + * @param loginUser login user + * @param userId user id + * @param datasourceIds data source id array * @return grant result code */ - @ApiOperation(value = "grantDataSource", notes= "GRANT_DATASOURCE_NOTES") + @ApiOperation(value = "grantDataSource", notes = "GRANT_DATASOURCE_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userId", value = "USER_ID",dataType = "Int", example = "100"), - @ApiImplicitParam(name = "datasourceIds", value = "DATASOURCE_IDS",type = "String") + @ApiImplicitParam(name = "userId", value = "USER_ID", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "datasourceIds", value = "DATASOURCE_IDS", type = "String") }) @PostMapping(value = "/grant-datasource") @ResponseStatus(HttpStatus.OK) + @ApiException(GRANT_DATASOURCE_ERROR) public Result grantDataSource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "userId") int userId, - @RequestParam(value = "datasourceIds") String datasourceIds) { - logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(),userId,datasourceIds); - try { - Map result = usersService.grantDataSource(loginUser, userId, datasourceIds); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GRANT_DATASOURCE_ERROR.getMsg(),e); - return error(Status.GRANT_DATASOURCE_ERROR.getCode(), Status.GRANT_DATASOURCE_ERROR.getMsg()); - } + @RequestParam(value = "userId") int userId, + @RequestParam(value = "datasourceIds") String datasourceIds) { + logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId, datasourceIds); + Map result = usersService.grantDataSource(loginUser, userId, datasourceIds); + return returnDataList(result); } @@ -320,18 +291,14 @@ public class UsersController extends BaseController{ * @param loginUser login user * @return user info */ - @ApiOperation(value = "getUserInfo", notes= "GET_USER_INFO_NOTES") - @GetMapping(value="/get-user-info") + @ApiOperation(value = "getUserInfo", notes = "GET_USER_INFO_NOTES") + @GetMapping(value = "/get-user-info") @ResponseStatus(HttpStatus.OK) - public Result getUserInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + @ApiException(GET_USER_INFO_ERROR) + public Result getUserInfo(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {},get user info", loginUser.getUserName()); - try{ - Map result = usersService.getUserInfo(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.GET_USER_INFO_ERROR.getMsg(),e); - return error(Status.GET_USER_INFO_ERROR.getCode(), Status.GET_USER_INFO_ERROR.getMsg()); - } + Map result = usersService.getUserInfo(loginUser); + return returnDataList(result); } /** @@ -340,18 +307,14 @@ public class UsersController extends BaseController{ * @param loginUser login user * @return user list */ - @ApiOperation(value = "listUser", notes= "LIST_USER_NOTES") - @GetMapping(value="/list") + @ApiOperation(value = "listUser", notes = "LIST_USER_NOTES") + @GetMapping(value = "/list") @ResponseStatus(HttpStatus.OK) - public Result listUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + @ApiException(USER_LIST_ERROR) + public Result listUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, user list", loginUser.getUserName()); - try{ - Map result = usersService.queryAllGeneralUsers(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.USER_LIST_ERROR.getMsg(),e); - return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); - } + Map result = usersService.queryAllGeneralUsers(loginUser); + return returnDataList(result); } @@ -361,17 +324,13 @@ public class UsersController extends BaseController{ * @param loginUser login user * @return user list */ - @GetMapping(value="/list-all") + @GetMapping(value = "/list-all") @ResponseStatus(HttpStatus.OK) - public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ + @ApiException(USER_LIST_ERROR) + public Result listAll(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) { logger.info("login user {}, user list", loginUser.getUserName()); - try{ - Map result = usersService.queryUserList(loginUser); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.USER_LIST_ERROR.getMsg(),e); - return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); - } + Map result = usersService.queryUserList(loginUser); + return returnDataList(result); } @@ -379,79 +338,71 @@ public class UsersController extends BaseController{ * verify username * * @param loginUser login user - * @param userName user name + * @param userName user name * @return true if user name not exists, otherwise return false */ - @ApiOperation(value = "verifyUserName", notes= "VERIFY_USER_NAME_NOTES") + @ApiOperation(value = "verifyUserName", notes = "VERIFY_USER_NAME_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "userName", value = "USER_NAME",type = "String") + @ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String") }) @GetMapping(value = "/verify-user-name") @ResponseStatus(HttpStatus.OK) + @ApiException(VERIFY_USERNAME_ERROR) public Result verifyUserName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value ="userName") String userName + @RequestParam(value = "userName") String userName ) { - try{ - - logger.info("login user {}, verfiy user name: {}", - loginUser.getUserName(),userName); - return usersService.verifyUserName(userName); - }catch (Exception e){ - logger.error(Status.VERIFY_USERNAME_ERROR.getMsg(),e); - return error(Status.VERIFY_USERNAME_ERROR.getCode(), Status.VERIFY_USERNAME_ERROR.getMsg()); - } + logger.info("login user {}, verfiy user name: {}", + loginUser.getUserName(), userName); + return usersService.verifyUserName(userName); } /** * unauthorized user * - * @param loginUser login user + * @param loginUser login user * @param alertgroupId alert group id * @return unauthorize result code */ - @ApiOperation(value = "unauthorizedUser", notes= "UNAUTHORIZED_USER_NOTES") + @ApiOperation(value = "unauthorizedUser", notes = "UNAUTHORIZED_USER_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String") + @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID", type = "String") }) @GetMapping(value = "/unauth-user") @ResponseStatus(HttpStatus.OK) + @ApiException(UNAUTHORIZED_USER_ERROR) public Result unauthorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("alertgroupId") Integer alertgroupId) { - try{ - logger.info("unauthorized user, login user:{}, alert group id:{}", - loginUser.getUserName(), alertgroupId); - Map result = usersService.unauthorizedUser(loginUser, alertgroupId); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.UNAUTHORIZED_USER_ERROR.getMsg(),e); - return error(Status.UNAUTHORIZED_USER_ERROR.getCode(), Status.UNAUTHORIZED_USER_ERROR.getMsg()); - } + logger.info("unauthorized user, login user:{}, alert group id:{}", + loginUser.getUserName(), alertgroupId); + Map result = usersService.unauthorizedUser(loginUser, alertgroupId); + return returnDataList(result); } /** * authorized user * - * @param loginUser login user + * @param loginUser login user * @param alertgroupId alert group id * @return authorized result code */ - @ApiOperation(value = "authorizedUser", notes= "AUTHORIZED_USER_NOTES") + @ApiOperation(value = "authorizedUser", notes = "AUTHORIZED_USER_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID",type = "String") + @ApiImplicitParam(name = "alertgroupId", value = "ALERT_GROUP_ID", type = "String") }) @GetMapping(value = "/authed-user") @ResponseStatus(HttpStatus.OK) + @ApiException(AUTHORIZED_USER_ERROR) public Result authorizedUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("alertgroupId") Integer alertgroupId) { - try{ + try { logger.info("authorized user , login user:{}, alert group id:{}", loginUser.getUserName(), alertgroupId); Map result = usersService.authorizedUser(loginUser, alertgroupId); return returnDataList(result); - }catch (Exception e){ - logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(),e); + } catch (Exception e) { + logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(), e); return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java index 8ec1335442..429553f4f1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkerGroupController.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.api.controller; -import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.WorkerGroupService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -36,17 +36,18 @@ import springfox.documentation.annotations.ApiIgnore; import java.util.Map; +import static org.apache.dolphinscheduler.api.enums.Status.*; + /** * worker group controller */ @Api(tags = "WORKER_GROUP_TAG", position = 1) @RestController @RequestMapping("/worker-group") -public class WorkerGroupController extends BaseController{ +public class WorkerGroupController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(WorkerGroupController.class); - @Autowired WorkerGroupService workerGroupService; @@ -55,69 +56,61 @@ public class WorkerGroupController extends BaseController{ * create or update a worker group * * @param loginUser login user - * @param id worker group id - * @param name worker group name - * @param ipList ip list + * @param id worker group id + * @param name worker group name + * @param ipList ip list * @return create or update result code */ - @ApiOperation(value = "saveWorkerGroup", notes= "CREATE_WORKER_GROUP_NOTES") + @ApiOperation(value = "saveWorkerGroup", notes = "CREATE_WORKER_GROUP_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"), - @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String") + @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType = "String") }) @PostMapping(value = "/save") @ResponseStatus(HttpStatus.OK) + @ApiException(SAVE_ERROR) public Result saveWorkerGroup(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "id", required = false, defaultValue = "0") int id, - @RequestParam(value = "name") String name, - @RequestParam(value = "ipList") String ipList - ) { + @RequestParam(value = "id", required = false, defaultValue = "0") int id, + @RequestParam(value = "name") String name, + @RequestParam(value = "ipList") String ipList + ) { logger.info("save worker group: login user {}, id:{}, name: {}, ipList: {} ", loginUser.getUserName(), id, name, ipList); - try { - Map result = workerGroupService.saveWorkerGroup(loginUser,id, name, ipList); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.SAVE_ERROR.getMsg(),e); - return error(Status.SAVE_ERROR.getCode(), Status.SAVE_ERROR.getMsg()); - } + Map result = workerGroupService.saveWorkerGroup(loginUser, id, name, ipList); + return returnDataList(result); } /** * query worker groups paging * * @param loginUser login user - * @param pageNo page number + * @param pageNo page number * @param searchVal search value - * @param pageSize page size + * @param pageSize page size * @return worker group list page */ - @ApiOperation(value = "queryAllWorkerGroupsPaging", notes= "QUERY_WORKER_GROUP_PAGING_NOTES") + @ApiOperation(value = "queryAllWorkerGroupsPaging", notes = "QUERY_WORKER_GROUP_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", dataType = "Int", example = "10", defaultValue = "0"), - @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType ="String"), - @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType ="String") + @ApiImplicitParam(name = "name", value = "WORKER_GROUP_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "ipList", value = "WORKER_IP_LIST", required = true, dataType = "String") }) @GetMapping(value = "/list-paging") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_WORKER_GROUP_FAIL) public Result queryAllWorkerGroupsPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageSize") Integer pageSize ) { logger.info("query all worker group paging: login user {}, pageNo:{}, pageSize:{}, searchVal:{}", - loginUser.getUserName() , pageNo, pageSize, searchVal); - - try { - searchVal = ParameterUtils.handleEscapes(searchVal); - Map result = workerGroupService.queryAllGroupPaging(loginUser,pageNo, pageSize, searchVal); - return returnDataListPaging(result); - }catch (Exception e){ - logger.error(Status.QUERY_WORKER_GROUP_FAIL.getMsg(),e); - return error(Status.QUERY_WORKER_GROUP_FAIL.getCode(), Status.QUERY_WORKER_GROUP_FAIL.getMsg()); - } + loginUser.getUserName(), pageNo, pageSize, searchVal); + + searchVal = ParameterUtils.handleEscapes(searchVal); + Map result = workerGroupService.queryAllGroupPaging(loginUser, pageNo, pageSize, searchVal); + return returnDataListPaging(result); } /** @@ -126,48 +119,41 @@ public class WorkerGroupController extends BaseController{ * @param loginUser login user * @return all worker group list */ - @ApiOperation(value = "queryAllWorkerGroups", notes= "QUERY_WORKER_GROUP_LIST_NOTES") + @ApiOperation(value = "queryAllWorkerGroups", notes = "QUERY_WORKER_GROUP_LIST_NOTES") @GetMapping(value = "/all-groups") @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_WORKER_GROUP_FAIL) public Result queryAllWorkerGroups(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser ) { logger.info("query all worker group: login user {}", - loginUser.getUserName() ); - - try { - Map result = workerGroupService.queryAllGroup(); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.QUERY_WORKER_GROUP_FAIL.getMsg(),e); - return error(Status.QUERY_WORKER_GROUP_FAIL.getCode(), Status.QUERY_WORKER_GROUP_FAIL.getMsg()); - } + loginUser.getUserName()); + + Map result = workerGroupService.queryAllGroup(); + return returnDataList(result); } /** * delete worker group by id + * * @param loginUser login user - * @param id group id + * @param id group id * @return delete result code */ - @ApiOperation(value = "deleteById", notes= "DELETE_WORKER_GROUP_BY_ID_NOTES") + @ApiOperation(value = "deleteById", notes = "DELETE_WORKER_GROUP_BY_ID_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "WORKER_GROUP_ID", required = true, dataType = "Int", example = "10"), }) @GetMapping(value = "/delete-by-id") @ResponseStatus(HttpStatus.OK) + @ApiException(DELETE_WORKER_GROUP_FAIL) public Result deleteById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("id") Integer id + @RequestParam("id") Integer id ) { logger.info("delete worker group: login user {}, id:{} ", - loginUser.getUserName() , id); - - try { - Map result = workerGroupService.deleteWorkerGroupById(id); - return returnDataList(result); - }catch (Exception e){ - logger.error(Status.DELETE_WORKER_GROUP_FAIL.getMsg(),e); - return error(Status.DELETE_WORKER_GROUP_FAIL.getCode(), Status.DELETE_WORKER_GROUP_FAIL.getMsg()); - } + loginUser.getUserName(), id); + + Map result = workerGroupService.deleteWorkerGroupById(id); + return returnDataList(result); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ProcessMeta.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ProcessMeta.java index f14d8df097..61e3752c69 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ProcessMeta.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/ProcessMeta.java @@ -96,19 +96,11 @@ public class ProcessMeta { */ private String scheduleProcessInstancePriority; - /** - * worker group id - */ - private Integer scheduleWorkerGroupId; - /** * worker group name */ private String scheduleWorkerGroupName; - public ProcessMeta() { - } - public String getProjectName() { return projectName; } @@ -229,14 +221,6 @@ public class ProcessMeta { this.scheduleProcessInstancePriority = scheduleProcessInstancePriority; } - public Integer getScheduleWorkerGroupId() { - return scheduleWorkerGroupId; - } - - public void setScheduleWorkerGroupId(int scheduleWorkerGroupId) { - this.scheduleWorkerGroupId = scheduleWorkerGroupId; - } - public String getScheduleWorkerGroupName() { return scheduleWorkerGroupName; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java index e7b182076d..6b0391f111 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java @@ -43,36 +43,36 @@ public class TaskCountDto { } private void countTaskDtos(List taskInstanceStateCounts){ - int submitted_success = 0; - int running_exeution = 0; - int ready_pause = 0; + int submittedSuccess = 0; + int runningExeution = 0; + int readyPause = 0; int pause = 0; - int ready_stop = 0; + int readyStop = 0; int stop = 0; int failure = 0; int success = 0; - int need_fault_tolerance = 0; + int needFaultTolerance = 0; int kill = 0; - int waitting_thread = 0; + int waittingThread = 0; for(ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts){ ExecutionStatus status = taskInstanceStateCount.getExecutionStatus(); totalCount += taskInstanceStateCount.getCount(); switch (status){ case SUBMITTED_SUCCESS: - submitted_success += taskInstanceStateCount.getCount(); + submittedSuccess += taskInstanceStateCount.getCount(); break; case RUNNING_EXEUTION: - running_exeution += taskInstanceStateCount.getCount(); + runningExeution += taskInstanceStateCount.getCount(); break; case READY_PAUSE: - ready_pause += taskInstanceStateCount.getCount(); + readyPause += taskInstanceStateCount.getCount(); break; case PAUSE: pause += taskInstanceStateCount.getCount(); break; case READY_STOP: - ready_stop += taskInstanceStateCount.getCount(); + readyStop += taskInstanceStateCount.getCount(); break; case STOP: stop += taskInstanceStateCount.getCount(); @@ -84,13 +84,13 @@ public class TaskCountDto { success += taskInstanceStateCount.getCount(); break; case NEED_FAULT_TOLERANCE: - need_fault_tolerance += taskInstanceStateCount.getCount(); + needFaultTolerance += taskInstanceStateCount.getCount(); break; case KILL: kill += taskInstanceStateCount.getCount(); break; case WAITTING_THREAD: - waitting_thread += taskInstanceStateCount.getCount(); + waittingThread += taskInstanceStateCount.getCount(); break; default: @@ -98,17 +98,17 @@ public class TaskCountDto { } } this.taskCountDtos = new ArrayList<>(); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submitted_success)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXEUTION, running_exeution)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, ready_pause)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submittedSuccess)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXEUTION, runningExeution)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, readyPause)); this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, ready_stop)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, readyStop)); this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.STOP, stop)); this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.FAILURE, failure)); this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUCCESS, success)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.NEED_FAULT_TOLERANCE, need_fault_tolerance)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.NEED_FAULT_TOLERANCE, needFaultTolerance)); this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.KILL, kill)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waitting_thread)); + this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waittingThread)); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/Directory.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/Directory.java new file mode 100644 index 0000000000..289d5060bf --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/Directory.java @@ -0,0 +1,29 @@ +package org.apache.dolphinscheduler.api.dto.resources; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * directory + */ +public class Directory extends ResourceComponent{ + + @Override + public boolean isDirctory() { + return true; + } + +} diff --git a/dolphinscheduler-ui/src/sass/common/_mixin.scss b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FileLeaf.java similarity index 85% rename from dolphinscheduler-ui/src/sass/common/_mixin.scss rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FileLeaf.java index c6a5afeef5..b9b91821f4 100644 --- a/dolphinscheduler-ui/src/sass/common/_mixin.scss +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/FileLeaf.java @@ -1,3 +1,5 @@ +package org.apache.dolphinscheduler.api.dto.resources; + /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -13,4 +15,10 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - */ \ No newline at end of file + */ +/** + * file leaf + */ +public class FileLeaf extends ResourceComponent{ + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java new file mode 100644 index 0000000000..fb0da702b3 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/ResourceComponent.java @@ -0,0 +1,193 @@ +package org.apache.dolphinscheduler.api.dto.resources; + +import com.alibaba.fastjson.annotation.JSONField; +import com.alibaba.fastjson.annotation.JSONType; +import org.apache.dolphinscheduler.common.enums.ResourceType; + +import java.util.ArrayList; +import java.util.List; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * resource component + */ +@JSONType(orders={"id","pid","name","fullName","description","isDirctory","children","type"}) +public abstract class ResourceComponent { + public ResourceComponent() { + } + + public ResourceComponent(int id, int pid, String name, String fullName, String description, boolean isDirctory) { + this.id = id; + this.pid = pid; + this.name = name; + this.fullName = fullName; + this.description = description; + this.isDirctory = isDirctory; + int directoryFlag = isDirctory ? 1:0; + this.idValue = String.format("%s_%s",id,directoryFlag); + } + + + /** + * id + */ + @JSONField(ordinal = 1) + protected int id; + /** + * parent id + */ + @JSONField(ordinal = 2) + protected int pid; + /** + * name + */ + @JSONField(ordinal = 3) + protected String name; + /** + * current directory + */ + protected String currentDir; + /** + * full name + */ + @JSONField(ordinal = 4) + protected String fullName; + /** + * description + */ + @JSONField(ordinal = 5) + protected String description; + /** + * is directory + */ + @JSONField(ordinal = 6) + protected boolean isDirctory; + /** + * id value + */ + @JSONField(ordinal = 7) + protected String idValue; + /** + * resoruce type + */ + @JSONField(ordinal = 8) + protected ResourceType type; + /** + * children + */ + @JSONField(ordinal = 8) + protected List children = new ArrayList<>(); + + /** + * add resource component + * @param resourceComponent resource component + */ + public void add(ResourceComponent resourceComponent){ + children.add(resourceComponent); + } + + public String getName(){ + return this.name; + } + + public String getDescription(){ + return this.description; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getPid() { + return pid; + } + + public void setPid(int pid) { + this.pid = pid; + } + + public void setName(String name) { + this.name = name; + } + + public String getFullName() { + return fullName; + } + + public void setFullName(String fullName) { + this.fullName = fullName; + } + + public void setDescription(String description) { + this.description = description; + } + + public boolean isDirctory() { + return isDirctory; + } + + public void setDirctory(boolean dirctory) { + isDirctory = dirctory; + } + + public String getIdValue() { + return idValue; + } + + public void setIdValue(int id,boolean isDirctory) { + int directoryFlag = isDirctory ? 1:0; + this.idValue = String.format("%s_%s",id,directoryFlag); + } + + public ResourceType getType() { + return type; + } + + public void setType(ResourceType type) { + this.type = type; + } + + public List getChildren() { + return children; + } + + public void setChildren(List children) { + this.children = children; + } + + @Override + public String toString() { + return "ResourceComponent{" + + "id=" + id + + ", pid=" + pid + + ", name='" + name + '\'' + + ", currentDir='" + currentDir + '\'' + + ", fullName='" + fullName + '\'' + + ", description='" + description + '\'' + + ", isDirctory=" + isDirctory + + ", idValue='" + idValue + '\'' + + ", type=" + type + + ", children=" + children + + '}'; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/IFilter.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/IFilter.java new file mode 100644 index 0000000000..ce6ce3a011 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/IFilter.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.resources.filter; + +import org.apache.dolphinscheduler.dao.entity.Resource; + +import java.util.List; + +/** + * interface filter + */ +public interface IFilter { + List filter(); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilter.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilter.java new file mode 100644 index 0000000000..c918a160af --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilter.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.resources.filter; + +import org.apache.dolphinscheduler.dao.entity.Resource; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * resource filter + */ +public class ResourceFilter implements IFilter { + /** + * resource suffix + */ + private String suffix; + /** + * resource list + */ + private List resourceList; + + /** + * parent list + */ + //Set parentList = new HashSet<>(); + + /** + * constructor + * @param suffix resource suffix + * @param resourceList resource list + */ + public ResourceFilter(String suffix, List resourceList) { + this.suffix = suffix; + this.resourceList = resourceList; + } + + /** + * file filter + * @return file filtered by suffix + */ + public Set fileFilter(){ + Set resources = resourceList.stream().filter(t -> { + String alias = t.getAlias(); + return alias.endsWith(suffix); + }).collect(Collectors.toSet()); + return resources; + } + + /** + * list all parent dir + * @return parent resource dir set + */ + Set listAllParent(){ + Set parentList = new HashSet<>(); + Set filterFileList = fileFilter(); + for(Resource file:filterFileList){ + parentList.add(file); + setAllParent(file,parentList); + } + return parentList; + + } + + /** + * list all parent dir + * @param resource resource + * @return parent resource dir set + */ + private void setAllParent(Resource resource,Set parentList){ + for (Resource resourceTemp : resourceList) { + if (resourceTemp.getId() == resource.getPid()) { + parentList.add(resourceTemp); + setAllParent(resourceTemp,parentList); + } + } + } + + @Override + public List filter() { + return new ArrayList<>(listAllParent()); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java new file mode 100644 index 0000000000..5cf118800a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitor.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.resources.visitor; + + +import org.apache.dolphinscheduler.api.dto.resources.Directory; +import org.apache.dolphinscheduler.api.dto.resources.FileLeaf; +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.dao.entity.Resource; + +import java.util.ArrayList; +import java.util.List; + +/** + * resource tree visitor + */ +public class ResourceTreeVisitor implements Visitor{ + + /** + * resource list + */ + private List resourceList; + + public ResourceTreeVisitor() { + } + + /** + * constructor + * @param resourceList resource list + */ + public ResourceTreeVisitor(List resourceList) { + this.resourceList = resourceList; + } + + /** + * visit + * @return resoruce component + */ + public ResourceComponent visit() { + ResourceComponent rootDirectory = new Directory(); + for (Resource resource : resourceList) { + // judge whether is root node + if (rootNode(resource)){ + ResourceComponent tempResourceComponent = getResourceComponent(resource); + rootDirectory.add(tempResourceComponent); + tempResourceComponent.setChildren(setChildren(tempResourceComponent.getId(),resourceList)); + } + } + return rootDirectory; + } + + /** + * set children + * @param id id + * @param list resource list + * @return resource component list + */ + public static List setChildren(int id, List list ){ + List childList = new ArrayList<>(); + for (Resource resource : list) { + if (id == resource.getPid()){ + ResourceComponent tempResourceComponent = getResourceComponent(resource); + childList.add(tempResourceComponent); + } + } + for (ResourceComponent resourceComponent : childList) { + resourceComponent.setChildren(setChildren(resourceComponent.getId(),list)); + } + if (childList.size()==0){ + return new ArrayList<>(); + } + return childList; + } + + /** + * Determine whether it is the root node + * @param resource resource + * @return true if it is the root node + */ + public boolean rootNode(Resource resource) { + + boolean isRootNode = true; + if(resource.getPid() != -1 ){ + for (Resource parent : resourceList) { + if (resource.getPid() == parent.getId()) { + isRootNode = false; + break; + } + } + } + return isRootNode; + } + + /** + * get resource component by resource + * @param resource resource + * @return resource component + */ + private static ResourceComponent getResourceComponent(Resource resource) { + ResourceComponent tempResourceComponent; + if(resource.isDirectory()){ + tempResourceComponent = new Directory(); + }else{ + tempResourceComponent = new FileLeaf(); + } + tempResourceComponent.setName(resource.getAlias()); + tempResourceComponent.setFullName(resource.getFullName().replaceFirst("/","")); + tempResourceComponent.setId(resource.getId()); + tempResourceComponent.setPid(resource.getPid()); + tempResourceComponent.setIdValue(resource.getId(),resource.isDirectory()); + tempResourceComponent.setDescription(resource.getDescription()); + tempResourceComponent.setType(resource.getType()); + return tempResourceComponent; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/Visitor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/Visitor.java new file mode 100644 index 0000000000..3dfce7c7c1 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/resources/visitor/Visitor.java @@ -0,0 +1,31 @@ +package org.apache.dolphinscheduler.api.dto.resources.visitor; + + +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Visitor + */ +public interface Visitor { + /** + * visit + * @return resource component + */ + ResourceComponent visit(); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 9955463f8e..8c52dd4d50 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -27,6 +27,8 @@ public enum Status { SUCCESS(0, "success", "成功"), + INTERNAL_SERVER_ERROR_ARGS(10000, "Internal Server Error: {0}", "服务端异常: {0}"), + REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"), TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"), USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"), @@ -97,7 +99,7 @@ public enum Status { VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error", "UDF函数名称验证错误"), DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error", "删除UDF函数错误"), AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error", "授权资源文件错误"), - UNAUTHORIZED_FILE_RESOURCE_ERROR( 10073,"unauthorized file resource error", "查询未授权资源错误"), + AUTHORIZE_RESOURCE_TREE( 10073,"authorize resource tree display error","授权资源目录树错误"), UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error", "查询未授权UDF函数错误"), AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error", "授权UDF函数错误"), CREATE_SCHEDULE_ERROR(10076,"create schedule error", "创建调度配置错误"), @@ -134,7 +136,7 @@ public enum Status { UPDATE_PROCESS_DEFINITION_ERROR(10107,"update process definition error", "更新工作流定义错误"), RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error", "上线工作流错误"), QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error", "查询工作流详细信息错误"), - QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list", "查询工作流列表错误"), + QUERY_PROCESS_DEFINITION_LIST(10110,"query process definition list", "查询工作流列表错误"), ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error", "查询工作流树形图数据错误"), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error", "查询工作流定义节点信息错误"), QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error", "分页查询工作流实例列表错误"), @@ -146,7 +148,7 @@ public enum Status { QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"), QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error", "查询工作流自定义变量信息错误"), ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"), - QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error", "分页查询工作流定义列表错误"), + QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query process definition list paging error", "分页查询工作流定义列表错误"), SIGN_OUT_ERROR(10123,"sign out error", "退出错误"), TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists", "租户编码已存在"), IP_IS_EMPTY(10125,"ip is empty", "IP地址不能为空"), @@ -166,15 +168,13 @@ public enum Status { PREVIEW_SCHEDULE_ERROR(10139,"preview schedule error", "预览调度配置错误"), PARSE_TO_CRON_EXPRESSION_ERROR(10140,"parse cron to cron expression error", "解析调度表达式错误"), SCHEDULE_START_TIME_END_TIME_SAME(10141,"The start time must not be the same as the end", "开始时间不能和结束时间一样"), - DELETE_TENANT_BY_ID_FAIL(100142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), - DELETE_TENANT_BY_ID_FAIL_DEFINES(100143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), - DELETE_TENANT_BY_ID_FAIL_USERS(100144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), - - DELETE_WORKER_GROUP_BY_ID_FAIL(100145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), - - QUERY_WORKER_GROUP_FAIL(100146,"query worker group fail ", "查询worker分组失败"), - DELETE_WORKER_GROUP_FAIL(100147,"delete worker group fail ", "删除worker分组失败"), - + DELETE_TENANT_BY_ID_FAIL(10142,"delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"), + DELETE_TENANT_BY_ID_FAIL_DEFINES(10143,"delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"), + DELETE_TENANT_BY_ID_FAIL_USERS(10144,"delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"), + DELETE_WORKER_GROUP_BY_ID_FAIL(10145,"delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"), + QUERY_WORKER_GROUP_FAIL(10146,"query worker group fail ", "查询worker分组失败"), + DELETE_WORKER_GROUP_FAIL(10147,"delete worker group fail ", "删除worker分组失败"), + COPY_PROCESS_DEFINITION_ERROR(10148,"copy process definition error", "复制工作流错误"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"), @@ -184,10 +184,13 @@ public enum Status { RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"), RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"), UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"), - HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), - RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), - RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"), - + HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"), + RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"), + RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"), + UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}","udf函数绑定了资源文件[{0}]"), + RESOURCE_IS_USED(20014, "resource file is used by process definition","资源文件被上线的流程定义使用了"), + PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist","父资源文件不存在"), + RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource","请检查任务节点并移除无权限或者已删除的资源"), USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiException.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiException.java new file mode 100644 index 0000000000..3c094f5294 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiException.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.exceptions; + +import org.apache.dolphinscheduler.api.enums.Status; + +import java.lang.annotation.Retention; +import java.lang.annotation.Target; + +import static java.lang.annotation.ElementType.METHOD; +import static java.lang.annotation.RetentionPolicy.RUNTIME; + +/** + * controller exception annotation + */ +@Retention(RUNTIME) +@Target(METHOD) +public @interface ApiException { + Status value(); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java new file mode 100644 index 0000000000..90d1afea49 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.exceptions; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.ControllerAdvice; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.method.HandlerMethod; + +/** + * Exception Handler + */ +@ControllerAdvice +@ResponseBody +public class ApiExceptionHandler { + + private static final Logger logger = LoggerFactory.getLogger(ApiExceptionHandler.class); + + @ExceptionHandler(Exception.class) + public Result exceptionHandler(Exception e, HandlerMethod hm) { + ApiException ce = hm.getMethodAnnotation(ApiException.class); + if (ce == null) { + logger.error(e.getMessage(), e); + return Result.errorWithArgs(Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage()); + } + Status st = ce.value(); + logger.error(st.getMsg(), e); + return Result.error(st); + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java new file mode 100644 index 0000000000..5669e6c3df --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ServiceException.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.exceptions; + +import org.apache.dolphinscheduler.api.enums.Status; + + +/** + * service exception + */ +public class ServiceException extends RuntimeException { + + /** + * code + */ + private Integer code; + + public ServiceException() { + } + + public ServiceException(Status status) { + super(status.getMsg()); + this.code = status.getCode(); + } + + public ServiceException(Integer code,String message) { + super(message); + this.code = code; + } + + public ServiceException(String message) { + super(message); + } + + public Integer getCode() { + return this.code; + } + + public void setCode(Integer code) { + this.code = code; + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java index 380eea5774..98bac42f72 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java @@ -27,7 +27,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.servlet.HandlerInterceptor; -import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -90,14 +89,4 @@ public class LoginHandlerInterceptor implements HandlerInterceptor { return true; } - @Override - public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception { - - } - - @Override - public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception { - - } - } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java index 897646ba70..5d176961bb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java @@ -83,6 +83,9 @@ public class AccessTokenService extends BaseService { public Map createToken(int userId, String expireTime, String token) { Map result = new HashMap<>(5); + if (userId <= 0) { + throw new IllegalArgumentException("User id should not less than or equals to 0."); + } AccessToken accessToken = new AccessToken(); accessToken.setUserId(userId); accessToken.setExpireTime(DateUtils.stringToDate(expireTime)); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java index 70310b6331..001a10d08a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AlertGroupService.java @@ -16,17 +16,17 @@ */ package org.apache.dolphinscheduler.api.service; +import java.util.*; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.AlertGroup; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.UserAlertGroup; import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; -import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.slf4j.Logger; @@ -35,11 +35,6 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * alert group service */ @@ -52,8 +47,7 @@ public class AlertGroupService extends BaseService{ private AlertGroupMapper alertGroupMapper; @Autowired - private UserAlertGroupMapper userAlertGroupMapper; - + private UserAlertGroupService userAlertGroupService; /** * query alert group list * @@ -122,7 +116,7 @@ public class AlertGroupService extends BaseService{ alertGroup.setCreateTime(now); alertGroup.setUpdateTime(now); - // insert + // insert int insert = alertGroupMapper.insert(alertGroup); if (insert > 0) { @@ -199,7 +193,7 @@ public class AlertGroupService extends BaseService{ return result; } - userAlertGroupMapper.deleteByAlertgroupId(id); + userAlertGroupService.deleteByAlertGroupId(id); alertGroupMapper.deleteById(id); putMsg(result, Status.SUCCESS); return result; @@ -223,22 +217,26 @@ public class AlertGroupService extends BaseService{ return result; } - userAlertGroupMapper.deleteByAlertgroupId(alertgroupId); + userAlertGroupService.deleteByAlertGroupId(alertgroupId); if (StringUtils.isEmpty(userIds)) { putMsg(result, Status.SUCCESS); return result; } String[] userIdsArr = userIds.split(","); - + Date now = new Date(); + List alertGroups = new ArrayList<>(userIds.length()); for (String userId : userIdsArr) { - Date now = new Date(); UserAlertGroup userAlertGroup = new UserAlertGroup(); userAlertGroup.setAlertgroupId(alertgroupId); userAlertGroup.setUserId(Integer.parseInt(userId)); userAlertGroup.setCreateTime(now); userAlertGroup.setUpdateTime(now); - userAlertGroupMapper.insert(userAlertGroup); + alertGroups.add(userAlertGroup); + } + + if (CollectionUtils.isNotEmpty(alertGroups)) { + userAlertGroupService.saveBatch(alertGroups); } putMsg(result, Status.SUCCESS); @@ -248,22 +246,11 @@ public class AlertGroupService extends BaseService{ /** * verify group name exists * - * @param loginUser login user * @param groupName group name * @return check result code */ - public Result verifyGroupName(User loginUser, String groupName) { - Result result = new Result(); + public boolean existGroupName(String groupName) { List alertGroup = alertGroupMapper.queryByGroupName(groupName); - if (alertGroup != null && alertGroup.size() > 0) { - logger.error("group {} has exist, can't create again.", groupName); - result.setCode(Status.ALERT_GROUP_EXIST.getCode()); - result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); - } else { - result.setCode(Status.SUCCESS.getCode()); - result.setMsg(Status.SUCCESS.getMsg()); - } - - return result; + return CollectionUtils.isNotEmpty(alertGroup); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java index bafe833fab..39bec56357 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java @@ -29,8 +29,6 @@ import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -108,14 +106,12 @@ public class DataAnalysisService extends BaseService{ List taskInstanceStateCounts = taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds); - if (taskInstanceStateCounts != null && !taskInstanceStateCounts.isEmpty()) { + if (taskInstanceStateCounts != null) { TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts); result.put(Constants.DATA_LIST, taskCountResult); putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.TASK_INSTANCE_STATE_COUNT_ERROR); } - return result; + return result; } private void putErrorRequestParamsMsg(Map result) { @@ -155,14 +151,12 @@ public class DataAnalysisService extends BaseService{ processInstanceMapper.countInstanceStateByUser(start, end, projectIdArray); - if (processInstanceStateCounts != null && !processInstanceStateCounts.isEmpty()) { + if (processInstanceStateCounts != null) { TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); result.put(Constants.DATA_LIST, taskCountResult); putMsg(result, Status.SUCCESS); - } else { - putMsg(result, Status.COUNT_PROCESS_INSTANCE_STATE_ERROR); } - return result; + return result; } @@ -236,7 +230,7 @@ public class DataAnalysisService extends BaseService{ // count error command state List errorCommandStateCounts = errorCommandMapper.countCommandState( - start, end, projectIdArray); + start, end, projectIdArray); // Map> dataMap = new HashMap<>(); @@ -318,9 +312,8 @@ public class DataAnalysisService extends BaseService{ return result; } - ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - List tasksQueueList = tasksQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - List tasksKillList = tasksQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_KILL); + List tasksQueueList = new ArrayList<>(); + List tasksKillList = new ArrayList<>(); Map dataMap = new HashMap<>(); if (loginUser.getUserType() == UserType.ADMIN_USER){ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index f6d8903dd8..afa13b7414 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -16,10 +16,16 @@ */ package org.apache.dolphinscheduler.api.service; +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DbConnectType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; @@ -29,10 +35,6 @@ import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; -import com.alibaba.fastjson.JSONObject; -import com.alibaba.fastjson.TypeReference; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; @@ -303,7 +305,7 @@ public class DataSourceService extends BaseService{ for (DataSource dataSource : dataSourceList) { String connectionParams = dataSource.getConnectionParams(); - JSONObject object = JSONObject.parseObject(connectionParams); + JSONObject object = JSON.parseObject(connectionParams); object.put(Constants.PASSWORD, Constants.XXXXXX); dataSource.setConnectionParams(JSONUtils.toJson(object)); @@ -367,11 +369,11 @@ public class DataSourceService extends BaseService{ try { switch (dbType) { case POSTGRESQL: - datasource = JSONObject.parseObject(parameter, PostgreDataSource.class); + datasource = JSON.parseObject(parameter, PostgreDataSource.class); Class.forName(Constants.ORG_POSTGRESQL_DRIVER); break; case MYSQL: - datasource = JSONObject.parseObject(parameter, MySQLDataSource.class); + datasource = JSON.parseObject(parameter, MySQLDataSource.class); Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); break; case HIVE: @@ -386,26 +388,26 @@ public class DataSourceService extends BaseService{ getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH)); } if (dbType == DbType.HIVE){ - datasource = JSONObject.parseObject(parameter, HiveDataSource.class); + datasource = JSON.parseObject(parameter, HiveDataSource.class); }else if (dbType == DbType.SPARK){ - datasource = JSONObject.parseObject(parameter, SparkDataSource.class); + datasource = JSON.parseObject(parameter, SparkDataSource.class); } Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); break; case CLICKHOUSE: - datasource = JSONObject.parseObject(parameter, ClickHouseDataSource.class); + datasource = JSON.parseObject(parameter, ClickHouseDataSource.class); Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER); break; case ORACLE: - datasource = JSONObject.parseObject(parameter, OracleDataSource.class); + datasource = JSON.parseObject(parameter, OracleDataSource.class); Class.forName(Constants.COM_ORACLE_JDBC_DRIVER); break; case SQLSERVER: - datasource = JSONObject.parseObject(parameter, SQLServerDataSource.class); + datasource = JSON.parseObject(parameter, SQLServerDataSource.class); Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); break; case DB2: - datasource = JSONObject.parseObject(parameter, DB2ServerDataSource.class); + datasource = JSON.parseObject(parameter, DB2ServerDataSource.class); Class.forName(Constants.COM_DB2_JDBC_DRIVER); break; default: @@ -472,12 +474,19 @@ public class DataSourceService extends BaseService{ * @return datasource parameter */ public String buildParameter(String name, String desc, DbType type, String host, - String port, String database,String principal,String userName, - String password, String other) { + String port, String database, String principal, String userName, + String password, DbConnectType connectType, String other) { + + String address = buildAddress(type, host, port, connectType); - String address = buildAddress(type, host, port); + String jdbcUrl; + if (Constants.ORACLE.equals(type.name()) + && connectType == DbConnectType.ORACLE_SID) { + jdbcUrl = address + ":" + database; + } else { + jdbcUrl = address + "/" + database; + } - String jdbcUrl = address + "/" + database; if (CommonUtils.getKerberosStartupState() && (type == DbType.HIVE || type == DbType.SPARK)){ jdbcUrl += ";principal=" + principal; @@ -507,7 +516,7 @@ public class DataSourceService extends BaseService{ parameterMap.put(Constants.PRINCIPAL,principal); } if (other != null && !"".equals(other)) { - LinkedHashMap map = JSONObject.parseObject(other, new TypeReference>() { + LinkedHashMap map = JSON.parseObject(other, new TypeReference>() { }); if (map.size() > 0) { StringBuilder otherSb = new StringBuilder(); @@ -523,14 +532,14 @@ public class DataSourceService extends BaseService{ } if(logger.isDebugEnabled()){ - logger.info("parameters map-----" + JSONObject.toJSONString(parameterMap)); + logger.info("parameters map-----" + JSON.toJSONString(parameterMap)); } - return JSONObject.toJSONString(parameterMap); + return JSON.toJSONString(parameterMap); } - private String buildAddress(DbType type, String host, String port) { + private String buildAddress(DbType type, String host, String port, DbConnectType connectType) { StringBuilder sb = new StringBuilder(); if (Constants.MYSQL.equals(type.name())) { sb.append(Constants.JDBC_MYSQL); @@ -551,7 +560,11 @@ public class DataSourceService extends BaseService{ sb.append(Constants.JDBC_CLICKHOUSE); sb.append(host).append(":").append(port); } else if (Constants.ORACLE.equals(type.name())) { - sb.append(Constants.JDBC_ORACLE); + if (connectType == DbConnectType.ORACLE_SID) { + sb.append(Constants.JDBC_ORACLE_SID); + } else { + sb.append(Constants.JDBC_ORACLE_SERVICE_NAME); + } sb.append(host).append(":").append(port); } else if (Constants.SQLSERVER.equals(type.name())) { sb.append(Constants.JDBC_SQLSERVER); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index 86b507f0a0..51f5420ac5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -85,7 +85,7 @@ public class ExecutorService extends BaseService{ * @param receivers receivers * @param receiversCc receivers cc * @param processInstancePriority process instance priority - * @param workerGroupId worker group id + * @param workerGroup worker group name * @param runMode run mode * @param timeout timeout * @return execute process instance code @@ -96,9 +96,9 @@ public class ExecutorService extends BaseService{ FailureStrategy failureStrategy, String startNodeList, TaskDependType taskDependType, WarningType warningType, int warningGroupId, String receivers, String receiversCc, RunMode runMode, - Priority processInstancePriority, int workerGroupId, Integer timeout) throws ParseException { + Priority processInstancePriority, String workerGroup, Integer timeout) throws ParseException { Map result = new HashMap<>(5); - // timeout is valid + // timeout is invalid if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR); return result; @@ -128,7 +128,7 @@ public class ExecutorService extends BaseService{ */ int create = this.createCommand(commandType, processDefinitionId, taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), - warningGroupId, runMode,processInstancePriority, workerGroupId); + warningGroupId, runMode,processInstancePriority, workerGroup); if(create > 0 ){ /** * according to the process definition ID updateProcessInstance and CC recipient @@ -225,20 +225,14 @@ public class ExecutorService extends BaseService{ if (processInstance.getState() == ExecutionStatus.READY_STOP) { putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); } else { - processInstance.setCommandType(CommandType.STOP); - processInstance.addHistoryCmd(CommandType.STOP); - processService.updateProcessInstance(processInstance); - result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_STOP); + result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP); } break; case PAUSE: if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); } else { - processInstance.setCommandType(CommandType.PAUSE); - processInstance.addHistoryCmd(CommandType.PAUSE); - processService.updateProcessInstance(processInstance); - result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_PAUSE); + result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE); } break; default: @@ -308,22 +302,27 @@ public class ExecutorService extends BaseService{ } /** - * update process instance state + * prepare to update process instance command type and status * - * @param processInstanceId process instance id + * @param processInstance process instance + * @param commandType command type * @param executionStatus execute status * @return update result */ - private Map updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { + private Map updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { Map result = new HashMap<>(5); - int update = processService.updateProcessInstanceState(processInstanceId, executionStatus); + processInstance.setCommandType(commandType); + processInstance.addHistoryCmd(commandType); + processInstance.setState(executionStatus); + int update = processService.updateProcessInstance(processInstance); + + // determine whether the process is normal if (update > 0) { putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); } - return result; } @@ -435,25 +434,26 @@ public class ExecutorService extends BaseService{ /** * create command - * - * @param commandType - * @param processDefineId - * @param nodeDep - * @param failureStrategy - * @param startNodeList - * @param schedule - * @param warningType - * @param excutorId - * @param warningGroupId - * @param runMode - * @return + * @param commandType commandType + * @param processDefineId processDefineId + * @param nodeDep nodeDep + * @param failureStrategy failureStrategy + * @param startNodeList startNodeList + * @param schedule schedule + * @param warningType warningType + * @param executorId executorId + * @param warningGroupId warningGroupId + * @param runMode runMode + * @param processInstancePriority processInstancePriority + * @param workerGroup workerGroup + * @return command id * @throws ParseException */ private int createCommand(CommandType commandType, int processDefineId, TaskDependType nodeDep, FailureStrategy failureStrategy, String startNodeList, String schedule, WarningType warningType, - int excutorId, int warningGroupId, - RunMode runMode,Priority processInstancePriority, int workerGroupId) throws ParseException { + int executorId, int warningGroupId, + RunMode runMode,Priority processInstancePriority, String workerGroup) throws ParseException { /** * instantiate command schedule instance @@ -481,10 +481,10 @@ public class ExecutorService extends BaseService{ command.setWarningType(warningType); } command.setCommandParam(JSONUtils.toJson(cmdParam)); - command.setExecutorId(excutorId); + command.setExecutorId(executorId); command.setWarningGroupId(warningGroupId); command.setProcessInstancePriority(processInstancePriority); - command.setWorkerGroupId(workerGroupId); + command.setWorkerGroup(workerGroup); Date start = null; Date end = null; @@ -496,9 +496,10 @@ public class ExecutorService extends BaseService{ } } + // determine whether to complement if(commandType == CommandType.COMPLEMENT_DATA){ runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; - if(null != start && null != end && start.before(end)){ + if(null != start && null != end && !start.after(end)){ if(runMode == RunMode.RUN_MODE_SERIAL){ cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java index 1f65208240..2f44dee304 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java @@ -21,6 +21,7 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; @@ -64,25 +65,24 @@ public class LoggerService { TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); - if (taskInstance == null){ - return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); - } - - String host = taskInstance.getHost(); - if(StringUtils.isEmpty(host)){ + if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())){ return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); } + String host = getHost(taskInstance.getHost()); Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT); + String log = logClient.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(),skipLineNum,limit); result.setData(log); - logger.info(log); return result; } + + + /** * get log size * @@ -91,10 +91,24 @@ public class LoggerService { */ public byte[] getLogBytes(int taskInstId) { TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); - if (taskInstance == null){ - throw new RuntimeException("task instance is null"); + if (taskInstance == null || StringUtils.isBlank(taskInstance.getHost())){ + throw new RuntimeException("task instance is null or host is null"); } - String host = taskInstance.getHost(); + String host = getHost(taskInstance.getHost()); + return logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()); } + + + /** + * get host + * @param address address + * @return old version return true ,otherwise return false + */ + private String getHost(String address){ + if (Host.isOldVersion(address)){ + return address; + } + return Host.of(address).getIp(); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java index 22e3593a52..14cadbf189 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java @@ -38,14 +38,13 @@ import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.service.permission.PermissionCheck; import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -113,8 +112,13 @@ public class ProcessDefinitionService extends BaseDAGService { * @return create result code * @throws JsonProcessingException JsonProcessingException */ - public Map createProcessDefinition(User loginUser, String projectName, String name, - String processDefinitionJson, String desc, String locations, String connects) throws JsonProcessingException { + public Map createProcessDefinition(User loginUser, + String projectName, + String name, + String processDefinitionJson, + String desc, + String locations, + String connects) throws JsonProcessingException { Map result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); @@ -145,10 +149,11 @@ public class ProcessDefinitionService extends BaseDAGService { processDefine.setTimeout(processData.getTimeout()); processDefine.setTenantId(processData.getTenantId()); processDefine.setModifyBy(loginUser.getUserName()); + processDefine.setResourceIds(getResourceIds(processData)); //custom global params List globalParamsList = processData.getGlobalParams(); - if (globalParamsList != null && globalParamsList.size() > 0) { + if (CollectionUtils.isNotEmpty(globalParamsList)) { Set globalParamsSet = new HashSet<>(globalParamsList); globalParamsList = new ArrayList<>(globalParamsSet); processDefine.setGlobalParamList(globalParamsList); @@ -162,15 +167,42 @@ public class ProcessDefinitionService extends BaseDAGService { return result; } + /** + * get resource ids + * @param processData process data + * @return resource ids + */ + private String getResourceIds(ProcessData processData) { + List tasks = processData.getTasks(); + Set resourceIds = new HashSet<>(); + for(TaskNode taskNode : tasks){ + String taskParameter = taskNode.getParams(); + AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(),taskParameter); + if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) { + Set tempSet = params.getResourceFilesList().stream().map(t->t.getId()).collect(Collectors.toSet()); + resourceIds.addAll(tempSet); + } + } + + StringBuilder sb = new StringBuilder(); + for(int i : resourceIds) { + if (sb.length() > 0) { + sb.append(","); + } + sb.append(i); + } + return sb.toString(); + } + /** - * query proccess definition list + * query process definition list * * @param loginUser login user * @param projectName project name * @return definition list */ - public Map queryProccessDefinitionList(User loginUser, String projectName) { + public Map queryProcessDefinitionList(User loginUser, String projectName) { HashMap result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); @@ -190,7 +222,7 @@ public class ProcessDefinitionService extends BaseDAGService { /** - * query proccess definition list paging + * query process definition list paging * * @param loginUser login user * @param projectName project name @@ -232,7 +264,7 @@ public class ProcessDefinitionService extends BaseDAGService { * @param processId process definition id * @return process definition detail */ - public Map queryProccessDefinitionById(User loginUser, String projectName, Integer processId) { + public Map queryProcessDefinitionById(User loginUser, String projectName, Integer processId) { Map result = new HashMap<>(5); @@ -254,6 +286,41 @@ public class ProcessDefinitionService extends BaseDAGService { return result; } + /** + * copy process definition + * + * @param loginUser login user + * @param projectName project name + * @param processId process definition id + * @return copy result code + */ + public Map copyProcessDefinition(User loginUser, String projectName, Integer processId) throws JsonProcessingException{ + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); + return result; + } else { + return createProcessDefinition( + loginUser, + projectName, + processDefinition.getName()+"_copy_"+System.currentTimeMillis(), + processDefinition.getProcessDefinitionJson(), + processDefinition.getDescription(), + processDefinition.getLocations(), + processDefinition.getConnects()); + } + } + /** * update process definition * @@ -284,20 +351,19 @@ public class ProcessDefinitionService extends BaseDAGService { if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { return checkProcessJson; } - ProcessDefinition processDefinition = processService.findProcessDefineById(id); - if (processDefinition == null) { + ProcessDefinition processDefine = processService.findProcessDefineById(id); + if (processDefine == null) { // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); return result; - } else if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { + } else if (processDefine.getReleaseState() == ReleaseState.ONLINE) { // online can not permit edit - putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); + putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefine.getName()); return result; } else { putMsg(result, Status.SUCCESS); } - ProcessDefinition processDefine = processService.findProcessDefineById(id); Date now = new Date(); processDefine.setId(id); @@ -311,10 +377,11 @@ public class ProcessDefinitionService extends BaseDAGService { processDefine.setTimeout(processData.getTimeout()); processDefine.setTenantId(processData.getTenantId()); processDefine.setModifyBy(loginUser.getUserName()); + processDefine.setResourceIds(getResourceIds(processData)); //custom global params List globalParamsList = new ArrayList<>(); - if (processData.getGlobalParams() != null && processData.getGlobalParams().size() > 0) { + if (CollectionUtils.isNotEmpty(processData.getGlobalParams())) { Set userDefParamsSet = new HashSet<>(processData.getGlobalParams()); globalParamsList = new ArrayList<>(userDefParamsSet); } @@ -338,7 +405,7 @@ public class ProcessDefinitionService extends BaseDAGService { * @param name name * @return true if process definition name not exists, otherwise false */ - public Map verifyProccessDefinitionName(User loginUser, String projectName, String name) { + public Map verifyProcessDefinitionName(User loginUser, String projectName, String name) { Map result = new HashMap<>(); Project project = projectMapper.queryByName(projectName); @@ -453,12 +520,25 @@ public class ProcessDefinitionService extends BaseDAGService { ProcessDefinition processDefinition = processDefineMapper.selectById(id); switch (state) { - case ONLINE: { + case ONLINE: + // To check resources whether they are already cancel authorized or deleted + String resourceIds = processDefinition.getResourceIds(); + if (StringUtils.isNotBlank(resourceIds)) { + Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new); + PermissionCheck permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,processService,resourceIdArray,loginUser.getId(),logger); + try { + permissionCheck.checkPermission(); + } catch (Exception e) { + logger.error(e.getMessage(),e); + putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, "releaseState"); + return result; + } + } + processDefinition.setReleaseState(state); processDefineMapper.updateById(processDefinition); break; - } - case OFFLINE: { + case OFFLINE: processDefinition.setReleaseState(state); processDefineMapper.updateById(processDefinition); List scheduleList = scheduleMapper.selectAllByProcessDefineArray( @@ -473,11 +553,9 @@ public class ProcessDefinitionService extends BaseDAGService { SchedulerService.deleteSchedule(project.getId(), schedule.getId()); } break; - } - default: { + default: putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState"); return result; - } } putMsg(result, Status.SUCCESS); @@ -561,13 +639,13 @@ public class ProcessDefinitionService extends BaseDAGService { List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); if (!schedules.isEmpty()) { Schedule schedule = schedules.get(0); - WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId()); + /*WorkerGroup workerGroup = workerGroupMapper.selectById(schedule.getWorkerGroupId()); if (null == workerGroup && schedule.getWorkerGroupId() == -1) { workerGroup = new WorkerGroup(); workerGroup.setId(-1); workerGroup.setName(""); - } + }*/ exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString()); exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId()); @@ -577,11 +655,7 @@ public class ProcessDefinitionService extends BaseDAGService { exportProcessMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy())); exportProcessMeta.setScheduleReleaseState(String.valueOf(ReleaseState.OFFLINE)); exportProcessMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority())); - - if (null != workerGroup) { - exportProcessMeta.setScheduleWorkerGroupId(workerGroup.getId()); - exportProcessMeta.setScheduleWorkerGroupName(workerGroup.getName()); - } + exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup()); } //create workflow json file return JSONUtils.toJsonString(exportProcessMeta); @@ -780,15 +854,9 @@ public class ProcessDefinitionService extends BaseDAGService { if (null != processMeta.getScheduleProcessInstancePriority()) { scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority())); } - if (null != processMeta.getScheduleWorkerGroupId()) { - scheduleObj.setWorkerGroupId(processMeta.getScheduleWorkerGroupId()); - } else { - if (null != processMeta.getScheduleWorkerGroupName()) { - List workerGroups = workerGroupMapper.queryWorkerGroupByName(processMeta.getScheduleWorkerGroupName()); - if(CollectionUtils.isNotEmpty(workerGroups)){ - scheduleObj.setWorkerGroupId(workerGroups.get(0).getId()); - } - } + + if (null != processMeta.getScheduleWorkerGroupName()) { + scheduleObj.setWorkerGroup(processMeta.getScheduleWorkerGroupName()); } return scheduleMapper.insert(scheduleObj); @@ -949,7 +1017,9 @@ public class ProcessDefinitionService extends BaseDAGService { return result; } + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); //process data check @@ -1008,12 +1078,12 @@ public class ProcessDefinitionService extends BaseDAGService { /** - * query proccess definition all by project id + * query process definition all by project id * * @param projectId project id * @return process definitions in the project */ - public Map queryProccessDefinitionAllByProjectId(Integer projectId) { + public Map queryProcessDefinitionAllByProjectId(Integer projectId) { HashMap result = new HashMap<>(5); @@ -1166,6 +1236,7 @@ public class ProcessDefinitionService extends BaseDAGService { private DAG genDagGraph(ProcessDefinition processDefinition) throws Exception { String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); //check process data diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 4f81d89505..b01a706ff7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.api.service; +import java.nio.charset.StandardCharsets; import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; import org.apache.dolphinscheduler.api.dto.gantt.Task; import org.apache.dolphinscheduler.api.enums.Status; @@ -38,7 +39,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -49,7 +49,6 @@ import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStreamReader; -import java.nio.charset.Charset; import java.text.ParseException; import java.util.*; import java.util.stream.Collectors; @@ -116,18 +115,7 @@ public class ProcessInstanceService extends BaseDAGService { return checkResult; } ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); - String workerGroupName = ""; - if(processInstance.getWorkerGroupId() == -1){ - workerGroupName = DEFAULT; - }else{ - WorkerGroup workerGroup = workerGroupMapper.selectById(processInstance.getWorkerGroupId()); - if(workerGroup != null){ - workerGroupName = workerGroup.getName(); - }else{ - workerGroupName = DEFAULT; - } - } - processInstance.setWorkerGroupName(workerGroupName); + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); processInstance.setReceivers(processDefinition.getReceivers()); processInstance.setReceiversCc(processDefinition.getReceiversCc()); @@ -204,14 +192,8 @@ public class ProcessInstanceService extends BaseDAGService { } } - Set exclusionSet = new HashSet<>(); - exclusionSet.add(Constants.CLASS); - exclusionSet.add("locations"); - exclusionSet.add("connects"); - exclusionSet.add("processInstanceJson"); - pageInfo.setTotalCount((int) processInstanceList.getTotal()); - pageInfo.setLists(CollectionUtils.getListByExclusion(processInstances, exclusionSet)); + pageInfo.setLists(processInstances); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); return result; @@ -239,7 +221,7 @@ public class ProcessInstanceService extends BaseDAGService { } ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); List taskInstanceList = processService.findValidTaskListByProcessId(processId); - AddDependResultForTaskList(taskInstanceList); + addDependResultForTaskList(taskInstanceList); Map resultMap = new HashMap<>(); resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); resultMap.put(TASK_LIST, taskInstanceList); @@ -253,9 +235,9 @@ public class ProcessInstanceService extends BaseDAGService { * add dependent result for dependent task * @param taskInstanceList */ - private void AddDependResultForTaskList(List taskInstanceList) throws IOException { + private void addDependResultForTaskList(List taskInstanceList) throws IOException { for(TaskInstance taskInstance: taskInstanceList){ - if(taskInstance.getTaskType().toUpperCase().equals(TaskType.DEPENDENT.toString())){ + if(taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())){ Result logResult = loggerService.queryLog( taskInstance.getId(), 0, 4098); if(logResult.getCode() == Status.SUCCESS.ordinal()){ @@ -273,7 +255,8 @@ public class ProcessInstanceService extends BaseDAGService { return resultMap; } - BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(Charset.forName("utf8"))), Charset.forName("utf8"))); + BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( + StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { if(line.contains(DEPENDENT_SPLIT)){ @@ -413,11 +396,10 @@ public class ProcessInstanceService extends BaseDAGService { processInstance.setProcessInstanceJson(processInstanceJson); processInstance.setGlobalParams(globalParams); } -// int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson, -// globalParams, schedule, flag, locations, connects); + int update = processService.updateProcessInstance(processInstance); int updateDefine = 1; - if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) { + if (Boolean.TRUE.equals(syncDefine) && StringUtils.isNotEmpty(processInstanceJson)) { processDefinition.setProcessDefinitionJson(processInstanceJson); processDefinition.setGlobalParams(originDefParams); processDefinition.setLocations(locations); @@ -481,11 +463,10 @@ public class ProcessInstanceService extends BaseDAGService { * @param loginUser login user * @param projectName project name * @param processInstanceId process instance id - * @param tasksQueue task queue * @return delete result code */ @Transactional(rollbackFor = Exception.class) - public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId, ITaskQueue tasksQueue) { + public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) { Map result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); @@ -503,51 +484,7 @@ public class ProcessInstanceService extends BaseDAGService { return result; } - //process instance priority - int processInstancePriority = processInstance.getProcessInstancePriority().ordinal(); - // delete zk queue - if (CollectionUtils.isNotEmpty(taskInstanceList)){ - for (TaskInstance taskInstance : taskInstanceList){ - // task instance priority - int taskInstancePriority = taskInstance.getTaskInstancePriority().ordinal(); - - StringBuilder nodeValueSb = new StringBuilder(100); - nodeValueSb.append(processInstancePriority) - .append(UNDERLINE) - .append(processInstanceId) - .append(UNDERLINE) - .append(taskInstancePriority) - .append(UNDERLINE) - .append(taskInstance.getId()) - .append(UNDERLINE); - - int taskWorkerGroupId = processService.getTaskWorkerGroupId(taskInstance); - WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId); - - if(workerGroup == null){ - nodeValueSb.append(DEFAULT_WORKER_ID); - }else { - - String ips = workerGroup.getIpList(); - StringBuilder ipSb = new StringBuilder(100); - String[] ipArray = ips.split(COMMA); - - for (String ip : ipArray) { - long ipLong = IpUtils.ipToLong(ip); - ipSb.append(ipLong).append(COMMA); - } - - if(ipSb.length() > 0) { - ipSb.deleteCharAt(ipSb.length() - 1); - } - nodeValueSb.append(ipSb); - } - - logger.info("delete task queue node : {}",nodeValueSb.toString()); - tasksQueue.removeNode(org.apache.dolphinscheduler.common.Constants.DOLPHINSCHEDULER_TASKS_QUEUE, nodeValueSb.toString()); - } - } // delete database cascade int delete = processService.deleteWorkProcessInstanceById(processInstanceId); @@ -620,7 +557,7 @@ public class ProcessInstanceService extends BaseDAGService { Map localParamsMap = new HashMap<>(); localParamsMap.put("taskType",taskNode.getType()); localParamsMap.put("localParamsList",localParamsList); - if (localParamsList.size() > 0) { + if (CollectionUtils.isNotEmpty(localParamsList)) { localUserDefParams.put(taskNode.getName(), localParamsMap); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java index 862c895c92..cba1b5f2bb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/QueueService.java @@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.Queue; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.QueueMapper; @@ -43,7 +44,7 @@ import java.util.Map; @Service public class QueueService extends BaseService { - private static final Logger logger = LoggerFactory.getLogger(TenantService.class); + private static final Logger logger = LoggerFactory.getLogger(QueueService.class); @Autowired private QueueMapper queueMapper; @@ -186,19 +187,16 @@ public class QueueService extends BaseService { } // check queue name is exist - if (!queueName.equals(queueObj.getQueueName())) { - if (checkQueueNameExist(queueName)) { - putMsg(result, Status.QUEUE_NAME_EXIST, queueName); - return result; - } + if (!queueName.equals(queueObj.getQueueName()) + && checkQueueNameExist(queueName)) { + putMsg(result, Status.QUEUE_NAME_EXIST, queueName); + return result; } // check queue value is exist - if (!queue.equals(queueObj.getQueue())) { - if (checkQueueExist(queue)) { - putMsg(result, Status.QUEUE_VALUE_EXIST, queue); - return result; - } + if (!queue.equals(queueObj.getQueue()) && checkQueueExist(queue)) { + putMsg(result, Status.QUEUE_VALUE_EXIST, queue); + return result; } // check old queue using by any user @@ -267,7 +265,7 @@ public class QueueService extends BaseService { * @return true if the queue not exists, otherwise return false */ private boolean checkQueueExist(String queue) { - return queueMapper.queryAllQueueList(queue, null).size() > 0; + return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(queue, null)); } /** @@ -278,7 +276,7 @@ public class QueueService extends BaseService { * @return true if the queue name not exists, otherwise return false */ private boolean checkQueueNameExist(String queueName) { - return queueMapper.queryAllQueueList(null, queueName).size() > 0; + return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(null, queueName)); } /** @@ -290,7 +288,7 @@ public class QueueService extends BaseService { * @return true if need to update user */ private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) { - return !oldQueue.equals(newQueue) && userMapper.queryUserListByQueue(oldQueue).size() > 0; + return !oldQueue.equals(newQueue) && CollectionUtils.isNotEmpty(userMapper.queryUserListByQueue(oldQueue)); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index c1adb8874d..a2af47b2d5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -16,23 +16,28 @@ */ package org.apache.dolphinscheduler.api.service; +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.serializer.SerializerFeature; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.commons.collections.BeanMap; +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.api.dto.resources.filter.ResourceFilter; +import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor; +import org.apache.dolphinscheduler.api.dto.resources.visitor.Visitor; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; -import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -42,6 +47,7 @@ import org.springframework.web.multipart.MultipartFile; import java.text.MessageFormat; import java.util.*; +import java.util.stream.Collectors; import static org.apache.dolphinscheduler.common.Constants.*; @@ -68,6 +74,82 @@ public class ResourcesService extends BaseService { @Autowired private ResourceUserMapper resourceUserMapper; + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + + /** + * create directory + * + * @param loginUser login user + * @param name alias + * @param description description + * @param type type + * @param pid parent id + * @param currentDir current directory + * @return create directory result + */ + @Transactional(rollbackFor = Exception.class) + public Result createDirectory(User loginUser, + String name, + String description, + ResourceType type, + int pid, + String currentDir) { + Result result = new Result(); + // if hdfs not startup + if (!PropertyUtils.getResUploadStartupState()){ + logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); + putMsg(result, Status.HDFS_NOT_STARTUP); + return result; + } + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); + + if (pid != -1) { + Resource parentResource = resourcesMapper.selectById(pid); + + if (parentResource == null) { + putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); + return result; + } + + if (!hasPerm(loginUser, parentResource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + } + + + if (checkResourceExists(fullName, 0, type.ordinal())) { + logger.error("resource directory {} has exist, can't recreate", fullName); + putMsg(result, Status.RESOURCE_EXIST); + return result; + } + + Date now = new Date(); + + Resource resource = new Resource(pid,name,fullName,true,description,name,loginUser.getId(),type,0,now,now); + + try { + resourcesMapper.insert(resource); + + putMsg(result, Status.SUCCESS); + Map dataMap = new BeanMap(resource); + Map resultMap = new HashMap(); + for (Map.Entry entry: dataMap.entrySet()) { + if (!"class".equalsIgnoreCase(entry.getKey().toString())) { + resultMap.put(entry.getKey().toString(), entry.getValue()); + } + } + result.setData(resultMap); + } catch (Exception e) { + logger.error("resource already exists, can't recreate ", e); + throw new RuntimeException("resource already exists, can't recreate"); + } + //create directory in hdfs + createDirecotry(loginUser,fullName,type,result); + return result; + } + /** * create resource * @@ -76,6 +158,8 @@ public class ResourcesService extends BaseService { * @param desc description * @param file file * @param type type + * @param pid parent id + * @param currentDir current directory * @return create result code */ @Transactional(rollbackFor = Exception.class) @@ -83,7 +167,9 @@ public class ResourcesService extends BaseService { String name, String desc, ResourceType type, - MultipartFile file) { + MultipartFile file, + int pid, + String currentDir) { Result result = new Result(); // if hdfs not startup @@ -92,6 +178,21 @@ public class ResourcesService extends BaseService { putMsg(result, Status.HDFS_NOT_STARTUP); return result; } + + if (pid != -1) { + Resource parentResource = resourcesMapper.selectById(pid); + + if (parentResource == null) { + putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); + return result; + } + + if (!hasPerm(loginUser, parentResource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + } + // file is empty if (file.isEmpty()) { logger.error("file is empty: {}", file.getOriginalFilename()); @@ -126,22 +227,22 @@ public class ResourcesService extends BaseService { } // check resoure name exists - if (checkResourceExists(name, 0, type.ordinal())) { + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); + if (checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource {} has exist, can't recreate", name); putMsg(result, Status.RESOURCE_EXIST); return result; } Date now = new Date(); - - Resource resource = new Resource(name,file.getOriginalFilename(),desc,loginUser.getId(),type,file.getSize(),now,now); + Resource resource = new Resource(pid,name,fullName,false,desc,file.getOriginalFilename(),loginUser.getId(),type,file.getSize(),now,now); try { resourcesMapper.insert(resource); putMsg(result, Status.SUCCESS); Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap(); + Map resultMap = new HashMap<>(); for (Map.Entry entry: dataMap.entrySet()) { if (!"class".equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); @@ -154,7 +255,7 @@ public class ResourcesService extends BaseService { } // fail upload - if (!upload(loginUser, name, file, type)) { + if (!upload(loginUser, fullName, file, type)) { logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); putMsg(result, Status.HDFS_OPERATION_ERROR); throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); @@ -165,14 +266,14 @@ public class ResourcesService extends BaseService { /** * check resource is exists * - * @param alias alias + * @param fullName fullName * @param userId user id * @param type type * @return true if resource exists */ - private boolean checkResourceExists(String alias, int userId, int type ){ + private boolean checkResourceExists(String fullName, int userId, int type ){ - List resources = resourcesMapper.queryResourceList(alias, userId, type); + List resources = resourcesMapper.queryResourceList(fullName, userId, type); if (resources != null && resources.size() > 0) { return true; } @@ -180,16 +281,14 @@ public class ResourcesService extends BaseService { } - /** * update resource - * - * @param loginUser login user - * @param name alias - * @param resourceId resource id - * @param type resource type - * @param desc description - * @return update result code + * @param loginUser login user + * @param resourceId resource id + * @param name name + * @param desc description + * @param type resource type + * @return update result code */ @Transactional(rollbackFor = Exception.class) public Result updateResource(User loginUser, @@ -223,7 +322,10 @@ public class ResourcesService extends BaseService { } //check resource aleady exists - if (!resource.getAlias().equals(name) && checkResourceExists(name, 0, type.ordinal())) { + String originFullName = resource.getFullName(); + + String fullName = String.format("%s%s",originFullName.substring(0,originFullName.lastIndexOf("/")+1),name); + if (!resource.getAlias().equals(name) && checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource {} already exists, can't recreate", name); putMsg(result, Status.RESOURCE_EXIST); return result; @@ -234,25 +336,40 @@ public class ResourcesService extends BaseService { if (StringUtils.isEmpty(tenantCode)){ return result; } - - //get the file suffix + String nameWithSuffix = name; String originResourceName = resource.getAlias(); - String suffix = originResourceName.substring(originResourceName.lastIndexOf(".")); + if (!resource.isDirectory()) { + //get the file suffix + String suffix = originResourceName.substring(originResourceName.lastIndexOf(".")); - //if the name without suffix then add it ,else use the origin name - String nameWithSuffix = name; - if(!name.endsWith(suffix)){ - nameWithSuffix = nameWithSuffix + suffix; + //if the name without suffix then add it ,else use the origin name + if(!name.endsWith(suffix)){ + nameWithSuffix = nameWithSuffix + suffix; + } } // updateResource data + List childrenResource = listAllChildren(resource,false); + String oldFullName = resource.getFullName(); Date now = new Date(); + resource.setAlias(nameWithSuffix); + resource.setFullName(fullName); resource.setDescription(desc); resource.setUpdateTime(now); try { resourcesMapper.updateById(resource); + if (resource.isDirectory() && CollectionUtils.isNotEmpty(childrenResource)) { + List childResourceList = new ArrayList<>(); + List resourceList = resourcesMapper.listResourceByIds(childrenResource.toArray(new Integer[childrenResource.size()])); + childResourceList = resourceList.stream().map(t -> { + t.setFullName(t.getFullName().replaceFirst(oldFullName, fullName)); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + resourcesMapper.batchUpdateResource(childResourceList); + } putMsg(result, Status.SUCCESS); Map dataMap = new BeanMap(resource); @@ -265,24 +382,18 @@ public class ResourcesService extends BaseService { result.setData(resultMap); } catch (Exception e) { logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); - throw new RuntimeException(Status.UPDATE_RESOURCE_ERROR.getMsg()); + throw new ServiceException(Status.UPDATE_RESOURCE_ERROR); } // if name unchanged, return directly without moving on HDFS if (originResourceName.equals(name)) { return result; } - // get file hdfs path - // delete hdfs file by type - String originHdfsFileName = ""; - String destHdfsFileName = ""; - if (resource.getType().equals(ResourceType.FILE)) { - originHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, originResourceName); - destHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, name); - } else if (resource.getType().equals(ResourceType.UDF)) { - originHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, originResourceName); - destHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, name); - } + // get the path of origin file in hdfs + String originHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,originFullName); + // get the path of dest file in hdfs + String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName); + try { if (HadoopUtils.getInstance().exists(originHdfsFileName)) { logger.info("hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); @@ -294,6 +405,7 @@ public class ResourcesService extends BaseService { } catch (Exception e) { logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); putMsg(result,Status.HDFS_COPY_FAIL); + throw new ServiceException(Status.HDFS_COPY_FAIL); } return result; @@ -310,7 +422,7 @@ public class ResourcesService extends BaseService { * @param pageSize page size * @return resource list page */ - public Map queryResourceListPaging(User loginUser, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { + public Map queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { HashMap result = new HashMap<>(5); Page page = new Page(pageNo, pageSize); @@ -318,8 +430,16 @@ public class ResourcesService extends BaseService { if (isAdmin(loginUser)) { userId= 0; } + if (direcotryId != -1) { + Resource directory = resourcesMapper.selectById(direcotryId); + if (directory == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + } + IPage resourceIPage = resourcesMapper.queryResourcePaging(page, - userId, type.ordinal(), searchVal); + userId,direcotryId, type.ordinal(), searchVal); PageInfo pageInfo = new PageInfo(pageNo, pageSize); pageInfo.setTotalCount((int)resourceIPage.getTotal()); pageInfo.setLists(resourceIPage.getRecords()); @@ -328,17 +448,46 @@ public class ResourcesService extends BaseService { return result; } + /** + * create direcoty + * @param loginUser login user + * @param fullName full name + * @param type resource type + * @param result Result + */ + private void createDirecotry(User loginUser,String fullName,ResourceType type,Result result){ + // query tenant + String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); + String directoryName = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); + String resourceRootPath = HadoopUtils.getHdfsDir(type,tenantCode); + try { + if (!HadoopUtils.getInstance().exists(resourceRootPath)) { + createTenantDirIfNotExists(tenantCode); + } + + if (!HadoopUtils.getInstance().mkdir(directoryName)) { + logger.error("create resource directory {} of hdfs failed",directoryName); + putMsg(result,Status.HDFS_OPERATION_ERROR); + throw new RuntimeException(String.format("create resource directory: %s failed.", directoryName)); + } + } catch (Exception e) { + logger.error("create resource directory {} of hdfs failed",directoryName); + putMsg(result,Status.HDFS_OPERATION_ERROR); + throw new RuntimeException(String.format("create resource directory: %s failed.", directoryName)); + } + } + /** * upload file to hdfs * - * @param loginUser - * @param name - * @param file + * @param loginUser login user + * @param fullName full name + * @param file file */ - private boolean upload(User loginUser, String name, MultipartFile file, ResourceType type) { + private boolean upload(User loginUser, String fullName, MultipartFile file, ResourceType type) { // save to local String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); - String nameSuffix = FileUtils.suffix(name); + String nameSuffix = FileUtils.suffix(fullName); // determine file suffix if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { @@ -351,15 +500,8 @@ public class ResourcesService extends BaseService { // save file to hdfs, and delete original file - String hdfsFilename = ""; - String resourcePath = ""; - if (type.equals(ResourceType.FILE)) { - hdfsFilename = HadoopUtils.getHdfsFilename(tenantCode, name); - resourcePath = HadoopUtils.getHdfsResDir(tenantCode); - } else if (type.equals(ResourceType.UDF)) { - hdfsFilename = HadoopUtils.getHdfsUdfFilename(tenantCode, name); - resourcePath = HadoopUtils.getHdfsUdfDir(tenantCode); - } + String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); + String resourcePath = HadoopUtils.getHdfsDir(type,tenantCode); try { // if tenant dir not exists if (!HadoopUtils.getInstance().exists(resourcePath)) { @@ -384,13 +526,38 @@ public class ResourcesService extends BaseService { public Map queryResourceList(User loginUser, ResourceType type) { Map result = new HashMap<>(5); - List resourceList; + + int userId = loginUser.getId(); + if(isAdmin(loginUser)){ + userId = 0; + } + List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); + Visitor resourceTreeVisitor = new ResourceTreeVisitor(allResourceList); + //JSONArray jsonArray = JSON.parseArray(JSON.toJSONString(resourceTreeVisitor.visit().getChildren(), SerializerFeature.SortField)); + result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); + putMsg(result,Status.SUCCESS); + + return result; + } + + /** + * query resource list + * + * @param loginUser login user + * @param type resource type + * @return resource list + */ + public Map queryResourceJarList(User loginUser, ResourceType type) { + + Map result = new HashMap<>(5); int userId = loginUser.getId(); if(isAdmin(loginUser)){ userId = 0; } - resourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal()); - result.put(Constants.DATA_LIST, resourceList); + List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); + List resources = new ResourceFilter(".jar",new ArrayList<>(allResourceList)).filter(); + Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources); + result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); putMsg(result,Status.SUCCESS); return result; @@ -427,22 +594,53 @@ public class ResourcesService extends BaseService { return result; } - Tenant tenant = tenantMapper.queryById(loginUser.getTenantId()); - if (tenant == null){ - putMsg(result, Status.TENANT_NOT_EXIST); + String tenantCode = getTenantCode(resource.getUserId(),result); + if (StringUtils.isEmpty(tenantCode)){ + return result; + } + + // get all resource id of process definitions those is released + List> list = processDefinitionMapper.listResources(); + Map> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); + Set resourceIdSet = resourceProcessMap.keySet(); + // get all children of the resource + List allChildren = listAllChildren(resource,true); + Integer[] needDeleteResourceIdArray = allChildren.toArray(new Integer[allChildren.size()]); + + //if resource type is UDF,need check whether it is bound by UDF functon + if (resource.getType() == (ResourceType.UDF)) { + List udfFuncs = udfFunctionMapper.listUdfByResourceId(needDeleteResourceIdArray); + if (CollectionUtils.isNotEmpty(udfFuncs)) { + logger.error("can't be deleted,because it is bound by UDF functions:{}",udfFuncs.toString()); + putMsg(result,Status.UDF_RESOURCE_IS_BOUND,udfFuncs.get(0).getFuncName()); + return result; + } + } + + if (resourceIdSet.contains(resource.getPid())) { + logger.error("can't be deleted,because it is used of process definition"); + putMsg(result, Status.RESOURCE_IS_USED); + return result; + } + resourceIdSet.retainAll(allChildren); + if (CollectionUtils.isNotEmpty(resourceIdSet)) { + logger.error("can't be deleted,because it is used of process definition"); + for (Integer resId : resourceIdSet) { + logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId)); + } + putMsg(result, Status.RESOURCE_IS_USED); return result; } - String hdfsFilename = ""; - // delete hdfs file by type - String tenantCode = tenant.getTenantCode(); - hdfsFilename = getHdfsFileName(resource, tenantCode, hdfsFilename); + // get hdfs file by type + String hdfsFilename = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getFullName()); //delete data in database - resourcesMapper.deleteById(resourceId); - resourceUserMapper.deleteResourceUser(0, resourceId); + resourcesMapper.deleteIds(needDeleteResourceIdArray); + resourceUserMapper.deleteResourceUserArray(0, needDeleteResourceIdArray); + //delete file on hdfs - HadoopUtils.getInstance().delete(hdfsFilename, false); + HadoopUtils.getInstance().delete(hdfsFilename, true); putMsg(result, Status.SUCCESS); return result; @@ -451,15 +649,15 @@ public class ResourcesService extends BaseService { /** * verify resource by name and type * @param loginUser login user - * @param name resource alias - * @param type resource type + * @param fullName resource full name + * @param type resource type * @return true if the resource name not exists, otherwise return false */ - public Result verifyResourceName(String name, ResourceType type,User loginUser) { + public Result verifyResourceName(String fullName, ResourceType type,User loginUser) { Result result = new Result(); putMsg(result, Status.SUCCESS); - if (checkResourceExists(name, 0, type.ordinal())) { - logger.error("resource type:{} name:{} has exist, can't create again.", type, name); + if (checkResourceExists(fullName, 0, type.ordinal())) { + logger.error("resource type:{} name:{} has exist, can't create again.", type, fullName); putMsg(result, Status.RESOURCE_EXIST); } else { // query tenant @@ -468,9 +666,9 @@ public class ResourcesService extends BaseService { String tenantCode = tenant.getTenantCode(); try { - String hdfsFilename = getHdfsFileName(type,tenantCode,name); + String hdfsFilename = HadoopUtils.getHdfsFileName(type,tenantCode,fullName); if(HadoopUtils.getInstance().exists(hdfsFilename)){ - logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, name,hdfsFilename); + logger.error("resource type:{} name:{} has exist in hdfs {}, can't create again.", type, fullName,hdfsFilename); putMsg(result, Status.RESOURCE_FILE_EXIST,hdfsFilename); } @@ -487,6 +685,48 @@ public class ResourcesService extends BaseService { return result; } + /** + * verify resource by full name or pid and type + * @param fullName resource full name + * @param id resource id + * @param type resource type + * @return true if the resource full name or pid not exists, otherwise return false + */ + public Result queryResource(String fullName,Integer id,ResourceType type) { + Result result = new Result(); + if (StringUtils.isBlank(fullName) && id == null) { + logger.error("You must input one of fullName and pid"); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR); + return result; + } + if (StringUtils.isNotBlank(fullName)) { + List resourceList = resourcesMapper.queryResource(fullName,type.ordinal()); + if (CollectionUtils.isEmpty(resourceList)) { + logger.error("resource file not exist, resource full name {} ", fullName); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + putMsg(result, Status.SUCCESS); + result.setData(resourceList.get(0)); + } else { + Resource resource = resourcesMapper.selectById(id); + if (resource == null) { + logger.error("resource file not exist, resource id {}", id); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + Resource parentResource = resourcesMapper.selectById(resource.getPid()); + if (parentResource == null) { + logger.error("parent resource file not exist, resource id {}", id); + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } + putMsg(result, Status.SUCCESS); + result.setData(parentResource); + } + return result; + } + /** * view resource file online * @@ -508,7 +748,7 @@ public class ResourcesService extends BaseService { // get resource by id Resource resource = resourcesMapper.selectById(resourceId); if (resource == null) { - logger.error("resouce file not exist, resource id {}", resourceId); + logger.error("resource file not exist, resource id {}", resourceId); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -518,7 +758,7 @@ public class ResourcesService extends BaseService { if (StringUtils.isNotEmpty(resourceViewSuffixs)) { List strList = Arrays.asList(resourceViewSuffixs.split(",")); if (!strList.contains(nameSuffix)) { - logger.error("resouce suffix {} not support view, resource id {}", nameSuffix, resourceId); + logger.error("resource suffix {} not support view, resource id {}", nameSuffix, resourceId); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; } @@ -530,7 +770,7 @@ public class ResourcesService extends BaseService { } // hdfs path - String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); + String hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resource.getFullName()); logger.info("resource hdfs path is {} ", hdfsFileName); try { if(HadoopUtils.getInstance().exists(hdfsFileName)){ @@ -566,7 +806,7 @@ public class ResourcesService extends BaseService { * @return create result code */ @Transactional(rollbackFor = Exception.class) - public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content) { + public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory) { Result result = new Result(); // if resource upload startup if (!PropertyUtils.getResUploadStartupState()){ @@ -588,15 +828,16 @@ public class ResourcesService extends BaseService { } String name = fileName.trim() + "." + nameSuffix; + String fullName = currentDirectory.equals("/") ? String.format("%s%s",currentDirectory,name):String.format("%s/%s",currentDirectory,name); - result = verifyResourceName(name,type,loginUser); + result = verifyResourceName(fullName,type,loginUser); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } // save data Date now = new Date(); - Resource resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now); + Resource resource = new Resource(pid,name,fullName,false,desc,name,loginUser.getId(),type,content.getBytes().length,now,now); resourcesMapper.insert(resource); @@ -612,7 +853,7 @@ public class ResourcesService extends BaseService { String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); - result = uploadContentToHdfs(name, tenantCode, content); + result = uploadContentToHdfs(fullName, tenantCode, content); if (!result.getCode().equals(Status.SUCCESS.getCode())) { throw new RuntimeException(result.getMsg()); } @@ -664,7 +905,7 @@ public class ResourcesService extends BaseService { resourcesMapper.updateById(resource); - result = uploadContentToHdfs(resource.getAlias(), tenantCode, content); + result = uploadContentToHdfs(resource.getFullName(), tenantCode, content); if (!result.getCode().equals(Status.SUCCESS.getCode())) { throw new RuntimeException(result.getMsg()); } @@ -672,10 +913,10 @@ public class ResourcesService extends BaseService { } /** - * @param resourceName - * @param tenantCode - * @param content - * @return + * @param resourceName resource name + * @param tenantCode tenant code + * @param content content + * @return result */ private Result uploadContentToHdfs(String resourceName, String tenantCode, String content) { Result result = new Result(); @@ -691,8 +932,8 @@ public class ResourcesService extends BaseService { return result; } - // get file hdfs path - hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resourceName); + // get resource file hdfs path + hdfsFileName = HadoopUtils.getHdfsResourceFileName(tenantCode, resourceName); String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); logger.info("resource hdfs path is {} ", hdfsFileName); @@ -736,21 +977,50 @@ public class ResourcesService extends BaseService { logger.error("download file not exist, resource id {}", resourceId); return null; } + if (resource.isDirectory()) { + logger.error("resource id {} is directory,can't download it", resourceId); + throw new RuntimeException("cant't download directory"); + } User user = userMapper.queryDetailsById(resource.getUserId()); String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); - String hdfsFileName = ""; - hdfsFileName = getHdfsFileName(resource, tenantCode, hdfsFileName); + String hdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(), tenantCode, resource.getAlias()); String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); logger.info("resource hdfs path is {} ", hdfsFileName); HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); - org.springframework.core.io.Resource file = org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); - return file; + return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); } + /** + * list all file + * + * @param loginUser login user + * @param userId user id + * @return unauthorized result code + */ + public Map authorizeResourceTree(User loginUser, Integer userId) { + + Map result = new HashMap<>(); + if (checkAdmin(loginUser, result)) { + return result; + } + List resourceList = resourcesMapper.queryResourceExceptUserId(userId); + List list ; + if (CollectionUtils.isNotEmpty(resourceList)) { + Visitor visitor = new ResourceTreeVisitor(resourceList); + list = visitor.visit().getChildren(); + }else { + list = new ArrayList<>(0); + } + + result.put(Constants.DATA_LIST, list); + putMsg(result,Status.SUCCESS); + return result; + } + /** * unauthorized file * @@ -765,7 +1035,7 @@ public class ResourcesService extends BaseService { return result; } List resourceList = resourcesMapper.queryResourceExceptUserId(userId); - List list ; + List list ; if (resourceList != null && resourceList.size() > 0) { Set resourceSet = new HashSet<>(resourceList); List authedResourceList = resourcesMapper.queryAuthorizedResourceList(userId); @@ -775,15 +1045,12 @@ public class ResourcesService extends BaseService { }else { list = new ArrayList<>(0); } - - result.put(Constants.DATA_LIST, list); + Visitor visitor = new ResourceTreeVisitor(list); + result.put(Constants.DATA_LIST, visitor.visit().getChildren()); putMsg(result,Status.SUCCESS); return result; } - - - /** * unauthorized udf function * @@ -801,7 +1068,7 @@ public class ResourcesService extends BaseService { List udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); List resultList = new ArrayList<>(); Set udfFuncSet = null; - if (udfFuncList != null && udfFuncList.size() > 0) { + if (CollectionUtils.isNotEmpty(udfFuncList)) { udfFuncSet = new HashSet<>(udfFuncList); List authedUDFFuncList = udfFunctionMapper.queryAuthedUdfFunc(userId); @@ -849,46 +1116,15 @@ public class ResourcesService extends BaseService { return result; } List authedResources = resourcesMapper.queryAuthorizedResourceList(userId); - - result.put(Constants.DATA_LIST, authedResources); + Visitor visitor = new ResourceTreeVisitor(authedResources); + logger.info(JSON.toJSONString(visitor.visit(), SerializerFeature.SortField)); + String jsonTreeStr = JSON.toJSONString(visitor.visit().getChildren(), SerializerFeature.SortField); + logger.info(jsonTreeStr); + result.put(Constants.DATA_LIST, visitor.visit().getChildren()); putMsg(result,Status.SUCCESS); return result; } - /** - * get hdfs file name - * - * @param resource resource - * @param tenantCode tenant code - * @param hdfsFileName hdfs file name - * @return hdfs file name - */ - private String getHdfsFileName(Resource resource, String tenantCode, String hdfsFileName) { - if (resource.getType().equals(ResourceType.FILE)) { - hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); - } else if (resource.getType().equals(ResourceType.UDF)) { - hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, resource.getAlias()); - } - return hdfsFileName; - } - - /** - * get hdfs file name - * - * @param resourceType resource type - * @param tenantCode tenant code - * @param hdfsFileName hdfs file name - * @return hdfs file name - */ - private String getHdfsFileName(ResourceType resourceType, String tenantCode, String hdfsFileName) { - if (resourceType.equals(ResourceType.FILE)) { - hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, hdfsFileName); - } else if (resourceType.equals(ResourceType.UDF)) { - hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, hdfsFileName); - } - return hdfsFileName; - } - /** * get authorized resource list * @@ -897,10 +1133,9 @@ public class ResourcesService extends BaseService { */ private void getAuthorizedResourceList(Set resourceSet, List authedResourceList) { Set authedResourceSet = null; - if (authedResourceList != null && authedResourceList.size() > 0) { + if (CollectionUtils.isNotEmpty(authedResourceList)) { authedResourceSet = new HashSet<>(authedResourceList); resourceSet.removeAll(authedResourceSet); - } } @@ -929,4 +1164,36 @@ public class ResourcesService extends BaseService { return tenant.getTenantCode(); } + /** + * list all children id + * @param resource resource + * @param containSelf whether add self to children list + * @return all children id + */ + List listAllChildren(Resource resource,boolean containSelf){ + List childList = new ArrayList<>(); + if (resource.getId() != -1 && containSelf) { + childList.add(resource.getId()); + } + + if(resource.isDirectory()){ + listAllChildren(resource.getId(),childList); + } + return childList; + } + + /** + * list all children id + * @param resourceId resource id + * @param childList child list + */ + void listAllChildren(int resourceId,List childList){ + + List children = resourcesMapper.listChildren(resourceId); + for(int chlidId:children){ + childList.add(chlidId); + listAllChildren(chlidId,childList); + } + } + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java index 72122100a1..cb07ffbbe3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -92,7 +92,7 @@ public class SchedulerService extends BaseService { * @param processInstancePriority process instance priority * @param receivers receivers * @param receiversCc receivers cc - * @param workerGroupId worker group id + * @param workerGroup worker group * @return create result code * @throws IOException ioexception */ @@ -106,7 +106,7 @@ public class SchedulerService extends BaseService { String receivers, String receiversCc, Priority processInstancePriority, - int workerGroupId) throws IOException { + String workerGroup) throws IOException { Map result = new HashMap(5); @@ -156,7 +156,7 @@ public class SchedulerService extends BaseService { scheduleObj.setUserName(loginUser.getUserName()); scheduleObj.setReleaseState(ReleaseState.OFFLINE); scheduleObj.setProcessInstancePriority(processInstancePriority); - scheduleObj.setWorkerGroupId(workerGroupId); + scheduleObj.setWorkerGroup(workerGroup); scheduleMapper.insert(scheduleObj); /** @@ -182,7 +182,7 @@ public class SchedulerService extends BaseService { * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy - * @param workerGroupId worker group id + * @param workerGroup worker group * @param processInstancePriority process instance priority * @param receiversCc receiver cc * @param receivers receivers @@ -202,7 +202,7 @@ public class SchedulerService extends BaseService { String receiversCc, ReleaseState scheduleStatus, Priority processInstancePriority, - int workerGroupId) throws IOException { + String workerGroup) throws IOException { Map result = new HashMap(5); Project project = projectMapper.queryByName(projectName); @@ -266,7 +266,7 @@ public class SchedulerService extends BaseService { if (scheduleStatus != null) { schedule.setReleaseState(scheduleStatus); } - schedule.setWorkerGroupId(workerGroupId); + schedule.setWorkerGroup(workerGroup); schedule.setUpdateTime(now); schedule.setProcessInstancePriority(processInstancePriority); scheduleMapper.updateById(schedule); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index e4fec54395..170278e02f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -32,8 +32,6 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -46,8 +44,6 @@ import java.util.*; @Service public class TaskInstanceService extends BaseService { - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceService.class); - @Autowired ProjectMapper projectMapper; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java index 54b6a1889c..54eba5c2d6 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskRecordService.java @@ -21,8 +21,6 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.TaskRecordDao; import org.apache.dolphinscheduler.dao.entity.TaskRecord; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import java.util.HashMap; @@ -37,8 +35,6 @@ import static org.apache.dolphinscheduler.common.Constants.*; @Service public class TaskRecordService extends BaseService{ - private static final Logger logger = LoggerFactory.getLogger(TaskRecordService.class); - /** * query task record list paging * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java index 12b4656a40..2fded4d32f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java @@ -310,7 +310,7 @@ public class TenantService extends BaseService{ Map result = new HashMap<>(5); List resourceList = tenantMapper.queryByTenantCode(tenantCode); - if (resourceList != null && resourceList.size() > 0) { + if (CollectionUtils.isNotEmpty(resourceList)) { result.put(Constants.DATA_LIST, resourceList); putMsg(result, Status.SUCCESS); } else { @@ -346,6 +346,6 @@ public class TenantService extends BaseService{ */ private boolean checkTenantExists(String tenantCode) { List tenants = tenantMapper.queryByTenantCode(tenantCode); - return (tenants != null && tenants.size() > 0); + return CollectionUtils.isNotEmpty(tenants); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java index 249c7ec8df..8a0bf748bb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java @@ -118,7 +118,7 @@ public class UdfFuncService extends BaseService{ } udf.setDescription(desc); udf.setResourceId(resourceId); - udf.setResourceName(resource.getAlias()); + udf.setResourceName(resource.getFullName()); udf.setType(type); udf.setCreateTime(now); @@ -226,7 +226,7 @@ public class UdfFuncService extends BaseService{ } udf.setDescription(desc); udf.setResourceId(resourceId); - udf.setResourceName(resource.getAlias()); + udf.setResourceName(resource.getFullName()); udf.setType(type); udf.setUpdateTime(now); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UserAlertGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UserAlertGroupService.java new file mode 100644 index 0000000000..502185709f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UserAlertGroupService.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import org.apache.dolphinscheduler.dao.entity.UserAlertGroup; +import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +/** + * + */ +@Service +public class UserAlertGroupService extends ServiceImpl { + + @Autowired + private UserAlertGroupMapper userAlertGroupMapper; + + boolean deleteByAlertGroupId(Integer groupId) { + return userAlertGroupMapper.deleteByAlertgroupId(groupId) >= 1; + } + +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index 1e5ec9e369..220b4fc4d0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -16,6 +16,8 @@ */ package org.apache.dolphinscheduler.api.service; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.CheckUtils; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -23,15 +25,10 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.EncryptionUtils; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -39,6 +36,7 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.*; +import java.util.stream.Collectors; /** * user service @@ -72,6 +70,9 @@ public class UsersService extends BaseService { @Autowired private AlertGroupMapper alertGroupMapper; + @Autowired + private ProcessDefinitionMapper processDefinitionMapper; + /** * create user, only system admin have permission @@ -422,6 +423,7 @@ public class UsersService extends BaseService { * @param projectIds project id array * @return grant result code */ + @Transactional(rollbackFor = Exception.class) public Map grantProject(User loginUser, int userId, String projectIds) { Map result = new HashMap<>(5); result.put(Constants.STATUS, false); @@ -471,6 +473,7 @@ public class UsersService extends BaseService { * @param resourceIds resource id array * @return grant result code */ + @Transactional(rollbackFor = Exception.class) public Map grantResources(User loginUser, int userId, String resourceIds) { Map result = new HashMap<>(5); //only admin can operate @@ -483,23 +486,74 @@ public class UsersService extends BaseService { return result; } + Set needAuthorizeResIds = new HashSet(); + if (StringUtils.isNotBlank(resourceIds)) { + String[] resourceFullIdArr = resourceIds.split(","); + // need authorize resource id set + for (String resourceFullId : resourceFullIdArr) { + String[] resourceIdArr = resourceFullId.split("-"); + for (int i=0;i<=resourceIdArr.length-1;i++) { + int resourceIdValue = Integer.parseInt(resourceIdArr[i]); + needAuthorizeResIds.add(resourceIdValue); + } + } + } + + + //get the authorized resource id list by user id + List oldAuthorizedRes = resourceMapper.queryAuthorizedResourceList(userId); + //if resource type is UDF,need check whether it is bound by UDF functon + Set oldAuthorizedResIds = oldAuthorizedRes.stream().map(t -> t.getId()).collect(Collectors.toSet()); + + //get the unauthorized resource id list + oldAuthorizedResIds.removeAll(needAuthorizeResIds); + + if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) { + + // get all resource id of process definitions those is released + List> list = processDefinitionMapper.listResources(); + Map> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list); + Set resourceIdSet = resourceProcessMap.keySet(); + + resourceIdSet.retainAll(oldAuthorizedResIds); + if (CollectionUtils.isNotEmpty(resourceIdSet)) { + logger.error("can't be deleted,because it is used of process definition"); + for (Integer resId : resourceIdSet) { + logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId)); + } + putMsg(result, Status.RESOURCE_IS_USED); + return result; + } + + } + resourcesUserMapper.deleteResourceUser(userId, 0); if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) { return result; } - String[] resourcesIdArr = resourceIds.split(","); + for (int resourceIdValue : needAuthorizeResIds) { + Resource resource = resourceMapper.selectById(resourceIdValue); + if (resource == null) { + putMsg(result, Status.RESOURCE_NOT_EXIST); + return result; + } - for (String resourceId : resourcesIdArr) { Date now = new Date(); ResourcesUser resourcesUser = new ResourcesUser(); resourcesUser.setUserId(userId); - resourcesUser.setResourcesId(Integer.parseInt(resourceId)); - resourcesUser.setPerm(7); + resourcesUser.setResourcesId(resourceIdValue); + if (resource.isDirectory()) { + resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM); + }else{ + resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); + } + resourcesUser.setCreateTime(now); resourcesUser.setUpdateTime(now); resourcesUserMapper.insert(resourcesUser); + } putMsg(result, Status.SUCCESS); @@ -516,6 +570,7 @@ public class UsersService extends BaseService { * @param udfIds udf id array * @return grant result code */ + @Transactional(rollbackFor = Exception.class) public Map grantUDFFunction(User loginUser, int userId, String udfIds) { Map result = new HashMap<>(5); @@ -562,6 +617,7 @@ public class UsersService extends BaseService { * @param datasourceIds data source id array * @return grant result code */ + @Transactional(rollbackFor = Exception.class) public Map grantDataSource(User loginUser, int userId, String datasourceIds) { Map result = new HashMap<>(5); result.put(Constants.STATUS, false); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java index c44c446d5c..2416fb7828 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java @@ -28,14 +28,12 @@ import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** * work group service @@ -50,6 +48,9 @@ public class WorkerGroupService extends BaseService { @Autowired ProcessInstanceMapper processInstanceMapper; + @Autowired + protected ZookeeperCachedOperator zookeeperCachedOperator; + /** * create or update a worker group * @@ -111,7 +112,7 @@ public class WorkerGroupService extends BaseService { List workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName()); - if(workerGroupList.size() > 0 ){ + if(CollectionUtils.isNotEmpty(workerGroupList)){ // new group has same name.. if(workerGroup.getId() == 0){ return true; @@ -180,9 +181,22 @@ public class WorkerGroupService extends BaseService { * @return all worker group list */ public Map queryAllGroup() { - Map result = new HashMap<>(5); - List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); - result.put(Constants.DATA_LIST, workerGroupList); + Map result = new HashMap<>(); + String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot()+"/nodes" +"/worker"; + List workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath); + + // available workerGroup list + List availableWorkerGroupList = new ArrayList<>(); + + for (String workerGroup : workerGroupList){ + String workerGroupPath= workerPath + "/" + workerGroup; + List childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath); + if (CollectionUtils.isNotEmpty(childrenNodes)){ + availableWorkerGroupList.add(workerGroup); + } + } + + result.put(Constants.DATA_LIST, availableWorkerGroupList); putMsg(result, Status.SUCCESS); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java index 6ab9512286..eacdecf166 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/Result.java @@ -16,6 +16,10 @@ */ package org.apache.dolphinscheduler.api.utils; +import org.apache.dolphinscheduler.api.enums.Status; + +import java.text.MessageFormat; + /** * result * @@ -37,13 +41,58 @@ public class Result { */ private T data; - public Result(){} + public Result() { + } - public Result(Integer code , String msg){ + public Result(Integer code, String msg) { this.code = code; this.msg = msg; } + private Result(T data) { + this.code = 0; + this.data = data; + } + + private Result(Status status) { + if (status != null) { + this.code = status.getCode(); + this.msg = status.getMsg(); + } + } + + /** + * Call this function if there is success + * + * @param data data + * @param type + * @return resule + */ + public static Result success(T data) { + return new Result<>(data); + } + + /** + * Call this function if there is any error + * + * @param status status + * @return result + */ + public static Result error(Status status) { + return new Result(status); + } + + /** + * Call this function if there is any error + * + * @param status status + * @param args args + * @return result + */ + public static Result errorWithArgs(Status status, Object... args) { + return new Result(status.getCode(), MessageFormat.format(status.getMsg(), args)); + } + public Integer getCode() { return code; } diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index a9b7c84d28..369e5e3c72 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -166,15 +166,16 @@ SIGNOUT_NOTES=logout USER_PASSWORD=user password UPDATE_PROCESS_INSTANCE_NOTES=update process instance QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list -VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name +VERIFY_PROCESS_DEFINITION_NAME_NOTES=verify process definition name LOGIN_NOTES=user login -UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition +UPDATE_PROCESS_DEFINITION_NOTES=update process definition PROCESS_DEFINITION_ID=process definition id PROCESS_DEFINITION_IDS=process definition ids -RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition -QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id -QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list -QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging +RELEASE_PROCESS_DEFINITION_NOTES=release process definition +QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id +COPY_PROCESS_DEFINITION_NOTES=copy process definition +QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list +QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list PAGE_NO=page no PROCESS_INSTANCE_ID=process instance id @@ -190,7 +191,7 @@ LIMIT=limit VIEW_TREE_NOTES=view tree GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id PROCESS_DEFINITION_ID_LIST=process definition id list -QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query proccess definition all by project id +QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query process definition all by project id DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index 6035bb6e18..92df742613 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -166,15 +166,16 @@ SIGNOUT_NOTES=logout USER_PASSWORD=user password UPDATE_PROCESS_INSTANCE_NOTES=update process instance QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list -VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name +VERIFY_PROCESS_DEFINITION_NAME_NOTES=verify process definition name LOGIN_NOTES=user login -UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition +UPDATE_PROCESS_DEFINITION_NOTES=update process definition PROCESS_DEFINITION_ID=process definition id PROCESS_DEFINITION_IDS=process definition ids -RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition -QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id -QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list -QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging +RELEASE_PROCESS_DEFINITION_NOTES=release process definition +QUERY_PROCESS_DEFINITION_BY_ID_NOTES=query process definition by id +COPY_PROCESS_DEFINITION_NOTES=copy process definition +QUERY_PROCESS_DEFINITION_LIST_NOTES=query process definition list +QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=query process definition list paging QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list PAGE_NO=page no PROCESS_INSTANCE_ID=process instance id @@ -190,7 +191,7 @@ LIMIT=limit VIEW_TREE_NOTES=view tree GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id PROCESS_DEFINITION_ID_LIST=process definition id list -QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query proccess definition all by project id +QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query process definition all by project id DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index 597d1a884e..3b427912b5 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -165,14 +165,15 @@ SIGNOUT_NOTES=退出登录 USER_PASSWORD=用户密码 UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例 QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表 -VERIFY_PROCCESS_DEFINITION_NAME_NOTES=验证流程定义名字 +VERIFY_PROCESS_DEFINITION_NAME_NOTES=验证流程定义名字 LOGIN_NOTES=用户登录 -UPDATE_PROCCESS_DEFINITION_NOTES=更新流程定义 +UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义 PROCESS_DEFINITION_ID=流程定义ID -RELEASE_PROCCESS_DEFINITION_NOTES=发布流程定义 -QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID -QUERY_PROCCESS_DEFINITION_LIST_NOTES=查询流程定义列表 -QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 +RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义 +QUERY_PROCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID +COPY_PROCESS_DEFINITION_NOTES=复制流程定义 +QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表 +QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义 PAGE_NO=页码号 PROCESS_INSTANCE_ID=流程实例ID @@ -188,7 +189,7 @@ LIMIT=显示多少条 VIEW_TREE_NOTES=树状图 GET_NODE_LIST_BY_DEFINITION_ID_NOTES=获得任务节点列表通过流程定义ID PROCESS_DEFINITION_ID_LIST=流程定义id列表 -QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=查询流程定义通过项目ID +QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=查询流程定义通过项目ID BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=批量删除流程定义通过流程定义ID集合 DELETE_PROCESS_DEFINITION_BY_ID_NOTES=删除流程定义通过流程定义ID QUERY_PROCESS_INSTANCE_BY_ID_NOTES=查询流程实例通过流程实例ID diff --git a/dolphinscheduler-api/src/main/resources/logback-api.xml b/dolphinscheduler-api/src/main/resources/logback-api.xml new file mode 100644 index 0000000000..2df90d8392 --- /dev/null +++ b/dolphinscheduler-api/src/main/resources/logback-api.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + ${log.base}/dolphinscheduler-api-server.log + + INFO + + + ${log.base}/dolphinscheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log + 168 + 64MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java index 47946d4af5..a219343371 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/AccessTokenControllerTest.java @@ -56,6 +56,23 @@ public class AccessTokenControllerTest extends AbstractControllerTest{ logger.info(mvcResult.getResponse().getContentAsString()); } + @Test + public void testExceptionHandler() throws Exception { + MultiValueMap paramsMap = new LinkedMultiValueMap<>(); + paramsMap.add("userId","-1"); + paramsMap.add("expireTime","2019-12-18 00:00:00"); + paramsMap.add("token","507f5aeaaa2093dbdff5d5522ce00510"); + MvcResult mvcResult = mockMvc.perform(post("/access-token/create") + .header("sessionId", sessionId) + .params(paramsMap)) + .andExpect(status().isOk()) + .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) + .andReturn(); + Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); + Assert.assertEquals(Status.CREATE_ACCESS_TOKEN_ERROR.getCode(), result.getCode().intValue()); + logger.info(mvcResult.getResponse().getContentAsString()); + } + @Test public void testGenerateToken() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java index f80ce8556e..5ed7310c47 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataSourceControllerTest.java @@ -39,6 +39,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. * data source controller test */ public class DataSourceControllerTest extends AbstractControllerTest{ + private static Logger logger = LoggerFactory.getLogger(DataSourceControllerTest.class); @Ignore @@ -95,6 +96,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{ + @Ignore @Test public void testQueryDataSource() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); @@ -169,6 +171,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{ } + @Ignore @Test public void testConnectionTest() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); @@ -248,6 +251,7 @@ public class DataSourceControllerTest extends AbstractControllerTest{ + @Ignore @Test public void testDelete() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java index f5a28d01ae..5be7b0711c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/LoginControllerTest.java @@ -36,7 +36,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. * login controller test */ public class LoginControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); + private static Logger logger = LoggerFactory.getLogger(LoginControllerTest.class); @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java index 7b4e2595f7..a69df9744e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java @@ -17,314 +17,274 @@ package org.apache.dolphinscheduler.api.controller; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ReleaseState; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.*; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** * process definition controller test */ -public class ProcessDefinitionControllerTest extends AbstractControllerTest{ +@RunWith(MockitoJUnitRunner.Silent.class) +public class ProcessDefinitionControllerTest{ private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class); + @InjectMocks + private ProcessDefinitionController processDefinitionController; + + @Mock + private ProcessDefinitionService processDefinitionService; + + protected User user; + + @Before + public void before(){ + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.GENERAL_USER); + loginUser.setUserName("admin"); + + user = loginUser; + } + @Test public void testCreateProcessDefinition() throws Exception { String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","dag_test"); - paramsMap.add("processDefinitionJson",json); - paramsMap.add("locations", locations); - paramsMap.add("connects", "[]"); - paramsMap.add("description", "desc test"); - - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/save","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } + String projectName = "test"; + String name = "dag_test"; + String description = "desc test"; + String connects = "[]"; + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + result.put("processDefinitionId",1); + Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json, + description, locations, connects)).thenReturn(result); - @Test - public void testVerifyProccessDefinitionName() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","dag_test"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/verify-name","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json, + locations, connects, description); + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } - @Test - public void testVerifyProccessDefinitionNameNotExit() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","dag_test_1"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/verify-name","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } } + @Test + public void testVerifyProcessDefinitionName() throws Exception { + + Map result = new HashMap<>(5); + putMsg(result, Status.PROCESS_INSTANCE_EXIST); + String projectName = "test"; + String name = "dag_test"; + + Mockito.when(processDefinitionService.verifyProcessDefinitionName(user,projectName,name)).thenReturn(result); + Result response = processDefinitionController.verifyProcessDefinitionName(user,projectName,name); + Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(),response.getCode().intValue()); + + } @Test - public void UpdateProccessDefinition() throws Exception { + public void UpdateProcessDefinition() throws Exception { + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; - - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("name","dag_test_update"); - paramsMap.add("id","91"); - paramsMap.add("processDefinitionJson",json); - paramsMap.add("locations", locations); - paramsMap.add("connects", "[]"); - paramsMap.add("description", "desc test update"); - - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/update","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + String projectName = "test"; + String name = "dag_test"; + String description = "desc test"; + String connects = "[]"; + int id = 1; + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + result.put("processDefinitionId",1); + + Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id,name, json, + description, locations, connects)).thenReturn(result); + + Result response = processDefinitionController.updateProcessDefinition(user, projectName, name,id, json, + locations, connects, description); + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } - @Test - public void testReleaseProccessDefinition() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processId","91"); - paramsMap.add("releaseState",String.valueOf(ReleaseState.OFFLINE)); - - MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/release","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + public void testReleaseProcessDefinition() throws Exception { + String projectName = "test"; + int id = 1; + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + + Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal())).thenReturn(result); + Result response = processDefinitionController.releaseProcessDefinition(user, projectName,id,ReleaseState.OFFLINE.ordinal()); + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } - @Test - public void testQueryProccessDefinitionById() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processId","91"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/select-by-id","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + public void testQueryProcessDefinitionById() throws Exception { + + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; + String projectName = "test"; + String name = "dag_test"; + String description = "desc test"; + String connects = "[]"; + int id = 1; + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectName(projectName); + processDefinition.setConnects(connects); + processDefinition.setDescription(description); + processDefinition.setId(id); + processDefinition.setLocations(locations); + processDefinition.setName(name); + processDefinition.setProcessDefinitionJson(json); + + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, processDefinition); + + Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName,id)).thenReturn(result); + Result response = processDefinitionController.queryProcessDefinitionById(user, projectName,id); + + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } @Test - public void testQueryProccessDefinitionList() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/list","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } + public void testCopyProcessDefinition() throws Exception { + String projectName = "test"; + int id = 1; + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); - @Test - public void testQueryProcessDefinitionListPaging() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("pageNo","1"); - paramsMap.add("searchVal","test"); - paramsMap.add("userId",""); - paramsMap.add("pageSize", "1"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/list-paging","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } + Mockito.when(processDefinitionService.copyProcessDefinition(user, projectName,id)).thenReturn(result); + Result response = processDefinitionController.copyProcessDefinition(user, projectName,id); - @Test - public void testViewTree() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processId","91"); - paramsMap.add("limit","30"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/view-tree","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } - @Test - public void testGetNodeListByDefinitionId() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionId","40"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/gen-task-list","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } @Test - public void testGetNodeListByDefinitionIdList() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionIdList","40,90,91"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/get-task-list","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } + public void testQueryProcessDefinitionList() throws Exception { + String projectName = "test"; + List resourceList = getDefinitionList(); + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, resourceList); - @Ignore - @Test - public void testExportProcessDefinitionById() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionId","91"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/export","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) -// .andExpect(status().isOk()) -// .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); - } + Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result); + Result response = processDefinitionController.queryProcessDefinitionList(user, projectName); - @Test - public void testQueryProccessDefinitionAllByProjectId() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("projectId","9"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/queryProccessDefinitionAllByProjectId","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } + public List getDefinitionList(){ + + List resourceList = new ArrayList<>(); + String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"; + String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"; + String projectName = "test"; + String name = "dag_test"; + String description = "desc test"; + String connects = "[]"; + int id = 1; + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProjectName(projectName); + processDefinition.setConnects(connects); + processDefinition.setDescription(description); + processDefinition.setId(id); + processDefinition.setLocations(locations); + processDefinition.setName(name); + processDefinition.setProcessDefinitionJson(json); + + String name2 = "dag_test"; + int id2 = 2; + + ProcessDefinition processDefinition2 = new ProcessDefinition(); + processDefinition2.setProjectName(projectName); + processDefinition2.setConnects(connects); + processDefinition2.setDescription(description); + processDefinition2.setId(id2); + processDefinition2.setLocations(locations); + processDefinition2.setName(name2); + processDefinition2.setProcessDefinitionJson(json); + + resourceList.add(processDefinition); + resourceList.add(processDefinition2); + + return resourceList; + } @Test public void testDeleteProcessDefinitionById() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionId","73"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/delete","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + String projectName = "test"; + int id = 1; + + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + + Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName,id)).thenReturn(result); + Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName,id); + + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); + } + + @Test + public void testGetNodeListByDefinitionId() throws Exception { + String projectName = "test"; + int id = 1; + + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + + Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result); + Result response = processDefinitionController.getNodeListByDefinitionId(user,projectName,id); + + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } @Test - public void testBatchDeleteProcessDefinitionByIds() throws Exception { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - paramsMap.add("processDefinitionIds","54,62"); - - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/process/batch-delete","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); - - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); + public void testGetNodeListByDefinitionIdList() throws Exception { + String projectName = "test"; + String idList = "1,2,3"; + + Map result = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + + Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result); + Result response = processDefinitionController.getNodeListByDefinitionIdList(user,projectName,idList); + + Assert.assertEquals(Status.SUCCESS.getCode(),response.getCode().intValue()); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java index 40bdd5490d..a56e3f83ef 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ResourcesControllerTest.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.api.controller; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.enums.ResourceType; @@ -54,7 +55,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -78,7 +79,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -281,7 +282,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -303,7 +304,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -324,7 +325,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -344,7 +345,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -365,7 +366,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -386,7 +387,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -406,7 +407,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -427,7 +428,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); @@ -446,7 +447,7 @@ public class ResourcesControllerTest extends AbstractControllerTest{ Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); - JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); + JSONObject object = (JSONObject) JSON.parse(mvcResult.getResponse().getContentAsString()); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java index 943e14607b..ad4a165ca1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskRecordControllerTest.java @@ -33,7 +33,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; public class TaskRecordControllerTest extends AbstractControllerTest { - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceController.class); + private static final Logger logger = LoggerFactory.getLogger(TaskRecordControllerTest.class); @Test public void testQueryTaskRecordListPaging() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java index d1be6cb382..0798151299 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/UsersControllerTest.java @@ -37,7 +37,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. * users controller test */ public class UsersControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); + private static Logger logger = LoggerFactory.getLogger(UsersControllerTest.class); @Test public void testCreateUser() throws Exception { diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilterTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilterTest.java new file mode 100644 index 0000000000..8a4a16c4f0 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/filter/ResourceFilterTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.resources.filter; + +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +/** + * resource filter test + */ +public class ResourceFilterTest { + private static Logger logger = LoggerFactory.getLogger(ResourceFilterTest.class); + @Test + public void filterTest(){ + List allList = new ArrayList<>(); + + Resource resource1 = new Resource(3,-1,"b","/b",true); + Resource resource2 = new Resource(4,2,"a1.txt","/a/a1.txt",false); + Resource resource3 = new Resource(5,3,"b1.txt","/b/b1.txt",false); + Resource resource4 = new Resource(6,3,"b2.jar","/b/b2.jar",false); + Resource resource5 = new Resource(7,-1,"b2","/b2",true); + Resource resource6 = new Resource(8,-1,"b2","/b/b2",true); + Resource resource7 = new Resource(9,8,"c2.jar","/b/b2/c2.jar",false); + allList.add(resource1); + allList.add(resource2); + allList.add(resource3); + allList.add(resource4); + allList.add(resource5); + allList.add(resource6); + allList.add(resource7); + + + ResourceFilter resourceFilter = new ResourceFilter(".jar",allList); + List resourceList = resourceFilter.filter(); + Assert.assertNotNull(resourceList); + resourceList.stream().forEach(t-> logger.info(t.toString())); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitorTest.java new file mode 100644 index 0000000000..d1f8a12012 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/dto/resources/visitor/ResourceTreeVisitorTest.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.dto.resources.visitor; + +import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.junit.Assert; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +/** + * resource tree visitor test + */ +public class ResourceTreeVisitorTest { + + @Test + public void visit() throws Exception { + List resourceList = new ArrayList<>(); + + Resource resource1 = new Resource(3,-1,"b","/b",true); + Resource resource2 = new Resource(4,2,"a1.txt","/a/a1.txt",false); + Resource resource3 = new Resource(5,3,"b1.txt","/b/b1.txt",false); + Resource resource4 = new Resource(6,3,"b2.jar","/b/b2.jar",false); + Resource resource5 = new Resource(7,-1,"b2","/b2",true); + Resource resource6 = new Resource(8,-1,"b2","/b/b2",true); + Resource resource7 = new Resource(9,8,"c2.jar","/b/b2/c2.jar",false); + resourceList.add(resource1); + resourceList.add(resource2); + resourceList.add(resource3); + resourceList.add(resource4); + resourceList.add(resource5); + resourceList.add(resource6); + resourceList.add(resource7); + + ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(resourceList); + ResourceComponent resourceComponent = resourceTreeVisitor.visit(); + Assert.assertNotNull(resourceComponent.getChildren()); + } + + @Test + public void rootNode() throws Exception { + List resourceList = new ArrayList<>(); + + Resource resource1 = new Resource(3,-1,"b","/b",true); + Resource resource2 = new Resource(4,2,"a1.txt","/a/a1.txt",false); + Resource resource3 = new Resource(5,3,"b1.txt","/b/b1.txt",false); + Resource resource4 = new Resource(6,3,"b2.jar","/b/b2.jar",false); + Resource resource5 = new Resource(7,-1,"b2","/b2",true); + Resource resource6 = new Resource(8,-1,"b2","/b/b2",true); + Resource resource7 = new Resource(9,8,"c2.jar","/b/b2/c2.jar",false); + resourceList.add(resource1); + resourceList.add(resource2); + resourceList.add(resource3); + resourceList.add(resource4); + resourceList.add(resource5); + resourceList.add(resource6); + resourceList.add(resource7); + + ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(resourceList); + Assert.assertTrue(resourceTreeVisitor.rootNode(resource1)); + Assert.assertTrue(resourceTreeVisitor.rootNode(resource2)); + Assert.assertFalse(resourceTreeVisitor.rootNode(resource3)); + + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java index 0c9ddff791..4e31a71e9d 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/enums/StatusTest.java @@ -28,7 +28,7 @@ public class StatusTest { @Test public void testGetCode() { - assertEquals(Status.SUCCESS.getCode(), 0); + assertEquals(0, Status.SUCCESS.getCode()); assertNotEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), 0); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java new file mode 100644 index 0000000000..95cd96d08e --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.exceptions; + +import org.apache.dolphinscheduler.api.controller.AccessTokenController; +import org.apache.dolphinscheduler.api.controller.ProcessDefinitionController; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; +import org.junit.Assert; +import org.junit.Test; +import org.springframework.web.method.HandlerMethod; + +import javax.servlet.http.HttpServletResponse; +import java.lang.reflect.Method; + +import static org.junit.Assert.*; + +public class ApiExceptionHandlerTest { + + @Test + public void exceptionHandler() throws NoSuchMethodException { + ApiExceptionHandler handler = new ApiExceptionHandler(); + AccessTokenController controller = new AccessTokenController(); + Method method = controller.getClass().getMethod("createToken", User.class, int.class, String.class, String.class); + HandlerMethod hm = new HandlerMethod(controller, method); + Result result = handler.exceptionHandler(new RuntimeException("test exception"), hm); + Assert.assertEquals(Status.CREATE_ACCESS_TOKEN_ERROR.getCode(),result.getCode().intValue()); + } + + @Test + public void exceptionHandlerRuntime() throws NoSuchMethodException { + ApiExceptionHandler handler = new ApiExceptionHandler(); + ProcessDefinitionController controller = new ProcessDefinitionController(); + Method method = controller.getClass().getMethod("exportProcessDefinitionById", User.class, String.class, Integer.class, HttpServletResponse.class); + HandlerMethod hm = new HandlerMethod(controller, method); + Result result = handler.exceptionHandler(new RuntimeException("test exception"), hm); + Assert.assertEquals(Status.INTERNAL_SERVER_ERROR_ARGS.getCode(),result.getCode().intValue()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ServiceExceptionTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ServiceExceptionTest.java new file mode 100644 index 0000000000..a574253d1d --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ServiceExceptionTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.exceptions; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.junit.Assert; +import org.junit.Test; + +public class ServiceExceptionTest { + @Test + public void getCodeTest(){ + ServiceException serviceException = new ServiceException(); + Assert.assertNull(serviceException.getCode()); + + serviceException = new ServiceException(Status.ALERT_GROUP_EXIST); + Assert.assertNotNull(serviceException.getCode()); + + serviceException = new ServiceException(10012, "alarm group already exists"); + Assert.assertNotNull(serviceException.getCode()); + } + @Test + public void getMessageTest(){ + ServiceException serviceException = new ServiceException(); + Assert.assertNull(serviceException.getMessage()); + + serviceException = new ServiceException(Status.ALERT_GROUP_EXIST); + Assert.assertNotNull(serviceException.getMessage()); + + serviceException = new ServiceException(10012, "alarm group already exists"); + Assert.assertNotNull(serviceException.getMessage()); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java index 4a31902af9..ab7dac4d60 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java @@ -18,9 +18,12 @@ package org.apache.dolphinscheduler.api.service; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; -import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.UserType; @@ -31,9 +34,12 @@ import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; import org.junit.After; import org.junit.Assert; +import static org.junit.Assert.assertEquals; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import static org.mockito.ArgumentMatchers.*; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; @@ -41,14 +47,6 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; - @RunWith(MockitoJUnitRunner.class) public class AlertGroupServiceTest { @@ -60,6 +58,8 @@ public class AlertGroupServiceTest { private AlertGroupMapper alertGroupMapper; @Mock private UserAlertGroupMapper userAlertGroupMapper; + @Mock + UserAlertGroupService userAlertGroupService; private String groupName = "AlertGroupServiceTest"; @@ -160,25 +160,34 @@ public class AlertGroupServiceTest { } + @Test - public void testGrantUser(){ + public void testGrantUser() { + + Integer groupId = 1; + + ArgumentCaptor groupArgument = ArgumentCaptor.forClass(Integer.class); + + Mockito.when(userAlertGroupService.deleteByAlertGroupId(anyInt())).thenReturn(true); + + Map result = alertGroupService.grantUser(getLoginUser(), groupId, "123,321"); + Mockito.verify(userAlertGroupService).deleteByAlertGroupId(groupArgument.capture()); - Map result = alertGroupService.grantUser(getLoginUser(),1,"123,321"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + assertEquals(groupArgument.getValue(), groupId); + assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } + @Test - public void testVerifyGroupName(){ + public void testVerifyGroupName() { //group name not exist - Result result = alertGroupService.verifyGroupName(getLoginUser(), groupName); - logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); + boolean result = alertGroupService.existGroupName(groupName); + Assert.assertFalse(result); Mockito.when(alertGroupMapper.queryByGroupName(groupName)).thenReturn(getList()); //group name exist - result = alertGroupService.verifyGroupName(getLoginUser(), groupName); - logger.info(result.toString()); - Assert.assertEquals(Status.ALERT_GROUP_EXIST.getMsg(),result.getMsg()); + result = alertGroupService.existGroupName(groupName); + Assert.assertTrue(result); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index 6f308e7b17..14612fcef8 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -28,8 +28,6 @@ import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -47,7 +45,6 @@ import java.util.List; import java.util.Map; @RunWith(PowerMockRunner.class) -@PrepareForTest({TaskQueueFactory.class}) public class DataAnalysisServiceTest { @InjectMocks @@ -74,8 +71,7 @@ public class DataAnalysisServiceTest { @Mock TaskInstanceMapper taskInstanceMapper; - @Mock - ITaskQueue taskQueue; + @Mock ProcessService processService; @@ -118,9 +114,6 @@ public class DataAnalysisServiceTest { Map result = dataAnalysisService.countTaskStateByProject(user, 2, startDate, endDate); Assert.assertTrue(result.isEmpty()); - // task instance state count error - result = dataAnalysisService.countTaskStateByProject(user, 1, startDate, endDate); - Assert.assertEquals(Status.TASK_INSTANCE_STATE_COUNT_ERROR,result.get(Constants.STATUS)); //SUCCESS Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(DateUtils.getScheduleDate(startDate), @@ -141,10 +134,6 @@ public class DataAnalysisServiceTest { Map result = dataAnalysisService.countProcessInstanceStateByProject(user,2,startDate,endDate); Assert.assertTrue(result.isEmpty()); - //COUNT_PROCESS_INSTANCE_STATE_ERROR - result = dataAnalysisService.countProcessInstanceStateByProject(user,1,startDate,endDate); - Assert.assertEquals(Status.COUNT_PROCESS_INSTANCE_STATE_ERROR,result.get(Constants.STATUS)); - //SUCCESS Mockito.when(processInstanceMapper.countInstanceStateByUser(DateUtils.getScheduleDate(startDate), DateUtils.getScheduleDate(endDate), new Integer[]{1})).thenReturn(getTaskInstanceStateCounts()); @@ -183,30 +172,6 @@ public class DataAnalysisServiceTest { } - @Test - public void testCountQueueState(){ - - PowerMockito.mockStatic(TaskQueueFactory.class); - List taskQueueList = new ArrayList<>(1); - taskQueueList.add("1_0_1_1_-1"); - List taskKillList = new ArrayList<>(1); - taskKillList.add("1-0"); - PowerMockito.when(taskQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_QUEUE)).thenReturn(taskQueueList); - PowerMockito.when(taskQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_KILL)).thenReturn(taskKillList); - PowerMockito.when(TaskQueueFactory.getTaskQueueInstance()).thenReturn(taskQueue); - //checkProject false - Map result = dataAnalysisService.countQueueState(user,2); - Assert.assertTrue(result.isEmpty()); - - result = dataAnalysisService.countQueueState(user,1); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); - //admin - user.setUserType(UserType.ADMIN_USER); - result = dataAnalysisService.countQueueState(user,1); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); - - } - /** * get list * @return diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java index 07d7477930..a8777541b7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java @@ -117,7 +117,7 @@ public class ExecutorService2Test { null, null, null, null, 0, "", "", RunMode.RUN_MODE_SERIAL, - Priority.LOW, 0, 110); + Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); }catch (Exception e){ @@ -138,7 +138,7 @@ public class ExecutorService2Test { null, null, null, null, 0, "", "", RunMode.RUN_MODE_SERIAL, - Priority.LOW, 0, 110); + Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS)); verify(processService, times(0)).createCommand(any(Command.class)); }catch (Exception e){ @@ -159,7 +159,7 @@ public class ExecutorService2Test { null, null, null, null, 0, "", "", RunMode.RUN_MODE_SERIAL, - Priority.LOW, 0, 110); + Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(1)).createCommand(any(Command.class)); }catch (Exception e){ @@ -180,7 +180,7 @@ public class ExecutorService2Test { null, null, null, null, 0, "", "", RunMode.RUN_MODE_PARALLEL, - Priority.LOW, 0, 110); + Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(31)).createCommand(any(Command.class)); }catch (Exception e){ @@ -201,7 +201,7 @@ public class ExecutorService2Test { null, null, null, null, 0, "", "", RunMode.RUN_MODE_PARALLEL, - Priority.LOW, 0, 110); + Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); verify(processService, times(15)).createCommand(any(Command.class)); }catch (Exception e){ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java index 20571577e3..4e41ed39b0 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -52,12 +52,17 @@ public class LoggerServiceTest { //TASK_INSTANCE_NOT_FOUND Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); - //HOST NOT FOUND - result = loggerService.queryLog(1,1,1); + try { + //HOST NOT FOUND OR ILLEGAL + result = loggerService.queryLog(1, 1, 1); + } catch (RuntimeException e) { + Assert.assertTrue(true); + logger.error("testQueryDataSourceList error {}", e.getMessage()); + } Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); //SUCCESS - taskInstance.setHost("127.0.0.1"); + taskInstance.setHost("127.0.0.1:8080"); taskInstance.setLogPath("/temp/log"); Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); result = loggerService.queryLog(1,1,1); @@ -87,7 +92,7 @@ public class LoggerServiceTest { } //success - taskInstance.setHost("127.0.0.1"); + taskInstance.setHost("127.0.0.1:8080"); taskInstance.setLogPath("/temp/log"); //if use @RunWith(PowerMockRunner.class) mock object,sonarcloud will not calculate the coverage, // so no assert will be added here diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index 1e6ee13c57..5a03cdb268 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.api.service; +import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import org.apache.dolphinscheduler.api.ApiApplicationServer; @@ -28,7 +29,9 @@ import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.apache.http.entity.ContentType; import org.json.JSONException; import org.junit.Assert; @@ -38,10 +41,12 @@ import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; +import org.quartz.Scheduler; import org.skyscreamer.jsonassert.JSONAssert; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.context.ApplicationContext; import org.springframework.mock.web.MockMultipartFile; import org.springframework.web.multipart.MultipartFile; @@ -110,7 +115,7 @@ public class ProcessDefinitionServiceTest { "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; @Test - public void testQueryProccessDefinitionList() { + public void testQueryProcessDefinitionList() { String projectName = "project_test1"; Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); @@ -124,7 +129,7 @@ public class ProcessDefinitionServiceTest { //project not found Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); + Map map = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1"); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); //project check auth success @@ -133,7 +138,7 @@ public class ProcessDefinitionServiceTest { List resourceList = new ArrayList<>(); resourceList.add(getProcessDefinition()); Mockito.when(processDefineMapper.queryAllDefinitionList(project.getId())).thenReturn(resourceList); - Map checkSuccessRes = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); + Map checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser,"project_test1"); Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS)); } @@ -174,7 +179,7 @@ public class ProcessDefinitionServiceTest { //project check auth fail Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.queryProccessDefinitionById(loginUser, + Map map = processDefinitionService.queryProcessDefinitionById(loginUser, "project_test1", 1); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); @@ -182,17 +187,58 @@ public class ProcessDefinitionServiceTest { putMsg(result, Status.SUCCESS, projectName); Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); Mockito.when(processDefineMapper.selectById(1)).thenReturn(null); - Map instanceNotexitRes = processDefinitionService.queryProccessDefinitionById(loginUser, + Map instanceNotexitRes = processDefinitionService.queryProcessDefinitionById(loginUser, "project_test1", 1); Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS)); //instance exit Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition()); - Map successRes = processDefinitionService.queryProccessDefinitionById(loginUser, + Map successRes = processDefinitionService.queryProcessDefinitionById(loginUser, "project_test1", 46); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } + @Test + public void testCopyProcessDefinition() throws Exception{ + String projectName = "project_test1"; + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); + + Project project = getProject(projectName); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + Map result = new HashMap<>(5); + //project check auth success, instance not exist + putMsg(result, Status.SUCCESS, projectName); + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + + ProcessDefinition definition = getProcessDefinition(); + definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}"); + definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}"); + definition.setConnects("[]"); + //instance exit + Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition); + + Map createProcessResult = new HashMap<>(5); + putMsg(result, Status.SUCCESS); + + Mockito.when(processDefinitionService.createProcessDefinition( + loginUser, + definition.getProjectName(), + definition.getName(), + definition.getProcessDefinitionJson(), + definition.getDescription(), + definition.getLocations(), + definition.getConnects())).thenReturn(createProcessResult); + + Map successRes = processDefinitionService.copyProcessDefinition(loginUser, + "project_test1", 46); + + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + @Test public void deleteProcessDefinitionByIdTest() throws Exception { String projectName = "project_test1"; @@ -274,6 +320,7 @@ public class ProcessDefinitionServiceTest { @Test public void testReleaseProcessDefinition() { + String projectName = "project_test1"; Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); @@ -298,20 +345,21 @@ public class ProcessDefinitionServiceTest { 46, ReleaseState.ONLINE.getCode()); Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS)); - //process definition offline - List schedules = new ArrayList<>(); - Schedule schedule = getSchedule(); - schedules.add(schedule); - Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules); - Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1); - Map offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", - 46, ReleaseState.OFFLINE.getCode()); - Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS)); - //release error code Map failRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", - 46, 2); + 46, 2); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failRes.get(Constants.STATUS)); + + //FIXME has function exit code 1 when exception + //process definition offline +// List schedules = new ArrayList<>(); +// Schedule schedule = getSchedule(); +// schedules.add(schedule); +// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules); +// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1); +// Map offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", +// 46, ReleaseState.OFFLINE.getCode()); +// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS)); } @Test @@ -328,20 +376,20 @@ public class ProcessDefinitionServiceTest { Map result = new HashMap<>(5); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.verifyProccessDefinitionName(loginUser, + Map map = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); //project check auth success, process not exist putMsg(result, Status.SUCCESS, projectName); Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(null); - Map processNotExistRes = processDefinitionService.verifyProccessDefinitionName(loginUser, + Map processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS)); //process exist Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(getProcessDefinition()); - Map processExistRes = processDefinitionService.verifyProccessDefinitionName(loginUser, + Map processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST, processExistRes.get(Constants.STATUS)); } @@ -411,14 +459,14 @@ public class ProcessDefinitionServiceTest { } @Test - public void testQueryProccessDefinitionAllByProjectId() { + public void testQueryProcessDefinitionAllByProjectId() { int projectId = 1; ProcessDefinition processDefinition = getProcessDefinition(); processDefinition.setProcessDefinitionJson(shellJson); List processDefinitionList = new ArrayList<>(); processDefinitionList.add(processDefinition); Mockito.when(processDefineMapper.queryAllDefinitionList(projectId)).thenReturn(processDefinitionList); - Map successRes = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId); + Map successRes = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @@ -763,12 +811,14 @@ public class ProcessDefinitionServiceTest { * @return ProcessDefinition */ private ProcessDefinition getProcessDefinition(){ + ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setId(46); processDefinition.setName("test_pdf"); processDefinition.setProjectId(2); processDefinition.setTenantId(1); processDefinition.setDescription(""); + return processDefinition; } @@ -803,7 +853,7 @@ public class ProcessDefinitionServiceTest { schedule.setProcessInstancePriority(Priority.MEDIUM); schedule.setWarningType(WarningType.NONE); schedule.setWarningGroupId(1); - schedule.setWorkerGroupId(-1); + schedule.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); return schedule; } @@ -822,7 +872,6 @@ public class ProcessDefinitionServiceTest { processMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy())); processMeta.setScheduleReleaseState(String.valueOf(schedule.getReleaseState())); processMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority())); - processMeta.setScheduleWorkerGroupId(schedule.getWorkerGroupId()); processMeta.setScheduleWorkerGroupName("workgroup1"); return processMeta; } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index 959dca21bf..a1b1246df1 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -163,7 +163,6 @@ public class ProcessInstanceServiceTest { //project auth success ProcessInstance processInstance = getProcessInstance(); - processInstance.setWorkerGroupId(-1); processInstance.setReceivers("xxx@qq.com"); processInstance.setReceiversCc("xxx@qq.com"); processInstance.setProcessDefinitionId(46); @@ -178,16 +177,11 @@ public class ProcessInstanceServiceTest { Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); //worker group null - processInstance.setWorkerGroupId(1); - when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null); Map workerNullRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1); Assert.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS)); //worker group exist WorkerGroup workerGroup = getWorkGroup(); - when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(workerGroup); - processInstance.setWorkerGroupId(1); - when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null); Map workerExistRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1); Assert.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS)); } @@ -265,7 +259,7 @@ public class ProcessInstanceServiceTest { //task not sub process TaskInstance taskInstance = getTaskInstance(); - taskInstance.setTaskType(TaskType.HTTP.getDescp()); + taskInstance.setTaskType(TaskType.HTTP.toString()); taskInstance.setProcessInstanceId(1); when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); Map notSubprocessRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, 1); @@ -273,7 +267,7 @@ public class ProcessInstanceServiceTest { //sub process not exist TaskInstance subTask = getTaskInstance(); - subTask.setTaskType(TaskType.SUB_PROCESS.getDescp()); + subTask.setTaskType(TaskType.SUB_PROCESS.toString()); subTask.setProcessInstanceId(1); when(processService.findTaskInstanceById(subTask.getId())).thenReturn(subTask); when(processService.findSubProcessInstance(subTask.getProcessInstanceId(), subTask.getId())).thenReturn(null); @@ -394,8 +388,6 @@ public class ProcessInstanceServiceTest { //project auth fail when(projectMapper.queryByName(projectName)).thenReturn(null); when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); - Map proejctAuthFailRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any()); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); //process instance null Project project = getProject(projectName); @@ -403,8 +395,6 @@ public class ProcessInstanceServiceTest { when(projectMapper.queryByName(projectName)).thenReturn(project); when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); when(processService.findProcessInstanceDetailById(1)).thenReturn(null); - Map processInstanceNullRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any()); - Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS)); } @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java index 6d07ebd99c..4f9176d699 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java @@ -24,10 +24,7 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.FileUtils; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; @@ -40,6 +37,7 @@ import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; +import org.omg.CORBA.Any; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; @@ -73,6 +71,8 @@ public class ResourcesServiceTest { private UserMapper userMapper; @Mock private UdfFuncMapper udfFunctionMapper; + @Mock + private ProcessDefinitionMapper processDefinitionMapper; @Before public void setUp() { @@ -96,14 +96,14 @@ public class ResourcesServiceTest { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); User user = new User(); //HDFS_NOT_STARTUP - Result result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null); + Result result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); //RESOURCE_FILE_IS_EMPTY MockMultipartFile mockMultipartFile = new MockMultipartFile("test.pdf",new String().getBytes()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); - result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile); + result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_FILE_IS_EMPTY.getMsg(),result.getMsg()); @@ -111,31 +111,42 @@ public class ResourcesServiceTest { mockMultipartFile = new MockMultipartFile("test.pdf","test.pdf","pdf",new String("test").getBytes()); PowerMockito.when(FileUtils.suffix("test.pdf")).thenReturn("pdf"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); - result = resourcesService.createResource(user,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile); + result = resourcesService.createResource(user,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(),result.getMsg()); //UDF_RESOURCE_SUFFIX_NOT_JAR mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.pdf","ResourcesServiceTest.pdf","pdf",new String("test").getBytes()); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.pdf")).thenReturn("pdf"); - result = resourcesService.createResource(user,"ResourcesServiceTest.pdf","ResourcesServiceTest",ResourceType.UDF,mockMultipartFile); + result = resourcesService.createResource(user,"ResourcesServiceTest.pdf","ResourcesServiceTest",ResourceType.UDF,mockMultipartFile,-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(),result.getMsg()); - //UDF_RESOURCE_SUFFIX_NOT_JAR - Mockito.when(tenantMapper.queryById(0)).thenReturn(getTenant()); - Mockito.when(resourcesMapper.queryResourceList("ResourcesServiceTest.jar", 0, 1)).thenReturn(getResourceList()); - mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.jar","ResourcesServiceTest.jar","pdf",new String("test").getBytes()); - result = resourcesService.createResource(user,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.UDF,mockMultipartFile); + } + + @Test + public void testCreateDirecotry(){ + + PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); + User user = new User(); + //HDFS_NOT_STARTUP + Result result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,-1,"/"); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); + Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); - //SUCCESS - Mockito.when(resourcesMapper.queryResourceList("ResourcesServiceTest.jar", 0, 1)).thenReturn(new ArrayList<>()); - result = resourcesService.createResource(user,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.UDF,mockMultipartFile); + //PARENT_RESOURCE_NOT_EXIST + PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); + Mockito.when(resourcesMapper.selectById(Mockito.anyInt())).thenReturn(null); + result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,1,"/"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); + Assert.assertEquals(Status.PARENT_RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); + //RESOURCE_EXIST + PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); + Mockito.when(resourcesMapper.queryResourceList("/directoryTest", 0, 0)).thenReturn(getResourceList()); + result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,-1,"/"); + logger.info(result.toString()); + Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); } @@ -163,41 +174,46 @@ public class ResourcesServiceTest { //SUCCESS user.setId(1); - result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest.jar",ResourceType.FILE); + Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser()); + Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); + + result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); //RESOURCE_EXIST - Mockito.when(resourcesMapper.queryResourceList("ResourcesServiceTest1.jar", 0, 0)).thenReturn(getResourceList()); - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.FILE); + Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest1.jar", 0, 0)).thenReturn(getResourceList()); + result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.FILE); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); //USER_NOT_EXIST - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF); + Mockito.when(userMapper.queryDetailsById(Mockito.anyInt())).thenReturn(null); + result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); //TENANT_NOT_EXIST Mockito.when(userMapper.queryDetailsById(1)).thenReturn(getUser()); - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF); + Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null); + result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg()); //RESOURCE_NOT_EXIST Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); - PowerMockito.when(HadoopUtils.getHdfsFilename(Mockito.any(), Mockito.any())).thenReturn("test1"); + PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test1"); try { Mockito.when(hadoopUtils.exists("test")).thenReturn(true); } catch (IOException e) { e.printStackTrace(); } - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF); + result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); //SUCCESS - PowerMockito.when(HadoopUtils.getHdfsFilename(Mockito.any(), Mockito.any())).thenReturn("test"); + PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test"); result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); @@ -212,8 +228,8 @@ public class ResourcesServiceTest { resourcePage.setTotal(1); resourcePage.setRecords(getResourceList()); Mockito.when(resourcesMapper.queryResourcePaging(Mockito.any(Page.class), - Mockito.eq(0), Mockito.eq(0), Mockito.eq("test"))).thenReturn(resourcePage); - Map result = resourcesService.queryResourceListPaging(loginUser,ResourceType.FILE,"test",1,10); + Mockito.eq(0),Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"))).thenReturn(resourcePage); + Map result = resourcesService.queryResourceListPaging(loginUser,-1,ResourceType.FILE,"test",1,10); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); @@ -226,7 +242,7 @@ public class ResourcesServiceTest { User loginUser = new User(); loginUser.setId(0); loginUser.setUserType(UserType.ADMIN_USER); - Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0)).thenReturn(getResourceList()); + Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0,0)).thenReturn(getResourceList()); Map result = resourcesService.queryResourceList(loginUser, ResourceType.FILE); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -263,6 +279,7 @@ public class ResourcesServiceTest { //TENANT_NOT_EXIST loginUser.setUserType(UserType.ADMIN_USER); loginUser.setTenantId(2); + Mockito.when(userMapper.queryDetailsById(Mockito.anyInt())).thenReturn(loginUser); result = resourcesService.delete(loginUser,1); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); @@ -285,14 +302,20 @@ public class ResourcesServiceTest { User user = new User(); user.setId(1); - Mockito.when(resourcesMapper.queryResourceList("test", 0, 0)).thenReturn(getResourceList()); - Result result = resourcesService.verifyResourceName("test",ResourceType.FILE,user); + Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest.jar", 0, 0)).thenReturn(getResourceList()); + Result result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar",ResourceType.FILE,user); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); //TENANT_NOT_EXIST Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); - result = resourcesService.verifyResourceName("test1",ResourceType.FILE,user); + String unExistFullName = "/test.jar"; + try { + Mockito.when(hadoopUtils.exists(unExistFullName)).thenReturn(false); + } catch (IOException e) { + logger.error("hadoop error",e); + } + result = resourcesService.verifyResourceName("/test.jar",ResourceType.FILE,user); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); @@ -304,10 +327,10 @@ public class ResourcesServiceTest { } catch (IOException e) { logger.error("hadoop error",e); } - PowerMockito.when(HadoopUtils.getHdfsFilename("123", "test1")).thenReturn("test"); - result = resourcesService.verifyResourceName("test1",ResourceType.FILE,user); + PowerMockito.when(HadoopUtils.getHdfsResourceFileName("123", "test1")).thenReturn("test"); + result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar",ResourceType.FILE,user); logger.info(result.toString()); - Assert.assertTrue(Status.RESOURCE_FILE_EXIST.getCode()==result.getCode()); + Assert.assertTrue(Status.RESOURCE_EXIST.getCode()==result.getCode()); //SUCCESS result = resourcesService.verifyResourceName("test2",ResourceType.FILE,user); @@ -389,14 +412,14 @@ public class ResourcesServiceTest { PowerMockito.when(HadoopUtils.getHdfsUdfDir("udfDir")).thenReturn("udfDir"); User user = getUser(); //HDFS_NOT_STARTUP - Result result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content"); + Result result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); //RESOURCE_SUFFIX_NOT_SUPPORT_VIEW PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); - result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content"); + result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); @@ -404,7 +427,7 @@ public class ResourcesServiceTest { try { PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); - result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content"); + result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content",-1,"/"); }catch (RuntimeException ex){ logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), ex.getMessage()); @@ -413,7 +436,7 @@ public class ResourcesServiceTest { //SUCCESS Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); - result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content"); + result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); @@ -584,13 +607,26 @@ public class ResourcesServiceTest { private Resource getResource(){ Resource resource = new Resource(); + resource.setPid(-1); resource.setUserId(1); resource.setDescription("ResourcesServiceTest.jar"); resource.setAlias("ResourcesServiceTest.jar"); + resource.setFullName("/ResourcesServiceTest.jar"); resource.setType(ResourceType.FILE); return resource; } + private Resource getUdfResource(){ + + Resource resource = new Resource(); + resource.setUserId(1); + resource.setDescription("udfTest"); + resource.setAlias("udfTest.jar"); + resource.setFullName("/udfTest.jar"); + resource.setType(ResourceType.UDF); + return resource; + } + private UdfFunc getUdfFunc(){ UdfFunc udfFunc = new UdfFunc(); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UserAlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UserAlertGroupServiceTest.java new file mode 100644 index 0000000000..24b1d5a98b --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UserAlertGroupServiceTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper; +import static org.junit.Assert.assertEquals; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * + */ +@RunWith(MockitoJUnitRunner.class) +public class UserAlertGroupServiceTest { + + @InjectMocks + UserAlertGroupService userAlertGroupService; + + @Mock + UserAlertGroupMapper userAlertGroupMapper; + + @Test + public void deleteByAlertGroupId() { + + Integer groupId = 1; + userAlertGroupService.deleteByAlertGroupId(groupId); + ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(Integer.class); + + Mockito.verify(userAlertGroupMapper).deleteByAlertgroupId(argumentCaptor.capture()); + assertEquals(argumentCaptor.getValue(), groupId); + + } + +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index efe9022ad7..58ee6fdf6c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -18,13 +18,16 @@ package org.apache.dolphinscheduler.api.service; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.avro.generic.GenericData; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.*; @@ -68,6 +71,8 @@ public class UsersServiceTest { private DataSourceUserMapper datasourceUserMapper; @Mock private AlertGroupMapper alertGroupMapper; + @Mock + private ResourceMapper resourceMapper; private String queueName ="UsersServiceTestQueue"; @@ -301,9 +306,13 @@ public class UsersServiceTest { logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success + when(resourceMapper.queryAuthorizedResourceList(1)).thenReturn(new ArrayList()); + + when(resourceMapper.selectById(Mockito.anyInt())).thenReturn(getResource()); result = usersService.grantResources(loginUser, 1, resourceIds); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + } @@ -476,11 +485,30 @@ public class UsersServiceTest { return user; } - + /** + * get tenant + * @return tenant + */ private Tenant getTenant(){ Tenant tenant = new Tenant(); tenant.setId(1); return tenant; } + /** + * get resource + * @return resource + */ + private Resource getResource(){ + + Resource resource = new Resource(); + resource.setPid(-1); + resource.setUserId(1); + resource.setDescription("ResourcesServiceTest.jar"); + resource.setAlias("ResourcesServiceTest.jar"); + resource.setFullName("/ResourcesServiceTest.jar"); + resource.setType(ResourceType.FILE); + return resource; + } + } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java index 2c535054a7..454e0de72e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java @@ -27,12 +27,15 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.WorkerGroupMapper; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; +import org.mockito.internal.matchers.Any; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,11 +55,11 @@ public class WorkerGroupServiceTest { private WorkerGroupMapper workerGroupMapper; @Mock private ProcessInstanceMapper processInstanceMapper; - + @Mock + private ZookeeperCachedOperator zookeeperCachedOperator; private String groupName="groupName000001"; - /** * create or update a worker group */ @@ -129,8 +132,14 @@ public class WorkerGroupServiceTest { } @Test - public void testQueryAllGroup(){ - Mockito.when(workerGroupMapper.queryAllWorkerGroup()).thenReturn(getList()); + public void testQueryAllGroup() throws Exception { + ZookeeperConfig zookeeperConfig = new ZookeeperConfig(); + zookeeperConfig.setDsRoot("/ds"); + Mockito.when(zookeeperCachedOperator.getZookeeperConfig()).thenReturn(zookeeperConfig); + List workerGroupStrList = new ArrayList<>(); + workerGroupStrList.add("workerGroup1"); + Mockito.when(zookeeperCachedOperator.getChildrenKeys(Mockito.anyString())).thenReturn(workerGroupStrList); + Map result = workerGroupService.queryAllGroup(); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(),(String)result.get(Constants.MSG)); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java index 24a0ed31d6..ccc231fcf6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java @@ -43,6 +43,7 @@ import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.ArrayList; import java.util.Map; import static org.junit.Assert.*; @@ -173,7 +174,11 @@ public class CheckUtilsTest { // MapreduceParameters MapreduceParameters mapreduceParameters = new MapreduceParameters(); assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(mapreduceParameters), TaskType.MR.toString())); - mapreduceParameters.setMainJar(new ResourceInfo()); + + ResourceInfo resourceInfoMapreduce = new ResourceInfo(); + resourceInfoMapreduce.setId(1); + resourceInfoMapreduce.setRes(""); + mapreduceParameters.setMainJar(resourceInfoMapreduce); mapreduceParameters.setProgramType(ProgramType.JAVA); assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(mapreduceParameters), TaskType.MR.toString())); @@ -211,6 +216,7 @@ public class CheckUtilsTest { // DataxParameters DataxParameters dataxParameters = new DataxParameters(); assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(dataxParameters), TaskType.DATAX.toString())); + dataxParameters.setCustomConfig(0); dataxParameters.setDataSource(111); dataxParameters.setDataTarget(333); dataxParameters.setSql("sql"); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ResultTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ResultTest.java new file mode 100644 index 0000000000..01fb75cdf7 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/ResultTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.api.utils; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.junit.Assert; +import org.junit.Test; + +import java.util.HashMap; + +import static org.junit.Assert.*; + +public class ResultTest { + + @Test + public void success() { + HashMap map = new HashMap<>(); + map.put("testdata", "test"); + Result ret = Result.success(map); + Assert.assertEquals(Status.SUCCESS.getCode(), ret.getCode().intValue()); + } + + @Test + public void error() { + Result ret = Result.error(Status.ACCESS_TOKEN_NOT_EXIST); + Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST.getCode(), ret.getCode().intValue()); + } + + @Test + public void errorWithArgs() { + Result ret = Result.errorWithArgs(Status.INTERNAL_SERVER_ERROR_ARGS, "test internal server error"); + Assert.assertEquals(Status.INTERNAL_SERVER_ERROR_ARGS.getCode(), ret.getCode().intValue()); + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index 6af0e6445f..853ab95d1c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -25,9 +25,45 @@ import java.util.regex.Pattern; * Constants */ public final class Constants { + private Constants() { throw new IllegalStateException("Constants class"); } + + /** + * quartz config + */ + public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass"; + public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName"; + public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId"; + public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon"; + public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties"; + public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class"; + public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount"; + public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons"; + public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority"; + public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class"; + public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix"; + public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered"; + public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold"; + public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval"; + public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock"; + public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource"; + public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class"; + + /** + * quartz config default value + */ + public static final String QUARTZ_TABLE_PREFIX = "QRTZ_"; + public static final String QUARTZ_MISFIRETHRESHOLD = "60000"; + public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000"; + public static final String QUARTZ_DATASOURCE = "myDs"; + public static final String QUARTZ_THREADCOUNT = "25"; + public static final String QUARTZ_THREADPRIORITY = "5"; + public static final String QUARTZ_INSTANCENAME = "DolphinScheduler"; + public static final String QUARTZ_INSTANCEID = "AUTO"; + public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true"; + /** * common properties path */ @@ -56,9 +92,11 @@ public final class Constants { /** - * yarn.resourcemanager.ha.rm.idsfs.defaultFS + * yarn.resourcemanager.ha.rm.ids */ public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids"; + public static final String YARN_RESOURCEMANAGER_HA_XX = "xx"; + /** * yarn.application.status.address @@ -72,31 +110,25 @@ public final class Constants { public static final String HDFS_ROOT_USER = "hdfs.root.user"; /** - * hdfs configuration - * data.store2hdfs.basepath + * hdfs/s3 configuration + * resource.upload.path */ - public static final String DATA_STORE_2_HDFS_BASEPATH = "data.store2hdfs.basepath"; + public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path"; /** - * data.basedir.path + * data basedir path */ public static final String DATA_BASEDIR_PATH = "data.basedir.path"; - /** - * data.download.basedir.path - */ - public static final String DATA_DOWNLOAD_BASEDIR_PATH = "data.download.basedir.path"; - - /** - * process.exec.basepath - */ - public static final String PROCESS_EXEC_BASEPATH = "process.exec.basepath"; - /** * dolphinscheduler.env.path */ public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path"; + /** + * environment properties default path + */ + public static final String ENV_PATH = "env/dolphinscheduler_env.sh"; /** * python home @@ -108,30 +140,38 @@ public final class Constants { */ public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs"; + public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties"; + /** * development.state */ public static final String DEVELOPMENT_STATE = "development.state"; + public static final String DEVELOPMENT_STATE_DEFAULT_VALUE = "true"; + + /** + * string true + */ + public static final String STRING_TRUE = "true"; /** - * res.upload.startup.type + * string false */ - public static final String RES_UPLOAD_STARTUP_TYPE = "res.upload.startup.type"; + public static final String STRING_FALSE = "false"; /** - * zookeeper quorum + * resource storage type */ - public static final String ZOOKEEPER_QUORUM = "zookeeper.quorum"; + public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type"; /** * MasterServer directory registered in zookeeper */ - public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/masters"; + public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/nodes/master"; /** * WorkerServer directory registered in zookeeper */ - public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/workers"; + public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/nodes/worker"; /** * all servers directory registered in zookeeper @@ -143,10 +183,6 @@ public final class Constants { */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters"; - /** - * WorkerServer lock directory registered in zookeeper - */ - public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_WORKERS = "/lock/workers"; /** * MasterServer failover directory registered in zookeeper @@ -163,16 +199,17 @@ public final class Constants { */ public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters"; - /** - * need send warn times when master server or worker server failover - */ - public static final int DOLPHINSCHEDULER_WARN_TIMES_FAILOVER = 3; /** * comma , */ public static final String COMMA = ","; + /** + * slash / + */ + public static final String SLASH = "/"; + /** * COLON : */ @@ -198,37 +235,6 @@ public final class Constants { */ public static final String EQUAL_SIGN = "="; - /** - * ZOOKEEPER_SESSION_TIMEOUT - */ - public static final String ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout"; - - public static final String ZOOKEEPER_CONNECTION_TIMEOUT = "zookeeper.connection.timeout"; - - public static final String ZOOKEEPER_RETRY_SLEEP = "zookeeper.retry.sleep"; - public static final String ZOOKEEPER_RETRY_BASE_SLEEP = "zookeeper.retry.base.sleep"; - public static final String ZOOKEEPER_RETRY_MAX_SLEEP = "zookeeper.retry.max.sleep"; - - public static final String ZOOKEEPER_RETRY_MAXTIME = "zookeeper.retry.maxtime"; - - - public static final String MASTER_HEARTBEAT_INTERVAL = "master.heartbeat.interval"; - - public static final String MASTER_EXEC_THREADS = "master.exec.threads"; - - public static final String MASTER_EXEC_TASK_THREADS = "master.exec.task.number"; - - - public static final String MASTER_COMMIT_RETRY_TIMES = "master.task.commit.retryTimes"; - - public static final String MASTER_COMMIT_RETRY_INTERVAL = "master.task.commit.interval"; - - - public static final String WORKER_EXEC_THREADS = "worker.exec.threads"; - - public static final String WORKER_HEARTBEAT_INTERVAL = "worker.heartbeat.interval"; - - public static final String WORKER_FETCH_TASK_NUM = "worker.fetch.task.num"; public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg"; @@ -239,21 +245,6 @@ public final class Constants { public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory"; - /** - * dolphinscheduler tasks queue - */ - public static final String DOLPHINSCHEDULER_TASKS_QUEUE = "tasks_queue"; - - /** - * dolphinscheduler need kill tasks queue - */ - public static final String DOLPHINSCHEDULER_TASKS_KILL = "tasks_kill"; - - public static final String ZOOKEEPER_DOLPHINSCHEDULER_ROOT = "zookeeper.dolphinscheduler.root"; - - public static final String SCHEDULER_QUEUE_IMPL = "dolphinscheduler.queue.impl"; - - /** * date format of yyyy-MM-dd HH:mm:ss */ @@ -345,26 +336,6 @@ public final class Constants { public static final int MAX_TASK_TIMEOUT = 24 * 3600; - /** - * heartbeat threads number - */ - public static final int DEFAUL_WORKER_HEARTBEAT_THREAD_NUM = 1; - - /** - * heartbeat interval - */ - public static final int DEFAULT_WORKER_HEARTBEAT_INTERVAL = 60; - - /** - * worker fetch task number - */ - public static final int DEFAULT_WORKER_FETCH_TASK_NUM = 1; - - /** - * worker execute threads number - */ - public static final int DEFAULT_WORKER_EXEC_THREAD_NUM = 10; - /** * master cpu load */ @@ -386,16 +357,6 @@ public final class Constants { public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10; - /** - * master execute threads number - */ - public static final int DEFAULT_MASTER_EXEC_THREAD_NUM = 100; - - - /** - * default master concurrent task execute num - */ - public static final int DEFAULT_MASTER_TASK_EXEC_NUM = 20; /** * default log cache rows num,output when reach the number @@ -403,33 +364,11 @@ public final class Constants { public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16; /** - * log flush interval,output when reach the interval + * log flush interval?output when reach the interval */ public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000; - /** - * default master heartbeat thread number - */ - public static final int DEFAULT_MASTER_HEARTBEAT_THREAD_NUM = 1; - - - /** - * default master heartbeat interval - */ - public static final int DEFAULT_MASTER_HEARTBEAT_INTERVAL = 60; - - /** - * default master commit retry times - */ - public static final int DEFAULT_MASTER_COMMIT_RETRY_TIMES = 5; - - - /** - * default master commit retry interval - */ - public static final int DEFAULT_MASTER_COMMIT_RETRY_INTERVAL = 3000; - /** * time unit secong to minutes */ @@ -448,9 +387,9 @@ public final class Constants { public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN"; /** - * task record configuration path + * datasource configuration path */ - public static final String APPLICATION_PROPERTIES = "application.properties"; + public static final String DATASOURCE_PROPERTIES = "/datasource.properties"; public static final String TASK_RECORD_URL = "task.record.datasource.url"; @@ -568,7 +507,7 @@ public final class Constants { /** * heartbeat for zk info length */ - public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 7; + public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 5; /** @@ -813,6 +752,11 @@ public final class Constants { */ public static final String KERBEROS = "kerberos"; + /** + * kerberos expire time + */ + public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time"; + /** * java.security.krb5.conf */ @@ -859,7 +803,7 @@ public final class Constants { */ public static final String HIVE_CONF = "hiveconf:"; - //flink 任务 + //flink ?? public static final String FLINK_YARN_CLUSTER = "yarn-cluster"; public static final String FLINK_RUN_MODE = "-m"; public static final String FLINK_YARN_SLOT = "-ys"; @@ -894,26 +838,20 @@ public final class Constants { /** * data total - * 数据总数 */ public static final String COUNT = "count"; /** * page size - * 每页数据条数 */ public static final String PAGE_SIZE = "pageSize"; /** * current page no - * 当前页码 */ public static final String PAGE_NUMBER = "pageNo"; - /** - * result - */ - public static final String RESULT = "result"; + /** * @@ -967,7 +905,8 @@ public final class Constants { public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; public static final String JDBC_HIVE_2 = "jdbc:hive2://"; public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://"; - public static final String JDBC_ORACLE = "jdbc:oracle:thin:@//"; + public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@"; + public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//"; public static final String JDBC_SQLSERVER = "jdbc:sqlserver://"; public static final String JDBC_DB2 = "jdbc:db2://"; @@ -994,4 +933,25 @@ public final class Constants { * dataSource sensitive param */ public static final String DATASOURCE_PASSWORD_REGEX = "(?<=(\"password\":\")).*?(?=(\"))"; + + /** + * default worker group + */ + public static final String DEFAULT_WORKER_GROUP = "default"; + + public static final Integer TASK_INFO_LENGTH = 5; + + /** + * new + * schedule time + */ + public static final String PARAMETER_SHECDULE_TIME = "schedule.time"; + /** + * authorize writable perm + */ + public static final int AUTHORIZE_WRITABLE_PERM=7; + /** + * authorize readable perm + */ + public static final int AUTHORIZE_READABLE_PERM=4; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java index 1c371e799e..633f5f9623 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/AuthorizationType.java @@ -23,13 +23,17 @@ import com.baomidou.mybatisplus.annotation.EnumValue; */ public enum AuthorizationType { /** - * 0 RESOURCE_FILE; + * 0 RESOURCE_FILE_ID; + * 0 RESOURCE_FILE_NAME; + * 1 UDF_FILE; * 1 DATASOURCE; * 2 UDF; */ - RESOURCE_FILE(0, "resource file"), - DATASOURCE(1, "data source"), - UDF(2, "udf function"); + RESOURCE_FILE_ID(0, "resource file id"), + RESOURCE_FILE_NAME(1, "resource file name"), + UDF_FILE(2, "udf file"), + DATASOURCE(3, "data source"), + UDF(4, "udf function"); AuthorizationType(int code, String descp){ this.code = code; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java index 1ee79156dc..56fdd078d7 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/CommandType.java @@ -65,4 +65,13 @@ public enum CommandType { public String getDescp() { return descp; } + + public static CommandType of(Integer status){ + for(CommandType cmdType : values()){ + if(cmdType.getCode() == status){ + return cmdType; + } + } + throw new IllegalArgumentException("invalid status : " + status); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbConnectType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbConnectType.java new file mode 100644 index 0000000000..ef0f454ff6 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbConnectType.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +import com.baomidou.mybatisplus.annotation.EnumValue; + +public enum DbConnectType { + + ORACLE_SERVICE_NAME(0, "Oracle Service Name"), + ORACLE_SID(1, "Oracle SID"); + + DbConnectType(int code, String descp) { + this.code = code; + this.descp = descp; + } + + @EnumValue + private final int code; + + private final String descp; + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java index 5fb245afef..cc3a29565b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/DbType.java @@ -57,4 +57,14 @@ public enum DbType { public String getDescp() { return descp; } + + + public static DbType of(int type){ + for(DbType ty : values()){ + if(ty.getCode() == type){ + return ty; + } + } + throw new IllegalArgumentException("invalid type : " + type); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java index 12702527f0..1c336c89a1 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java @@ -128,4 +128,13 @@ public enum ExecutionStatus { public String getDescp() { return descp; } + + public static ExecutionStatus of(int status){ + for(ExecutionStatus es : values()){ + if(es.getCode() == status){ + return es; + } + } + throw new IllegalArgumentException("invalid status : " + status); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java index 695f0fd880..200f90709a 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java @@ -60,7 +60,7 @@ public enum TaskStateType { default: break; } - return null; + return new int[0]; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java index 557d9b8b77..a8bd3255de 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskTimeoutStrategy.java @@ -16,14 +16,45 @@ */ package org.apache.dolphinscheduler.common.enums; +import com.baomidou.mybatisplus.annotation.EnumValue; + /** * task timeout strategy */ -public enum TaskTimeoutStrategy { +public enum TaskTimeoutStrategy { /** * 0 warn * 1 failed * 2 warn+failed */ - WARN, FAILED, WARNFAILED + WARN(0, "warn"), + FAILED(1,"failed"), + WARNFAILED(2,"warnfailed"); + + + TaskTimeoutStrategy(int code, String descp){ + this.code = code; + this.descp = descp; + } + + @EnumValue + private final int code; + private final String descp; + + public int getCode() { + return code; + } + + public String getDescp() { + return descp; + } + + public static TaskTimeoutStrategy of(int status){ + for(TaskTimeoutStrategy es : values()){ + if(es.getCode() == status){ + return es; + } + } + throw new IllegalArgumentException("invalid status : " + status); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java index 1f85432bd2..31e457f105 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java @@ -39,7 +39,7 @@ public enum TaskType { */ SHELL(0, "shell"), SQL(1, "sql"), - SUB_PROCESS(2, "sub process"), + SUB_PROCESS(2, "sub_process"), PROCEDURE(3, "procedure"), MR(4, "mr"), SPARK(5, "spark"), diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java index 22f6752689..2351cca40b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/UdfType.java @@ -44,4 +44,15 @@ public enum UdfType { public String getDescp() { return descp; } + + public static UdfType of(int type){ + for(UdfType ut : values()){ + if(ut.getCode() == type){ + return ut; + } + } + throw new IllegalArgumentException("invalid type : " + type); + } + + } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java index 8982c2a838..b4b3c59321 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ZKNodeType.java @@ -22,10 +22,10 @@ package org.apache.dolphinscheduler.common.enums; public enum ZKNodeType { /** - * 0 do not send warning; - * 1 send if process success; - * 2 send if process failed; - * 3 send if process ending; + * 0 master node; + * 1 worker node; + * 2 dead_server node; + * 3 task_queue node; */ MASTER, WORKER, DEAD_SERVER, TASK_QUEUE; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java index b45bd8aeb8..35767a0a46 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.model; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; @@ -23,7 +24,6 @@ import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import com.alibaba.fastjson.JSONObject; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; @@ -120,9 +120,9 @@ public class TaskNode { private Priority taskInstancePriority; /** - * worker group id + * worker group */ - private int workerGroupId; + private String workerGroup; /** @@ -236,8 +236,9 @@ public class TaskNode { Objects.equals(extras, taskNode.extras) && Objects.equals(runFlag, taskNode.runFlag) && Objects.equals(dependence, taskNode.dependence) && + Objects.equals(workerGroup, taskNode.workerGroup) && Objects.equals(conditionResult, taskNode.conditionResult) && - Objects.equals(workerGroupId, taskNode.workerGroupId) && + CollectionUtils.equalLists(depList, taskNode.depList); } @@ -288,19 +289,19 @@ public class TaskNode { /** * get task time out parameter - * @return + * @return task time out parameter */ public TaskTimeoutParameter getTaskTimeoutParameter() { if(StringUtils.isNotEmpty(this.getTimeout())){ String formatStr = String.format("%s,%s", TaskTimeoutStrategy.WARN.name(), TaskTimeoutStrategy.FAILED.name()); - String timeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name()); - return JSONObject.parseObject(timeout,TaskTimeoutParameter.class); + String taskTimeout = this.getTimeout().replace(formatStr,TaskTimeoutStrategy.WARNFAILED.name()); + return JSON.parseObject(taskTimeout,TaskTimeoutParameter.class); } return new TaskTimeoutParameter(false); } public boolean isConditionsTask(){ - return this.getType().toUpperCase().equals(TaskType.CONDITIONS.toString()); + return TaskType.CONDITIONS.toString().equalsIgnoreCase(this.getType()); } @Override @@ -321,16 +322,16 @@ public class TaskNode { ", dependence='" + dependence + '\'' + ", taskInstancePriority=" + taskInstancePriority + ", timeout='" + timeout + '\'' + - ", workerGroupId='" + workerGroupId + '\'' + + ", workerGroup='" + workerGroup + '\'' + '}'; } - public int getWorkerGroupId() { - return workerGroupId; + public String getWorkerGroup() { + return workerGroup; } - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; } public String getConditionResult() { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java index a0c7a928a1..9ec9b1ae57 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/Property.java @@ -20,9 +20,10 @@ package org.apache.dolphinscheduler.common.process; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; +import java.io.Serializable; import java.util.Objects; -public class Property { +public class Property implements Serializable { /** * key */ diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java index 3c95ac648b..a7fc0839eb 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java @@ -23,6 +23,16 @@ public class ResourceInfo { /** * res the name of the resource that was uploaded */ + private int id; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + private String res; public String getRes() { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java index 2d0322a6d7..929516c86b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/AbstractParameters.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.common.task; import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import java.util.LinkedHashMap; import java.util.List; @@ -31,7 +32,7 @@ public abstract class AbstractParameters implements IParameters { public abstract boolean checkParameters(); @Override - public abstract List getResourceFilesList(); + public abstract List getResourceFilesList(); /** * local parameters @@ -40,7 +41,7 @@ public abstract class AbstractParameters implements IParameters { /** * get local parameters list - * @return + * @return Property list */ public List getLocalParams() { return localParams; @@ -52,7 +53,7 @@ public abstract class AbstractParameters implements IParameters { /** * get local parameters map - * @return + * @return parameters map */ public Map getLocalParametersMap() { if (localParams != null) { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java index 8fb49eb1fa..63c2aa04cd 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/IParameters.java @@ -16,6 +16,8 @@ */ package org.apache.dolphinscheduler.common.task; +import org.apache.dolphinscheduler.common.process.ResourceInfo; + import java.util.List; /** @@ -25,7 +27,7 @@ public interface IParameters { /** * check parameters is valid * - * @return + * @return result */ boolean checkParameters(); @@ -34,5 +36,5 @@ public interface IParameters { * * @return resource files list */ - List getResourceFilesList(); + List getResourceFilesList(); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java index 5714b5ef3e..7f0f2c8079 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.common.task.conditions; import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import java.util.List; @@ -41,7 +42,7 @@ public class ConditionsParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return null; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java index 95dd505c02..872b3aa174 100755 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; /** @@ -27,6 +28,16 @@ import org.apache.dolphinscheduler.common.task.AbstractParameters; */ public class DataxParameters extends AbstractParameters { + /** + * if custom json config,eg 0, 1 + */ + private Integer customConfig; + + /** + * if customConfig eq 1 ,then json is usable + */ + private String json; + /** * data source type,eg MYSQL, POSTGRES ... */ @@ -77,6 +88,22 @@ public class DataxParameters extends AbstractParameters { */ private int jobSpeedRecord; + public Integer getCustomConfig() { + return customConfig; + } + + public void setCustomConfig(Integer customConfig) { + this.customConfig = customConfig; + } + + public String getJson() { + return json; + } + + public void setJson(String json) { + this.json = json; + } + public String getDsType() { return dsType; } @@ -157,27 +184,31 @@ public class DataxParameters extends AbstractParameters { this.jobSpeedRecord = jobSpeedRecord; } + @Override public boolean checkParameters() { - if (!(dataSource != 0 - && dataTarget != 0 - && StringUtils.isNotEmpty(sql) - && StringUtils.isNotEmpty(targetTable))) { - return false; + if (customConfig == null) return false; + if (customConfig == 0) { + return dataSource != 0 + && dataTarget != 0 + && StringUtils.isNotEmpty(sql) + && StringUtils.isNotEmpty(targetTable); + } else { + return StringUtils.isNotEmpty(json); } - - return true; } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } @Override public String toString() { return "DataxParameters{" + - "dsType='" + dsType + '\'' + + "customConfig=" + customConfig + + ", json='" + json + '\'' + + ", dsType='" + dsType + '\'' + ", dataSource=" + dataSource + ", dtType='" + dtType + '\'' + ", dataTarget=" + dataTarget + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java index 9ff1405722..5f2e0e1853 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dependent/DependentParameters.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.common.task.dependent; import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import java.util.ArrayList; @@ -36,7 +37,7 @@ public class DependentParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java index 1fbd9ab354..05cbb1d794 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java @@ -19,10 +19,10 @@ package org.apache.dolphinscheduler.common.task.flink; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import java.util.Collections; +import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; /** * spark parameters @@ -50,35 +50,35 @@ public class FlinkParameters extends AbstractParameters { private String mainArgs; /** - * slot个数 + * slot count */ private int slot; /** - *Yarn application的名字 + *Yarn application name */ private String appName; /** - * taskManager 数量 + * taskManager count */ private int taskManager; /** - * jobManagerMemory 内存大小 + * job manager memory */ private String jobManagerMemory ; /** - * taskManagerMemory内存大小 + * task manager memory */ private String taskManagerMemory; /** * resource list */ - private List resourceList; + private List resourceList = new ArrayList<>(); /** * The YARN queue to submit to @@ -207,16 +207,11 @@ public class FlinkParameters extends AbstractParameters { @Override - public List getResourceFilesList() { - if(resourceList != null ) { - List resourceFiles = resourceList.stream() - .map(ResourceInfo::getRes).collect(Collectors.toList()); - if(mainJar != null) { - resourceFiles.add(mainJar.getRes()); - } - return resourceFiles; + public List getResourceFilesList() { + if (mainJar != null && !resourceList.contains(mainJar)) { + resourceList.add(mainJar); } - return Collections.emptyList(); + return resourceList; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java index 00b01afce3..54284bd8b0 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/http/HttpParameters.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.common.task.http; import org.apache.dolphinscheduler.common.enums.HttpCheckCondition; import org.apache.dolphinscheduler.common.enums.HttpMethod; import org.apache.dolphinscheduler.common.process.HttpProperty; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.commons.lang.StringUtils; @@ -62,7 +63,7 @@ public class HttpParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java index b8fd6ebcbf..5126e82e85 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/mr/MapreduceParameters.java @@ -19,9 +19,10 @@ package org.apache.dolphinscheduler.common.task.mr; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; public class MapreduceParameters extends AbstractParameters { @@ -53,7 +54,7 @@ public class MapreduceParameters extends AbstractParameters { /** * resource list */ - private List resourceList; + private List resourceList = new ArrayList<>(); /** * program type @@ -124,13 +125,12 @@ public class MapreduceParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { - if (resourceList != null) { - this.resourceList.add(mainJar); - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); + public List getResourceFilesList() { + if (mainJar != null && !resourceList.contains(mainJar)) { + resourceList.add(mainJar); } - return null; + + return resourceList; } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java index 56ae65547d..2811f10380 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/procedure/ProcedureParameters.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.task.procedure; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.commons.lang.StringUtils; @@ -74,7 +75,7 @@ public class ProcedureParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java index ae9cb4c7da..35dbd8ed86 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/python/PythonParameters.java @@ -21,7 +21,6 @@ import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import java.util.List; -import java.util.stream.Collectors; public class PythonParameters extends AbstractParameters { /** @@ -56,12 +55,7 @@ public class PythonParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { - if (resourceList != null) { - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - - return null; + public List getResourceFilesList() { + return this.resourceList; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java index 85b8acb46a..e11e59600b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/shell/ShellParameters.java @@ -59,12 +59,7 @@ public class ShellParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { - if (resourceList != null) { - return resourceList.stream() - .map(p -> p.getRes()).collect(Collectors.toList()); - } - - return null; + public List getResourceFilesList() { + return resourceList; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java index dbafddfddd..4e58201bf3 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java @@ -19,9 +19,10 @@ package org.apache.dolphinscheduler.common.task.spark; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; /** * spark parameters @@ -77,7 +78,7 @@ public class SparkParameters extends AbstractParameters { /** * resource list */ - private List resourceList; + private List resourceList = new ArrayList<>(); /** * The YARN queue to submit to @@ -218,15 +219,12 @@ public class SparkParameters extends AbstractParameters { return mainJar != null && programType != null && sparkVersion != null; } - @Override - public List getResourceFilesList() { - if(resourceList !=null ) { - this.resourceList.add(mainJar); - return resourceList.stream() - .map(ResourceInfo::getRes).collect(Collectors.toList()); + public List getResourceFilesList() { + if (mainJar != null && !resourceList.contains(mainJar)) { + resourceList.add(mainJar); } - return null; + return resourceList; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java index d65204a386..4604234e8f 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sql/SqlParameters.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.task.sql; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.commons.lang.StringUtils; @@ -189,7 +190,7 @@ public class SqlParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java index fb65df6c1b..7f02f42387 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.task.sqoop; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.utils.StringUtils; @@ -111,7 +112,7 @@ public class SqoopParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java index c7784de8dd..46f0e8510c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java @@ -15,6 +15,7 @@ * limitations under the License. */ package org.apache.dolphinscheduler.common.task.subprocess; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import java.util.ArrayList; @@ -42,7 +43,7 @@ public class SubProcessParameters extends AbstractParameters { } @Override - public List getResourceFilesList() { + public List getResourceFilesList() { return new ArrayList<>(); } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java index cad6914cb8..57e8af4221 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/Stopper.java @@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class Stopper { - private static volatile AtomicBoolean signal = new AtomicBoolean(false); + private static AtomicBoolean signal = new AtomicBoolean(false); public static final boolean isStopped(){ return signal.get(); @@ -34,6 +34,6 @@ public class Stopper { } public static final void stop(){ - signal.getAndSet(true); + signal.set(true); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java index f88ea6d127..198028b534 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadPoolExecutors.java @@ -71,24 +71,24 @@ public class ThreadPoolExecutors { * Executes the given task sometime in the future. The task may execute in a new thread or in an existing pooled thread. * If the task cannot be submitted for execution, either because this executor has been shutdown or because its capacity has been reached, * the task is handled by the current RejectedExecutionHandler. - * @param event + * @param event event */ public void execute(final Runnable event) { - Executor executor = getExecutor(); - if (executor == null) { - logger.error("Cannot execute [" + event + "] because the executor is missing."); + Executor eventExecutor = getExecutor(); + if (eventExecutor == null) { + logger.error("Cannot execute [{}}] because the executor is missing.", event); } else { - executor.execute(event); + eventExecutor.execute(event); } } public Future submit(Runnable event) { - Executor executor = getExecutor(); - if (executor == null) { - logger.error("Cannot submit [" + event + "] because the executor is missing."); + Executor eventExecutor = getExecutor(); + if (eventExecutor == null) { + logger.error("Cannot submit [{}}] because the executor is missing.", event); } else { - return executor.submit(event); + return eventExecutor.submit(event); } return null; @@ -97,11 +97,11 @@ public class ThreadPoolExecutors { public Future submit(Callable task) { - Executor executor = getExecutor(); - if (executor == null) { - logger.error("Cannot submit [" + task + "] because the executor is missing."); + Executor taskExecutor = getExecutor(); + if (taskExecutor == null) { + logger.error("Cannot submit [{}] because the executor is missing.", task); } else { - return executor.submit(task); + return taskExecutor.submit(task); } return null; @@ -110,8 +110,8 @@ public class ThreadPoolExecutors { public void printStatus() { - Executor executor = getExecutor(); - executor.getStatus().dumpInfo(); + Executor printExecutor = getExecutor(); + printExecutor.getStatus().dumpInfo(); } @@ -125,7 +125,7 @@ public class ThreadPoolExecutors { List wasRunning = executor.threadPoolExecutor .shutdownNow(); if (!wasRunning.isEmpty()) { - logger.info(executor + " had " + wasRunning + " on shutdown"); + logger.info("{} had {} on shutdown", executor, wasRunning); } } } @@ -138,7 +138,7 @@ public class ThreadPoolExecutors { /** * how long to retain excess threads */ - final long keepAliveTimeInMillis = 1000; + static final long KEEP_ALIVE_TIME_IN_MILLIS = 1000; /** * the thread pool executor that services the requests */ @@ -146,7 +146,7 @@ public class ThreadPoolExecutors { /** * work queue to use - unbounded queue */ - final BlockingQueue q = new LinkedBlockingQueue(); + final BlockingQueue q = new LinkedBlockingQueue<>(); private final String name; private static final AtomicLong seqids = new AtomicLong(0); private final long id; @@ -156,7 +156,7 @@ public class ThreadPoolExecutors { this.name = name; //create the thread pool executor this.threadPoolExecutor = new TrackingThreadPoolExecutor( - maxThreads, maxThreads, keepAliveTimeInMillis, + maxThreads, maxThreads, KEEP_ALIVE_TIME_IN_MILLIS, TimeUnit.MILLISECONDS, q); // name the threads for this threadpool ThreadFactoryBuilder tfb = new ThreadFactoryBuilder(); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java index 0a4ed9b5ac..a9a124547a 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java @@ -33,10 +33,11 @@ public class ThreadUtils { private static final int STACK_DEPTH = 20; /** - Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a + * Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a * unique, sequentially assigned integer. - * @param prefix - * @return + * + * @param prefix prefix + * @return ThreadPoolExecutor */ public static ThreadPoolExecutor newDaemonCachedThreadPool(String prefix){ ThreadFactory threadFactory = namedThreadFactory(prefix); @@ -45,8 +46,8 @@ public class ThreadUtils { /** * Create a thread factory that names threads with a prefix and also sets the threads to daemon. - * @param prefix - * @return + * @param prefix prefix + * @return ThreadFactory */ private static ThreadFactory namedThreadFactory(String prefix) { return new ThreadFactoryBuilder().setDaemon(true).setNameFormat(prefix + "-%d").build(); @@ -56,10 +57,10 @@ public class ThreadUtils { /** * Create a cached thread pool whose max number of threads is `maxThreadNumber`. Thread names * are formatted as prefix-ID, where ID is a unique, sequentially assigned integer. - * @param prefix - * @param maxThreadNumber - * @param keepAliveSeconds - * @return + * @param prefix prefix + * @param maxThreadNumber maxThreadNumber + * @param keepAliveSeconds keepAliveSeconds + * @return ThreadPoolExecutor */ public static ThreadPoolExecutor newDaemonCachedThreadPool(String prefix , int maxThreadNumber, @@ -82,9 +83,9 @@ public class ThreadUtils { /** * Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a * unique, sequentially assigned integer. - * @param nThreads - * @param prefix - * @return + * @param nThreads nThreads + * @param prefix prefix + * @return ThreadPoolExecutor */ public static ThreadPoolExecutor newDaemonFixedThreadPool(int nThreads , String prefix){ ThreadFactory threadFactory = namedThreadFactory(prefix); @@ -93,8 +94,8 @@ public class ThreadUtils { /** * Wrapper over newSingleThreadExecutor. - * @param threadName - * @return + * @param threadName threadName + * @return ExecutorService */ public static ExecutorService newDaemonSingleThreadExecutor(String threadName){ ThreadFactory threadFactory = new ThreadFactoryBuilder() @@ -106,26 +107,37 @@ public class ThreadUtils { /** * Wrapper over newDaemonFixedThreadExecutor. - * @param threadName - * @param threadsNum - * @return + * @param threadName threadName + * @param threadsNum threadsNum + * @return ExecutorService */ public static ExecutorService newDaemonFixedThreadExecutor(String threadName,int threadsNum){ ThreadFactory threadFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat(threadName) .build(); - return Executors.newFixedThreadPool(threadsNum,threadFactory); + return Executors.newFixedThreadPool(threadsNum, threadFactory); + } + /** + * Wrapper over ScheduledThreadPoolExecutor + * @param threadName threadName + * @param corePoolSize corePoolSize + * @return ScheduledExecutorService + */ + public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName, int corePoolSize) { + return newThreadScheduledExecutor(threadName, corePoolSize, true); } /** * Wrapper over ScheduledThreadPoolExecutor - * @param corePoolSize - * @return + * @param threadName threadName + * @param corePoolSize corePoolSize + * @param isDaemon isDaemon + * @return ScheduledThreadPoolExecutor */ - public static ScheduledExecutorService newDaemonThreadScheduledExecutor(String threadName,int corePoolSize) { + public static ScheduledExecutorService newThreadScheduledExecutor(String threadName, int corePoolSize, boolean isDaemon) { ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) + .setDaemon(isDaemon) .setNameFormat(threadName) .build(); ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(corePoolSize, threadFactory); @@ -135,7 +147,11 @@ public class ThreadUtils { return executor; } - + /** + * get thread info + * @param t t + * @return thread info + */ public static ThreadInfo getThreadInfo(Thread t) { long tid = t.getId(); return threadBean.getThreadInfo(tid, STACK_DEPTH); @@ -144,7 +160,9 @@ public class ThreadUtils { /** * Format the given ThreadInfo object as a String. - * @param indent a prefix for each line, used for nested indentation + * @param threadInfo threadInfo + * @param indent indent + * @return threadInfo */ public static String formatThreadInfo(ThreadInfo threadInfo, String indent) { StringBuilder sb = new StringBuilder(); @@ -156,9 +174,9 @@ public class ThreadUtils { /** * Print all of the thread's information and stack traces. * - * @param sb - * @param info - * @param indent + * @param sb StringBuilder + * @param info ThreadInfo + * @param indent indent */ public static void appendThreadInfo(StringBuilder sb, ThreadInfo info, @@ -193,10 +211,26 @@ public class ThreadUtils { } } + /** + * getTaskName + * @param id id + * @param name name + * @return task name + */ private static String getTaskName(long id, String name) { if (name == null) { return Long.toString(id); } return id + " (" + name + ")"; } + + /** + * sleep + * @param millis millis + */ + public static void sleep(final long millis) { + try { + Thread.sleep(millis); + } catch (final InterruptedException ignore) {} + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java index b4b89bfe26..731cdaa719 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java @@ -20,13 +20,18 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; +import java.net.URL; /** * common utils */ -public class CommonUtils { +public class CommonUtils { + private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class); + private CommonUtils() { throw new IllegalStateException("CommonUtils class"); } @@ -37,25 +42,25 @@ public class CommonUtils { public static String getSystemEnvPath() { String envPath = PropertyUtils.getString(Constants.DOLPHINSCHEDULER_ENV_PATH); if (StringUtils.isEmpty(envPath)) { - envPath = System.getProperty("user.home") + File.separator + ".bash_profile"; + URL envDefaultPath = CommonUtils.class.getClassLoader().getResource(Constants.ENV_PATH); + + if (envDefaultPath != null){ + envPath = envDefaultPath.getPath(); + logger.debug("env path :{}", envPath); + }else{ + envPath = System.getProperty("user.home") + File.separator + ".bash_profile"; + } } return envPath; } - /** - * @return get queue implementation name - */ - public static String getQueueImplValue(){ - return PropertyUtils.getString(Constants.SCHEDULER_QUEUE_IMPL); - } - /** * * @return is develop mode */ public static boolean isDevelopMode() { - return PropertyUtils.getBoolean(Constants.DEVELOPMENT_STATE); + return PropertyUtils.getBoolean(Constants.DEVELOPMENT_STATE, true); } @@ -65,9 +70,9 @@ public class CommonUtils { * @return true if upload resource is HDFS and kerberos startup */ public static boolean getKerberosStartupState(){ - String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); + String resUploadStartupType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE); ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); - Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE); + Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE,false); return resUploadType == ResUploadType.HDFS && kerberosStartupState; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java index c1c3ff5d57..f8ea0e7188 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java @@ -16,86 +16,35 @@ */ package org.apache.dolphinscheduler.common.utils; +import java.util.Arrays; +import java.util.Objects; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.sql.*; - public class ConnectionUtils { - public static final Logger logger = LoggerFactory.getLogger(ConnectionUtils.class); - - private static ConnectionUtils instance; - - ConnectionUtils() { - } - - public static ConnectionUtils getInstance() { - if (null == instance) { - syncInit(); - } - return instance; - } - - private static synchronized void syncInit() { - if (instance == null) { - instance = new ConnectionUtils(); - } - } - - public void release(ResultSet rs, Statement stmt, Connection conn) { - try { - if (rs != null) { - rs.close(); - rs = null; - } - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } finally { - try { - if (stmt != null) { - stmt.close(); - stmt = null; - } - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } finally { - try { - if (conn != null) { - conn.close(); - conn = null; - } - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - } - } - - public static void releaseResource(ResultSet rs, PreparedStatement ps, Connection conn) { - ConnectionUtils.getInstance().release(rs,ps,conn); - if (null != rs) { - try { - rs.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - - if (null != ps) { - try { - ps.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - - if (null != conn) { - try { - conn.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } - } + public static final Logger logger = LoggerFactory.getLogger(ConnectionUtils.class); + + private ConnectionUtils() { + throw new IllegalStateException("ConnectionUtils class"); + } + + /** + * release resource + * @param resources resources + */ + public static void releaseResource(AutoCloseable... resources) { + + if (resources == null || resources.length == 0) { + return; + } + Arrays.stream(resources).filter(Objects::nonNull) + .forEach(resource -> { + try { + resource.close(); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + }); + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java index ec060d486b..68c1792346 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java @@ -1 +1 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Calendar; import java.util.Date; /** * date utils */ public class DateUtils { private static final Logger logger = LoggerFactory.getLogger(DateUtils.class); /** * date to local datetime * * @param date date * @return local datetime */ private static LocalDateTime date2LocalDateTime(Date date) { return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); } /** * local datetime to date * * @param localDateTime local datetime * @return date */ private static Date localDateTime2Date(LocalDateTime localDateTime) { Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant(); return Date.from(instant); } /** * get current date str * * @return date string */ public static String getCurrentTime() { return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS); } /** * get the date string in the specified format of the current time * * @param format date format * @return date string */ public static String getCurrentTime(String format) { return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format)); } /** * get the formatted date string * * @param date date * @param format e.g. yyyy-MM-dd HH:mm:ss * @return date string */ public static String format(Date date, String format) { return format(date2LocalDateTime(date), format); } /** * get the formatted date string * * @param localDateTime local data time * @param format yyyy-MM-dd HH:mm:ss * @return date string */ public static String format(LocalDateTime localDateTime, String format) { return localDateTime.format(DateTimeFormatter.ofPattern(format)); } /** * convert time to yyyy-MM-dd HH:mm:ss format * * @param date date * @return date string */ public static String dateToString(Date date) { return format(date, Constants.YYYY_MM_DD_HH_MM_SS); } /** * convert string to date and time * * @param date date * @param format format * @return date */ public static Date parse(String date, String format) { try { LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format)); return localDateTime2Date(ldt); } catch (Exception e) { logger.error("error while parse date:" + date, e); } return null; } /** * convert date str to yyyy-MM-dd HH:mm:ss format * * @param str date string * @return yyyy-MM-dd HH:mm:ss format */ public static Date stringToDate(String str) { return parse(str, Constants.YYYY_MM_DD_HH_MM_SS); } /** * get seconds between two dates * * @param d1 date1 * @param d2 date2 * @return differ seconds */ public static long differSec(Date d1, Date d2) { if(d1 == null || d2 == null){ return 0; } return (long) Math.ceil(differMs(d1, d2) / 1000.0); } /** * get ms between two dates * * @param d1 date1 * @param d2 date2 * @return differ ms */ public static long differMs(Date d1, Date d2) { return Math.abs(d1.getTime() - d2.getTime()); } /** * get hours between two dates * * @param d1 date1 * @param d2 date2 * @return differ hours */ public static long diffHours(Date d1, Date d2) { return (long) Math.ceil(diffMin(d1, d2) / 60.0); } /** * get minutes between two dates * * @param d1 date1 * @param d2 date2 * @return differ minutes */ public static long diffMin(Date d1, Date d2) { return (long) Math.ceil(differSec(d1, d2) / 60.0); } /** * get the date of the specified date in the days before and after * * @param date date * @param day day * @return the date of the specified date in the days before and after */ public static Date getSomeDay(Date date, int day) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.add(Calendar.DATE, day); return calendar.getTime(); } /** * compare two dates * * @param future future date * @param old old date * @return true if future time greater than old time */ public static boolean compare(Date future, Date old) { return future.getTime() > old.getTime(); } /** * convert schedule string to date * * @param schedule schedule * @return convert schedule string to date */ public static Date getScheduleDate(String schedule) { return stringToDate(schedule); } /** * format time to readable * * @param ms ms * @return format time */ public static String format2Readable(long ms) { long days = ms / (1000 * 60 * 60 * 24); long hours = (ms % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60); long minutes = (ms % (1000 * 60 * 60)) / (1000 * 60); long seconds = (ms % (1000 * 60)) / 1000; return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); } /** * get monday * * note: Set the first day of the week to Monday, the default is Sunday * @param date date * @return get monday */ public static Date getMonday(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.setFirstDayOfWeek(Calendar.MONDAY); cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); return cal.getTime(); } /** * get sunday * * note: Set the first day of the week to Monday, the default is Sunday * @param date date * @return get sunday */ public static Date getSunday(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.setFirstDayOfWeek(Calendar.MONDAY); cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY); return cal.getTime(); } /** * get first day of month * * @param date date * @return first day of month * */ public static Date getFirstDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.set(Calendar.DAY_OF_MONTH, 1); return cal.getTime(); } /** * get some hour of day * * @param date date * @param offsetHour hours * @return some hour of day * */ public static Date getSomeHourOfDay(Date date, int offsetHour) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) + offsetHour); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * get last day of month * * @param date date * @return get last day of month */ public static Date getLastDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(Calendar.MONTH, 1); cal.set(Calendar.DAY_OF_MONTH, 1); cal.add(Calendar.DAY_OF_MONTH, -1); return cal.getTime(); } /** * return YYYY-MM-DD 00:00:00 * * @param inputDay date * @return start day */ public static Date getStartOfDay(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * return YYYY-MM-DD 23:59:59 * * @param inputDay day * @return end of day */ public static Date getEndOfDay(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.HOUR_OF_DAY, 23); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); return cal.getTime(); } /** * return YYYY-MM-DD 00:00:00 * * @param inputDay day * @return start of hour */ public static Date getStartOfHour(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * return YYYY-MM-DD 23:59:59 * * @param inputDay day * @return end of hour */ public static Date getEndOfHour(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); return cal.getTime(); } /** * get current date * @return current date */ public static Date getCurrentDate() { return DateUtils.parse(DateUtils.getCurrentTime(), Constants.YYYY_MM_DD_HH_MM_SS); } /** * get date * @return result date */ public static Date add(final Date date, final int calendarField, final int amount) { if (date == null) { throw new IllegalArgumentException("The date must not be null"); } final Calendar c = Calendar.getInstance(); c.setTime(date); c.add(calendarField, amount); return c.getTime(); } } \ No newline at end of file +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.Constants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Calendar; import java.util.Date; /** * date utils */ public class DateUtils { private static final Logger logger = LoggerFactory.getLogger(DateUtils.class); /** * date to local datetime * * @param date date * @return local datetime */ private static LocalDateTime date2LocalDateTime(Date date) { return LocalDateTime.ofInstant(date.toInstant(), ZoneId.systemDefault()); } /** * local datetime to date * * @param localDateTime local datetime * @return date */ private static Date localDateTime2Date(LocalDateTime localDateTime) { Instant instant = localDateTime.atZone(ZoneId.systemDefault()).toInstant(); return Date.from(instant); } /** * get current date str * * @return date string */ public static String getCurrentTime() { return getCurrentTime(Constants.YYYY_MM_DD_HH_MM_SS); } /** * get the date string in the specified format of the current time * * @param format date format * @return date string */ public static String getCurrentTime(String format) { return LocalDateTime.now().format(DateTimeFormatter.ofPattern(format)); } /** * get the formatted date string * * @param date date * @param format e.g. yyyy-MM-dd HH:mm:ss * @return date string */ public static String format(Date date, String format) { return format(date2LocalDateTime(date), format); } /** * get the formatted date string * * @param localDateTime local data time * @param format yyyy-MM-dd HH:mm:ss * @return date string */ public static String format(LocalDateTime localDateTime, String format) { return localDateTime.format(DateTimeFormatter.ofPattern(format)); } /** * convert time to yyyy-MM-dd HH:mm:ss format * * @param date date * @return date string */ public static String dateToString(Date date) { return format(date, Constants.YYYY_MM_DD_HH_MM_SS); } /** * convert string to date and time * * @param date date * @param format format * @return date */ public static Date parse(String date, String format) { try { LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format)); return localDateTime2Date(ldt); } catch (Exception e) { logger.error("error while parse date:" + date, e); } return null; } /** * convert date str to yyyy-MM-dd HH:mm:ss format * * @param str date string * @return yyyy-MM-dd HH:mm:ss format */ public static Date stringToDate(String str) { return parse(str, Constants.YYYY_MM_DD_HH_MM_SS); } /** * get seconds between two dates * * @param d1 date1 * @param d2 date2 * @return differ seconds */ public static long differSec(Date d1, Date d2) { if(d1 == null || d2 == null){ return 0; } return (long) Math.ceil(differMs(d1, d2) / 1000.0); } /** * get ms between two dates * * @param d1 date1 * @param d2 date2 * @return differ ms */ public static long differMs(Date d1, Date d2) { return Math.abs(d1.getTime() - d2.getTime()); } /** * get hours between two dates * * @param d1 date1 * @param d2 date2 * @return differ hours */ public static long diffHours(Date d1, Date d2) { return (long) Math.ceil(diffMin(d1, d2) / 60.0); } /** * get minutes between two dates * * @param d1 date1 * @param d2 date2 * @return differ minutes */ public static long diffMin(Date d1, Date d2) { return (long) Math.ceil(differSec(d1, d2) / 60.0); } /** * get the date of the specified date in the days before and after * * @param date date * @param day day * @return the date of the specified date in the days before and after */ public static Date getSomeDay(Date date, int day) { Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.add(Calendar.DATE, day); return calendar.getTime(); } /** * compare two dates * * @param future future date * @param old old date * @return true if future time greater than old time */ public static boolean compare(Date future, Date old) { return future.getTime() > old.getTime(); } /** * convert schedule string to date * * @param schedule schedule * @return convert schedule string to date */ public static Date getScheduleDate(String schedule) { return stringToDate(schedule); } /** * format time to readable * * @param ms ms * @return format time */ public static String format2Readable(long ms) { long days = ms / (1000 * 60 * 60 * 24); long hours = (ms % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60); long minutes = (ms % (1000 * 60 * 60)) / (1000 * 60); long seconds = (ms % (1000 * 60)) / 1000; return String.format("%02d %02d:%02d:%02d", days, hours, minutes, seconds); } /** * get monday * * note: Set the first day of the week to Monday, the default is Sunday * @param date date * @return get monday */ public static Date getMonday(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.setFirstDayOfWeek(Calendar.MONDAY); cal.set(Calendar.DAY_OF_WEEK, Calendar.MONDAY); return cal.getTime(); } /** * get sunday * * note: Set the first day of the week to Monday, the default is Sunday * @param date date * @return get sunday */ public static Date getSunday(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.setFirstDayOfWeek(Calendar.MONDAY); cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY); return cal.getTime(); } /** * get first day of month * * @param date date * @return first day of month * */ public static Date getFirstDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.set(Calendar.DAY_OF_MONTH, 1); return cal.getTime(); } /** * get some hour of day * * @param date date * @param offsetHour hours * @return some hour of day * */ public static Date getSomeHourOfDay(Date date, int offsetHour) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.set(Calendar.HOUR_OF_DAY, cal.get(Calendar.HOUR_OF_DAY) + offsetHour); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * get last day of month * * @param date date * @return get last day of month */ public static Date getLastDayOfMonth(Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); cal.add(Calendar.MONTH, 1); cal.set(Calendar.DAY_OF_MONTH, 1); cal.add(Calendar.DAY_OF_MONTH, -1); return cal.getTime(); } /** * return YYYY-MM-DD 00:00:00 * * @param inputDay date * @return start day */ public static Date getStartOfDay(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * return YYYY-MM-DD 23:59:59 * * @param inputDay day * @return end of day */ public static Date getEndOfDay(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.HOUR_OF_DAY, 23); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); return cal.getTime(); } /** * return YYYY-MM-DD 00:00:00 * * @param inputDay day * @return start of hour */ public static Date getStartOfHour(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } /** * return YYYY-MM-DD 23:59:59 * * @param inputDay day * @return end of hour */ public static Date getEndOfHour(Date inputDay) { Calendar cal = Calendar.getInstance(); cal.setTime(inputDay); cal.set(Calendar.MINUTE, 59); cal.set(Calendar.SECOND, 59); cal.set(Calendar.MILLISECOND, 999); return cal.getTime(); } /** * get current date * @return current date */ public static Date getCurrentDate() { return DateUtils.parse(DateUtils.getCurrentTime(), Constants.YYYY_MM_DD_HH_MM_SS); } /** * get date * @param date date * @param calendarField calendarField * @param amount amount * @return date */ public static Date add(final Date date, final int calendarField, final int amount) { if (date == null) { throw new IllegalArgumentException("The date must not be null"); } final Calendar c = Calendar.getInstance(); c.setTime(date); c.add(calendarField, amount); return c.getTime(); } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java index dc60b04c59..bae8f7f9bd 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java @@ -34,6 +34,8 @@ import static org.apache.dolphinscheduler.common.Constants.*; public class FileUtils { public static final Logger logger = LoggerFactory.getLogger(FileUtils.class); + public static final String DATA_BASEDIR = PropertyUtils.getString(DATA_BASEDIR_PATH,"/tmp/dolphinscheduler"); + /** * get file suffix * @@ -59,7 +61,14 @@ public class FileUtils { * @return download file name */ public static String getDownloadFilename(String filename) { - return String.format("%s/%s/%s", PropertyUtils.getString(DATA_DOWNLOAD_BASEDIR_PATH), DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); + String fileName = String.format("%s/download/%s/%s", DATA_BASEDIR, DateUtils.getCurrentTime(YYYYMMDDHHMMSS), filename); + + File file = new File(fileName); + if (!file.getParentFile().exists()){ + file.getParentFile().mkdirs(); + } + + return fileName; } /** @@ -70,7 +79,13 @@ public class FileUtils { * @return local file path */ public static String getUploadFilename(String tenantCode, String filename) { - return String.format("%s/%s/resources/%s", PropertyUtils.getString(DATA_BASEDIR_PATH), tenantCode, filename); + String fileName = String.format("%s/%s/resources/%s", DATA_BASEDIR, tenantCode, filename); + File file = new File(fileName); + if (!file.getParentFile().exists()){ + file.getParentFile().mkdirs(); + } + + return fileName; } /** @@ -82,9 +97,14 @@ public class FileUtils { * @return directory of process execution */ public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId, int taskInstanceId) { - - return String.format("%s/process/%s/%s/%s/%s", PropertyUtils.getString(PROCESS_EXEC_BASEPATH), Integer.toString(projectId), + String fileName = String.format("%s/exec/process/%s/%s/%s/%s", DATA_BASEDIR, Integer.toString(projectId), Integer.toString(processDefineId), Integer.toString(processInstanceId),Integer.toString(taskInstanceId)); + File file = new File(fileName); + if (!file.getParentFile().exists()){ + file.getParentFile().mkdirs(); + } + + return fileName; } /** @@ -95,15 +115,21 @@ public class FileUtils { * @return directory of process instances */ public static String getProcessExecDir(int projectId, int processDefineId, int processInstanceId) { - return String.format("%s/process/%s/%s/%s", PropertyUtils.getString(PROCESS_EXEC_BASEPATH), Integer.toString(projectId), + String fileName = String.format("%s/exec/process/%s/%s/%s", DATA_BASEDIR, Integer.toString(projectId), Integer.toString(processDefineId), Integer.toString(processInstanceId)); + File file = new File(fileName); + if (!file.getParentFile().exists()){ + file.getParentFile().mkdirs(); + } + + return fileName; } /** * @return get suffixes for resource files that support online viewing */ public static String getResourceViewSuffixs() { - return PropertyUtils.getString(RESOURCE_VIEW_SUFFIXS); + return PropertyUtils.getString(RESOURCE_VIEW_SUFFIXS, RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE); } /** diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java index 6cb58a4324..02f00ce330 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java @@ -16,6 +16,9 @@ */ package org.apache.dolphinscheduler.common.utils; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ResUploadType; @@ -23,6 +26,7 @@ import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONException; import com.alibaba.fastjson.JSONObject; import org.apache.commons.io.IOUtils; +import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.FileSystem; @@ -37,9 +41,12 @@ import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH; + /** * hadoop utils * single instance @@ -48,39 +55,51 @@ public class HadoopUtils implements Closeable { private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class); - private static HadoopUtils instance = new HadoopUtils(); - private static Configuration configuration; - private static FileSystem fs; + private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); + public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler"); + public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS); + public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); + + private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY"; + + private static final LoadingCache cache = CacheBuilder + .newBuilder() + .expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 7), TimeUnit.DAYS) + .build(new CacheLoader() { + @Override + public HadoopUtils load(String key) throws Exception { + return new HadoopUtils(); + } + }); - private String hdfsUser; + private static volatile boolean yarnEnabled = false; - private HadoopUtils(){ - hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER); + private Configuration configuration; + private FileSystem fs; + + private HadoopUtils() { init(); initHdfsPath(); } - public static HadoopUtils getInstance(){ - // if kerberos startup , renew HadoopUtils - if (CommonUtils.getKerberosStartupState()){ - return new HadoopUtils(); - } - return instance; + public static HadoopUtils getInstance() { + + return cache.getUnchecked(HADOOP_UTILS_KEY); } /** * init dolphinscheduler root path in hdfs */ - private void initHdfsPath(){ - String hdfsPath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH); - Path path = new Path(hdfsPath); + + private void initHdfsPath() { + Path path = new Path(resourceUploadPath); try { if (!fs.exists(path)) { fs.mkdirs(path); } } catch (Exception e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } } @@ -89,81 +108,68 @@ public class HadoopUtils implements Closeable { * init hadoop configuration */ private void init() { - if (configuration == null) { - synchronized (HadoopUtils.class) { - if (configuration == null) { - try { - configuration = new Configuration(); - - String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); - ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); - - if (resUploadType == ResUploadType.HDFS){ - if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){ - System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, - PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); - configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos"); - UserGroupInformation.setConfiguration(configuration); - UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), - PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)); - } + try { + configuration = new Configuration(); + + String resourceStorageType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE); + ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType); + + if (resUploadType == ResUploadType.HDFS){ + if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE,false)){ + System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, + PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH)); + configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos"); + hdfsUser = ""; + UserGroupInformation.setConfiguration(configuration); + UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME), + PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH)); + } - String defaultFS = configuration.get(Constants.FS_DEFAULTFS); - //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file - // the default is the local file system - if(defaultFS.startsWith("file")){ - String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS); - if(StringUtils.isNotBlank(defaultFSProp)){ - Map fsRelatedProps = PropertyUtils.getPrefixedProperties("fs."); - configuration.set(Constants.FS_DEFAULTFS,defaultFSProp); - fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); - }else{ - logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS ); - throw new RuntimeException( - String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS) - ); - } - }else{ - logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS); - } + String defaultFS = configuration.get(Constants.FS_DEFAULTFS); + //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file + // the default is the local file system + if (defaultFS.startsWith("file")) { + String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS); + if (StringUtils.isNotBlank(defaultFSProp)) { + Map fsRelatedProps = PropertyUtils.getPrefixedProperties("fs."); + configuration.set(Constants.FS_DEFAULTFS, defaultFSProp); + fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); + } else { + logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS); + throw new RuntimeException( + String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS) + ); + } + } else { + logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS); + } - if (fs == null) { - if(StringUtils.isNotEmpty(hdfsUser)){ - UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); - ugi.doAs(new PrivilegedExceptionAction() { - @Override - public Boolean run() throws Exception { - fs = FileSystem.get(configuration); - return true; - } - }); - }else{ - logger.warn("hdfs.root.user is not set value!"); - fs = FileSystem.get(configuration); - } + if (fs == null) { + if (StringUtils.isNotEmpty(hdfsUser)) { + UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser); + ugi.doAs(new PrivilegedExceptionAction() { + @Override + public Boolean run() throws Exception { + fs = FileSystem.get(configuration); + return true; } - }else if (resUploadType == ResUploadType.S3){ - configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS)); - configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT)); - configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); - configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY)); - fs = FileSystem.get(configuration); - } - - - String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS); - String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS); - if (!StringUtils.isEmpty(rmHaIds)) { - appAddress = getAppAddress(appAddress, rmHaIds); - logger.info("appAddress : {}", appAddress); - } - configuration.set(Constants.YARN_APPLICATION_STATUS_ADDRESS, appAddress); - } catch (Exception e) { - logger.error(e.getMessage(), e); + }); + } else { + logger.warn("hdfs.root.user is not set value!"); + fs = FileSystem.get(configuration); } - } + } else if (resUploadType == ResUploadType.S3) { + configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS)); + configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT)); + configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY)); + configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY)); + fs = FileSystem.get(configuration); } + + + } catch (Exception e) { + logger.error(e.getMessage(), e); } } @@ -181,20 +187,42 @@ public class HadoopUtils implements Closeable { * @return url of application */ public String getApplicationUrl(String applicationId) { - return String.format(configuration.get(Constants.YARN_APPLICATION_STATUS_ADDRESS), applicationId); + /** + * if rmHaIds contains xx, it signs not use resourcemanager + * otherwise: + * if rmHaIds is empty, single resourcemanager enabled + * if rmHaIds not empty: resourcemanager HA enabled + */ + String appUrl = ""; + //not use resourcemanager + if (rmHaIds.contains(Constants.YARN_RESOURCEMANAGER_HA_XX)){ + + yarnEnabled = false; + logger.warn("should not step here"); + } else if (!StringUtils.isEmpty(rmHaIds)) { + //resourcemanager HA enabled + appUrl = getAppAddress(appAddress, rmHaIds); + yarnEnabled = true; + logger.info("application url : {}", appUrl); + } else { + //single resourcemanager enabled + yarnEnabled = true; + } + + return String.format(appUrl, applicationId); } /** * cat file on hdfs * - * @param hdfsFilePath hdfs file path + * @param hdfsFilePath hdfs file path * @return byte[] byte array * @throws IOException errors */ public byte[] catFile(String hdfsFilePath) throws IOException { - if(StringUtils.isBlank(hdfsFilePath)){ - logger.error("hdfs file path:{} is blank",hdfsFilePath); + if (StringUtils.isBlank(hdfsFilePath)) { + logger.error("hdfs file path:{} is blank", hdfsFilePath); return new byte[0]; } @@ -203,29 +231,28 @@ public class HadoopUtils implements Closeable { } - /** * cat file on hdfs * - * @param hdfsFilePath hdfs file path - * @param skipLineNums skip line numbers - * @param limit read how many lines + * @param hdfsFilePath hdfs file path + * @param skipLineNums skip line numbers + * @param limit read how many lines * @return content of file * @throws IOException errors */ public List catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { - if (StringUtils.isBlank(hdfsFilePath)){ - logger.error("hdfs file path:{} is blank",hdfsFilePath); + if (StringUtils.isBlank(hdfsFilePath)) { + logger.error("hdfs file path:{} is blank", hdfsFilePath); return Collections.emptyList(); } - try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))){ + try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) { BufferedReader br = new BufferedReader(new InputStreamReader(in)); Stream stream = br.lines().skip(skipLineNums).limit(limit); return stream.collect(Collectors.toList()); } - + } /** @@ -258,17 +285,17 @@ public class HadoopUtils implements Closeable { /** * the src file is on the local disk. Add it to FS at * the given dst name. - - * @param srcFile local file - * @param dstHdfsPath destination hdfs path - * @param deleteSource whether to delete the src - * @param overwrite whether to overwrite an existing file + * + * @param srcFile local file + * @param dstHdfsPath destination hdfs path + * @param deleteSource whether to delete the src + * @param overwrite whether to overwrite an existing file * @return if success or not * @throws IOException errors */ public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException { Path srcPath = new Path(srcFile); - Path dstPath= new Path(dstHdfsPath); + Path dstPath = new Path(dstHdfsPath); fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath); @@ -278,10 +305,10 @@ public class HadoopUtils implements Closeable { /** * copy hdfs file to local * - * @param srcHdfsFilePath source hdfs file path - * @param dstFile destination file - * @param deleteSource delete source - * @param overwrite overwrite + * @param srcHdfsFilePath source hdfs file path + * @param dstFile destination file + * @param deleteSource delete source + * @param overwrite overwrite * @return result of copy hdfs file to local * @throws IOException errors */ @@ -299,7 +326,7 @@ public class HadoopUtils implements Closeable { } } - if(!dstPath.getParentFile().exists()){ + if (!dstPath.getParentFile().exists()) { dstPath.getParentFile().mkdirs(); } @@ -307,14 +334,13 @@ public class HadoopUtils implements Closeable { } /** - * * delete a file * * @param hdfsFilePath the path to delete. - * @param recursive if path is a directory and set to - * true, the directory is deleted else throws an exception. In - * case of a file the recursive can be set to either true or false. - * @return true if delete is successful else false. + * @param recursive if path is a directory and set to + * true, the directory is deleted else throws an exception. In + * case of a file the recursive can be set to either true or false. + * @return true if delete is successful else false. * @throws IOException errors */ public boolean delete(String hdfsFilePath, boolean recursive) throws IOException { @@ -339,7 +365,7 @@ public class HadoopUtils implements Closeable { * @return {@link FileStatus} file status * @throws Exception errors */ - public FileStatus[] listFileStatus(String filePath)throws Exception{ + public FileStatus[] listFileStatus(String filePath) throws Exception { try { return fs.listStatus(new Path(filePath)); } catch (IOException e) { @@ -351,15 +377,23 @@ public class HadoopUtils implements Closeable { /** * Renames Path src to Path dst. Can take place on local fs * or remote DFS. + * * @param src path to be renamed * @param dst new path after rename - * @throws IOException on failure * @return true if rename is successful + * @throws IOException on failure */ public boolean rename(String src, String dst) throws IOException { return fs.rename(new Path(src), new Path(dst)); } + /** + * hadoop resourcemanager enabled or not + * @return result + */ + public boolean isYarnEnabled() { + return yarnEnabled; + } /** * get the state of an application @@ -400,17 +434,32 @@ public class HadoopUtils implements Closeable { } /** - * + * get data hdfs path * @return data hdfs path */ public static String getHdfsDataBasePath() { - String basePath = PropertyUtils.getString(Constants.DATA_STORE_2_HDFS_BASEPATH); - if ("/".equals(basePath)) { + if ("/".equals(resourceUploadPath)) { // if basepath is configured to /, the generated url may be //default/resources (with extra leading /) return ""; } else { - return basePath; + return resourceUploadPath; + } + } + + /** + * hdfs resource dir + * + * @param tenantCode tenant code + * @return hdfs resource dir + */ + public static String getHdfsDir(ResourceType resourceType,String tenantCode) { + String hdfsDir = ""; + if (resourceType.equals(ResourceType.FILE)) { + hdfsDir = getHdfsResDir(tenantCode); + } else if (resourceType.equals(ResourceType.UDF)) { + hdfsDir = getHdfsUdfDir(tenantCode); } + return hdfsDir; } /** @@ -427,11 +476,11 @@ public class HadoopUtils implements Closeable { * hdfs user dir * * @param tenantCode tenant code - * @param userId user id + * @param userId user id * @return hdfs resource dir */ - public static String getHdfsUserDir(String tenantCode,int userId) { - return String.format("%s/home/%d", getHdfsTenantDir(tenantCode),userId); + public static String getHdfsUserDir(String tenantCode, int userId) { + return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId); } /** @@ -444,26 +493,39 @@ public class HadoopUtils implements Closeable { return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); } + + /** + * get hdfs file name + * + * @param resourceType resource type + * @param tenantCode tenant code + * @param fileName file name + * @return hdfs file name + */ + public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) { + return String.format("%s/%s", getHdfsDir(resourceType,tenantCode), fileName); + } + /** - * get absolute path and name for file on hdfs + * get absolute path and name for resource file on hdfs * * @param tenantCode tenant code - * @param filename file name + * @param fileName file name * @return get absolute path and name for file on hdfs */ - public static String getHdfsFilename(String tenantCode, String filename) { - return String.format("%s/%s", getHdfsResDir(tenantCode), filename); + public static String getHdfsResourceFileName(String tenantCode, String fileName) { + return String.format("%s/%s", getHdfsResDir(tenantCode), fileName); } /** * get absolute path and name for udf file on hdfs * * @param tenantCode tenant code - * @param filename file name + * @param fileName file name * @return get absolute path and name for udf file on hdfs */ - public static String getHdfsUdfFilename(String tenantCode, String filename) { - return String.format("%s/%s", getHdfsUdfDir(tenantCode), filename); + public static String getHdfsUdfFileName(String tenantCode, String fileName) { + return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName); } /** @@ -479,7 +541,7 @@ public class HadoopUtils implements Closeable { * getAppAddress * * @param appAddress app address - * @param rmHa resource manager ha + * @param rmHa resource manager ha * @return app address */ public static String getAppAddress(String appAddress, String rmHa) { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java index 7de198f28b..98d9cf16ec 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java @@ -40,7 +40,7 @@ public class HttpUtils { /** * get http request content * @param url url - * @return http response + * @return http get request response content */ public static String get(String url){ CloseableHttpClient httpclient = HttpClients.createDefault(); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java index 73df158aa3..ce551d8405 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IOUtils.java @@ -19,26 +19,17 @@ package org.apache.dolphinscheduler.common.utils; +import java.io.Closeable; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; public class IOUtils { - public static void closeQuietly(InputStream fis){ - if(fis != null){ + public static void closeQuietly(Closeable closeable){ + if(closeable != null){ try { - fis.close(); - } catch (IOException ignore) { - } - } - } - - public static void closeQuietly(InputStreamReader reader){ - if(reader != null){ - try { - reader.close(); + closeable.close(); } catch (IOException ignore) { + // nothing need to do } } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java index e7e0b34bdd..3b068c60d2 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/IpUtils.java @@ -17,16 +17,11 @@ package org.apache.dolphinscheduler.common.utils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - /** * http utils */ public class IpUtils { - private static final Logger logger = LoggerFactory.getLogger(IpUtils.class); public static final String DOT = "."; /** diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index ec523b1ff2..f0aed91a0d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.utils; +import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.TypeReference; @@ -41,12 +42,6 @@ public class JSONUtils { */ private static final ObjectMapper objectMapper = new ObjectMapper(); - /** - * init - */ - private static final JSONUtils instance = new JSONUtils(); - - private JSONUtils() { //Feature that determines whether encountering of unknown properties, false means not analyzer unknown properties objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false).setTimeZone(TimeZone.getDefault()); @@ -59,7 +54,7 @@ public class JSONUtils { */ public static String toJson(Object object) { try{ - return JSONObject.toJSONString(object,false); + return JSON.toJSONString(object,false); } catch (Exception e) { logger.error("object to json exception!",e); } @@ -89,7 +84,7 @@ public class JSONUtils { } try { - return JSONObject.parseObject(json, clazz); + return JSON.parseObject(json, clazz); } catch (Exception e) { logger.error("parse object exception!",e); } @@ -178,7 +173,7 @@ public class JSONUtils { } try { - return JSONObject.parseObject(json, new TypeReference>(){}); + return JSON.parseObject(json, new TypeReference>(){}); } catch (Exception e) { logger.error("json to map exception!",e); } @@ -203,7 +198,7 @@ public class JSONUtils { } try { - return JSONObject.parseObject(json, new TypeReference>() {}); + return JSON.parseObject(json, new TypeReference>() {}); } catch (Exception e) { logger.error("json to map exception!",e); } @@ -218,23 +213,23 @@ public class JSONUtils { */ public static String toJsonString(Object object) { try{ - return JSONObject.toJSONString(object,false); + return JSON.toJSONString(object,false); } catch (Exception e) { - throw new RuntimeException("Json deserialization exception.", e); + throw new RuntimeException("Object json deserialization exception.", e); } } public static JSONObject parseObject(String text) { try{ - return JSONObject.parseObject(text); + return JSON.parseObject(text); } catch (Exception e) { - throw new RuntimeException("Json deserialization exception.", e); + throw new RuntimeException("String json deserialization exception.", e); } } public static JSONArray parseArray(String text) { try{ - return JSONObject.parseArray(text); + return JSON.parseArray(text); } catch (Exception e) { throw new RuntimeException("Json deserialization exception.", e); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java index fc08eb645b..191df335c5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/LoggerUtils.java @@ -79,7 +79,7 @@ public class LoggerUtils { */ public static List getAppIds(String log, Logger logger) { - List appIds = new ArrayList(); + List appIds = new ArrayList<>(); Matcher matcher = APPLICATION_REGEX.matcher(log); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java index b011c0bc4e..3505e59fb5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java @@ -352,13 +352,7 @@ public class OSUtils { return sb.toString(); } finally { - if (br != null) { - try { - br.close(); - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } + IOUtils.closeQuietly(br); } } @@ -408,7 +402,7 @@ public class OSUtils { * whether is windows * @return true if windows */ - public static boolean isWindows() { ; + public static boolean isWindows() { return getOSName().startsWith("Windows"); } @@ -422,16 +416,18 @@ public class OSUtils { /** * check memory and cpu usage + * @param systemCpuLoad systemCpuLoad + * @param systemReservedMemory systemReservedMemory * @return check memory and cpu usage */ public static Boolean checkResource(double systemCpuLoad, double systemReservedMemory){ - // judging usage + // system load average double loadAverage = OSUtils.loadAverage(); - // + // system available physical memory double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); if(loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory){ - logger.warn("load or availablePhysicalMemorySize(G) is too high, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize , loadAverage); + logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize , loadAverage); return false; }else{ return true; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java index 9492b49cb1..270e0c4696 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.utils; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DataType; @@ -23,7 +24,6 @@ import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils; import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils; -import com.alibaba.fastjson.JSONObject; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.DateUtils; import org.slf4j.Logger; @@ -78,6 +78,45 @@ public class ParameterUtils { return parameterString; } + /** + * new + * convert parameters place holders + * + * @param parameterString parameter + * @param parameterMap parameter map + * @return convert parameters place holders + */ + public static String convertParameterPlaceholders2(String parameterString, Map parameterMap) { + if (StringUtils.isEmpty(parameterString)) { + return parameterString; + } + //Get current time, schedule execute time + String cronTimeStr = parameterMap.get(Constants.PARAMETER_SHECDULE_TIME); + Date cronTime = null; + + if (StringUtils.isNotEmpty(cronTimeStr)) { + try { + cronTime = DateUtils.parseDate(cronTimeStr, new String[]{Constants.PARAMETER_FORMAT_TIME}); + + } catch (ParseException e) { + logger.error(String.format("parse %s exception", cronTimeStr), e); + } + } else { + cronTime = new Date(); + } + + // replace variable ${} form,refers to the replacement of system variables and custom variables + parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); + + // replace time $[...] form, eg. $[yyyyMMdd] + if (cronTime != null) { + parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true); + + } + return parameterString; + } + + /** * set in parameter * @param index index @@ -157,7 +196,7 @@ public class ParameterUtils { property.setValue(val); } } - return JSONObject.toJSONString(globalParamList); + return JSON.toJSONString(globalParamList); } @@ -173,4 +212,44 @@ public class ParameterUtils { } return inputString; } + + /** + * new + * $[yyyyMMdd] replace scheduler time + * @param text + * @param paramsMap + * @return + */ + public static String replaceScheduleTime(String text, Date scheduleTime, Map paramsMap) { + if (paramsMap != null) { + //if getScheduleTime null ,is current date + if (null == scheduleTime) { + scheduleTime = new Date(); + } + String dateTime = org.apache.dolphinscheduler.common.utils.DateUtils.format(scheduleTime, Constants.PARAMETER_FORMAT_TIME); + Property p = new Property(); + p.setValue(dateTime); + p.setProp(Constants.PARAMETER_SHECDULE_TIME); + paramsMap.put(Constants.PARAMETER_SHECDULE_TIME, p); + text = ParameterUtils.convertParameterPlaceholders2(text, convert(paramsMap)); + } + return text; + } + + + /** + * format convert + * @param paramsMap params map + * @return Map of converted + * see org.apache.dolphinscheduler.server.utils.ParamUtils.convert + */ + public static Map convert(Map paramsMap){ + Map map = new HashMap<>(); + Iterator> iter = paramsMap.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + map.put(en.getKey(),en.getValue().getValue()); + } + return map; + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java index e59cbd1b96..32fd298a7d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java @@ -34,10 +34,9 @@ public final class Preconditions { * Ensures that the given object reference is not null. * Upon violation, a {@code NullPointerException} with no message is thrown. * - * @param reference The object reference - * @return The object reference itself (generically typed). - * - * @throws NullPointerException Thrown, if the passed reference was null. + * @param reference reference + * @param T + * @return T */ public static T checkNotNull(T reference) { if (reference == null) { @@ -49,12 +48,10 @@ public final class Preconditions { /** * Ensures that the given object reference is not null. * Upon violation, a {@code NullPointerException} with the given message is thrown. - * - * @param reference The object reference - * @param errorMessage The message for the {@code NullPointerException} that is thrown if the check fails. - * @return The object reference itself (generically typed). - * - * @throws NullPointerException Thrown, if the passed reference was null. + * @param reference reference + * @param errorMessage errorMessage + * @param T + * @return T */ public static T checkNotNull(T reference, String errorMessage) { if (reference == null) { @@ -78,9 +75,8 @@ public final class Preconditions { * @param errorMessageArgs The arguments for the error message, to be inserted into the * message template for the {@code %s} placeholders. * + * @param T * @return The object reference itself (generically typed). - * - * @throws NullPointerException Thrown, if the passed reference was null. */ public static T checkNotNull(T reference, String errorMessageTemplate, diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java index 5c1011cd8b..ba1fcd6926 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java @@ -43,13 +43,11 @@ public class PropertyUtils { private static final Properties properties = new Properties(); - private static final PropertyUtils propertyUtils = new PropertyUtils(); - - private PropertyUtils(){ - init(); + private PropertyUtils() { + throw new IllegalStateException("PropertyUtils class"); } - private void init(){ + static { String[] propertyFiles = new String[]{COMMON_PROPERTIES_PATH}; for (String fileName : propertyFiles) { InputStream fis = null; @@ -74,7 +72,7 @@ public class PropertyUtils { * @return judge whether resource upload startup */ public static Boolean getResUploadStartupState(){ - String resUploadStartupType = PropertyUtils.getString(Constants.RES_UPLOAD_STARTUP_TYPE); + String resUploadStartupType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE); ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType); return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3; } @@ -89,6 +87,18 @@ public class PropertyUtils { return properties.getProperty(key.trim()); } + /** + * get property value + * + * @param key property name + * @param defaultVal default value + * @return property value + */ + public static String getString(String key, String defaultVal) { + String val = properties.getProperty(key.trim()); + return val == null ? defaultVal : val; + } + /** * get property value * @@ -134,6 +144,22 @@ public class PropertyUtils { return false; } + /** + * get property value + * + * @param key property name + * @param defaultValue default value + * @return property value + */ + public static Boolean getBoolean(String key, boolean defaultValue) { + String value = properties.getProperty(key.trim()); + if(null != value){ + return Boolean.parseBoolean(value); + } + + return defaultValue; + } + /** * get property long value * @param key key diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java index aa8d44fa42..feadb68ee6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java @@ -89,45 +89,6 @@ public class ResInfo { } - /** - * get heart beat info - * @param now now - * @return heart beat info - */ - public static String getHeartBeatInfo(Date now){ - return buildHeartbeatForZKInfo(OSUtils.getHost(), - OSUtils.getProcessID(), - OSUtils.cpuUsage(), - OSUtils.memoryUsage(), - OSUtils.loadAverage(), - DateUtils.dateToString(now), - DateUtils.dateToString(now)); - - } - - /** - * build heartbeat info for zk - * @param host host - * @param port port - * @param cpuUsage cpu usage - * @param memoryUsage memory usage - * @param loadAverage load average - * @param createTime create time - * @param lastHeartbeatTime last heartbeat time - * @return heartbeat info - */ - public static String buildHeartbeatForZKInfo(String host , int port , - double cpuUsage , double memoryUsage,double loadAverage, - String createTime,String lastHeartbeatTime){ - - return host + Constants.COMMA + port + Constants.COMMA - + cpuUsage + Constants.COMMA - + memoryUsage + Constants.COMMA - + loadAverage + Constants.COMMA - + createTime + Constants.COMMA - + lastHeartbeatTime; - } - /** * parse heartbeat info for zk * @param heartBeatInfo heartbeat info @@ -143,13 +104,11 @@ public class ResInfo { } Server masterServer = new Server(); - masterServer.setHost(masterArray[0]); - masterServer.setPort(Integer.parseInt(masterArray[1])); - masterServer.setResInfo(getResInfoJson(Double.parseDouble(masterArray[2]), - Double.parseDouble(masterArray[3]), - Double.parseDouble(masterArray[4]))); - masterServer.setCreateTime(DateUtils.stringToDate(masterArray[5])); - masterServer.setLastHeartbeatTime(DateUtils.stringToDate(masterArray[6])); + masterServer.setResInfo(getResInfoJson(Double.parseDouble(masterArray[0]), + Double.parseDouble(masterArray[1]), + Double.parseDouble(masterArray[2]))); + masterServer.setCreateTime(DateUtils.stringToDate(masterArray[3])); + masterServer.setLastHeartbeatTime(DateUtils.stringToDate(masterArray[4])); return masterServer; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java index 103e75fb61..32d1e41fa5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/dependent/DependentDateUtils.java @@ -27,9 +27,9 @@ public class DependentDateUtils { /** * get last day interval list - * @param businessDate - * @param hourNumber - * @return + * @param businessDate businessDate + * @param hourNumber hourNumber + * @return DateInterval list */ public static List getLastHoursInterval(Date businessDate, int hourNumber){ List dateIntervals = new ArrayList<>(); @@ -44,8 +44,8 @@ public class DependentDateUtils { /** * get today day interval list - * @param businessDate - * @return + * @param businessDate businessDate + * @return DateInterval list */ public static List getTodayInterval(Date businessDate){ @@ -59,9 +59,9 @@ public class DependentDateUtils { /** * get last day interval list - * @param businessDate - * @param someDay - * @return + * @param businessDate businessDate + * @param someDay someDay + * @return DateInterval list */ public static List getLastDayInterval(Date businessDate, int someDay){ @@ -78,8 +78,8 @@ public class DependentDateUtils { /** * get interval between this month first day and businessDate - * @param businessDate - * @return + * @param businessDate businessDate + * @return DateInterval list */ public static List getThisMonthInterval(Date businessDate) { Date firstDay = DateUtils.getFirstDayOfMonth(businessDate); @@ -88,8 +88,8 @@ public class DependentDateUtils { /** * get interval between last month first day and last day - * @param businessDate - * @return + * @param businessDate businessDate + * @return DateInterval list */ public static List getLastMonthInterval(Date businessDate) { @@ -102,11 +102,12 @@ public class DependentDateUtils { /** * get interval on first/last day of the last month - * @param businessDate - * @param isBeginDay - * @return + * @param businessDate businessDate + * @param isBeginDay isBeginDay + * @return DateInterval list */ - public static List getLastMonthBeginInterval(Date businessDate, boolean isBeginDay) { + public static List getLastMonthBeginInterval(Date businessDate, + boolean isBeginDay) { Date firstDayThisMonth = DateUtils.getFirstDayOfMonth(businessDate); Date lastDay = DateUtils.getSomeDay(firstDayThisMonth, -1); @@ -120,8 +121,8 @@ public class DependentDateUtils { /** * get interval between monday to businessDate of this week - * @param businessDate - * @return + * @param businessDate businessDate + * @return DateInterval list */ public static List getThisWeekInterval(Date businessDate) { Date mondayThisWeek = DateUtils.getMonday(businessDate); @@ -131,8 +132,8 @@ public class DependentDateUtils { /** * get interval between monday to sunday of last week * default set monday the first day of week - * @param businessDate - * @return + * @param businessDate businessDate + * @return DateInterval list */ public static List getLastWeekInterval(Date businessDate) { Date mondayThisWeek = DateUtils.getMonday(businessDate); @@ -144,9 +145,9 @@ public class DependentDateUtils { /** * get interval on the day of last week * default set monday the first day of week - * @param businessDate + * @param businessDate businessDate * @param dayOfWeek monday:1,tuesday:2,wednesday:3,thursday:4,friday:5,saturday:6,sunday:7 - * @return + * @return DateInterval list */ public static List getLastWeekOneDayInterval(Date businessDate, int dayOfWeek) { Date mondayThisWeek = DateUtils.getMonday(businessDate); @@ -156,6 +157,12 @@ public class DependentDateUtils { return getDateIntervalListBetweenTwoDates(destDay, destDay); } + /** + * get date interval list between two dates + * @param firstDay firstDay + * @param lastDay lastDay + * @return DateInterval list + */ public static List getDateIntervalListBetweenTwoDates(Date firstDay, Date lastDay) { List dateIntervals = new ArrayList<>(); while(!firstDay.after(lastDay)){ diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java index 0c756cb0b3..39b59a04d6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java @@ -31,23 +31,27 @@ public class PlaceholderUtils { /** * Prefix of the position to be replaced */ - public static final String placeholderPrefix = "${"; + public static final String PLACEHOLDER_PREFIX = "${"; /** * The suffix of the position to be replaced */ - public static final String placeholderSuffix = "}"; + + public static final String PLACEHOLDER_SUFFIX = "}"; /** * Replaces all placeholders of format {@code ${name}} with the value returned * from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}. * - * @param value the value containing the placeholders to be replaced - * @param paramsMap placeholder data dictionary + * @param value the value containing the placeholders to be replaced + * @param paramsMap placeholder data dictionary + * @param ignoreUnresolvablePlaceholders ignoreUnresolvablePlaceholders * @return the supplied value with placeholders replaced inline */ - public static String replacePlaceholders(String value, Map paramsMap, boolean ignoreUnresolvablePlaceholders) { + public static String replacePlaceholders(String value, + Map paramsMap, + boolean ignoreUnresolvablePlaceholders) { //replacement tool, parameter key will be replaced by value,if can't match , will throw an exception PropertyPlaceholderHelper strictHelper = getPropertyPlaceholderHelper(false); @@ -68,7 +72,7 @@ public class PlaceholderUtils { */ public static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { - return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); + return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders); } /** diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java index 15e3282d38..35cb018399 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java @@ -35,12 +35,12 @@ public class TimePlaceholderUtils { /** * Prefix of the position to be replaced */ - public static final String placeholderPrefix = "$["; + public static final String PLACEHOLDER_PREFIX = "$["; /** * The suffix of the position to be replaced */ - public static final String placeholderSuffix = "]"; + public static final String PLACEHOLDER_SUFFIX = "]"; /** * Replaces all placeholders of format {@code ${name}} with the value returned @@ -66,7 +66,7 @@ public class TimePlaceholderUtils { * be ignored ({@code true}) or cause an exception ({@code false}) */ private static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { - return new PropertyPlaceholderHelper(placeholderPrefix, placeholderSuffix, null, ignoreUnresolvablePlaceholders); + return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders); } /** @@ -503,7 +503,7 @@ public class TimePlaceholderUtils { * @return calculate need minutes */ public static Integer calcMinutes(String minuteExpression) { - int index = minuteExpression.indexOf("/"); + int index = minuteExpression.indexOf('/'); String calcExpression; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java index 4fb5f94616..eee456d019 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java @@ -426,6 +426,7 @@ public class ProcessBuilderForWin32 { static final ProcessBuilderForWin32.NullInputStream INSTANCE = new ProcessBuilderForWin32.NullInputStream(); private NullInputStream() {} public int read() { return -1; } + @Override public int available() { return 0; } } @@ -462,7 +463,7 @@ public class ProcessBuilderForWin32 { * * @since 1.7 */ - public static abstract class Redirect { + public abstract static class Redirect { /** * The type of a {@link ProcessBuilderForWin32.Redirect}. */ @@ -494,7 +495,7 @@ public class ProcessBuilderForWin32 { * {@link ProcessBuilderForWin32.Redirect#appendTo Redirect.appendTo(File)}. */ APPEND - }; + } /** * Returns the type of this {@code Redirect}. @@ -568,6 +569,7 @@ public class ProcessBuilderForWin32 { throw new NullPointerException(); return new ProcessBuilderForWin32.Redirect() { public Type type() { return Type.READ; } + @Override public File file() { return file; } public String toString() { return "redirect to read from file \"" + file + "\""; @@ -595,10 +597,12 @@ public class ProcessBuilderForWin32 { throw new NullPointerException(); return new ProcessBuilderForWin32.Redirect() { public Type type() { return Type.WRITE; } + @Override public File file() { return file; } public String toString() { return "redirect to write to file \"" + file + "\""; } + @Override boolean append() { return false; } }; } @@ -626,10 +630,12 @@ public class ProcessBuilderForWin32 { throw new NullPointerException(); return new ProcessBuilderForWin32.Redirect() { public Type type() { return Type.APPEND; } + @Override public File file() { return file; } public String toString() { return "redirect to append to file \"" + file + "\""; } + @Override boolean append() { return true; } }; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java index 3dbe7cb50f..39fddfbad9 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java @@ -46,22 +46,23 @@ final class ProcessEnvironmentForWin32 extends HashMap { return (String) o; } + @Override public String put(String key, String value) { return super.put(validateName(key), validateValue(value)); } - + @Override public String get(Object key) { return super.get(nonNullString(key)); } - + @Override public boolean containsKey(Object key) { return super.containsKey(nonNullString(key)); } - + @Override public boolean containsValue(Object value) { return super.containsValue(nonNullString(value)); } - + @Override public String remove(Object key) { return super.remove(nonNullString(key)); } @@ -92,6 +93,7 @@ final class ProcessEnvironmentForWin32 extends HashMap { public Entry next() { return new CheckedEntry(i.next()); } + @Override public void remove() { i.remove();} }; } @@ -110,10 +112,14 @@ final class ProcessEnvironmentForWin32 extends HashMap { private final Collection c; public CheckedValues(Collection c) {this.c = c;} public int size() {return c.size();} + @Override public boolean isEmpty() {return c.isEmpty();} + @Override public void clear() { c.clear();} public Iterator iterator() {return c.iterator();} + @Override public boolean contains(Object o) {return c.contains(nonNullString(o));} + @Override public boolean remove(Object o) {return c.remove(nonNullString(o));} } @@ -127,15 +133,15 @@ final class ProcessEnvironmentForWin32 extends HashMap { public boolean contains(Object o) {return s.contains(nonNullString(o));} public boolean remove(Object o) {return s.remove(nonNullString(o));} } - + @Override public Set keySet() { return new CheckedKeySet(super.keySet()); } - + @Override public Collection values() { return new CheckedValues(super.values()); } - + @Override public Set> entrySet() { return new CheckedEntrySet(super.entrySet()); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java index 9f2716a096..4f6d719ef3 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java @@ -32,6 +32,7 @@ import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static com.sun.jna.platform.win32.WinBase.INVALID_HANDLE_VALUE; import static com.sun.jna.platform.win32.WinBase.STILL_ACTIVE; import static java.util.Objects.requireNonNull; @@ -112,7 +113,7 @@ public class ProcessImplForWin32 extends Process { // System-dependent portion of ProcessBuilderForWindows.start() static Process start(String username, String password, - String cmdarray[], + String[] cmdarray, java.util.Map environment, String dir, ProcessBuilderForWin32.Redirect[] redirects, @@ -177,10 +178,10 @@ public class ProcessImplForWin32 extends Process { private static class LazyPattern { // Escape-support version: - // "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)"; + // "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)" private static final Pattern PATTERN = Pattern.compile("[^\\s\"]+|\"[^\"]*\""); - }; + } /* Parses the command string parameter into the executable name and * program arguments. @@ -203,7 +204,7 @@ public class ProcessImplForWin32 extends Process { private static final int VERIFICATION_LEGACY = 3; // See Command shell overview for documentation of special characters. // https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-xp/bb490954(v=technet.10) - private static final char ESCAPE_VERIFICATION[][] = { + private static final char[][] ESCAPE_VERIFICATION = { // We guarantee the only command file execution for implicit [cmd.exe] run. // http://technet.microsoft.com/en-us/library/bb490954.aspx {' ', '\t', '<', '>', '&', '|', '^'}, @@ -214,7 +215,7 @@ public class ProcessImplForWin32 extends Process { private static String createCommandLine(int verificationType, final String executablePath, - final String cmd[]) + final String[] cmd) { StringBuilder cmdbuf = new StringBuilder(80); @@ -309,7 +310,7 @@ public class ProcessImplForWin32 extends Process { } if (!argIsQuoted) { - char testEscape[] = ESCAPE_VERIFICATION[verificationType]; + char[] testEscape = ESCAPE_VERIFICATION[verificationType]; for (int i = 0; i < testEscape.length; ++i) { if (arg.indexOf(testEscape[i]) >= 0) { return true; @@ -390,14 +391,14 @@ public class ProcessImplForWin32 extends Process { private static final char BACKSLASH = '\\'; private WinNT.HANDLE handle; - private OutputStream stdin_stream; - private InputStream stdout_stream; - private InputStream stderr_stream; + private OutputStream stdinStream; + private InputStream stdoutStream; + private InputStream stderrStream; private ProcessImplForWin32( String username, String password, - String cmd[], + String[] cmd, final String envblock, final String path, final long[] stdHandles, @@ -472,44 +473,44 @@ public class ProcessImplForWin32 extends Process { new PrivilegedAction() { public Void run() { if (stdHandles[0] == -1L) - stdin_stream = ProcessBuilderForWin32.NullOutputStream.INSTANCE; + stdinStream = ProcessBuilderForWin32.NullOutputStream.INSTANCE; else { - FileDescriptor stdin_fd = new FileDescriptor(); - setHandle(stdin_fd, stdHandles[0]); - stdin_stream = new BufferedOutputStream( - new FileOutputStream(stdin_fd)); + FileDescriptor stdinFd = new FileDescriptor(); + setHandle(stdinFd, stdHandles[0]); + stdinStream = new BufferedOutputStream( + new FileOutputStream(stdinFd)); } if (stdHandles[1] == -1L) - stdout_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE; + stdoutStream = ProcessBuilderForWin32.NullInputStream.INSTANCE; else { - FileDescriptor stdout_fd = new FileDescriptor(); - setHandle(stdout_fd, stdHandles[1]); - stdout_stream = new BufferedInputStream( - new FileInputStream(stdout_fd)); + FileDescriptor stdoutFd = new FileDescriptor(); + setHandle(stdoutFd, stdHandles[1]); + stdoutStream = new BufferedInputStream( + new FileInputStream(stdoutFd)); } if (stdHandles[2] == -1L) - stderr_stream = ProcessBuilderForWin32.NullInputStream.INSTANCE; + stderrStream = ProcessBuilderForWin32.NullInputStream.INSTANCE; else { - FileDescriptor stderr_fd = new FileDescriptor(); - setHandle(stderr_fd, stdHandles[2]); - stderr_stream = new FileInputStream(stderr_fd); + FileDescriptor stderrFd = new FileDescriptor(); + setHandle(stderrFd, stdHandles[2]); + stderrStream = new FileInputStream(stderrFd); } return null; }}); } public OutputStream getOutputStream() { - return stdin_stream; + return stdinStream; } public InputStream getInputStream() { - return stdout_stream; + return stdoutStream; } public InputStream getErrorStream() { - return stderr_stream; + return stderrStream; } protected void finalize() { @@ -557,11 +558,12 @@ public class ProcessImplForWin32 extends Process { public void destroy() { terminateProcess(handle); } + @Override public Process destroyForcibly() { destroy(); return this; } - + @Override public boolean isAlive() { return isProcessAlive(handle); } @@ -582,7 +584,7 @@ public class ProcessImplForWin32 extends Process { pjhandles.setValue(thisProcessEnd); } } - Kernel32.INSTANCE.SetHandleInformation(phStd.getValue(), Kernel32.HANDLE_FLAG_INHERIT, Kernel32.HANDLE_FLAG_INHERIT); + Kernel32.INSTANCE.SetHandleInformation(phStd.getValue(), WinBase.HANDLE_FLAG_INHERIT, WinBase.HANDLE_FLAG_INHERIT); return true; } @@ -596,17 +598,17 @@ public class ProcessImplForWin32 extends Process { private static void prepareIOEHandleState(WinNT.HANDLE[] stdIOE, Boolean[] inherit) { for(int i = 0; i < HANDLE_STORAGE_SIZE; ++i) { WinNT.HANDLE hstd = stdIOE[i]; - if (!Kernel32.INVALID_HANDLE_VALUE.equals(hstd)) { + if (!WinBase.INVALID_HANDLE_VALUE.equals(hstd)) { inherit[i] = Boolean.TRUE; - Kernel32.INSTANCE.SetHandleInformation(hstd, Kernel32.HANDLE_FLAG_INHERIT, 0); + Kernel32.INSTANCE.SetHandleInformation(hstd, WinBase.HANDLE_FLAG_INHERIT, 0); } } } private static void restoreIOEHandleState(WinNT.HANDLE[] stdIOE, Boolean[] inherit) { for (int i = HANDLE_STORAGE_SIZE - 1; i >= 0; --i) { - if (!Kernel32.INVALID_HANDLE_VALUE.equals(stdIOE[i])) { - Kernel32.INSTANCE.SetHandleInformation(stdIOE[i], Kernel32.HANDLE_FLAG_INHERIT, inherit[i] ? Kernel32.HANDLE_FLAG_INHERIT : 0); + if (!WinBase.INVALID_HANDLE_VALUE.equals(stdIOE[i])) { + Kernel32.INSTANCE.SetHandleInformation(stdIOE[i], WinBase.HANDLE_FLAG_INHERIT, Boolean.TRUE.equals(inherit[i]) ? WinBase.HANDLE_FLAG_INHERIT : 0); } } } @@ -621,12 +623,12 @@ public class ProcessImplForWin32 extends Process { WinNT.HANDLE ret = new WinNT.HANDLE(Pointer.createConstant(0)); WinNT.HANDLE[] stdIOE = new WinNT.HANDLE[] { - Kernel32.INVALID_HANDLE_VALUE, Kernel32.INVALID_HANDLE_VALUE, Kernel32.INVALID_HANDLE_VALUE, + WinBase.INVALID_HANDLE_VALUE, WinBase.INVALID_HANDLE_VALUE, WinBase.INVALID_HANDLE_VALUE, stdHandles[0].getValue(), stdHandles[1].getValue(), stdHandles[2].getValue() }; - stdIOE[0] = Kernel32.INSTANCE.GetStdHandle(Kernel32.STD_INPUT_HANDLE); - stdIOE[1] = Kernel32.INSTANCE.GetStdHandle(Kernel32.STD_OUTPUT_HANDLE); - stdIOE[2] = Kernel32.INSTANCE.GetStdHandle(Kernel32.STD_ERROR_HANDLE); + stdIOE[0] = Kernel32.INSTANCE.GetStdHandle(Wincon.STD_INPUT_HANDLE); + stdIOE[1] = Kernel32.INSTANCE.GetStdHandle(Wincon.STD_OUTPUT_HANDLE); + stdIOE[2] = Kernel32.INSTANCE.GetStdHandle(Wincon.STD_ERROR_HANDLE); Boolean[] inherit = new Boolean[] { Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, @@ -638,17 +640,17 @@ public class ProcessImplForWin32 extends Process { // input WinNT.HANDLEByReference hStdInput = new WinNT.HANDLEByReference(); WinNT.HANDLEByReference[] pipeIn = new WinNT.HANDLEByReference[] { - new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE) }; + new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE) }; // output WinNT.HANDLEByReference hStdOutput = new WinNT.HANDLEByReference(); WinNT.HANDLEByReference[] pipeOut = new WinNT.HANDLEByReference[] { - new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE) }; + new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE) }; // error WinNT.HANDLEByReference hStdError = new WinNT.HANDLEByReference(); WinNT.HANDLEByReference[] pipeError = new WinNT.HANDLEByReference[] { - new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(Kernel32.INVALID_HANDLE_VALUE) }; + new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE) }; boolean success; if (initHolder(stdHandles[0], pipeIn, OFFSET_READ, hStdInput)) { @@ -668,8 +670,8 @@ public class ProcessImplForWin32 extends Process { if (success) { WTypes.LPSTR lpEnvironment = envblock == null ? new WTypes.LPSTR() : new WTypes.LPSTR(envblock); - Kernel32.PROCESS_INFORMATION pi = new WinBase.PROCESS_INFORMATION(); - si.dwFlags = Kernel32.STARTF_USESTDHANDLES; + WinBase.PROCESS_INFORMATION pi = new WinBase.PROCESS_INFORMATION(); + si.dwFlags = WinBase.STARTF_USESTDHANDLES; if (!Advapi32.INSTANCE.CreateProcessWithLogonW( username , null @@ -677,7 +679,7 @@ public class ProcessImplForWin32 extends Process { , Advapi32.LOGON_WITH_PROFILE , null , cmd - , Kernel32.CREATE_NO_WINDOW + , WinBase.CREATE_NO_WINDOW , lpEnvironment.getPointer() , path , si @@ -709,13 +711,11 @@ public class ProcessImplForWin32 extends Process { for (int i = 0; i < stdHandles.length; i++) { handles[i] = new WinNT.HANDLEByReference(new WinNT.HANDLE(Pointer.createConstant(stdHandles[i]))); } - - if (cmd != null) { - if (username != null && password != null) { - ret = processCreate(username, password, cmd, envblock, path, handles, redirectErrorStream); - } + + if (cmd != null && username != null && password != null) { + ret = processCreate(username, password, cmd, envblock, path, handles, redirectErrorStream); } - + for (int i = 0; i < stdHandles.length; i++) { stdHandles[i] = handles[i].getPointer().getLong(0); } @@ -742,7 +742,9 @@ public class ProcessImplForWin32 extends Process { } private static void closeHandle(WinNT.HANDLE handle) { - Kernel32Util.closeHandle(handle); + if (!handle.equals(INVALID_HANDLE_VALUE)) { + Kernel32Util.closeHandle(handle); + } } /** @@ -753,15 +755,15 @@ public class ProcessImplForWin32 extends Process { * @return the native HANDLE */ private static long openForAtomicAppend(String path) throws IOException { - int access = Kernel32.GENERIC_READ | Kernel32.GENERIC_WRITE; - int sharing = Kernel32.FILE_SHARE_READ | Kernel32.FILE_SHARE_WRITE; - int disposition = Kernel32.OPEN_ALWAYS; - int flagsAndAttributes = Kernel32.FILE_ATTRIBUTE_NORMAL; + int access = WinNT.GENERIC_READ | WinNT.GENERIC_WRITE; + int sharing = WinNT.FILE_SHARE_READ | WinNT.FILE_SHARE_WRITE; + int disposition = WinNT.OPEN_ALWAYS; + int flagsAndAttributes = WinNT.FILE_ATTRIBUTE_NORMAL; if (path == null || path.isEmpty()) { return -1; } else { WinNT.HANDLE handle = Kernel32.INSTANCE.CreateFile(path, access, sharing, null, disposition, flagsAndAttributes, null); - if (handle == Kernel32.INVALID_HANDLE_VALUE) { + if (handle == WinBase.INVALID_HANDLE_VALUE) { throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); } return handle.getPointer().getLong(0); @@ -769,15 +771,15 @@ public class ProcessImplForWin32 extends Process { } private static void waitForInterruptibly(WinNT.HANDLE handle) { - int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, Kernel32.INFINITE); - if (result == Kernel32.WAIT_FAILED) { + int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, WinBase.INFINITE); + if (result == WinBase.WAIT_FAILED) { throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); } } private static void waitForTimeoutInterruptibly(WinNT.HANDLE handle, long timeout) { int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, (int) timeout); - if (result == Kernel32.WAIT_FAILED) { + if (result == WinBase.WAIT_FAILED) { throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); } } diff --git a/dolphinscheduler-common/src/main/resources/common.properties b/dolphinscheduler-common/src/main/resources/common.properties index 5a4aa1441f..db3b241ca9 100644 --- a/dolphinscheduler-common/src/main/resources/common.properties +++ b/dolphinscheduler-common/src/main/resources/common.properties @@ -15,40 +15,14 @@ # limitations under the License. # -#task queue implementation, default "zookeeper" -dolphinscheduler.queue.impl=zookeeper +# resource storage type : HDFS,S3,NONE +resource.storage.type=NONE -#zookeeper cluster. multiple are separated by commas. eg. 192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181 -zookeeper.quorum=localhost:2181 - -#dolphinscheduler root directory -zookeeper.dolphinscheduler.root=/dolphinscheduler - -#dolphinscheduler failover directory -zookeeper.session.timeout=300 -zookeeper.connection.timeout=300 -zookeeper.retry.base.sleep=100 -zookeeper.retry.max.sleep=30000 -zookeeper.retry.maxtime=5 - -# resource upload startup type : HDFS,S3,NONE -res.upload.startup.type=NONE - -# Users who have permission to create directories under the HDFS root path -hdfs.root.user=hdfs - -# data base dir, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended -data.store2hdfs.basepath=/dolphinscheduler - -# user data directory path, self configuration, please make sure the directory exists and have read write permissions -data.basedir.path=/tmp/dolphinscheduler - -# directory path for user data download. self configuration, please make sure the directory exists and have read write permissions -data.download.basedir.path=/tmp/dolphinscheduler/download - -# process execute directory. self configuration, please make sure the directory exists and have read write permissions -process.exec.basepath=/tmp/dolphinscheduler/exec +# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended +resource.upload.path=/dolphinscheduler +# user data local directory path, please make sure the directory exists and have read write permissions +#data.basedir.path=/tmp/dolphinscheduler # whether kerberos starts hadoop.security.authentication.startup.state=false @@ -56,39 +30,37 @@ hadoop.security.authentication.startup.state=false # java.security.krb5.conf path java.security.krb5.conf.path=/opt/krb5.conf -# loginUserFromKeytab user +# login user from keytab username login.user.keytab.username=hdfs-mycluster@ESZ.COM # loginUserFromKeytab path login.user.keytab.path=/opt/hdfs.headless.keytab -# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions -dolphinscheduler.env.path=/opt/dolphinscheduler_env.sh - #resource.view.suffixs -resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties - -# is development state? default "false" -development.state=true +#resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties +# if resource.storage.type=HDFS, the user need to have permission to create directories under the HDFS root path +hdfs.root.user=hdfs -# ha or single namenode,If namenode ha needs to copy core-site.xml and hdfs-site.xml -# to the conf directory,support s3,for example : s3a://dolphinscheduler +# if resource.storage.type=S3,the value like: s3a://dolphinscheduler ; if resource.storage.type=HDFS, When namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir fs.defaultFS=hdfs://mycluster:8020 -# s3 need,s3 endpoint +# if resource.storage.type=S3,s3 endpoint fs.s3a.endpoint=http://192.168.199.91:9010 -# s3 need,s3 access key +# if resource.storage.type=S3,s3 access key fs.s3a.access.key=A3DXS30FO22544RE -# s3 need,s3 secret key +# if resource.storage.type=S3,s3 secret key fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK -#resourcemanager ha note this need ips , this empty if single +# if not use hadoop resourcemanager, please keep default value; if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx -# If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine +# if resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname. yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s +# system env path +#dolphinscheduler.env.path=env/dolphinscheduler_env.sh +kerberos.expire.time=7 \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/resources/logback.xml b/dolphinscheduler-common/src/main/resources/logback.xml deleted file mode 100644 index 7f634da975..0000000000 --- a/dolphinscheduler-common/src/main/resources/logback.xml +++ /dev/null @@ -1,169 +0,0 @@ - - - - - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - ${log.base}/dolphinscheduler-master.log - - - ${log.base}/dolphinscheduler-master.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - - - - INFO - - - - taskAppId - ${log.base} - - - - ${log.base}/${taskAppId}.log - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n - - UTF-8 - - true - - - - - ${log.base}/dolphinscheduler-worker.log - - INFO - - - - ${log.base}/dolphinscheduler-worker.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n - - UTF-8 - - - - - - - - ${log.base}/dolphinscheduler-alert.log - - ${log.base}/dolphinscheduler-alert.%d{yyyy-MM-dd_HH}.%i.log - 20 - 64MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - - ${log.base}/dolphinscheduler-api-server.log - - INFO - - - ${log.base}/dolphinscheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log - 168 - 64MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java index 2670eebc20..1815e48f84 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/os/OSUtilsTest.java @@ -67,7 +67,7 @@ public class OSUtilsTest { @Test public void cpuUsage() throws Exception { logger.info("cpuUsage : {}", OSUtils.cpuUsage()); - Thread.sleep(1000l); + Thread.sleep(1000L); logger.info("cpuUsage : {}", OSUtils.cpuUsage()); double cpuUsage = OSUtils.cpuUsage(); diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java index 7ce00e875a..cd7b4f2200 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/FlinkParametersTest.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.common.task; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.junit.Assert; import org.junit.Test; @@ -28,8 +29,7 @@ public class FlinkParametersTest { @Test public void getResourceFilesList() { FlinkParameters flinkParameters = new FlinkParameters(); - Assert.assertNotNull(flinkParameters.getResourceFilesList()); - Assert.assertTrue(flinkParameters.getResourceFilesList().isEmpty()); + Assert.assertTrue(CollectionUtils.isEmpty(flinkParameters.getResourceFilesList())); ResourceInfo mainResource = new ResourceInfo(); mainResource.setRes("testFlinkMain-1.0.0-SNAPSHOT.jar"); @@ -41,15 +41,17 @@ public class FlinkParametersTest { resourceInfos.add(resourceInfo1); flinkParameters.setResourceList(resourceInfos); - Assert.assertNotNull(flinkParameters.getResourceFilesList()); - Assert.assertEquals(2, flinkParameters.getResourceFilesList().size()); + List resourceFilesList = flinkParameters.getResourceFilesList(); + Assert.assertNotNull(resourceFilesList); + Assert.assertEquals(2, resourceFilesList.size()); ResourceInfo resourceInfo2 = new ResourceInfo(); resourceInfo2.setRes("testFlinkParameters2.jar"); resourceInfos.add(resourceInfo2); flinkParameters.setResourceList(resourceInfos); - Assert.assertNotNull(flinkParameters.getResourceFilesList()); - Assert.assertEquals(3, flinkParameters.getResourceFilesList().size()); + resourceFilesList = flinkParameters.getResourceFilesList(); + Assert.assertNotNull(resourceFilesList); + Assert.assertEquals(3, resourceFilesList.size()); } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java index 42c9958810..c720013125 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java @@ -35,11 +35,6 @@ public class CommonUtilsTest { Assert.assertTrue(true); } @Test - public void getQueueImplValue(){ - logger.info(CommonUtils.getQueueImplValue()); - Assert.assertTrue(true); - } - @Test public void isDevelopMode() { logger.info("develop mode: {}",CommonUtils.isDevelopMode()); Assert.assertTrue(true); diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java index 43745c4e3c..a3ee26e18b 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/DependentUtilsTest.java @@ -32,6 +32,7 @@ import java.util.List; public class DependentUtilsTest { private static final Logger logger = LoggerFactory.getLogger(ShellExecutorTest.class); + @Test public void getDependResultForRelation() { //failed diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java index 8948e69f74..00b8f1c5c6 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java @@ -16,73 +16,187 @@ */ package org.apache.dolphinscheduler.common.utils; -import org.junit.Ignore; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.hadoop.conf.Configuration; +import org.junit.Assert; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.List; -@Ignore +@RunWith(MockitoJUnitRunner.class) +//todo there is no hadoop environment public class HadoopUtilsTest { private static final Logger logger = LoggerFactory.getLogger(HadoopUtilsTest.class); + private HadoopUtils hadoopUtils = HadoopUtils.getInstance(); @Test public void getActiveRMTest() { - logger.info(HadoopUtils.getAppAddress("http://ark1:8088/ws/v1/cluster/apps/%s","192.168.xx.xx,192.168.xx.xx")); + try{ + hadoopUtils.getAppAddress("http://ark1:8088/ws/v1/cluster/apps/%s","192.168.xx.xx,192.168.xx.xx"); + } catch (Exception e) { + logger.error(e.getMessage(),e); + } } @Test - public void getApplicationStatusAddressTest(){ - logger.info(HadoopUtils.getInstance().getApplicationUrl("application_1548381297012_0030")); + public void rename() { + + boolean result = false; + try { + result = hadoopUtils.rename("/dolphinscheduler/hdfs1","/dolphinscheduler/hdfs2"); + } catch (Exception e) { + logger.error(e.getMessage(),e); + } + Assert.assertEquals(false, result); } + @Test - public void test() throws IOException { - HadoopUtils.getInstance().copyLocalToHdfs("/root/teamviewer_13.1.8286.x86_64.rpm", "/journey", true, true); + public void getConfiguration(){ + Configuration conf = hadoopUtils.getConfiguration(); + } @Test - public void readFileTest(){ + public void mkdir() { + boolean result = false; try { - byte[] bytes = HadoopUtils.getInstance().catFile("/dolphinscheduler/hdfs/resources/35435.sh"); - logger.info(new String(bytes)); + result = hadoopUtils.mkdir("/dolphinscheduler/hdfs"); } catch (Exception e) { + logger.error(e.getMessage(), e); + } + Assert.assertEquals(false, result); + } + + @Test + public void delete() { + boolean result = false; + try { + result = hadoopUtils.delete("/dolphinscheduler/hdfs",true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + Assert.assertEquals(false, result); + } + @Test + public void exists() { + boolean result = false; + try { + result = hadoopUtils.exists("/dolphinscheduler/hdfs"); + } catch (Exception e) { + logger.error(e.getMessage(), e); } + Assert.assertEquals(false, result); + } + + @Test + public void getHdfsDataBasePath() { + String result = hadoopUtils.getHdfsDataBasePath(); + Assert.assertEquals("/dolphinscheduler", result); } + @Test - public void testCapacity(){ + public void getHdfsResDir() { + String result = hadoopUtils.getHdfsResDir("11000"); + Assert.assertEquals("/dolphinscheduler/11000/resources", result); + } + @Test + public void getHdfsUserDir() { + String result = hadoopUtils.getHdfsUserDir("11000",1000); + Assert.assertEquals("/dolphinscheduler/11000/home/1000", result); } + @Test - public void testMove(){ - HadoopUtils instance = HadoopUtils.getInstance(); + public void getHdfsUdfDir() { + String result = hadoopUtils.getHdfsUdfDir("11000"); + Assert.assertEquals("/dolphinscheduler/11000/udfs", result); + } + + @Test + public void getHdfsFileName() { + String result = hadoopUtils.getHdfsFileName(ResourceType.FILE,"11000","aa.txt"); + Assert.assertEquals("/dolphinscheduler/11000/resources/aa.txt", result); + } + + @Test + public void isYarnEnabled() { + boolean result = hadoopUtils.isYarnEnabled(); + Assert.assertEquals(false, result); + } + + @Test + public void test() { try { - instance.copy("/opt/apptest/test.dat","/opt/apptest/test.dat.back",true,true); + hadoopUtils.copyLocalToHdfs("/root/teamviewer_13.1.8286.x86_64.rpm", "/journey", true, true); } catch (Exception e) { logger.error(e.getMessage(), e); } + } + + @Test + public void readFileTest(){ + try { + byte[] bytes = hadoopUtils.catFile("/dolphinscheduler/hdfs/resources/35435.sh"); + logger.info(new String(bytes)); + } catch (Exception e) { + logger.error(e.getMessage(),e); + } + } + @Test + public void testMove(){ + try { + hadoopUtils.copy("/opt/apptest/test.dat","/opt/apptest/test.dat.back",true,true); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } @Test public void getApplicationStatus() { - logger.info(HadoopUtils.getInstance().getApplicationStatus("application_1542010131334_0029").toString()); + try { + logger.info(hadoopUtils.getApplicationStatus("application_1542010131334_0029").toString()); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } } @Test public void getApplicationUrl(){ - String application_1516778421218_0042 = HadoopUtils.getInstance().getApplicationUrl("application_1529051418016_0167"); + String application_1516778421218_0042 = hadoopUtils.getApplicationUrl("application_1529051418016_0167"); logger.info(application_1516778421218_0042); } @Test - public void catFileTest()throws Exception{ - List stringList = HadoopUtils.getInstance().catFile("/dolphinscheduler/hdfs/resources/WCSparkPython.py", 0, 1000); - logger.info(String.join(",",stringList)); + public void catFileWithLimitTest() { + List stringList = new ArrayList<>(); + try { + stringList = hadoopUtils.catFile("/dolphinscheduler/hdfs/resources/WCSparkPython.py", 0, 1000); + logger.info(String.join(",",stringList)); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + + @Test + public void catFileTest() { + byte[] content = new byte[0]; + try { + content = hadoopUtils.catFile("/dolphinscheduler/hdfs/resources/WCSparkPython.py"); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + logger.info(Arrays.toString(content)); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java index e65bcd219b..ec6ffa35a7 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/IpUtilsTest.java @@ -29,13 +29,13 @@ public class IpUtilsTest { long longNumber = IpUtils.ipToLong(ip); long longNumber2 = IpUtils.ipToLong(ip2); System.out.println(longNumber); - Assert.assertEquals(longNumber, 3232263681L); - Assert.assertEquals(longNumber2, 0L); + Assert.assertEquals(3232263681L, longNumber); + Assert.assertEquals(0L, longNumber2); String ip3 = "255.255.255.255"; long longNumber3 = IpUtils.ipToLong(ip3); System.out.println(longNumber3); - Assert.assertEquals(longNumber3, 4294967295L); + Assert.assertEquals(4294967295L, longNumber3); } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java index bd924e4852..8ce60349ed 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java @@ -16,10 +16,10 @@ */ package org.apache.dolphinscheduler.common.utils; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; -import com.alibaba.fastjson.JSONObject; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import org.junit.Assert; @@ -40,8 +40,8 @@ public class JSONUtilsTest { String jsonStr = "{\"id\":\"1001\",\"name\":\"Jobs\"}"; Map models = JSONUtils.toMap(jsonStr); - Assert.assertEquals(models.get("id"), "1001"); - Assert.assertEquals(models.get("name"), "Jobs"); + Assert.assertEquals("1001", models.get("id")); + Assert.assertEquals("Jobs", models.get("name")); } @@ -53,9 +53,9 @@ public class JSONUtilsTest { property.setType(DataType.VARCHAR); property.setValue("sssssss"); String str = "{\"direct\":\"IN\",\"prop\":\"ds\",\"type\":\"VARCHAR\",\"value\":\"sssssss\"}"; - Property property1 = JSONObject.parseObject(str, Property.class); + Property property1 = JSON.parseObject(str, Property.class); Direct direct = property1.getDirect(); - Assert.assertEquals(direct , Direct.IN); + Assert.assertEquals(Direct.IN, direct); } @@ -66,12 +66,12 @@ public class JSONUtilsTest { List maps = JSONUtils.toList(str, LinkedHashMap.class); - Assert.assertEquals(maps.size(), 1); - Assert.assertEquals(maps.get(0).get("mysql service name"), "mysql200"); - Assert.assertEquals(maps.get(0).get("mysql address"), "192.168.xx.xx"); - Assert.assertEquals(maps.get(0).get("port"), "3306"); - Assert.assertEquals(maps.get(0).get("no index of number"), "80"); - Assert.assertEquals(maps.get(0).get("database client connections"), "190"); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals("mysql200", maps.get(0).get("mysql service name")); + Assert.assertEquals("192.168.xx.xx", maps.get(0).get("mysql address")); + Assert.assertEquals("3306", maps.get(0).get("port")); + Assert.assertEquals("80", maps.get(0).get("no index of number")); + Assert.assertEquals("190", maps.get(0).get("database client connections")); } public String list2String(){ diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java index 8bb64b03c8..abdc15cc6e 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.common.utils; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.JSON; import org.apache.commons.lang.time.DateUtils; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DataType; @@ -91,13 +91,13 @@ public class ParameterUtilsTest { globalParamList.add(property); String result2 = ParameterUtils.curingGlobalParams(null,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime); - Assert.assertEquals(result2, JSONObject.toJSONString(globalParamList)); + Assert.assertEquals(result2, JSON.toJSONString(globalParamList)); String result3 = ParameterUtils.curingGlobalParams(globalParamMap,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,null); - Assert.assertEquals(result3, JSONObject.toJSONString(globalParamList)); + Assert.assertEquals(result3, JSON.toJSONString(globalParamList)); String result4 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime); - Assert.assertEquals(result4, JSONObject.toJSONString(globalParamList)); + Assert.assertEquals(result4, JSON.toJSONString(globalParamList)); //test var $ startsWith globalParamMap.put("bizDate","${system.biz.date}"); diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java index 99a2cf05bc..b14be21e60 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringTest.java @@ -24,12 +24,6 @@ import java.util.List; public class StringTest { - - @Test - public void test1(){ - System.out.println(String.format("%s_%010d_%010d", String.valueOf(1), Long.valueOf(3), Integer.valueOf(4))); - } - @Test public void stringCompareTest(){ diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java index ee0a8aafe3..d204dfd4de 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java @@ -32,37 +32,37 @@ public class TimePlaceholderUtilsTest { date = DateUtils.parse("20170101010101","yyyyMMddHHmmss"); } - @Test - public void replacePlaceholdersT() { - Assert.assertEquals("2017test12017:***2016-12-31,20170102,20170130,20161227,20161231", TimePlaceholderUtils.replacePlaceholders("$[yyyy]test1$[yyyy:***]$[yyyy-MM-dd-1],$[month_begin(yyyyMMdd, 1)],$[month_end(yyyyMMdd, -1)],$[week_begin(yyyyMMdd, 1)],$[week_end(yyyyMMdd, -1)]", - date, true)); - - Assert.assertEquals("1483200061,1483290061,1485709261,1482771661,1483113600,1483203661", TimePlaceholderUtils.replacePlaceholders("$[timestamp(yyyyMMdd00mmss)]," - + "$[timestamp(month_begin(yyyyMMddHHmmss, 1))]," - + "$[timestamp(month_end(yyyyMMddHHmmss, -1))]," - + "$[timestamp(week_begin(yyyyMMddHHmmss, 1))]," - + "$[timestamp(week_end(yyyyMMdd000000, -1))]," - + "$[timestamp(yyyyMMddHHmmss)]", - date, true)); - } - - - - @Test - public void calcMinutesT() { - Assert.assertEquals("Sun Jan 01 01:01:01 CST 2017=yyyy", TimePlaceholderUtils.calcMinutes("yyyy", date).toString()); - Assert.assertEquals("Sun Jan 08 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+7*1", date).toString()); - Assert.assertEquals("Sun Dec 25 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-7*1", date).toString()); - Assert.assertEquals("Mon Jan 02 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+1", date).toString()); - Assert.assertEquals("Sat Dec 31 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-1", date).toString()); - Assert.assertEquals("Sun Jan 01 02:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH+1/24", date).toString()); - Assert.assertEquals("Sun Jan 01 00:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH-1/24", date).toString()); - } - - @Test - public void calcMonthsT() { - Assert.assertEquals("Mon Jan 01 01:01:01 CST 2018=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,12*1)", date).toString()); - Assert.assertEquals("Fri Jan 01 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,-12*1)", date).toString()); - } +// @Test +// public void replacePlaceholdersT() { +// Assert.assertEquals("2017test12017:***2016-12-31,20170102,20170130,20161227,20161231", TimePlaceholderUtils.replacePlaceholders("$[yyyy]test1$[yyyy:***]$[yyyy-MM-dd-1],$[month_begin(yyyyMMdd, 1)],$[month_end(yyyyMMdd, -1)],$[week_begin(yyyyMMdd, 1)],$[week_end(yyyyMMdd, -1)]", +// date, true)); +// +// Assert.assertEquals("1483200061,1483290061,1485709261,1482771661,1483113600,1483203661", TimePlaceholderUtils.replacePlaceholders("$[timestamp(yyyyMMdd00mmss)]," +// + "$[timestamp(month_begin(yyyyMMddHHmmss, 1))]," +// + "$[timestamp(month_end(yyyyMMddHHmmss, -1))]," +// + "$[timestamp(week_begin(yyyyMMddHHmmss, 1))]," +// + "$[timestamp(week_end(yyyyMMdd000000, -1))]," +// + "$[timestamp(yyyyMMddHHmmss)]", +// date, true)); +// } +// +// +// +// @Test +// public void calcMinutesT() { +// Assert.assertEquals("Sun Jan 01 01:01:01 CST 2017=yyyy", TimePlaceholderUtils.calcMinutes("yyyy", date).toString()); +// Assert.assertEquals("Sun Jan 08 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+7*1", date).toString()); +// Assert.assertEquals("Sun Dec 25 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-7*1", date).toString()); +// Assert.assertEquals("Mon Jan 02 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+1", date).toString()); +// Assert.assertEquals("Sat Dec 31 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-1", date).toString()); +// Assert.assertEquals("Sun Jan 01 02:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH+1/24", date).toString()); +// Assert.assertEquals("Sun Jan 01 00:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH-1/24", date).toString()); +// } +// +// @Test +// public void calcMonthsT() { +// Assert.assertEquals("Mon Jan 01 01:01:01 CST 2018=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,12*1)", date).toString()); +// Assert.assertEquals("Fri Jan 01 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,-12*1)", date).toString()); +// } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java index 1a8c09e611..49b8c01ece 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -50,8 +50,8 @@ public class AlertDao extends AbstractBaseDao { @Override protected void init() { - alertMapper = ConnectionFactory.getMapper(AlertMapper.class); - userAlertGroupMapper = ConnectionFactory.getMapper(UserAlertGroupMapper.class); + alertMapper = ConnectionFactory.getInstance().getMapper(AlertMapper.class); + userAlertGroupMapper = ConnectionFactory.getInstance().getMapper(UserAlertGroupMapper.class); } /** @@ -99,13 +99,7 @@ public class AlertDao extends AbstractBaseDao { String content = String.format("[{'type':'%s','host':'%s','event':'server down','warning level':'serious'}]", serverType, host); alert.setTitle("Fault tolerance warning"); - alert.setShowType(ShowType.TABLE); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(alertgroupId); - alert.setCreateTime(new Date()); - alert.setUpdateTime(new Date()); - alertMapper.insert(alert); + saveTaskTimeoutAlert(alert, content, alertgroupId, null, null); } /** @@ -121,6 +115,11 @@ public class AlertDao extends AbstractBaseDao { String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]", processInstance.getId(), processInstance.getName()); alert.setTitle("Process Timeout Warn"); + saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc); + } + + private void saveTaskTimeoutAlert(Alert alert, String content, int alertgroupId, + String receivers, String receiversCc){ alert.setShowType(ShowType.TABLE); alert.setContent(content); alert.setAlertType(AlertType.EMAIL); @@ -136,11 +135,14 @@ public class AlertDao extends AbstractBaseDao { alertMapper.insert(alert); } + /** * task timeout warn * @param alertgroupId alertgroupId * @param receivers receivers * @param receiversCc receiversCc + * @param processInstanceId processInstanceId + * @param processInstanceName processInstanceName * @param taskId taskId * @param taskName taskName */ @@ -150,19 +152,7 @@ public class AlertDao extends AbstractBaseDao { String content = String.format("[{'process instance id':'%d','task name':'%s','task id':'%d','task name':'%s'," + "'event':'timeout','warnLevel':'middle'}]", processInstanceId, processInstanceName, taskId, taskName); alert.setTitle("Task Timeout Warn"); - alert.setShowType(ShowType.TABLE); - alert.setContent(content); - alert.setAlertType(AlertType.EMAIL); - alert.setAlertGroupId(alertgroupId); - if (StringUtils.isNotEmpty(receivers)) { - alert.setReceivers(receivers); - } - if (StringUtils.isNotEmpty(receiversCc)) { - alert.setReceiversCc(receiversCc); - } - alert.setCreateTime(new Date()); - alert.setUpdateTime(new Date()); - alertMapper.insert(alert); + saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc); } /** @@ -182,5 +172,11 @@ public class AlertDao extends AbstractBaseDao { return userAlertGroupMapper.listUserByAlertgroupId(alertgroupId); } - + /** + * for test + * @return AlertMapper + */ + public AlertMapper getAlertMapper() { + return alertMapper; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java index 51f60666d1..53366777f7 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java @@ -18,10 +18,10 @@ package org.apache.dolphinscheduler.dao; import com.alibaba.druid.pool.DruidDataSource; import java.sql.Connection; -import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.apache.dolphinscheduler.dao.utils.MysqlPerformance; import org.apache.dolphinscheduler.dao.utils.PostgrePerformance; @@ -61,15 +61,9 @@ public class MonitorDBDao { return new PostgrePerformance().getMonitorRecord(conn); } }catch (Exception e) { - logger.error("SQLException " + e); + logger.error("SQLException: {}", e.getMessage(), e); }finally { - try { - if (conn != null) { - conn.close(); - } - } catch (SQLException e) { - logger.error("SQLException ", e); - } + ConnectionUtils.releaseResource(conn); } return monitorRecord; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java index f7ba1054a5..1592e607f9 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/TaskRecordDao.java @@ -16,15 +16,17 @@ */ package org.apache.dolphinscheduler.dao; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.TaskRecordStatus; import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.TaskRecord; -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.dolphinscheduler.dao.utils.PropertyUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,149 +44,132 @@ public class TaskRecordDao { private static Logger logger = LoggerFactory.getLogger(TaskRecordDao.class.getName()); - /** - * load conf - */ - private static Configuration conf; - - static { - try { - conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES); - }catch (ConfigurationException e){ - logger.error("load configuration exception",e); - System.exit(1); - } - } - /** * get task record flag * @return whether startup taskrecord */ public static boolean getTaskRecordFlag(){ - return conf.getBoolean(Constants.TASK_RECORD_FLAG); + return PropertyUtils.getBoolean(Constants.TASK_RECORD_FLAG,false); } + /** * create connection + * * @return connection */ private static Connection getConn() { - if(!getTaskRecordFlag()){ + if (!getTaskRecordFlag()) { return null; } String driver = "com.mysql.jdbc.Driver"; - String url = conf.getString(Constants.TASK_RECORD_URL); - String username = conf.getString(Constants.TASK_RECORD_USER); - String password = conf.getString(Constants.TASK_RECORD_PWD); + String url = PropertyUtils.getString(Constants.TASK_RECORD_URL); + String username = PropertyUtils.getString(Constants.TASK_RECORD_USER); + String password = PropertyUtils.getString(Constants.TASK_RECORD_PWD); Connection conn = null; try { //classLoader,load driver Class.forName(driver); conn = DriverManager.getConnection(url, username, password); } catch (ClassNotFoundException e) { - logger.error("Exception ", e); + logger.error("Class not found Exception ", e); } catch (SQLException e) { - logger.error("Exception ", e); + logger.error("SQL Exception ", e); } return conn; } /** * generate where sql string + * * @param filterMap filterMap * @return sql string */ private static String getWhereString(Map filterMap) { - if(filterMap.size() ==0){ + if (filterMap.size() == 0) { return ""; } String result = " where 1=1 "; Object taskName = filterMap.get("taskName"); - if(taskName != null && StringUtils.isNotEmpty(taskName.toString())){ + if (taskName != null && StringUtils.isNotEmpty(taskName.toString())) { result += " and PROC_NAME like concat('%', '" + taskName.toString() + "', '%') "; } Object taskDate = filterMap.get("taskDate"); - if(taskDate != null && StringUtils.isNotEmpty(taskDate.toString())){ + if (taskDate != null && StringUtils.isNotEmpty(taskDate.toString())) { result += " and PROC_DATE='" + taskDate.toString() + "'"; } Object state = filterMap.get("state"); - if(state != null && StringUtils.isNotEmpty(state.toString())){ + if (state != null && StringUtils.isNotEmpty(state.toString())) { result += " and NOTE='" + state.toString() + "'"; } Object sourceTable = filterMap.get("sourceTable"); - if(sourceTable!= null && StringUtils.isNotEmpty(sourceTable.toString())){ - result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString()+ "', '%')"; + if (sourceTable != null && StringUtils.isNotEmpty(sourceTable.toString())) { + result += " and SOURCE_TAB like concat('%', '" + sourceTable.toString() + "', '%')"; } Object targetTable = filterMap.get("targetTable"); - if(sourceTable!= null && StringUtils.isNotEmpty(targetTable.toString())){ - result += " and TARGET_TAB like concat('%', '"+ targetTable.toString()+"', '%') " ; + if (sourceTable != null && StringUtils.isNotEmpty(targetTable.toString())) { + result += " and TARGET_TAB like concat('%', '" + targetTable.toString() + "', '%') "; } Object start = filterMap.get("startTime"); - if(start != null && StringUtils.isNotEmpty(start.toString())){ + if (start != null && StringUtils.isNotEmpty(start.toString())) { result += " and STARTDATE>='" + start.toString() + "'"; } Object end = filterMap.get("endTime"); - if(end != null && StringUtils.isNotEmpty(end.toString())){ - result += " and ENDDATE>='" + end.toString()+ "'"; + if (end != null && StringUtils.isNotEmpty(end.toString())) { + result += " and ENDDATE>='" + end.toString() + "'"; } return result; } /** * count task record + * * @param filterMap filterMap - * @param table table + * @param table table * @return task record count */ - public static int countTaskRecord(Map filterMap, String table){ + public static int countTaskRecord(Map filterMap, String table) { int count = 0; Connection conn = null; PreparedStatement pstmt = null; + ResultSet rs = null; try { conn = getConn(); - if(conn == null){ + if (conn == null) { return count; } String sql = String.format("select count(1) as count from %s", table); sql += getWhereString(filterMap); pstmt = conn.prepareStatement(sql); - ResultSet rs = pstmt.executeQuery(); - while(rs.next()){ + rs = pstmt.executeQuery(); + while (rs.next()){ count = rs.getInt("count"); break; } } catch (SQLException e) { logger.error("Exception ", e); }finally { - try { - if(pstmt != null) { - pstmt.close(); - } - if(conn != null){ - conn.close(); - } - } catch (SQLException e) { - logger.error("Exception ", e); - } + ConnectionUtils.releaseResource(rs, pstmt, conn); } return count; } /** * query task record by filter map paging + * * @param filterMap filterMap - * @param table table + * @param table table * @return task record list */ - public static List queryAllTaskRecord(Map filterMap , String table) { + public static List queryAllTaskRecord(Map filterMap, String table) { String sql = String.format("select * from %s", table); sql += getWhereString(filterMap); @@ -194,9 +179,9 @@ public class TaskRecordDao { sql += String.format(" order by STARTDATE desc limit %d,%d", offset, pageSize); List recordList = new ArrayList<>(); - try{ + try { recordList = getQueryResult(sql); - }catch (Exception e){ + } catch (Exception e) { logger.error("Exception ", e); } return recordList; @@ -204,6 +189,7 @@ public class TaskRecordDao { /** * convert result set to task record + * * @param resultSet resultSet * @return task record * @throws SQLException if error throws SQLException @@ -232,6 +218,7 @@ public class TaskRecordDao { /** * query task list by select sql + * * @param selectSql select sql * @return task record list */ @@ -239,65 +226,57 @@ public class TaskRecordDao { List recordList = new ArrayList<>(); Connection conn = null; PreparedStatement pstmt = null; + ResultSet rs = null; try { conn = getConn(); - if(conn == null){ + if (conn == null) { return recordList; } pstmt = conn.prepareStatement(selectSql); - ResultSet rs = pstmt.executeQuery(); + rs = pstmt.executeQuery(); - while(rs.next()){ + while (rs.next()) { TaskRecord taskRecord = convertToTaskRecord(rs); recordList.add(taskRecord); } } catch (SQLException e) { logger.error("Exception ", e); }finally { - try { - if(pstmt != null) { - pstmt.close(); - } - if(conn != null){ - conn.close(); - } - } catch (SQLException e) { - logger.error("Exception ", e); - } + ConnectionUtils.releaseResource(rs, pstmt, conn); } return recordList; } /** * according to procname and procdate query task record + * * @param procName procName * @param procDate procDate * @return task record status */ - public static TaskRecordStatus getTaskRecordState(String procName,String procDate){ + public static TaskRecordStatus getTaskRecordState(String procName, String procDate) { String sql = String.format("SELECT * FROM eamp_hive_log_hd WHERE PROC_NAME='%s' and PROC_DATE like '%s'" - ,procName,procDate + "%"); + , procName, procDate + "%"); List taskRecordList = getQueryResult(sql); // contains no record and sql exception - if (CollectionUtils.isEmpty(taskRecordList)){ + if (CollectionUtils.isEmpty(taskRecordList)) { // exception return TaskRecordStatus.EXCEPTION; - }else if (taskRecordList.size() > 1){ + } else if (taskRecordList.size() > 1) { return TaskRecordStatus.EXCEPTION; - }else { + } else { TaskRecord taskRecord = taskRecordList.get(0); - if (taskRecord == null){ + if (taskRecord == null) { return TaskRecordStatus.EXCEPTION; } Long targetRowCount = taskRecord.getTargetRowCount(); - if (targetRowCount <= 0){ + if (targetRowCount <= 0) { return TaskRecordStatus.FAILURE; - }else { + } else { return TaskRecordStatus.SUCCESS; } } } - } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java index a46e5aabcc..1132147faf 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java @@ -16,10 +16,21 @@ */ package org.apache.dolphinscheduler.dao.datasource; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * data source base class */ public abstract class BaseDataSource { + + private static final Logger logger = LoggerFactory.getLogger(BaseDataSource.class); + /** * user name */ @@ -57,17 +68,106 @@ public abstract class BaseDataSource { public void setPrincipal(String principal) { this.principal = principal; } + /** - * test whether the data source can be connected successfully - * @throws Exception + * @return driver class + */ + public abstract String driverClassSelector(); + + /** + * @return db type */ - public abstract void isConnectable() throws Exception; + public abstract DbType dbTypeSelector(); /** * gets the JDBC url for the data source connection - * @return + * @return getJdbcUrl + */ + public String getJdbcUrl() { + StringBuilder jdbcUrl = new StringBuilder(getAddress()); + + appendDatabase(jdbcUrl); + appendPrincipal(jdbcUrl); + appendOther(jdbcUrl); + + return jdbcUrl.toString(); + } + + /** + * append database + * @param jdbcUrl jdbc url */ - public abstract String getJdbcUrl(); + private void appendDatabase(StringBuilder jdbcUrl) { + if (dbTypeSelector() == DbType.SQLSERVER) { + jdbcUrl.append(";databaseName=").append(getDatabase()); + } else { + if (getAddress().lastIndexOf('/') != (jdbcUrl.length() - 1)) { + jdbcUrl.append("/"); + } + jdbcUrl.append(getDatabase()); + } + } + + /** + * append principal + * @param jdbcUrl jdbc url + */ + private void appendPrincipal(StringBuilder jdbcUrl) { + boolean tag = dbTypeSelector() == DbType.HIVE || dbTypeSelector() == DbType.SPARK; + if (tag && StringUtils.isNotEmpty(getPrincipal())) { + jdbcUrl.append(";principal=").append(getPrincipal()); + } + } + + /** + * append other + * @param jdbcUrl jdbc url + */ + private void appendOther(StringBuilder jdbcUrl) { + if (StringUtils.isNotEmpty(getOther())) { + String separator = ""; + switch (dbTypeSelector()) { + case CLICKHOUSE: + case MYSQL: + case ORACLE: + case POSTGRESQL: + separator = "?"; + break; + case DB2: + separator = ":"; + break; + case HIVE: + case SPARK: + case SQLSERVER: + separator = ";"; + break; + default: + logger.error("Db type mismatch!"); + } + jdbcUrl.append(separator).append(getOther()); + } + } + + /** + * test whether the data source can be connected successfully + */ + public void isConnectable() { + Connection con = null; + try { + Class.forName(driverClassSelector()); + con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } catch (ClassNotFoundException | SQLException e) { + logger.error("Get connection error: {}", e.getMessage()); + } finally { + if (con != null) { + try { + con.close(); + } catch (SQLException e) { + logger.error(e.getMessage(), e); + } + } + } + } public String getUser() { return user; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java index e159f81d2e..ba34ff82d6 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java @@ -17,59 +17,26 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; /** * data source of ClickHouse */ public class ClickHouseDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(ClickHouseDataSource.class); /** - * gets the JDBC url for the data source connection - * @return + * @return driver class */ @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - - return jdbcUrl; + public String driverClassSelector() { + return Constants.COM_CLICKHOUSE_JDBC_DRIVER; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("ClickHouse datasource try conn close conn error", e); - } - } - } - + public DbType dbTypeSelector() { + return DbType.CLICKHOUSE; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java index a3bc6a0150..2664273724 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java @@ -34,120 +34,108 @@ import javax.sql.DataSource; /** - * not spring manager connection, only use for init db, and alert module for non-spring application + * not spring manager connection, only use for init db, and alert module for non-spring application * data source connection factory */ -public class ConnectionFactory extends SpringConnectionFactory{ +public class ConnectionFactory extends SpringConnectionFactory { private static final Logger logger = LoggerFactory.getLogger(ConnectionFactory.class); + private static class ConnectionFactoryHolder { + private static final ConnectionFactory connectionFactory = new ConnectionFactory(); + } + + public static ConnectionFactory getInstance() { + return ConnectionFactoryHolder.connectionFactory; + } + + private ConnectionFactory() { + try { + dataSource = buildDataSource(); + sqlSessionFactory = getSqlSessionFactory(); + sqlSessionTemplate = getSqlSessionTemplate(); + } catch (Exception e) { + logger.error("Initializing ConnectionFactory error", e); + throw new RuntimeException(e); + } + } /** * sql session factory */ - private static SqlSessionFactory sqlSessionFactory; + private SqlSessionFactory sqlSessionFactory; /** * sql session template */ - private static SqlSessionTemplate sqlSessionTemplate; + private SqlSessionTemplate sqlSessionTemplate; + + private DataSource dataSource; + + public DataSource getDataSource() { + return dataSource; + } /** * get the data source + * * @return druid dataSource */ - public static DruidDataSource getDataSource() { - - DruidDataSource druidDataSource = new DruidDataSource(); - - druidDataSource.setDriverClassName(conf.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); - druidDataSource.setUrl(conf.getString(Constants.SPRING_DATASOURCE_URL)); - druidDataSource.setUsername(conf.getString(Constants.SPRING_DATASOURCE_USERNAME)); - druidDataSource.setPassword(conf.getString(Constants.SPRING_DATASOURCE_PASSWORD)); - druidDataSource.setValidationQuery(conf.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY)); - - druidDataSource.setPoolPreparedStatements(conf.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS)); - druidDataSource.setTestWhileIdle(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE)); - druidDataSource.setTestOnBorrow(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW)); - druidDataSource.setTestOnReturn(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN)); - druidDataSource.setKeepAlive(conf.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE)); - - druidDataSource.setMinIdle(conf.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE)); - druidDataSource.setMaxActive(conf.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE)); - druidDataSource.setMaxWait(conf.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT)); - druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(conf.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE)); - druidDataSource.setInitialSize(conf.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE)); - druidDataSource.setTimeBetweenEvictionRunsMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS)); - druidDataSource.setTimeBetweenConnectErrorMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS)); - druidDataSource.setMinEvictableIdleTimeMillis(conf.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS)); - druidDataSource.setValidationQueryTimeout(conf.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT)); - //auto commit - druidDataSource.setDefaultAutoCommit(conf.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT)); + private DataSource buildDataSource() { + DruidDataSource druidDataSource = dataSource(); return druidDataSource; } /** * * get sql session factory + * * @return sqlSessionFactory * @throws Exception sqlSessionFactory exception */ - public static SqlSessionFactory getSqlSessionFactory() throws Exception { - if (sqlSessionFactory == null) { - synchronized (ConnectionFactory.class) { - if (sqlSessionFactory == null) { - DataSource dataSource = getDataSource(); - TransactionFactory transactionFactory = new JdbcTransactionFactory(); - - Environment environment = new Environment("development", transactionFactory, dataSource); - - MybatisConfiguration configuration = new MybatisConfiguration(); - configuration.setEnvironment(environment); - configuration.setLazyLoadingEnabled(true); - configuration.addMappers("org.apache.dolphinscheduler.dao.mapper"); - configuration.addInterceptor(new PaginationInterceptor()); - - MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); - sqlSessionFactoryBean.setConfiguration(configuration); - sqlSessionFactoryBean.setDataSource(dataSource); - - sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); - sqlSessionFactory = sqlSessionFactoryBean.getObject(); - } - } - } + private SqlSessionFactory getSqlSessionFactory() throws Exception { + TransactionFactory transactionFactory = new JdbcTransactionFactory(); + + Environment environment = new Environment("development", transactionFactory, getDataSource()); + + MybatisConfiguration configuration = new MybatisConfiguration(); + configuration.setEnvironment(environment); + configuration.setLazyLoadingEnabled(true); + configuration.addMappers("org.apache.dolphinscheduler.dao.mapper"); + configuration.addInterceptor(new PaginationInterceptor()); + + MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); + sqlSessionFactoryBean.setConfiguration(configuration); + sqlSessionFactoryBean.setDataSource(getDataSource()); + + sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); + sqlSessionFactory = sqlSessionFactoryBean.getObject(); return sqlSessionFactory; +} + + private SqlSessionTemplate getSqlSessionTemplate() { + sqlSessionTemplate = new SqlSessionTemplate(sqlSessionFactory); + return sqlSessionTemplate; } /** * get sql session + * * @return sqlSession */ - public static SqlSession getSqlSession() { - if (sqlSessionTemplate == null) { - synchronized (ConnectionFactory.class) { - if (sqlSessionTemplate == null) { - try { - sqlSessionTemplate = new SqlSessionTemplate(getSqlSessionFactory()); - return sqlSessionTemplate; - } catch (Exception e) { - logger.error("getSqlSession error", e); - throw new RuntimeException(e); - } - } - } - } + public SqlSession getSqlSession() { return sqlSessionTemplate; } /** * get mapper + * * @param type target class - * @param generic + * @param generic * @return target object */ - public static T getMapper(Class type) { + public T getMapper(Class type) { try { return getSqlSession().getMapper(type); } catch (Exception e) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java index 3c2366b5b0..29448a0fdd 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java @@ -17,58 +17,27 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; /** * data source of DB2 Server */ public class DB2ServerDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(DB2ServerDataSource.class); /** - * gets the JDBC url for the data source connection - * @return + * gets the JDBC url for the data source connection + * @return jdbc url */ @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += ":" + getOther(); - } - return jdbcUrl; + public String driverClassSelector() { + return Constants.COM_DB2_JDBC_DRIVER; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.COM_DB2_JDBC_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("DB2 Server datasource try conn close conn error", e); - } - } - } - + public DbType dbTypeSelector() { + return DbType.DB2; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java index 9571f9c9f6..cca1fa041d 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java @@ -29,6 +29,12 @@ public class DataSourceFactory { private static final Logger logger = LoggerFactory.getLogger(DataSourceFactory.class); + /** + * getDatasource + * @param dbType dbType + * @param parameter parameter + * @return getDatasource + */ public static BaseDataSource getDatasource(DbType dbType, String parameter) { try { switch (dbType) { diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java index 4dfb8817a8..055937b49c 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java @@ -17,63 +17,27 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; /** * data source of hive */ public class HiveDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(HiveDataSource.class); - /** * gets the JDBC url for the data source connection - * @return + * @return jdbc url */ @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getPrincipal())){ - jdbcUrl += ";principal=" + getPrincipal(); - } - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += ";" + getOther(); - } - - return jdbcUrl; + public String driverClassSelector() { + return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), ""); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("hive datasource try conn close conn error", e); - } - } - } + public DbType dbTypeSelector() { + return DbType.HIVE; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java index 6e2fbe3dd8..94a4895df9 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java @@ -17,57 +17,28 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; /** * data source of mySQL */ public class MySQLDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(MySQLDataSource.class); - /** * gets the JDBC url for the data source connection - * @return + * @return jdbc url */ @Override - public String getJdbcUrl() { - String address = getAddress(); - if (address.lastIndexOf("/") != (address.length() - 1)) { - address += "/"; - } - String jdbcUrl = address + getDatabase(); - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - return jdbcUrl; + public String driverClassSelector() { + return Constants.COM_MYSQL_JDBC_DRIVER; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Mysql datasource try conn close conn error", e); - } - } - } + public DbType dbTypeSelector() { + return DbType.MYSQL; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java index cefaf879b5..c457583075 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java @@ -17,23 +17,37 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.enums.DbConnectType; +import org.apache.dolphinscheduler.common.enums.DbType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; - /** * data source of Oracle */ public class OracleDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(OracleDataSource.class); + + private DbConnectType type; + + public DbConnectType getType() { + return type; + } + + public void setType(DbConnectType type) { + this.type = type; + } + + /** + * @return driver class + */ + @Override + public String driverClassSelector() { + return Constants.COM_ORACLE_JDBC_DRIVER; + } /** * gets the JDBC url for the data source connection - * @return + * @return jdbc url */ @Override public String getJdbcUrl() { @@ -41,35 +55,15 @@ public class OracleDataSource extends BaseDataSource { if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { jdbcUrl += "/"; } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - return jdbcUrl; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.COM_ORACLE_JDBC_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Oracle datasource try conn close conn error", e); - } - } - } - + public DbType dbTypeSelector() { + return DbType.ORACLE; } + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java index 176cba2587..5a71976c53 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java @@ -17,61 +17,27 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; /** * data source of postgreSQL */ public class PostgreDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(PostgreDataSource.class); - - /** * gets the JDBC url for the data source connection - * @return + * @return jdbc url */ @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += "?" + getOther(); - } - - return jdbcUrl; + public String driverClassSelector() { + return Constants.ORG_POSTGRESQL_DRIVER; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.ORG_POSTGRESQL_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Postgre datasource try conn close conn error", e); - } - } - } - + public DbType dbTypeSelector() { + return DbType.POSTGRESQL; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java index 07770c06a7..e4b8f4bf13 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -29,11 +30,12 @@ import java.sql.SQLException; * data source of SQL Server */ public class SQLServerDataSource extends BaseDataSource { + private static final Logger logger = LoggerFactory.getLogger(SQLServerDataSource.class); /** * gets the JDBC url for the data source connection - * @return + * @return jdbc url */ @Override public String getJdbcUrl() { @@ -49,14 +51,15 @@ public class SQLServerDataSource extends BaseDataSource { /** * test whether the data source can be connected successfully - * @throws Exception */ @Override - public void isConnectable() throws Exception { + public void isConnectable() { Connection con = null; try { Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER); con = DriverManager.getConnection(getJdbcUrl(), getUser(), getPassword()); + } catch (Exception e) { + logger.error("error", e); } finally { if (con != null) { try { @@ -66,6 +69,20 @@ public class SQLServerDataSource extends BaseDataSource { } } } - } + /** + * @return driver class + */ + @Override + public String driverClassSelector() { + return Constants.COM_SQLSERVER_JDBC_DRIVER; + } + + /** + * @return db type + */ + @Override + public DbType dbTypeSelector() { + return DbType.SQLSERVER; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java index 81a5ac6f04..0329ef8400 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java @@ -17,64 +17,27 @@ package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; +import org.apache.dolphinscheduler.common.enums.DbType; /** * data source of spark */ public class SparkDataSource extends BaseDataSource { - private static final Logger logger = LoggerFactory.getLogger(SparkDataSource.class); - /** * gets the JDBC url for the data source connection - * @return + * @return jdbc url */ @Override - public String getJdbcUrl() { - String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { - jdbcUrl += "/"; - } - - jdbcUrl += getDatabase(); - - if (StringUtils.isNotEmpty(getPrincipal())){ - jdbcUrl += ";principal=" + getPrincipal(); - } - - if (StringUtils.isNotEmpty(getOther())) { - jdbcUrl += ";" + getOther(); - } - - return jdbcUrl; + public String driverClassSelector() { + return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER; } /** - * test whether the data source can be connected successfully - * @throws Exception + * @return db type */ @Override - public void isConnectable() throws Exception { - Connection con = null; - try { - Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); - con = DriverManager.getConnection(getJdbcUrl(), getUser(), ""); - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error("Spark datasource try conn close conn error", e); - } - } - } - + public DbType dbTypeSelector() { + return DbType.SPARK; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java index 8eb1a2bb97..9e27d949aa 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SpringConnectionFactory.java @@ -17,20 +17,26 @@ package org.apache.dolphinscheduler.dao.datasource; import com.alibaba.druid.pool.DruidDataSource; +import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.core.MybatisConfiguration; +import com.baomidou.mybatisplus.core.config.GlobalConfig; import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.dao.utils.PropertyUtils; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; +import org.apache.ibatis.type.JdbcType; import org.mybatis.spring.SqlSessionTemplate; import org.mybatis.spring.annotation.MapperScan; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; +import org.springframework.core.io.support.ResourcePatternResolver; import org.springframework.jdbc.datasource.DataSourceTransactionManager; @@ -43,19 +49,6 @@ public class SpringConnectionFactory { private static final Logger logger = LoggerFactory.getLogger(SpringConnectionFactory.class); - /** - * Load configuration file - */ - protected static org.apache.commons.configuration.Configuration conf; - - static { - try { - conf = new PropertiesConfiguration(Constants.APPLICATION_PROPERTIES); - } catch (ConfigurationException e) { - logger.error("load configuration exception", e); - System.exit(1); - } - } /** * pagination interceptor @@ -70,35 +63,34 @@ public class SpringConnectionFactory { * get the data source * @return druid dataSource */ - @Bean + @Bean(destroyMethod="") public DruidDataSource dataSource() { DruidDataSource druidDataSource = new DruidDataSource(); - druidDataSource.setDriverClassName(conf.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); - druidDataSource.setUrl(conf.getString(Constants.SPRING_DATASOURCE_URL)); - druidDataSource.setUsername(conf.getString(Constants.SPRING_DATASOURCE_USERNAME)); - druidDataSource.setPassword(conf.getString(Constants.SPRING_DATASOURCE_PASSWORD)); - druidDataSource.setValidationQuery(conf.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY)); - - druidDataSource.setPoolPreparedStatements(conf.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS)); - druidDataSource.setTestWhileIdle(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE)); - druidDataSource.setTestOnBorrow(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW)); - druidDataSource.setTestOnReturn(conf.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN)); - druidDataSource.setKeepAlive(conf.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE)); - - druidDataSource.setMinIdle(conf.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE)); - druidDataSource.setMaxActive(conf.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE)); - druidDataSource.setMaxWait(conf.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT)); - druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(conf.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE)); - druidDataSource.setInitialSize(conf.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE)); - druidDataSource.setTimeBetweenEvictionRunsMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS)); - druidDataSource.setTimeBetweenConnectErrorMillis(conf.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS)); - druidDataSource.setMinEvictableIdleTimeMillis(conf.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS)); - druidDataSource.setValidationQueryTimeout(conf.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT)); + druidDataSource.setDriverClassName(PropertyUtils.getString(Constants.SPRING_DATASOURCE_DRIVER_CLASS_NAME)); + druidDataSource.setUrl(PropertyUtils.getString(Constants.SPRING_DATASOURCE_URL)); + druidDataSource.setUsername(PropertyUtils.getString(Constants.SPRING_DATASOURCE_USERNAME)); + druidDataSource.setPassword(PropertyUtils.getString(Constants.SPRING_DATASOURCE_PASSWORD)); + druidDataSource.setValidationQuery(PropertyUtils.getString(Constants.SPRING_DATASOURCE_VALIDATION_QUERY,"SELECT 1")); + + druidDataSource.setPoolPreparedStatements(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS,true)); + druidDataSource.setTestWhileIdle(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_WHILE_IDLE,true)); + druidDataSource.setTestOnBorrow(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_BORROW,true)); + druidDataSource.setTestOnReturn(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_TEST_ON_RETURN,true)); + druidDataSource.setKeepAlive(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_KEEP_ALIVE,true)); + + druidDataSource.setMinIdle(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MIN_IDLE,5)); + druidDataSource.setMaxActive(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_ACTIVE,50)); + druidDataSource.setMaxWait(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_WAIT,60000)); + druidDataSource.setMaxPoolPreparedStatementPerConnectionSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE,20)); + druidDataSource.setInitialSize(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_INITIAL_SIZE,5)); + druidDataSource.setTimeBetweenEvictionRunsMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS,60000)); + druidDataSource.setTimeBetweenConnectErrorMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS,60000)); + druidDataSource.setMinEvictableIdleTimeMillis(PropertyUtils.getLong(Constants.SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS,300000)); + druidDataSource.setValidationQueryTimeout(PropertyUtils.getInt(Constants.SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT,3)); //auto commit - druidDataSource.setDefaultAutoCommit(conf.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT)); - + druidDataSource.setDefaultAutoCommit(PropertyUtils.getBoolean(Constants.SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT,true)); return druidDataSource; } @@ -119,20 +111,31 @@ public class SpringConnectionFactory { @Bean public SqlSessionFactory sqlSessionFactory() throws Exception { MybatisConfiguration configuration = new MybatisConfiguration(); - configuration.addMappers("org.apache.dolphinscheduler.dao.mapper"); + configuration.setMapUnderscoreToCamelCase(true); + configuration.setCacheEnabled(false); + configuration.setCallSettersOnNulls(true); + configuration.setJdbcTypeForNull(JdbcType.NULL); configuration.addInterceptor(paginationInterceptor()); - MybatisSqlSessionFactoryBean sqlSessionFactoryBean = new MybatisSqlSessionFactoryBean(); sqlSessionFactoryBean.setConfiguration(configuration); sqlSessionFactoryBean.setDataSource(dataSource()); + GlobalConfig.DbConfig dbConfig = new GlobalConfig.DbConfig(); + dbConfig.setIdType(IdType.AUTO); + GlobalConfig globalConfig = new GlobalConfig(); + globalConfig.setDbConfig(dbConfig); + sqlSessionFactoryBean.setGlobalConfig(globalConfig); + sqlSessionFactoryBean.setTypeAliasesPackage("org.apache.dolphinscheduler.dao.entity"); + ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); + sqlSessionFactoryBean.setMapperLocations(resolver.getResources("org/apache/dolphinscheduler/dao/mapper/*Mapper.xml")); sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); return sqlSessionFactoryBean.getObject(); } /** * get sql session - * @return sqlSession + * @return SqlSession + * @throws Exception */ @Bean public SqlSession sqlSession() throws Exception{ diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java index 25667924ac..7d52dc93f3 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java @@ -108,13 +108,11 @@ public class Command { @TableField("update_time") private Date updateTime; - /** - * + * worker group */ - @TableField("worker_group_id") - private int workerGroupId; - + @TableField("worker_group") + private String workerGroup; public Command() { this.taskDependType = TaskDependType.TASK_POST; @@ -254,13 +252,12 @@ public class Command { this.updateTime = updateTime; } - - public int getWorkerGroupId() { - return workerGroupId; + public String getWorkerGroup() { + return workerGroup; } - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; } @Override @@ -283,7 +280,7 @@ public class Command { if (executorId != command.executorId) { return false; } - if (workerGroupId != command.workerGroupId) { + if (workerGroup != null ? workerGroup.equals(command.workerGroup) : command.workerGroup == null) { return false; } if (commandType != command.commandType) { @@ -332,10 +329,9 @@ public class Command { result = 31 * result + (startTime != null ? startTime.hashCode() : 0); result = 31 * result + (processInstancePriority != null ? processInstancePriority.hashCode() : 0); result = 31 * result + (updateTime != null ? updateTime.hashCode() : 0); - result = 31 * result + workerGroupId; + result = 31 * result + (workerGroup != null ? workerGroup.hashCode() : 0); return result; } - @Override public String toString() { return "Command{" + @@ -352,7 +348,7 @@ public class Command { ", startTime=" + startTime + ", processInstancePriority=" + processInstancePriority + ", updateTime=" + updateTime + - ", workerGroupId=" + workerGroupId + + ", workerGroup='" + workerGroup + '\'' + '}'; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java index 7f3eb38760..127c5b7322 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ErrorCommand.java @@ -101,9 +101,9 @@ public class ErrorCommand { private String message; /** - * worker group id + * worker group */ - private int workerGroupId; + private String workerGroup; public ErrorCommand(){} @@ -257,17 +257,25 @@ public class ErrorCommand { this.updateTime = updateTime; } - public int getWorkerGroupId() { - return workerGroupId; + public String getWorkerGroup() { + return workerGroup; } - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; } @Override public String toString() { - return "Command{" + + return "ErrorCommand{" + "id=" + id + ", commandType=" + commandType + ", processDefinitionId=" + processDefinitionId + @@ -281,17 +289,8 @@ public class ErrorCommand { ", startTime=" + startTime + ", processInstancePriority=" + processInstancePriority + ", updateTime=" + updateTime + - ", message=" + message + + ", message='" + message + '\'' + + ", workerGroup='" + workerGroup + '\'' + '}'; } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java index dbb880c025..e29de897ef 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessDefinition.java @@ -16,10 +16,10 @@ */ package org.apache.dolphinscheduler.dao.entity; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.process.Property; -import com.alibaba.fastjson.JSONObject; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; import com.baomidou.mybatisplus.annotation.TableId; @@ -163,6 +163,11 @@ public class ProcessDefinition { */ private String modifyBy; + /** + * resource ids + */ + private String resourceIds; + public String getName() { return name; @@ -266,7 +271,7 @@ public class ProcessDefinition { } public void setGlobalParams(String globalParams) { - this.globalParamList = JSONObject.parseArray(globalParams, Property.class); + this.globalParamList = JSON.parseArray(globalParams, Property.class); this.globalParams = globalParams; } @@ -275,7 +280,7 @@ public class ProcessDefinition { } public void setGlobalParamList(List globalParamList) { - this.globalParams = JSONObject.toJSONString(globalParamList); + this.globalParams = JSON.toJSONString(globalParamList); this.globalParamList = globalParamList; } @@ -283,7 +288,7 @@ public class ProcessDefinition { List propList; if (globalParamMap == null && StringUtils.isNotEmpty(globalParams)) { - propList = JSONObject.parseArray(globalParams, Property.class); + propList = JSON.parseArray(globalParams, Property.class); globalParamMap = propList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); } @@ -334,6 +339,14 @@ public class ProcessDefinition { this.scheduleReleaseState = scheduleReleaseState; } + public String getResourceIds() { + return resourceIds; + } + + public void setResourceIds(String resourceIds) { + this.resourceIds = resourceIds; + } + public int getTimeout() { return timeout; } @@ -393,6 +406,8 @@ public class ProcessDefinition { ", timeout=" + timeout + ", tenantId=" + tenantId + ", modifyBy='" + modifyBy + '\'' + + ", resourceIds='" + resourceIds + '\'' + '}'; } + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java index fb5d0cda42..2fa8e64451 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java @@ -195,9 +195,9 @@ public class ProcessInstance { private Priority processInstancePriority; /** - * worker group id + * worker group */ - private int workerGroupId; + private String workerGroup; /** * process timeout for warning @@ -209,12 +209,6 @@ public class ProcessInstance { */ private int tenantId; - /** - * worker group name. for api. - */ - @TableField(exist = false) - private String workerGroupName; - /** * receivers for api */ @@ -366,7 +360,7 @@ public class ProcessInstance { } - public boolean IsProcessInstanceStop(){ + public boolean isProcessInstanceStop(){ return this.state.typeIsFinished(); } @@ -506,8 +500,8 @@ public class ProcessInstance { * check this process is start complement data * @return whether complement data */ - public Boolean isComplementData(){ - if(!StringUtils.isNotEmpty(this.historyCmd)){ + public boolean isComplementData(){ + if(StringUtils.isEmpty(this.historyCmd)){ return false; } return historyCmd.startsWith(CommandType.COMPLEMENT_DATA.toString()); @@ -541,12 +535,12 @@ public class ProcessInstance { this.duration = duration; } - public int getWorkerGroupId() { - return workerGroupId; + public String getWorkerGroup() { + return workerGroup; } - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; } public int getTimeout() { @@ -566,14 +560,6 @@ public class ProcessInstance { return this.tenantId ; } - public String getWorkerGroupName() { - return workerGroupName; - } - - public void setWorkerGroupName(String workerGroupName) { - this.workerGroupName = workerGroupName; - } - public String getReceivers() { return receivers; } @@ -624,10 +610,9 @@ public class ProcessInstance { ", dependenceScheduleTimes='" + dependenceScheduleTimes + '\'' + ", duration=" + duration + ", processInstancePriority=" + processInstancePriority + - ", workerGroupId=" + workerGroupId + + ", workerGroup='" + workerGroup + '\'' + ", timeout=" + timeout + ", tenantId=" + tenantId + - ", workerGroupName='" + workerGroupName + '\'' + ", receivers='" + receivers + '\'' + ", receiversCc='" + receiversCc + '\'' + '}'; @@ -635,8 +620,12 @@ public class ProcessInstance { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } ProcessInstance that = (ProcessInstance) o; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java index 934be4ba3d..16d94914fd 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Resource.java @@ -32,11 +32,26 @@ public class Resource { @TableId(value="id", type=IdType.AUTO) private int id; + /** + * parent id + */ + private int pid; + /** * resource alias */ private String alias; + /** + * full name + */ + private String fullName; + + /** + * is directory + */ + private boolean isDirectory=false; + /** * description */ @@ -89,7 +104,15 @@ public class Resource { this.updateTime = updateTime; } - public Resource(String alias, String fileName, String description, int userId, ResourceType type, long size, Date createTime, Date updateTime) { + public Resource(int id, int pid, String alias, String fullName, boolean isDirectory) { + this.id = id; + this.pid = pid; + this.alias = alias; + this.fullName = fullName; + this.isDirectory = isDirectory; + } + + /*public Resource(String alias, String fileName, String description, int userId, ResourceType type, long size, Date createTime, Date updateTime) { this.alias = alias; this.fileName = fileName; this.description = description; @@ -98,6 +121,20 @@ public class Resource { this.size = size; this.createTime = createTime; this.updateTime = updateTime; + }*/ + + public Resource(int pid, String alias, String fullName, boolean isDirectory, String description, String fileName, int userId, ResourceType type, long size, Date createTime, Date updateTime) { + this.pid = pid; + this.alias = alias; + this.fullName = fullName; + this.isDirectory = isDirectory; + this.description = description; + this.fileName = fileName; + this.userId = userId; + this.type = type; + this.size = size; + this.createTime = createTime; + this.updateTime = updateTime; } public int getId() { @@ -116,6 +153,30 @@ public class Resource { this.alias = alias; } + public int getPid() { + return pid; + } + + public void setPid(int pid) { + this.pid = pid; + } + + public String getFullName() { + return fullName; + } + + public void setFullName(String fullName) { + this.fullName = fullName; + } + + public boolean isDirectory() { + return isDirectory; + } + + public void setDirectory(boolean directory) { + isDirectory = directory; + } + public String getFileName() { return fileName; } @@ -177,9 +238,12 @@ public class Resource { public String toString() { return "Resource{" + "id=" + id + + ", pid=" + pid + ", alias='" + alias + '\'' + - ", fileName='" + fileName + '\'' + + ", fullName='" + fullName + '\'' + + ", isDirectory=" + isDirectory + ", description='" + description + '\'' + + ", fileName='" + fileName + '\'' + ", userId=" + userId + ", type=" + type + ", size=" + size + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java index cfda49df6e..0cb41080b2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Schedule.java @@ -122,9 +122,9 @@ public class Schedule { private Priority processInstancePriority; /** - * worker group id + * worker group */ - private int workerGroupId; + private String workerGroup; public int getWarningGroupId() { return warningGroupId; @@ -265,13 +265,12 @@ public class Schedule { this.processInstancePriority = processInstancePriority; } - - public int getWorkerGroupId() { - return workerGroupId; + public String getWorkerGroup() { + return workerGroup; } - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; } @Override @@ -294,7 +293,7 @@ public class Schedule { ", releaseState=" + releaseState + ", warningGroupId=" + warningGroupId + ", processInstancePriority=" + processInstancePriority + - ", workerGroupId=" + workerGroupId + + ", workerGroup='" + workerGroup + '\'' + '}'; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java index c692575e3a..53b56e54b2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java @@ -27,13 +27,15 @@ import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; +import java.io.Serializable; import java.util.Date; +import java.util.List; /** * task instance */ @TableName("t_ds_task_instance") -public class TaskInstance { +public class TaskInstance implements Serializable { /** * id @@ -46,6 +48,8 @@ public class TaskInstance { */ private String name; + + /** * task type */ @@ -154,20 +158,17 @@ public class TaskInstance { /** * duration - * @return */ @TableField(exist = false) private Long duration; /** * max retry times - * @return */ private int maxRetryTimes; /** * task retry interval, unit: minute - * @return */ private int retryInterval; @@ -184,17 +185,16 @@ public class TaskInstance { /** * dependent state - * @return */ @TableField(exist = false) private String dependentResult; /** - * worker group id - * @return + * workerGroup */ - private int workerGroupId; + private String workerGroup; + /** * executor id @@ -208,8 +208,12 @@ public class TaskInstance { private String executorName; + @TableField(exist = false) + private List resources; + - public void init(String host,Date startTime,String executePath){ + + public void init(String host,Date startTime,String executePath){ this.host = host; this.startTime = startTime; this.executePath = executePath; @@ -373,9 +377,6 @@ public class TaskInstance { } - public Boolean isSubProcess(){ - return TaskType.SUB_PROCESS.getDescp().equals(this.taskType); - } public String getDependency(){ @@ -442,13 +443,34 @@ public class TaskInstance { this.executorName = executorName; } - public Boolean isTaskComplete() { + public boolean isTaskComplete() { return this.getState().typeIsPause() || this.getState().typeIsSuccess() || this.getState().typeIsCancel() || (this.getState().typeIsFailure() && !taskCanRetry()); } + + public List getResources() { + return resources; + } + + public boolean isSubProcess(){ + return TaskType.SUB_PROCESS.equals(TaskType.valueOf(this.taskType)); + } + + public boolean isDependTask(){ + return TaskType.DEPENDENT.equals(TaskType.valueOf(this.taskType)); + } + + public boolean isConditionsTask(){ + return TaskType.CONDITIONS.equals(TaskType.valueOf(this.taskType)); + } + + public void setResources(List resources) { + this.resources = resources; + } + /** * determine if you can try again * @return can try result @@ -485,12 +507,12 @@ public class TaskInstance { this.processInstancePriority = processInstancePriority; } - public int getWorkerGroupId() { - return workerGroupId; + public String getWorkerGroup() { + return workerGroup; } - public void setWorkerGroupId(int workerGroupId) { - this.workerGroupId = workerGroupId; + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; } public String getDependentResult() { @@ -532,7 +554,7 @@ public class TaskInstance { ", taskInstancePriority=" + taskInstancePriority + ", processInstancePriority=" + processInstancePriority + ", dependentResult='" + dependentResult + '\'' + - ", workerGroupId=" + workerGroupId + + ", workerGroup='" + workerGroup + '\'' + ", executorId=" + executorId + ", executorName='" + executorName + '\'' + '}'; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java index 3518676337..e14255be77 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java @@ -185,24 +185,6 @@ public class UdfFunc { this.updateTime = updateTime; } - @Override - public String toString() { - return "UdfFunc{" + - "id=" + id + - ", userId=" + userId + - ", funcName='" + funcName + '\'' + - ", className='" + className + '\'' + - ", argTypes='" + argTypes + '\'' + - ", database='" + database + '\'' + - ", description='" + description + '\'' + - ", resourceId=" + resourceId + - ", resourceName='" + resourceName + '\'' + - ", type=" + type + - ", createTime=" + createTime + - ", updateTime=" + updateTime + - '}'; - } - @Override public boolean equals(Object o) { if (this == o) { @@ -227,4 +209,22 @@ public class UdfFunc { result = 31 * result + (funcName != null ? funcName.hashCode() : 0); return result; } + + @Override + public String toString() { + return "UdfFunc{" + + "id=" + id + + ", userId=" + userId + + ", funcName='" + funcName + '\'' + + ", className='" + className + '\'' + + ", argTypes='" + argTypes + '\'' + + ", database='" + database + '\'' + + ", description='" + description + '\'' + + ", resourceId=" + resourceId + + ", resourceName='" + resourceName + '\'' + + ", type=" + type + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java index f95fbc7a4d..0c3238a5c5 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.java @@ -79,8 +79,10 @@ public interface DataSourceMapper extends BaseMapper { /** * list authorized UDF function + * * @param userId userId * @param dataSourceIds data source id array + * @param T * @return UDF function list */ List listAuthorizedDataSource(@Param("userId") int userId,@Param("dataSourceIds")T[] dataSourceIds); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java index 9f9225cb04..b75bb58b7d 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java @@ -20,9 +20,11 @@ import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; +import org.apache.ibatis.annotations.MapKey; import org.apache.ibatis.annotations.Param; import java.util.List; +import java.util.Map; /** * process definition mapper interface @@ -83,7 +85,7 @@ public interface ProcessDefinitionMapper extends BaseMapper { List queryDefinitionListByTenant(@Param("tenantId") int tenantId); /** - * count process definition group by user + * count process definition group by user * @param userId userId * @param projectIds projectIds * @param isAdmin isAdmin @@ -93,4 +95,11 @@ public interface ProcessDefinitionMapper extends BaseMapper { @Param("userId") Integer userId, @Param("projectIds") Integer[] projectIds, @Param("isAdmin") boolean isAdmin); + + /** + * list all resource ids + * @return resource ids list + */ + @MapKey("id") + List> listResources(); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java index 9a5f261254..5ca192811e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java @@ -78,6 +78,20 @@ public interface ProcessInstanceMapper extends BaseMapper { * @param endTime endTime * @return process instance IPage */ + + /** + * process instance page + * @param page page + * @param projectId projectId + * @param processDefinitionId processDefinitionId + * @param searchVal searchVal + * @param executorId executorId + * @param statusArray statusArray + * @param host host + * @param startTime startTime + * @param endTime endTime + * @return process instance page + */ IPage queryProcessInstanceListPaging(Page page, @Param("projectId") int projectId, @Param("processDefinitionId") Integer processDefinitionId, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java index cf65e5d08a..f58cc7d496 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.java @@ -30,12 +30,12 @@ public interface ResourceMapper extends BaseMapper { /** * query resource list - * @param alias alias + * @param fullName full name * @param userId userId * @param type type * @return resource list */ - List queryResourceList(@Param("alias") String alias, + List queryResourceList(@Param("fullName") String fullName, @Param("userId") int userId, @Param("type") int type); @@ -43,22 +43,27 @@ public interface ResourceMapper extends BaseMapper { * query resource list * @param userId userId * @param type type + * @param perm perm * @return resource list */ List queryResourceListAuthored( @Param("userId") int userId, - @Param("type") int type); + @Param("type") int type, + @Param("perm") int perm); + /** * resource page * @param page page - * @param userId query all if 0, then query the authed resources + * @param userId userId + * @param id id * @param type type * @param searchVal searchVal - * @return resource list + * @return resource page */ IPage queryResourcePaging(IPage page, @Param("userId") int userId, + @Param("id") int id, @Param("type") int type, @Param("searchVal") String searchVal); @@ -76,19 +81,67 @@ public interface ResourceMapper extends BaseMapper { */ List queryResourceExceptUserId(@Param("userId") int userId); - /** * query tenant code by name * @param resName resource name + * @param resType resource type * @return tenant code */ - String queryTenantCodeByResourceName(@Param("resName") String resName); + String queryTenantCodeByResourceName(@Param("resName") String resName,@Param("resType") int resType); /** * list authorized resource * @param userId userId - * @param resNames resource names + * @param resNames resNames + * @param T * @return resource list */ List listAuthorizedResource(@Param("userId") int userId,@Param("resNames")T[] resNames); + + + + /** + * list authorized resource + * @param userId userId + * @param resIds resIds + * @param T + * @return resource list + */ + List listAuthorizedResourceById(@Param("userId") int userId,@Param("resIds")T[] resIds); + + /** + * delete resource by id array + * @param resIds resource id array + * @return delete num + */ + int deleteIds(@Param("resIds")Integer[] resIds); + + /** + * list children + * @param direcotyId directory id + * @return resource id array + */ + List listChildren(@Param("direcotyId") int direcotyId); + + /** + * query resource by full name or pid + * @param fullName full name + * @param type resource type + * @return resource + */ + List queryResource(@Param("fullName") String fullName,@Param("type") int type); + + /** + * list resource by id array + * @param resIds resource id array + * @return resource list + */ + List listResourceByIds(@Param("resIds")Integer[] resIds); + + /** + * update resource + * @param resourceList resource list + * @return update num + */ + int batchUpdateResource(@Param("resourceList") List resourceList); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java index 6e973d7cc0..176f7d8eb4 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.java @@ -34,4 +34,13 @@ public interface ResourceUserMapper extends BaseMapper { int deleteResourceUser(@Param("userId") int userId, @Param("resourceId") int resourceId); + /** + * delete resource user relation + * @param userId userId + * @param resIds resource Ids + * @return delete result + */ + int deleteResourceUserArray(@Param("userId") int userId, + @Param("resIds") Integer[] resIds); + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java index 8a49c8ff4f..225677d152 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.java @@ -62,8 +62,8 @@ public interface ScheduleMapper extends BaseMapper { /** * query schedule list by process definition id - * @param processDefinitionId - * @return + * @param processDefinitionId processDefinitionId + * @return schedule list */ List queryReleaseSchedulerListByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java index 5a8734233c..a2ce6b29b8 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java @@ -86,4 +86,19 @@ public interface UdfFuncMapper extends BaseMapper { */ List listAuthorizedUdfFunc (@Param("userId") int userId,@Param("udfIds")T[] udfIds); + /** + * list UDF by resource id + * @param resourceIds resource id array + * @return UDF function list + */ + List listUdfByResourceId(@Param("resourceIds") Integer[] resourceIds); + + /** + * list authorized UDF by resource id + * @param resourceIds resource id array + * @return UDF function list + */ + List listAuthorizedUdfByResourceId(@Param("userId") int userId,@Param("resourceIds") int[] resourceIds); + + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java index 58bd673fc5..255f1cf081 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/MysqlUpgradeDao.java @@ -29,15 +29,7 @@ import java.sql.SQLException; */ public class MysqlUpgradeDao extends UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); - - /** - * init - */ - @Override - protected void init() { - - } + public static final Logger logger = LoggerFactory.getLogger(MysqlUpgradeDao.class); /** * mysql upgrade dao holder @@ -69,17 +61,12 @@ public class MysqlUpgradeDao extends UpgradeDao { try { conn = dataSource.getConnection(); rs = conn.getMetaData().getTables(null, null, tableName, null); - if (rs.next()) { - return true; - } else { - return false; - } - + return rs.next(); } catch (SQLException e) { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(rs, null, conn); + ConnectionUtils.releaseResource(rs, conn); } } @@ -96,17 +83,13 @@ public class MysqlUpgradeDao extends UpgradeDao { try { conn = dataSource.getConnection(); ResultSet rs = conn.getMetaData().getColumns(null,null,tableName,columnName); - if (rs.next()) { - return true; - } else { - return false; - } + return rs.next(); } catch (SQLException e) { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(null, null, conn); + ConnectionUtils.releaseResource(conn); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java index 5db273642a..b4049450ab 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/PostgresqlUpgradeDao.java @@ -30,16 +30,8 @@ import java.sql.SQLException; */ public class PostgresqlUpgradeDao extends UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); - private static final String schema = getSchema(); - - /** - * init - */ - @Override - protected void init() { - - } + public static final Logger logger = LoggerFactory.getLogger(PostgresqlUpgradeDao.class); + private static final String SCHEMA = getSchema(); /** * postgresql upgrade dao holder @@ -58,16 +50,6 @@ public class PostgresqlUpgradeDao extends UpgradeDao { return PostgresqlUpgradeDaoHolder.INSTANCE; } - - /** - * init schema - * @param initSqlPath initSqlPath - */ - @Override - public void initSchema(String initSqlPath) { - super.initSchema(initSqlPath); - } - /** * getSchema * @return schema @@ -107,18 +89,14 @@ public class PostgresqlUpgradeDao extends UpgradeDao { try { conn = dataSource.getConnection(); - rs = conn.getMetaData().getTables(null, schema, tableName, null); - if (rs.next()) { - return true; - } else { - return false; - } + rs = conn.getMetaData().getTables(null, SCHEMA, tableName, null); + return rs.next(); } catch (SQLException e) { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(rs, null, conn); + ConnectionUtils.releaseResource(rs, conn); } } @@ -135,18 +113,13 @@ public class PostgresqlUpgradeDao extends UpgradeDao { ResultSet rs = null; try { conn = dataSource.getConnection(); - rs = conn.getMetaData().getColumns(null,schema,tableName,columnName); - if (rs.next()) { - return true; - } else { - return false; - } - + rs = conn.getMetaData().getColumns(null, SCHEMA,tableName,columnName); + return rs.next(); } catch (SQLException e) { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(rs, null, conn); + ConnectionUtils.releaseResource(rs, conn); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java index aed93038f5..e708620f8a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java @@ -27,6 +27,7 @@ import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.sql.DataSource; import java.io.*; import java.sql.Connection; import java.sql.PreparedStatement; @@ -40,7 +41,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { private static final String T_VERSION_NAME = "t_escheduler_version"; private static final String T_NEW_VERSION_NAME = "t_ds_version"; private static final String rootDir = System.getProperty("user.dir"); - protected static final DruidDataSource dataSource = getDataSource(); + protected static final DataSource dataSource = getDataSource(); private static final DbType dbType = getCurrentDbType(); @Override @@ -52,13 +53,8 @@ public abstract class UpgradeDao extends AbstractBaseDao { * get datasource * @return DruidDataSource */ - public static DruidDataSource getDataSource(){ - DruidDataSource dataSource = ConnectionFactory.getDataSource(); - dataSource.setInitialSize(2); - dataSource.setMinIdle(2); - dataSource.setMaxActive(2); - - return dataSource; + public static DataSource getDataSource(){ + return ConnectionFactory.getInstance().getDataSource(); } /** @@ -83,7 +79,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { logger.error(e.getMessage(),e); return null; }finally { - ConnectionUtils.releaseResource(null, null, conn); + ConnectionUtils.releaseResource(conn); } } @@ -164,7 +160,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(null, null, conn); + ConnectionUtils.releaseResource(conn); } @@ -197,7 +193,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(null, null, conn); + ConnectionUtils.releaseResource(conn); } @@ -333,7 +329,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(null, pstmt, conn); + ConnectionUtils.releaseResource(pstmt, conn); } } @@ -376,7 +372,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { logger.error(e.getMessage(),e); throw new RuntimeException(e.getMessage(),e); } finally { - ConnectionUtils.releaseResource(null, pstmt, conn); + ConnectionUtils.releaseResource(pstmt, conn); } } @@ -405,7 +401,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { logger.error(e.getMessage(),e); throw new RuntimeException("sql: " + upgradeSQL, e); } finally { - ConnectionUtils.releaseResource(null, pstmt, conn); + ConnectionUtils.releaseResource(pstmt, conn); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java index 7a4dc655f7..1133cadbe7 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java @@ -361,4 +361,48 @@ public class DagHelper { processDag.setNodes(taskNodeList); return processDag; } + + /** + * is there have conditions after the parent node + * @param parentNodeName + * @return + */ + public static boolean haveConditionsAfterNode(String parentNodeName, + DAG dag + ){ + boolean result = false; + Set subsequentNodes = dag.getSubsequentNodes(parentNodeName); + if(CollectionUtils.isEmpty(subsequentNodes)){ + return result; + } + for(String nodeName : subsequentNodes){ + TaskNode taskNode = dag.getNode(nodeName); + List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); + if(preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()){ + return true; + } + } + return result; + } + + /** + * is there have conditions after the parent node + * @param parentNodeName + * @return + */ + public static boolean haveConditionsAfterNode(String parentNodeName, + List taskNodes + ){ + boolean result = false; + if(CollectionUtils.isEmpty(taskNodes)){ + return result; + } + for(TaskNode taskNode : taskNodes){ + List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); + if(preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()){ + return true; + } + } + return result; + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java index 40d12ab36f..1e1ee78036 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MysqlPerformance.java @@ -27,7 +27,6 @@ import java.util.Date; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.dao.MonitorDBDao; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +36,7 @@ import org.slf4j.LoggerFactory; */ public class MysqlPerformance extends BaseDBPerformance{ - private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); + private static Logger logger = LoggerFactory.getLogger(MysqlPerformance.class); /** diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java index 031fd00681..b1cdf6f179 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgrePerformance.java @@ -24,7 +24,6 @@ import java.util.Date; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.dao.MonitorDBDao; import org.apache.dolphinscheduler.dao.entity.MonitorRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,7 +33,7 @@ import org.slf4j.LoggerFactory; */ public class PostgrePerformance extends BaseDBPerformance { - private static Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); + private static Logger logger = LoggerFactory.getLogger(PostgrePerformance.class); /** * get monitor record diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java index cdd481a5d7..47cfadbf9a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PropertyUtils.java @@ -49,7 +49,7 @@ public class PropertyUtils { * init */ private void init(){ - String[] propertyFiles = new String[]{Constants.APPLICATION_PROPERTIES}; + String[] propertyFiles = new String[]{Constants.DATASOURCE_PROPERTIES}; for (String fileName : propertyFiles) { InputStream fis = null; try { @@ -77,6 +77,17 @@ public class PropertyUtils { return properties.getProperty(key); } + /** + * get property value + * + * @param key property name + * @param defaultVal default value + * @return property value + */ + public static String getString(String key, String defaultVal) { + String val = properties.getProperty(key.trim()); + return val == null ? defaultVal : val; + } /** * get property value @@ -106,4 +117,46 @@ public class PropertyUtils { } return defaultValue; } + + /** + * get property value + * + * @param key property name + * @return property value + */ + public static Boolean getBoolean(String key) { + String value = properties.getProperty(key.trim()); + if(null != value){ + return Boolean.parseBoolean(value); + } + + return false; + } + + /** + * get property value + * + * @param key property name + * @param defaultValue default value + * @return property value + */ + public static Boolean getBoolean(String key, boolean defaultValue) { + String value = properties.getProperty(key.trim()); + if(null != value){ + return Boolean.parseBoolean(value); + } + + return defaultValue; + } + + /** + * get property long value + * @param key key + * @param defaultVal default value + * @return property value + */ + public static long getLong(String key, long defaultVal) { + String val = getString(key); + return val == null ? defaultVal : Long.parseLong(val); + } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java new file mode 100644 index 0000000000..b334603a1a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtils.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.utils; + +import org.apache.dolphinscheduler.common.utils.CollectionUtils; + +import java.util.*; +import java.util.stream.Collectors; + +/** + * resource process definition utils + */ +public class ResourceProcessDefinitionUtils { + /** + * get resource process map key is resource id,value is the set of process definition + * @param list the map key is process definition id and value is resource_ids + * @return resource process definition map + */ + public static Map> getResourceProcessDefinitionMap(List> list) { + Map map = new HashMap<>(); + Map> result = new HashMap<>(); + if (CollectionUtils.isNotEmpty(list)) { + for (Map tempMap : list) { + + map.put((Integer) tempMap.get("id"), (String)tempMap.get("resource_ids")); + } + } + + for (Map.Entry entry : map.entrySet()) { + Integer mapKey = entry.getKey(); + String[] arr = entry.getValue().split(","); + Set mapValues = Arrays.stream(arr).map(Integer::parseInt).collect(Collectors.toSet()); + for (Integer value : mapValues) { + if (result.containsKey(value)) { + Set set = result.get(value); + set.add(mapKey); + result.put(value, set); + } else { + Set set = new HashSet<>(); + set.add(mapKey); + result.put(value, set); + } + } + } + return result; + } +} diff --git a/dolphinscheduler-dao/src/main/resources/application.properties b/dolphinscheduler-dao/src/main/resources/application.properties deleted file mode 100644 index 06b0ee94d5..0000000000 --- a/dolphinscheduler-dao/src/main/resources/application.properties +++ /dev/null @@ -1,149 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# base spring data source configuration -spring.datasource.type=com.alibaba.druid.pool.DruidDataSource -# postgre -spring.datasource.driver-class-name=org.postgresql.Driver -spring.datasource.url=jdbc:postgresql://localhost:5432/dolphinscheduler -# mysql -#spring.datasource.driver-class-name=com.mysql.jdbc.Driver -#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 -# h2 -#spring.datasource.driver-class-name=org.h2.Driver -#spring.datasource.url=jdbc:h2:file:../sql/h2;AUTO_SERVER=TRUE - -spring.datasource.username=test -spring.datasource.password=test - -# connection configuration -spring.datasource.initialSize=5 -# min connection number -spring.datasource.minIdle=5 -# max connection number -spring.datasource.maxActive=50 - -# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. -# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. -spring.datasource.maxWait=60000 - -# milliseconds for check to close free connections -spring.datasource.timeBetweenEvictionRunsMillis=60000 - -# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. -spring.datasource.timeBetweenConnectErrorMillis=60000 - -# the longest time a connection remains idle without being evicted, in milliseconds -spring.datasource.minEvictableIdleTimeMillis=300000 - -#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. -spring.datasource.validationQuery=SELECT 1 - -#check whether the connection is valid for timeout, in seconds -spring.datasource.validationQueryTimeout=3 - -# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis, -# validation Query is performed to check whether the connection is valid -spring.datasource.testWhileIdle=true - -#execute validation to check if the connection is valid when applying for a connection -spring.datasource.testOnBorrow=true -#execute validation to check if the connection is valid when the connection is returned -spring.datasource.testOnReturn=false -spring.datasource.defaultAutoCommit=true -spring.datasource.keepAlive=true - -# open PSCache, specify count PSCache for every connection -spring.datasource.poolPreparedStatements=true -spring.datasource.maxPoolPreparedStatementPerConnectionSize=20 - -spring.datasource.spring.datasource.filters=stat,wall,log4j -spring.datasource.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 - -#mybatis -mybatis-plus.mapper-locations=classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml - -mybatis-plus.typeEnumsPackage=org.apache.dolphinscheduler.*.enums - -#Entity scan, where multiple packages are separated by a comma or semicolon -mybatis-plus.typeAliasesPackage=org.apache.dolphinscheduler.dao.entity - -#Primary key type AUTO:" database ID AUTO ", INPUT:" user INPUT ID", ID_WORKER:" global unique ID (numeric type unique ID)", UUID:" global unique ID UUID"; -mybatis-plus.global-config.db-config.id-type=AUTO - -#Field policy IGNORED:" ignore judgment ",NOT_NULL:" not NULL judgment "),NOT_EMPTY:" not NULL judgment" -mybatis-plus.global-config.db-config.field-strategy=NOT_NULL - -#The hump underline is converted -mybatis-plus.global-config.db-config.column-underline=true -mybatis-plus.global-config.db-config.logic-delete-value=-1 -mybatis-plus.global-config.db-config.logic-not-delete-value=0 -mybatis-plus.global-config.db-config.banner=false -#The original configuration -mybatis-plus.configuration.map-underscore-to-camel-case=true -mybatis-plus.configuration.cache-enabled=false -mybatis-plus.configuration.call-setters-on-nulls=true -mybatis-plus.configuration.jdbc-type-for-null=null - -# master settings -# master execute thread num -master.exec.threads=100 - -# master execute task number in parallel -master.exec.task.num=20 - -# master heartbeat interval -master.heartbeat.interval=10 - -# master commit task retry times -master.task.commit.retryTimes=5 - -# master commit task interval -master.task.commit.interval=1000 - - -# only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 -master.max.cpuload.avg=100 - -# only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. -master.reserved.memory=0.1 - -# worker settings -# worker execute thread num -worker.exec.threads=100 - -# worker heartbeat interval -worker.heartbeat.interval=10 - -# submit the number of tasks at a time -worker.fetch.task.num = 3 - -# only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 -worker.max.cpuload.avg=100 - -# only larger than reserved memory, worker server can work. default value : physical memory * 1/6, unit is G. -worker.reserved.memory=0.1 - -# data quality analysis is not currently in use. please ignore the following configuration -# task record -task.record.flag=false -task.record.datasource.url=jdbc:mysql://192.168.xx.xx:3306/etl?characterEncoding=UTF-8 -task.record.datasource.username=xx -task.record.datasource.password=xx - -# Logger Config -#logging.level.org.apache.dolphinscheduler.dao=debug diff --git a/dolphinscheduler-dao/src/main/resources/datasource.properties b/dolphinscheduler-dao/src/main/resources/datasource.properties new file mode 100644 index 0000000000..2f28ca2b0b --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/datasource.properties @@ -0,0 +1,67 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +# postgre +#spring.datasource.driver-class-name=org.postgresql.Driver +#spring.datasource.url=jdbc:postgresql://localhost:5432/dolphinscheduler +# mysql +spring.datasource.driver-class-name=org.postgresql.Driver +spring.datasource.url=jdbc:postgresql://localhost:5432/dolphinscheduler +spring.datasource.username=test +spring.datasource.password=test + +# connection configuration +#spring.datasource.initialSize=5 +# min connection number +#spring.datasource.minIdle=5 +# max connection number +#spring.datasource.maxActive=50 + +# max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. +# If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. +#spring.datasource.maxWait=60000 + +# milliseconds for check to close free connections +#spring.datasource.timeBetweenEvictionRunsMillis=60000 + +# the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. +#spring.datasource.timeBetweenConnectErrorMillis=60000 + +# the longest time a connection remains idle without being evicted, in milliseconds +#spring.datasource.minEvictableIdleTimeMillis=300000 + +#the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. +#spring.datasource.validationQuery=SELECT 1 + +#check whether the connection is valid for timeout, in seconds +#spring.datasource.validationQueryTimeout=3 + +# when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis, +# validation Query is performed to check whether the connection is valid +#spring.datasource.testWhileIdle=true + +#execute validation to check if the connection is valid when applying for a connection +#spring.datasource.testOnBorrow=true +#execute validation to check if the connection is valid when the connection is returned +#spring.datasource.testOnReturn=false +#spring.datasource.defaultAutoCommit=true +#spring.datasource.keepAlive=true + +# open PSCache, specify count PSCache for every connection +#spring.datasource.poolPreparedStatements=true +#spring.datasource.maxPoolPreparedStatementPerConnectionSize=20 \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml index f2157783e8..0cabf800cd 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml @@ -29,7 +29,9 @@ and pd.name = #{processDefinitionName} + + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml index 2146d1ac20..6b1c9b7e34 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml @@ -22,8 +22,8 @@ select * from t_ds_resources where 1= 1 - - and alias = #{alias} + + and full_name = #{fullName} and type = #{type} @@ -39,7 +39,11 @@ and type=#{type} - + + and id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId} and perm=#{perm} + union select id as resources_id from t_ds_resources where user_id=#{userId}) + + and id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId} union select id as resources_id from t_ds_resources where user_id=#{userId}) @@ -47,7 +51,7 @@ select r.* from t_ds_resources r,t_ds_relation_resources_user rel - where r.id = rel.resources_id AND rel.user_id = #{userId} + where r.id = rel.resources_id AND rel.user_id = #{userId} and perm=7 select tenant_code from t_ds_tenant t, t_ds_user u, t_ds_resources res - where t.id = u.tenant_id and u.id = res.user_id and res.type=0 - and res.alias= #{resName} + where t.id = u.tenant_id and u.id = res.user_id and res.type=#{resType} + and res.full_name= #{resName} + + + + + + delete from t_ds_resources where id in + + #{i} + + + + + + + + + + update t_ds_resources + + full_name=#{resource.fullName}, + update_time=#{resource.updateTime} + + + id=#{resource.id} + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml index 6a89e47c2f..7fdd09fecc 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml @@ -29,4 +29,17 @@ and resources_id = #{resourceId} + + + delete + from t_ds_relation_resources_user + where 1 = 1 + + and user_id = #{userId} + + and resources_id in + + #{i} + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml index 0aa10607c4..e38d1637d6 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml @@ -87,4 +87,28 @@ + + \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java new file mode 100644 index 0000000000..ef3f0ffbb9 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/AlertDaoTest.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao; + +import org.apache.dolphinscheduler.common.enums.AlertStatus; +import org.apache.dolphinscheduler.common.enums.AlertType; +import org.apache.dolphinscheduler.common.enums.ShowType; +import org.apache.dolphinscheduler.dao.entity.Alert; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.List; + +public class AlertDaoTest { + @Test + public void testAlertDao(){ + AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class); + Alert alert = new Alert(); + alert.setTitle("Mysql Exception"); + alert.setShowType(ShowType.TEXT); + alert.setContent("[\"alarm time:2018-02-05\", \"service name:MYSQL_ALTER\", \"alarm name:MYSQL_ALTER_DUMP\", " + + "\"get the alarm exception.!,interface error,exception information:timed out\", \"request address:http://blog.csdn.net/dreamInTheWorld/article/details/78539286\"]"); + alert.setAlertType(AlertType.EMAIL); + alert.setAlertGroupId(1); + alert.setAlertStatus(AlertStatus.WAIT_EXECUTION); + alertDao.addAlert(alert); + + + List alerts = alertDao.listWaitExecutionAlert(); + Assert.assertNotNull(alerts); + Assert.assertNotEquals(0, alerts.size()); + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSourceTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSourceTest.java new file mode 100644 index 0000000000..6c44c3e329 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSourceTest.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.datasource; + +import org.apache.dolphinscheduler.common.Constants; +import org.junit.Assert; +import org.junit.Test; + +public class BaseDataSourceTest { + + @Test + public void testDriverClassSelector() { + String mysqlDriverClass = new MySQLDataSource().driverClassSelector(); + Assert.assertEquals(Constants.COM_MYSQL_JDBC_DRIVER, mysqlDriverClass); + + String clickHouseDriverClass = new ClickHouseDataSource().driverClassSelector(); + Assert.assertEquals(Constants.COM_CLICKHOUSE_JDBC_DRIVER, clickHouseDriverClass); + + String db2ServerDriverClass = new DB2ServerDataSource().driverClassSelector(); + Assert.assertEquals(Constants.COM_DB2_JDBC_DRIVER, db2ServerDriverClass); + + String oracleDriverClass = new OracleDataSource().driverClassSelector(); + Assert.assertEquals(Constants.COM_ORACLE_JDBC_DRIVER, oracleDriverClass); + + String postgreDriverClass = new PostgreDataSource().driverClassSelector(); + Assert.assertEquals(Constants.ORG_POSTGRESQL_DRIVER, postgreDriverClass); + + String sqlServerDriverClass = new SQLServerDataSource().driverClassSelector(); + Assert.assertEquals(Constants.COM_SQLSERVER_JDBC_DRIVER, sqlServerDriverClass); + + String hiveDriverClass = new HiveDataSource().driverClassSelector(); + Assert.assertEquals(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER, hiveDriverClass); + + String sparkDriverClass = new SparkDataSource().driverClassSelector(); + Assert.assertEquals(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER, sparkDriverClass); + } + + @Test + public void testGetJdbcUrl() { + BaseDataSource hiveDataSource = new HiveDataSource(); + hiveDataSource.setAddress("jdbc:hive2://127.0.0.1:10000"); + hiveDataSource.setDatabase("test"); + hiveDataSource.setPassword("123456"); + hiveDataSource.setUser("test"); + Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test", hiveDataSource.getJdbcUrl()); + //set principal + hiveDataSource.setPrincipal("hive/test.com@TEST.COM"); + Assert.assertEquals("jdbc:hive2://127.0.0.1:10000/test;principal=hive/test.com@TEST.COM", + hiveDataSource.getJdbcUrl()); + //set fake other + hiveDataSource.setOther("charset=UTF-8"); + Assert.assertEquals( + "jdbc:hive2://127.0.0.1:10000/test;principal=hive/test.com@TEST.COM;charset=UTF-8", + hiveDataSource.getJdbcUrl()); + + BaseDataSource clickHouseDataSource = new ClickHouseDataSource(); + clickHouseDataSource.setAddress("jdbc:clickhouse://127.0.0.1:8123"); + clickHouseDataSource.setDatabase("test"); + clickHouseDataSource.setPassword("123456"); + clickHouseDataSource.setUser("test"); + Assert.assertEquals("jdbc:clickhouse://127.0.0.1:8123/test", clickHouseDataSource.getJdbcUrl()); + //set fake principal + clickHouseDataSource.setPrincipal("fake principal"); + Assert.assertEquals("jdbc:clickhouse://127.0.0.1:8123/test", clickHouseDataSource.getJdbcUrl()); + //set fake other + clickHouseDataSource.setOther("charset=UTF-8"); + Assert.assertEquals("jdbc:clickhouse://127.0.0.1:8123/test?charset=UTF-8", + clickHouseDataSource.getJdbcUrl()); + + BaseDataSource sqlServerDataSource = new SQLServerDataSource(); + sqlServerDataSource.setAddress("jdbc:sqlserver://127.0.0.1:1433"); + sqlServerDataSource.setDatabase("test"); + sqlServerDataSource.setPassword("123456"); + sqlServerDataSource.setUser("test"); + Assert.assertEquals("jdbc:sqlserver://127.0.0.1:1433;databaseName=test", + sqlServerDataSource.getJdbcUrl()); + //set fake principal + sqlServerDataSource.setPrincipal("fake principal"); + Assert.assertEquals("jdbc:sqlserver://127.0.0.1:1433;databaseName=test", + sqlServerDataSource.getJdbcUrl()); + //set fake other + sqlServerDataSource.setOther("charset=UTF-8"); + Assert.assertEquals("jdbc:sqlserver://127.0.0.1:1433;databaseName=test;charset=UTF-8", + sqlServerDataSource.getJdbcUrl()); + + BaseDataSource db2DataSource = new DB2ServerDataSource(); + db2DataSource.setAddress("jdbc:db2://127.0.0.1:50000"); + db2DataSource.setDatabase("test"); + db2DataSource.setPassword("123456"); + db2DataSource.setUser("test"); + Assert.assertEquals("jdbc:db2://127.0.0.1:50000/test", db2DataSource.getJdbcUrl()); + //set fake principal + db2DataSource.setPrincipal("fake principal"); + Assert.assertEquals("jdbc:db2://127.0.0.1:50000/test", db2DataSource.getJdbcUrl()); + //set fake other + db2DataSource.setOther("charset=UTF-8"); + Assert.assertEquals("jdbc:db2://127.0.0.1:50000/test:charset=UTF-8", db2DataSource.getJdbcUrl()); + + + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java index e165da1e88..9c59670872 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java @@ -29,11 +29,21 @@ public class TaskInstanceTest { TaskInstance taskInstance = new TaskInstance(); //sub process - taskInstance.setTaskType("sub process"); + taskInstance.setTaskType("SUB_PROCESS"); Assert.assertTrue(taskInstance.isSubProcess()); //not sub process - taskInstance.setTaskType("http"); + taskInstance.setTaskType("HTTP"); Assert.assertFalse(taskInstance.isSubProcess()); + + //sub process + taskInstance.setTaskType("CONDITIONS"); + Assert.assertTrue(taskInstance.isConditionsTask()); + + //sub process + taskInstance.setTaskType("DEPENDENT"); + Assert.assertTrue(taskInstance.isDependTask()); + + } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java index c35ce7e8ce..297ea66c94 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/CommandMapperTest.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.dao.mapper; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CommandCount; @@ -75,7 +76,8 @@ public class CommandMapperTest { //query Command actualCommand = commandMapper.selectById(expectedCommand.getId()); - assertEquals(expectedCommand, actualCommand); + assertNotNull(actualCommand); + assertEquals(expectedCommand.getProcessDefinitionId(), actualCommand.getProcessDefinitionId()); } /** @@ -93,7 +95,8 @@ public class CommandMapperTest { Command actualCommand = commandMapper.selectById(expectedCommand.getId()); - assertEquals(expectedCommand,actualCommand); + assertNotNull(actualCommand); + assertEquals(expectedCommand.getUpdateTime(),actualCommand.getUpdateTime()); } @@ -126,13 +129,6 @@ public class CommandMapperTest { List actualCommands = commandMapper.selectList(null); assertThat(actualCommands.size(), greaterThanOrEqualTo(count)); - - for (Command actualCommand : actualCommands){ - Command expectedCommand = commandMap.get(actualCommand.getId()); - if (expectedCommand != null){ - assertEquals(expectedCommand,actualCommand); - } - } } /** @@ -147,7 +143,7 @@ public class CommandMapperTest { Command actualCommand = commandMapper.getOneToRun(); - assertEquals(expectedCommand, actualCommand); + assertNotNull(actualCommand); } /** @@ -170,16 +166,6 @@ public class CommandMapperTest { List actualCommandCounts = commandMapper.countCommandState(0, startTime, endTime, projectIdArray); assertThat(actualCommandCounts.size(),greaterThanOrEqualTo(1)); - - Boolean flag = false; - for (CommandCount actualCommandCount : actualCommandCounts){ - if (actualCommandCount.getCommandType().equals(expectedCommandCount.getCommandType())){ - assertEquals(expectedCommandCount,actualCommandCount); - flag = true; - } - } - - assertTrue(flag); } @@ -265,7 +251,7 @@ public class CommandMapperTest { command.setProcessInstancePriority(Priority.MEDIUM); command.setStartTime(DateUtils.stringToDate("2019-12-29 10:10:00")); command.setUpdateTime(DateUtils.stringToDate("2019-12-29 10:10:00")); - command.setWorkerGroupId(-1); + command.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP); commandMapper.insert(command); return command; diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java index 5ba2936aaf..1d419a83d8 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java @@ -31,7 +31,7 @@ public class ConnectionFactoryTest { */ @Test public void testConnection()throws Exception{ - Connection connection = ConnectionFactory.getDataSource().getPooledConnection().getConnection(); + Connection connection = ConnectionFactory.getInstance().getDataSource().getConnection(); Assert.assertTrue(connection != null); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java index 815e9394d5..3a449ee8a3 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapperTest.java @@ -23,13 +23,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class DataSourceUserMapperTest { @Autowired @@ -63,7 +67,6 @@ public class DataSourceUserMapperTest { dataSourceUser.setUpdateTime(new Date()); int update = dataSourceUserMapper.updateById(dataSourceUser); Assert.assertEquals(update, 1); - dataSourceUserMapper.deleteById(dataSourceUser.getId()); } /** @@ -86,7 +89,6 @@ public class DataSourceUserMapperTest { //query List dataSources = dataSourceUserMapper.selectList(null); Assert.assertNotEquals(dataSources.size(), 0); - dataSourceUserMapper.deleteById(dataSourceUser.getId()); } /** diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java index 5fb7dfc09f..2d275f1140 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapperTest.java @@ -26,13 +26,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ErrorCommandMapperTest { @Autowired @@ -57,30 +61,9 @@ public class ErrorCommandMapperTest { return errorCommand; } - /** - * test update - */ - @Test - public void testUpdate(){ - //insertOne - ErrorCommand errorCommand = insertOne(); - //update - errorCommand.setUpdateTime(new Date()); - int update = errorCommandMapper.updateById(errorCommand); - Assert.assertEquals(1,update); - errorCommandMapper.deleteById(errorCommand.getId()); - } - /** - * test delete - */ - @Test - public void testDelete(){ - ErrorCommand errorCommand = insertOne(); - int delete = errorCommandMapper.deleteById(errorCommand.getId()); - Assert.assertEquals(1,delete); - } + /** * test query @@ -103,8 +86,8 @@ public class ErrorCommandMapperTest { List commandCounts = errorCommandMapper.countCommandState( null, - null, - new Integer[0] + null, + new Integer[0] ); Integer[] projectIdArray = new Integer[2]; @@ -116,8 +99,6 @@ public class ErrorCommandMapperTest { projectIdArray ); - errorCommandMapper.deleteById(errorCommand.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); Assert.assertNotEquals(commandCounts.size(), 0); Assert.assertNotEquals(commandCounts2.size(), 0); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java index 1b3db55608..9dafbe138c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.dao.mapper; +import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.UserType; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @@ -26,13 +27,18 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; +import java.util.Map; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ProcessDefinitionMapperTest { @@ -77,8 +83,7 @@ public class ProcessDefinitionMapperTest { //update processDefinition.setUpdateTime(new Date()); int update = processDefinitionMapper.updateById(processDefinition); - Assert.assertEquals(update, 1); - processDefinitionMapper.deleteById(processDefinition.getId()); + Assert.assertEquals(1, update); } /** @@ -88,7 +93,7 @@ public class ProcessDefinitionMapperTest { public void testDelete(){ ProcessDefinition processDefinition = insertOne(); int delete = processDefinitionMapper.deleteById(processDefinition.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -100,7 +105,6 @@ public class ProcessDefinitionMapperTest { //query List dataSources = processDefinitionMapper.selectList(null); Assert.assertNotEquals(dataSources.size(), 0); - processDefinitionMapper.deleteById(processDefinition.getId()); } /** @@ -143,11 +147,6 @@ public class ProcessDefinitionMapperTest { ProcessDefinition processDefinition1 = processDefinitionMapper.queryByDefineName(project.getId(), "def 1"); Assert.assertNotEquals(processDefinition1, null); - processDefinitionMapper.deleteById(processDefinition.getId()); - queueMapper.deleteById(queue.getId()); - projectMapper.deleteById(project.getId()); - tenantMapper.deleteById(tenant.getId()); - userMapper.deleteById(user.getId()); } /** @@ -159,7 +158,6 @@ public class ProcessDefinitionMapperTest { Page page = new Page(1,3); IPage processDefinitionIPage = processDefinitionMapper.queryDefineListPaging(page, "def", 101, 1010,true); Assert.assertNotEquals(processDefinitionIPage.getTotal(), 0); - processDefinitionMapper.deleteById(processDefinition.getId()); } /** @@ -170,7 +168,6 @@ public class ProcessDefinitionMapperTest { ProcessDefinition processDefinition = insertOne(); List processDefinitionIPage = processDefinitionMapper.queryAllDefinitionList(1010); Assert.assertNotEquals(processDefinitionIPage.size(), 0); - processDefinitionMapper.deleteById(processDefinition.getId()); } /** @@ -187,9 +184,7 @@ public class ProcessDefinitionMapperTest { array[1] = processDefinition1.getId(); List processDefinitions = processDefinitionMapper.queryDefinitionListByIdList(array); - processDefinitionMapper.deleteById(processDefinition.getId()); - processDefinitionMapper.deleteById(processDefinition1.getId()); - Assert.assertEquals(processDefinitions.size(), 2); + Assert.assertEquals(2, processDefinitions.size()); } @@ -220,7 +215,15 @@ public class ProcessDefinitionMapperTest { projectIds, user.getUserType() == UserType.ADMIN_USER ); - processDefinitionMapper.deleteById(processDefinition.getId()); Assert.assertNotEquals(processDefinitions.size(), 0); } + + @Test + public void listResourcesTest(){ + ProcessDefinition processDefinition = insertOne(); + processDefinition.setResourceIds("3,5"); + processDefinition.setReleaseState(ReleaseState.ONLINE); + List> maps = processDefinitionMapper.listResources(); + Assert.assertNotNull(maps); + } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java index 175dd57948..08b30ce76c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapperTest.java @@ -23,12 +23,16 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ProcessInstanceMapMapperTest { @@ -60,8 +64,7 @@ public class ProcessInstanceMapMapperTest { //update processInstanceMap.setParentProcessInstanceId(1); int update = processInstanceMapMapper.updateById(processInstanceMap); - Assert.assertEquals(update, 1); - processInstanceMapMapper.deleteById(processInstanceMap.getId()); + Assert.assertEquals(1, update); } /** @@ -71,7 +74,7 @@ public class ProcessInstanceMapMapperTest { public void testDelete(){ ProcessInstanceMap processInstanceMap = insertOne(); int delete = processInstanceMapMapper.deleteById(processInstanceMap.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -83,7 +86,6 @@ public class ProcessInstanceMapMapperTest { //query List dataSources = processInstanceMapMapper.selectList(null); Assert.assertNotEquals(dataSources.size(), 0); - processInstanceMapMapper.deleteById(processInstanceMap.getId()); } /** @@ -95,30 +97,11 @@ public class ProcessInstanceMapMapperTest { processInstanceMap.setParentProcessInstanceId(100); processInstanceMapMapper.updateById(processInstanceMap); - ProcessInstanceMap map = - processInstanceMapMapper.queryByParentId(processInstanceMap.getParentProcessInstanceId(), processInstanceMap.getParentTaskInstanceId()); - Assert.assertNotEquals(map, null); - processInstanceMapMapper.deleteById(processInstanceMap.getId()); } - /** - * test query by sub process instance id - */ - @Test - public void testQueryBySubProcessId() { - ProcessInstanceMap processInstanceMap = insertOne(); - processInstanceMap.setProcessInstanceId(100); - processInstanceMapMapper.updateById(processInstanceMap); - ProcessInstanceMap map = - processInstanceMapMapper.queryBySubProcessId( - processInstanceMap.getProcessInstanceId() ); - Assert.assertNotEquals(map, null); - - processInstanceMapMapper.deleteById(processInstanceMap.getId()); - } /** * test delete by parent process instance id @@ -132,10 +115,11 @@ public class ProcessInstanceMapMapperTest { int delete = processInstanceMapMapper.deleteByParentProcessId( processInstanceMap.getParentProcessInstanceId() ); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** + * * test query sub ids by process instance parentId */ @Test @@ -150,7 +134,6 @@ public class ProcessInstanceMapMapperTest { Assert.assertNotEquals(subIds.size(), 0); - processInstanceMapMapper.deleteById(processInstanceMap.getId()); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java index 3b307cc2ad..3da6e69cce 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java @@ -28,13 +28,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ProcessInstanceMapperTest { @@ -74,7 +78,7 @@ public class ProcessInstanceMapperTest { ProcessInstance processInstanceMap = insertOne(); //update int update = processInstanceMapper.updateById(processInstanceMap); - Assert.assertEquals(update, 1); + Assert.assertEquals(1, update); processInstanceMapper.deleteById(processInstanceMap.getId()); } @@ -85,7 +89,7 @@ public class ProcessInstanceMapperTest { public void testDelete(){ ProcessInstance processInstanceMap = insertOne(); int delete = processInstanceMapper.deleteById(processInstanceMap.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -197,7 +201,7 @@ public class ProcessInstanceMapperTest { Assert.assertNotEquals(update, 0); processInstance = processInstanceMapper.selectById(processInstance.getId()); - Assert.assertEquals(processInstance.getHost(), null); + Assert.assertNull(processInstance.getHost()); processInstanceMapper.deleteById(processInstance.getId()); } @@ -217,7 +221,7 @@ public class ProcessInstanceMapperTest { ProcessInstance processInstance1 = processInstanceMapper.selectById(processInstance.getId()); processInstanceMapper.deleteById(processInstance.getId()); - Assert.assertEquals(processInstance1.getState(), ExecutionStatus.SUCCESS); + Assert.assertEquals(ExecutionStatus.SUCCESS, processInstance1.getState()); } @@ -261,10 +265,10 @@ public class ProcessInstanceMapperTest { List processInstances = processInstanceMapper.queryByProcessDefineId(processInstance.getProcessDefinitionId(), 1); - Assert.assertEquals(processInstances.size(), 1); + Assert.assertEquals(1, processInstances.size()); processInstances = processInstanceMapper.queryByProcessDefineId(processInstance.getProcessDefinitionId(), 2); - Assert.assertEquals(processInstances.size(), 2); + Assert.assertEquals(2, processInstances.size()); processInstanceMapper.deleteById(processInstance.getId()); processInstanceMapper.deleteById(processInstance1.getId()); @@ -314,13 +318,13 @@ public class ProcessInstanceMapperTest { Date start = new Date(2019-1900, 1-1, 01, 0, 0, 0); Date end = new Date(2019-1900, 1-1, 01, 5, 0, 0); ProcessInstance processInstance1 = processInstanceMapper.queryLastManualProcess(processInstance.getProcessDefinitionId(),start, end - ); + ); Assert.assertEquals(processInstance1.getId(), processInstance.getId()); start = new Date(2019-1900, 1-1, 01, 1, 0, 0); processInstance1 = processInstanceMapper.queryLastManualProcess(processInstance.getProcessDefinitionId(),start, end - ); - Assert.assertEquals(processInstance1, null); + ); + Assert.assertNull(processInstance1); processInstanceMapper.deleteById(processInstance.getId()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java index 44be49a839..32a6eac12c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapperTest.java @@ -26,13 +26,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ProjectMapperTest { @Autowired @@ -66,7 +70,6 @@ public class ProjectMapperTest { //update int update = projectMapper.updateById(project); Assert.assertEquals(update, 1); - projectMapper.deleteById(project.getId()); } /** @@ -88,7 +91,6 @@ public class ProjectMapperTest { //query List projects = projectMapper.selectList(null); Assert.assertNotEquals(projects.size(), 0); - projectMapper.deleteById(project.getId()); } /** @@ -106,8 +108,6 @@ public class ProjectMapperTest { projectMapper.updateById(project); Project project1 = projectMapper.queryDetailById(project.getId()); - userMapper.deleteById(user.getId()); - projectMapper.deleteById(project.getId()); Assert.assertNotEquals(project1, null); Assert.assertEquals(project1.getUserName(), user.getUserName()); } @@ -126,10 +126,7 @@ public class ProjectMapperTest { projectMapper.updateById(project); Project project1 = projectMapper.queryByName(project.getName()); - userMapper.deleteById(user.getId()); - projectMapper.deleteById(project.getId()); Assert.assertNotEquals(project1, null); - Assert.assertEquals(project1.getUserName(), user.getUserName()); } /** @@ -157,9 +154,6 @@ public class ProjectMapperTest { project.getUserId(), project.getName() ); - projectMapper.deleteById(project.getId()); - projectMapper.deleteById(project1.getId()); - userMapper.deleteById(user.getId()); Assert.assertNotEquals(projectIPage.getTotal(), 0); Assert.assertNotEquals(projectIPage1.getTotal(), 0); } @@ -173,7 +167,6 @@ public class ProjectMapperTest { List projects = projectMapper.queryProjectCreatedByUser(project.getUserId()); - projectMapper.deleteById(project.getId()); Assert.assertNotEquals(projects.size(), 0); } @@ -187,7 +180,6 @@ public class ProjectMapperTest { List projects = projectMapper.queryProjectCreatedByUser(project.getUserId()); - projectMapper.deleteById(project.getId()); Assert.assertNotEquals(projects.size(), 0); } @@ -202,7 +194,6 @@ public class ProjectMapperTest { 100000 ); - projectMapper.deleteById(project.getId()); Assert.assertNotEquals(projects.size(), 0); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java index 6e995ef3c7..e8eff87830 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapperTest.java @@ -23,13 +23,22 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.Assert.*; + @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ProjectUserMapperTest { @@ -60,7 +69,6 @@ public class ProjectUserMapperTest { //update int update = projectUserMapper.updateById(projectUser); Assert.assertEquals(update, 1); - projectUserMapper.deleteById(projectUser.getId()); } /** @@ -82,7 +90,6 @@ public class ProjectUserMapperTest { //query List projectUsers = projectUserMapper.selectList(null); Assert.assertNotEquals(projectUsers.size(), 0); - projectUserMapper.deleteById(projectUser.getId()); } /** @@ -94,7 +101,7 @@ public class ProjectUserMapperTest { ProjectUser projectUser = insertOne(); int delete = projectUserMapper.deleteProjectRelation(projectUser.getProjectId(), projectUser.getUserId()); - Assert.assertEquals(delete, 1); + assertThat(delete,greaterThanOrEqualTo(1)); } @@ -107,6 +114,5 @@ public class ProjectUserMapperTest { ProjectUser projectUser1 = projectUserMapper.queryProjectRelation(projectUser.getProjectId(), projectUser.getUserId()); Assert.assertNotEquals(projectUser1, null); - projectUserMapper.deleteById(projectUser.getId()); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java index 30d2be03e0..a1e1fdaf7a 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/QueueMapperTest.java @@ -25,16 +25,20 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class QueueMapperTest { - + @Autowired QueueMapper queueMapper; @@ -64,8 +68,7 @@ public class QueueMapperTest { queue.setCreateTime(new Date()); //update int update = queueMapper.updateById(queue); - Assert.assertEquals(update, 1); - queueMapper.deleteById(queue.getId()); + Assert.assertEquals(1, update); } /** @@ -75,7 +78,7 @@ public class QueueMapperTest { public void testDelete(){ Queue queue = insertOne(); int delete = queueMapper.deleteById(queue.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -87,7 +90,6 @@ public class QueueMapperTest { //query List queues = queueMapper.selectList(null); Assert.assertNotEquals(queues.size(), 0); - queueMapper.deleteById(queue.getId()); } /** @@ -106,7 +108,6 @@ public class QueueMapperTest { queueIPage= queueMapper.queryQueuePaging(page, queue.getQueueName()); Assert.assertNotEquals(queueIPage.getTotal(), 0); - queueMapper.deleteById(queue.getId()); } /** @@ -121,6 +122,5 @@ public class QueueMapperTest { queues = queueMapper.queryAllQueueList(null, queue.getQueueName()); Assert.assertNotEquals(queues.size(), 0); - queueMapper.deleteById(queue.getId()); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java index aaf5129c02..818f88fb49 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.dao.mapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.Resource; @@ -34,6 +35,7 @@ import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.transaction.annotation.Transactional; +import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; @@ -68,7 +70,10 @@ public class ResourceMapperTest { private Resource insertOne(){ //insertOne Resource resource = new Resource(); - resource.setAlias("ut resource"); + resource.setAlias("ut-resource"); + resource.setFullName("/ut-resource"); + resource.setPid(-1); + resource.setDirectory(false); resource.setType(ResourceType.FILE); resource.setUserId(111); resourceMapper.insert(resource); @@ -80,16 +85,32 @@ public class ResourceMapperTest { * @param user user * @return Resource */ - private Resource createResource(User user){ + private Resource createResource(User user,boolean isDirectory,ResourceType resourceType,int pid,String alias,String fullName){ //insertOne Resource resource = new Resource(); - resource.setAlias(String.format("ut resource %s",user.getUserName())); - resource.setType(ResourceType.FILE); + resource.setDirectory(isDirectory); + resource.setType(resourceType); + resource.setAlias(alias); + resource.setFullName(fullName); resource.setUserId(user.getId()); resourceMapper.insert(resource); return resource; } + /** + * create resource by user + * @param user user + * @return Resource + */ + private Resource createResource(User user){ + //insertOne + String alias = String.format("ut-resource-%s",user.getUserName()); + String fullName = String.format("/%s",alias); + + Resource resource = createResource(user, false, ResourceType.FILE, -1, alias, fullName); + return resource; + } + /** * create user * @return User @@ -118,6 +139,7 @@ public class ResourceMapperTest { resourcesUser.setUpdateTime(new Date()); resourcesUser.setUserId(user.getId()); resourcesUser.setResourcesId(resource.getId()); + resourcesUser.setPerm(7); resourceUserMapper.insert(resourcesUser); return resourcesUser; } @@ -138,8 +160,7 @@ public class ResourceMapperTest { resource.setCreateTime(new Date()); //update int update = resourceMapper.updateById(resource); - Assert.assertEquals(update, 1); - resourceMapper.deleteById(resource.getId()); + Assert.assertEquals(1, update); } /** @@ -149,7 +170,7 @@ public class ResourceMapperTest { public void testDelete(){ Resource resourceMap = insertOne(); int delete = resourceMapper.deleteById(resourceMap.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -161,7 +182,6 @@ public class ResourceMapperTest { //query List resources = resourceMapper.selectList(null); Assert.assertNotEquals(resources.size(), 0); - resourceMapper.deleteById(resource.getId()); } /** @@ -176,13 +196,12 @@ public class ResourceMapperTest { int userId = resource.getUserId(); int type = resource.getType().ordinal(); List resources = resourceMapper.queryResourceList( - alias, - userId, - type + alias, + userId, + type ); Assert.assertNotEquals(resources.size(), 0); - resourceMapper.deleteById(resource.getId()); } /** @@ -200,18 +219,18 @@ public class ResourceMapperTest { IPage resourceIPage = resourceMapper.queryResourcePaging( page, - resource.getUserId(), + 0, + -1, resource.getType().ordinal(), "" ); IPage resourceIPage1 = resourceMapper.queryResourcePaging( page, 1110, + -1, resource.getType().ordinal(), "" ); - resourceMapper.deleteById(resource.getId()); - resourceUserMapper.deleteById(resourcesUser.getId()); Assert.assertNotEquals(resourceIPage.getTotal(), 0); Assert.assertNotEquals(resourceIPage1.getTotal(), 0); @@ -230,14 +249,13 @@ public class ResourceMapperTest { resourcesUser.setResourcesId(resource.getId()); resourcesUser.setUserId(1110); + resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); resourceUserMapper.insert(resourcesUser); List resources1 = resourceMapper.queryAuthorizedResourceList(1110); - resourceUserMapper.deleteById(resourcesUser.getId()); - resourceMapper.deleteById(resource.getId()); - Assert.assertEquals(resources.size(), 0); - Assert.assertNotEquals(resources1.size(), 0); + Assert.assertEquals(0, resources.size()); + Assert.assertNotEquals(0, resources1.size()); } @@ -251,7 +269,7 @@ public class ResourceMapperTest { List resources = resourceMapper.queryAuthorizedResourceList(resource.getUserId()); resourceMapper.deleteById(resource.getId()); - Assert.assertEquals(resources.size(), 0); + Assert.assertEquals(0, resources.size()); } /** @@ -264,7 +282,6 @@ public class ResourceMapperTest { 11111 ); Assert.assertNotEquals(resources.size(), 0); - resourceMapper.deleteById(resource.getId()); } /** @@ -289,12 +306,11 @@ public class ResourceMapperTest { resourceMapper.updateById(resource); String resource1 = resourceMapper.queryTenantCodeByResourceName( - resource.getAlias() + resource.getFullName(),ResourceType.FILE.ordinal() ); - Assert.assertEquals(resource1, "ut tenant code for resource"); - resourceMapper.deleteById(resource.getId()); + Assert.assertEquals("ut tenant code for resource", resource1); } @@ -305,22 +321,67 @@ public class ResourceMapperTest { User generalUser2 = createGeneralUser("user2"); // create one resource Resource resource = createResource(generalUser2); - Resource unauthorizedResource = createResource(generalUser2); + Resource unauthorizedResource = createResource(generalUser1); // need download resources - String[] resNames = new String[]{resource.getAlias(), unauthorizedResource.getAlias()}; + String[] resNames = new String[]{resource.getFullName(), unauthorizedResource.getFullName()}; List resources = resourceMapper.listAuthorizedResource(generalUser2.getId(), resNames); Assert.assertEquals(generalUser2.getId(),resource.getUserId()); - Assert.assertFalse(resources.stream().map(t -> t.getAlias()).collect(toList()).containsAll(Arrays.asList(resNames))); + Assert.assertFalse(resources.stream().map(t -> t.getFullName()).collect(toList()).containsAll(Arrays.asList(resNames))); // authorize object unauthorizedResource to generalUser createResourcesUser(unauthorizedResource,generalUser2); List authorizedResources = resourceMapper.listAuthorizedResource(generalUser2.getId(), resNames); - Assert.assertTrue(authorizedResources.stream().map(t -> t.getAlias()).collect(toList()).containsAll(Arrays.asList(resNames))); + Assert.assertTrue(authorizedResources.stream().map(t -> t.getFullName()).collect(toList()).containsAll(Arrays.asList(resNames))); } + + @Test + public void deleteIdsTest(){ + // create a general user + User generalUser1 = createGeneralUser("user1"); + + Resource resource = createResource(generalUser1); + Resource resource1 = createResource(generalUser1); + + List resourceList = new ArrayList<>(); + resourceList.add(resource.getId()); + resourceList.add(resource1.getId()); + int result = resourceMapper.deleteIds(resourceList.toArray(new Integer[resourceList.size()])); + Assert.assertEquals(result,2); + } + + @Test + public void queryResourceListAuthoredTest(){ + // create a general user + User generalUser1 = createGeneralUser("user1"); + User generalUser2 = createGeneralUser("user2"); + // create resource + Resource resource = createResource(generalUser1); + createResourcesUser(resource, generalUser2); + + List resourceList = resourceMapper.queryResourceListAuthored(generalUser2.getId(), ResourceType.FILE.ordinal(), 0); + Assert.assertNotNull(resourceList); + + resourceList = resourceMapper.queryResourceListAuthored(generalUser2.getId(), ResourceType.FILE.ordinal(), 4); + Assert.assertFalse(resourceList.contains(resource)); + } + + @Test + public void batchUpdateResourceTest(){ + // create a general user + User generalUser1 = createGeneralUser("user1"); + // create resource + Resource resource = createResource(generalUser1); + resource.setFullName(String.format("%s-update",resource.getFullName())); + resource.setUpdateTime(new Date()); + List resourceList = new ArrayList<>(); + resourceList.add(resource); + int result = resourceMapper.batchUpdateResource(resourceList); + Assert.assertTrue(result>0); + } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java index 233e88c5dd..26ae55800a 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapperTest.java @@ -17,19 +17,24 @@ package org.apache.dolphinscheduler.dao.mapper; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.dao.entity.ResourcesUser; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ResourceUserMapperTest { @@ -43,13 +48,14 @@ public class ResourceUserMapperTest { */ private ResourcesUser insertOne(){ //insertOne - ResourcesUser queue = new ResourcesUser(); - queue.setCreateTime(new Date()); - queue.setUpdateTime(new Date()); - queue.setUserId(11111); - queue.setResourcesId(1110); - resourceUserMapper.insert(queue); - return queue; + ResourcesUser resourcesUser = new ResourcesUser(); + resourcesUser.setCreateTime(new Date()); + resourcesUser.setUpdateTime(new Date()); + resourcesUser.setUserId(11111); + resourcesUser.setResourcesId(1110); + resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM); + resourceUserMapper.insert(resourcesUser); + return resourcesUser; } /** @@ -62,8 +68,7 @@ public class ResourceUserMapperTest { queue.setCreateTime(new Date()); //update int update = resourceUserMapper.updateById(queue); - Assert.assertEquals(update, 1); - resourceUserMapper.deleteById(queue.getId()); + Assert.assertEquals(1, update); } /** @@ -73,7 +78,7 @@ public class ResourceUserMapperTest { public void testDelete(){ ResourcesUser queue = insertOne(); int delete = resourceUserMapper.deleteById(queue.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -85,7 +90,6 @@ public class ResourceUserMapperTest { //query List queues = resourceUserMapper.selectList(null); Assert.assertNotEquals(queues.size(), 0); - resourceUserMapper.deleteById(queue.getId()); } /** @@ -100,4 +104,18 @@ public class ResourceUserMapperTest { queue.getResourcesId()); Assert.assertNotEquals(delete, 0); } + + /** + * test delete + */ + @Test + public void testDeleteResourceUserArray() { + + ResourcesUser resourcesUser = insertOne(); + Integer[] resourceIdArray = new Integer[]{resourcesUser.getResourcesId()}; + int delete = resourceUserMapper.deleteResourceUserArray( + resourcesUser.getUserId(), + resourceIdArray); + Assert.assertNotEquals(delete, 0); + } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java index 154b92b23b..e7dafccc73 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ScheduleMapperTest.java @@ -31,16 +31,20 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class ScheduleMapperTest { - + @Autowired ScheduleMapper scheduleMapper; @@ -83,7 +87,6 @@ public class ScheduleMapperTest { //update int update = scheduleMapper.updateById(schedule); Assert.assertEquals(update, 1); - scheduleMapper.deleteById(schedule.getId()); } /** @@ -105,7 +108,6 @@ public class ScheduleMapperTest { //query List schedules = scheduleMapper.selectList(null); Assert.assertNotEquals(schedules.size(), 0); - scheduleMapper.deleteById(schedule.getId()); } /** @@ -137,14 +139,10 @@ public class ScheduleMapperTest { Page page = new Page(1,3); IPage scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging(page, processDefinition.getId(), "" - ); + ); Assert.assertNotEquals(scheduleIPage.getSize(), 0); - projectMapper.deleteById(project.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - userMapper.deleteById(user.getId()); - scheduleMapper.deleteById(schedule.getId()); } /** @@ -178,10 +176,6 @@ public class ScheduleMapperTest { List schedules = scheduleMapper.querySchedulerListByProjectName( project.getName() ); - projectMapper.deleteById(project.getId()); - processDefinitionMapper.deleteById(processDefinition.getId()); - userMapper.deleteById(user.getId()); - scheduleMapper.deleteById(schedule.getId()); Assert.assertNotEquals(schedules.size(), 0); } @@ -198,7 +192,6 @@ public class ScheduleMapperTest { scheduleMapper.updateById(schedule); List schedules= scheduleMapper.selectAllByProcessDefineArray(new int[] {schedule.getProcessDefinitionId()}); - scheduleMapper.deleteById(schedule.getId()); Assert.assertNotEquals(schedules.size(), 0); } @@ -212,7 +205,6 @@ public class ScheduleMapperTest { scheduleMapper.updateById(schedule); List schedules= scheduleMapper.queryByProcessDefinitionId(schedule.getProcessDefinitionId()); - scheduleMapper.deleteById(schedule.getId()); Assert.assertNotEquals(schedules.size(), 0); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java index c9aba3082e..df16177b43 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/SessionMapperTest.java @@ -23,13 +23,18 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; +import java.util.UUID; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class SessionMapperTest { @Autowired @@ -42,6 +47,7 @@ public class SessionMapperTest { private Session insertOne(){ //insertOne Session session = new Session(); + session.setId(UUID.randomUUID().toString()); session.setLastLoginTime(new Date()); session.setUserId(11111); sessionMapper.insert(session); @@ -59,7 +65,6 @@ public class SessionMapperTest { //update int update = sessionMapper.updateById(session); Assert.assertEquals(update, 1); - sessionMapper.deleteById(session.getId()); } /** @@ -81,7 +86,6 @@ public class SessionMapperTest { //query List sessions = sessionMapper.selectList(null); Assert.assertNotEquals(sessions.size(), 0); - sessionMapper.deleteById(session.getId()); } /** @@ -93,6 +97,5 @@ public class SessionMapperTest { List sessions = sessionMapper.queryByUserId(session.getUserId()); Assert.assertNotEquals(sessions.size(), 0); - sessionMapper.deleteById(session.getId()); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java index 51a22b756e..b224067a29 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java @@ -32,13 +32,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class TaskInstanceMapperTest { @@ -78,7 +82,7 @@ public class TaskInstanceMapperTest { TaskInstance taskInstance = insertOne(); //update int update = taskInstanceMapper.updateById(taskInstance); - Assert.assertEquals(update, 1); + Assert.assertEquals(1, update); taskInstanceMapper.deleteById(taskInstance.getId()); } @@ -89,7 +93,7 @@ public class TaskInstanceMapperTest { public void testDelete(){ TaskInstance taskInstance = insertOne(); int delete = taskInstanceMapper.deleteById(taskInstance.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java index f5cb8fca12..493e85b39c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java @@ -26,13 +26,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class TenantMapperTest { @Autowired @@ -64,8 +68,7 @@ public class TenantMapperTest { tenant.setUpdateTime(new Date()); //update int update = tenantMapper.updateById(tenant); - Assert.assertEquals(update, 1); - tenantMapper.deleteById(tenant.getId()); + Assert.assertEquals(1, update); } /** @@ -75,7 +78,7 @@ public class TenantMapperTest { public void testDelete(){ Tenant tenant = insertOne(); int delete = tenantMapper.deleteById(tenant.getId()); - Assert.assertEquals(delete, 1); + Assert.assertEquals(1, delete); } /** @@ -87,7 +90,6 @@ public class TenantMapperTest { //query List tenants = tenantMapper.selectList(null); Assert.assertNotEquals(tenants.size(), 0); - tenantMapper.deleteById(tenant.getId()); } /** @@ -108,7 +110,6 @@ public class TenantMapperTest { Tenant tenant1 = tenantMapper.queryById(tenant.getId()); - tenantMapper.deleteById(tenant.getId()); Assert.assertNotEquals(tenant1, null); } @@ -121,7 +122,6 @@ public class TenantMapperTest { Tenant tenant = insertOne(); tenant.setTenantCode("ut code"); tenantMapper.updateById(tenant); - tenantMapper.deleteById(tenant.getId()); } /** @@ -144,8 +144,6 @@ public class TenantMapperTest { IPage tenantIPage = tenantMapper.queryTenantPaging(page, tenant.getTenantName()); - queueMapper.deleteById(queue.getId()); - tenantMapper.deleteById(tenant.getId()); Assert.assertNotEquals(tenantIPage.getTotal(), 0); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java index 18585a5e08..178369c36e 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UDFUserMapperTest.java @@ -27,13 +27,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class UDFUserMapperTest { @Autowired @@ -126,9 +130,6 @@ public class UDFUserMapperTest { udfUser.setUdfId(2); int update = udfUserMapper.updateById(udfUser); Assert.assertEquals(update, 1); - udfUserMapper.deleteById(udfUser.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); - userMapper.deleteById(user.getId()); } @@ -145,8 +146,6 @@ public class UDFUserMapperTest { UDFUser udfUser = insertOne(user, udfFunc); int delete = udfUserMapper.deleteById(udfUser.getId()); Assert.assertEquals(delete, 1); - userMapper.deleteById(user.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); } /** @@ -159,7 +158,6 @@ public class UDFUserMapperTest { //query List udfUserList = udfUserMapper.selectList(null); Assert.assertNotEquals(udfUserList.size(), 0); - userMapper.deleteById(udfUser.getId()); } /** @@ -175,8 +173,6 @@ public class UDFUserMapperTest { UDFUser udfUser = insertOne(user, udfFunc); int delete = udfUserMapper.deleteByUserId(user.getId()); Assert.assertEquals(delete, 1); - userMapper.deleteById(user.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); } @@ -193,7 +189,5 @@ public class UDFUserMapperTest { UDFUser udfUser = insertOne(user, udfFunc); int delete = udfUserMapper.deleteByUdfFuncId(udfFunc.getId()); Assert.assertEquals(delete, 1); - userMapper.deleteById(user.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java index 0dd06484d8..47d8d89b40 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java @@ -170,7 +170,6 @@ public class UdfFuncMapperTest { udfFunc.setUpdateTime(new Date()); //update int update = udfFuncMapper.updateById(udfFunc); - udfFuncMapper.deleteById(udfFunc.getId()); Assert.assertEquals(update, 1); } @@ -197,7 +196,6 @@ public class UdfFuncMapperTest { //query List udfFuncList = udfFuncMapper.selectList(null); Assert.assertNotEquals(udfFuncList.size(), 0); - udfFuncMapper.deleteById(udfFunc.getId()); } /** @@ -213,8 +211,6 @@ public class UdfFuncMapperTest { //queryUdfByIdStr List udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray,""); Assert.assertNotEquals(udfFuncList.size(), 0); - udfFuncMapper.deleteById(udfFunc.getId()); - udfFuncMapper.deleteById(udfFunc1.getId()); } /** @@ -229,8 +225,6 @@ public class UdfFuncMapperTest { //queryUdfFuncPaging Page page = new Page(1,3); IPage udfFuncIPage = udfFuncMapper.queryUdfFuncPaging(page,user.getId(),""); - userMapper.deleteById(user.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); Assert.assertNotEquals(udfFuncIPage.getTotal(), 0); } @@ -246,8 +240,6 @@ public class UdfFuncMapperTest { UdfFunc udfFunc = insertOne(user); //getUdfFuncByType List udfFuncList = udfFuncMapper.getUdfFuncByType(user.getId(), udfFunc.getType().ordinal()); - userMapper.deleteById(user.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); Assert.assertNotEquals(udfFuncList.size(), 0); } @@ -264,10 +256,6 @@ public class UdfFuncMapperTest { UdfFunc udfFunc1 = insertOne(user1); UdfFunc udfFunc2 = insertOne(user2); List udfFuncList = udfFuncMapper.queryUdfFuncExceptUserId(user1.getId()); - userMapper.deleteById(user1.getId()); - userMapper.deleteById(user2.getId()); - udfFuncMapper.deleteById(udfFunc1.getId()); - udfFuncMapper.deleteById(udfFunc2.getId()); Assert.assertNotEquals(udfFuncList.size(), 0); } @@ -287,9 +275,6 @@ public class UdfFuncMapperTest { UDFUser udfUser = insertOneUDFUser(user, udfFunc); //queryAuthedUdfFunc List udfFuncList = udfFuncMapper.queryAuthedUdfFunc(user.getId()); - userMapper.deleteById(user.getId()); - udfFuncMapper.deleteById(udfFunc.getId()); - udfUserMapper.deleteById(udfUser.getId()); Assert.assertNotEquals(udfFuncList.size(), 0); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java index 71149292e2..2c5024f2ee 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapperTest.java @@ -27,13 +27,17 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class UserAlertGroupMapperTest { @Autowired @@ -131,9 +135,6 @@ public class UserAlertGroupMapperTest { int update = userAlertGroupMapper.updateById(userAlertGroup); Assert.assertEquals(update, 1); - userAlertGroupMapper.deleteById(userAlertGroup.getId()); - userMapper.deleteById(user.getId()); - alertGroupMapper.deleteById(alertGroup.getId()); } /** @@ -158,7 +159,6 @@ public class UserAlertGroupMapperTest { //query List userAlertGroupList = userAlertGroupMapper.selectList(null); Assert.assertNotEquals(userAlertGroupList.size(), 0); - userAlertGroupMapper.deleteById(userAlertGroup.getId()); } /** @@ -175,8 +175,6 @@ public class UserAlertGroupMapperTest { UserAlertGroup userAlertGroup = insertOne(user,alertGroup); int delete = userAlertGroupMapper.deleteByAlertgroupId(alertGroup.getId()); Assert.assertEquals(delete, 1); - userMapper.deleteById(user.getId()); - alertGroupMapper.deleteById(alertGroup.getId()); } /** @@ -194,8 +192,5 @@ public class UserAlertGroupMapperTest { List userList = userAlertGroupMapper.listUserByAlertgroupId(alertGroup.getId()); Assert.assertNotEquals(userList.size(), 0); - userAlertGroupMapper.deleteByAlertgroupId(alertGroup.getId()); - userMapper.deleteById(user.getId()); - alertGroupMapper.deleteById(alertGroup.getId()); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java index da17e14044..7b1849ef4d 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java @@ -16,24 +16,28 @@ */ package org.apache.dolphinscheduler.dao.mapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.common.enums.AlertType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.dao.entity.*; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.transaction.annotation.Transactional; import java.util.Date; import java.util.List; @RunWith(SpringRunner.class) @SpringBootTest +@Transactional +@Rollback(true) public class UserMapperTest { @Autowired private UserMapper userMapper; @@ -175,6 +179,23 @@ public class UserMapperTest { return tenant; } + /** + * insert one Tenant + * @return Tenant + */ + private Tenant insertOneTenant(Queue queue){ + Tenant tenant = new Tenant(); + tenant.setTenantCode("dolphin"); + tenant.setTenantName("dolphin test"); + tenant.setDescription("dolphin user use"); + tenant.setQueueId(queue.getId()); + tenant.setQueue(queue.getQueue()); + tenant.setCreateTime(new Date()); + tenant.setUpdateTime(new Date()); + tenantMapper.insert(tenant); + return tenant; + } + /** * insert one Queue * @return Queue @@ -202,7 +223,6 @@ public class UserMapperTest { user.setUserType(UserType.ADMIN_USER); int update = userMapper.updateById(user); Assert.assertEquals(update, 1); - userMapper.deleteById(user.getId()); } /** @@ -215,7 +235,6 @@ public class UserMapperTest { //delete int delete = userMapper.deleteById(user.getId()); Assert.assertEquals(delete, 1); - userMapper.deleteById(user.getId()); } /** @@ -228,7 +247,6 @@ public class UserMapperTest { //query List userList = userMapper.selectList(null); Assert.assertNotEquals(userList.size(), 0); - userMapper.deleteById(user.getId()); } /** @@ -241,35 +259,32 @@ public class UserMapperTest { //queryAllGeneralUser List userList = userMapper.queryAllGeneralUser(); Assert.assertNotEquals(userList.size(), 0); - userMapper.deleteById(user.getId()); } - /** - * test query by username - */ - @Test - public void testQueryByUserNameAccurately() { - //insertOne - User user = insertOne(); - //queryByUserNameAccurately - User queryUser = userMapper.queryByUserNameAccurately(user.getUserName()); - Assert.assertEquals(queryUser.getUserName(), user.getUserName()); - userMapper.deleteById(user.getId()); - } +// /** +// * test query by username +// */ +// @Test +// public void testQueryByUserNameAccurately() { +// //insertOne +// User user = insertOne(); +// //queryByUserNameAccurately +// User queryUser = userMapper.queryByUserNameAccurately(user.getUserName()); +// Assert.assertEquals(queryUser.getUserName(), user.getUserName()); +// } - /** - * test query by username and password - */ - @Test - public void testQueryUserByNamePassword() { - //insertOne - User user = insertOne(); - //queryUserByNamePassword - User queryUser = userMapper.queryUserByNamePassword(user.getUserName(),user.getUserPassword()); - Assert.assertEquals(queryUser.getUserName(),user.getUserName()); - Assert.assertEquals(queryUser.getUserPassword(),user.getUserPassword()); - userMapper.deleteById(user.getId()); - } +// /** +// * test query by username and password +// */ +// @Test +// public void testQueryUserByNamePassword() { +// //insertOne +// User user = insertOne(); +// //queryUserByNamePassword +// User queryUser = userMapper.queryUserByNamePassword(user.getUserName(),user.getUserPassword()); +// Assert.assertEquals(queryUser.getUserName(),user.getUserName()); +// Assert.assertEquals(queryUser.getUserPassword(), user.getUserPassword()); +// } /** * test page @@ -286,9 +301,6 @@ public class UserMapperTest { Page page = new Page(1,3); IPage userIPage = userMapper.queryUserPaging(page, user.getUserName()); Assert.assertNotEquals(userIPage.getTotal(), 0); - queueMapper.deleteById(queue.getId()); - tenantMapper.deleteById(tenant.getId()); - userMapper.deleteById(user.getId()); } /** @@ -296,12 +308,13 @@ public class UserMapperTest { */ @Test public void testQueryDetailsById() { - //insertOne - User user = insertOne(); + //insertOneQueue and insertOneTenant + Queue queue = insertOneQueue(); + Tenant tenant = insertOneTenant(queue); + User user = insertOne(queue,tenant); //queryDetailsById User queryUser = userMapper.queryDetailsById(user.getId()); - Assert.assertEquals(queryUser,user); - userMapper.deleteById(user.getId()); + Assert.assertEquals(user.getUserName(), queryUser.getUserName()); } /** @@ -318,9 +331,6 @@ public class UserMapperTest { //queryUserListByAlertGroupId List userList = userMapper.queryUserListByAlertGroupId(userAlertGroup.getAlertgroupId()); Assert.assertNotEquals(userList.size(), 0); - userMapper.deleteById(user.getId()); - alertGroupMapper.deleteById(alertGroup.getId()); - userAlertGroupMapper.deleteById(userAlertGroup.getAlertgroupId()); } @@ -336,8 +346,6 @@ public class UserMapperTest { //queryTenantCodeByUserId User queryUser = userMapper.queryTenantCodeByUserId(user.getId()); Assert.assertEquals(queryUser,user); - userMapper.deleteById(user.getId()); - tenantMapper.deleteById(tenant.getId()); } /** @@ -352,8 +360,6 @@ public class UserMapperTest { //queryUserByToken User userToken = userMapper.queryUserByToken(accessToken.getToken()); Assert.assertEquals(userToken,user); - userMapper.deleteById(user.getId()); - accessTokenMapper.deleteById(accessToken.getId()); } } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java deleted file mode 100644 index ea05f1bf11..0000000000 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapperTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.dao.mapper; - - -import org.apache.dolphinscheduler.dao.entity.WorkerGroup; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; - -import java.util.Date; -import java.util.List; - -@RunWith(SpringRunner.class) -@SpringBootTest -public class WorkerGroupMapperTest { - @Autowired - WorkerGroupMapper workerGroupMapper; - - /** - * insert - * @return WorkerGroup - */ - private WorkerGroup insertOne(){ - //insertOne - WorkerGroup workerGroup = new WorkerGroup(); - - String name = "workerGroup3"; - workerGroup.setName(name); - workerGroup.setIpList("192.168.220.154,192.168.220.188"); - workerGroup.setCreateTime(new Date()); - workerGroup.setUpdateTime(new Date()); - workerGroupMapper.insert(workerGroup); - return workerGroup; - } - - - /** - * test update - */ - @Test - public void testUpdate(){ - //insertOne - WorkerGroup workerGroup = insertOne(); - //update - workerGroup.setName("workerGroup11"); - int update = workerGroupMapper.updateById(workerGroup); - workerGroupMapper.deleteById(workerGroup.getId()); - Assert.assertEquals(update, 1); - } - - /** - * test delete - */ - @Test - public void testDelete(){ - //insertOne - WorkerGroup workerGroup = insertOne(); - //delete - int delete = workerGroupMapper.deleteById(workerGroup.getId()); - Assert.assertEquals(delete, 1); - } - - /** - * test query - */ - @Test - public void testQuery() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //query - List workerGroupList = workerGroupMapper.selectList(null); - Assert.assertNotEquals(workerGroupList.size(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - /** - * test query all worker group - */ - @Test - public void testQueryAllWorkerGroup() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //queryAllWorkerGroup - List workerGroupList = workerGroupMapper.queryAllWorkerGroup(); - Assert.assertNotEquals(workerGroupList.size(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - /** - * test query work group by name - */ - @Test - public void testQueryWorkerGroupByName() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //queryWorkerGroupByName - List workerGroupList = workerGroupMapper.queryWorkerGroupByName(workerGroup.getName()); - Assert.assertNotEquals(workerGroupList.size(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } - - /** - * test page - */ - @Test - public void testQueryListPaging() { - //insertOne - WorkerGroup workerGroup = insertOne(); - //queryListPaging - Page page = new Page(1,3); - IPage workerGroupIPage = workerGroupMapper.queryListPaging(page, workerGroup.getName()); - Assert.assertNotEquals(workerGroupIPage.getTotal(), 0); - workerGroupMapper.deleteById(workerGroup.getId()); - } -} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ResInfoTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtilsTest.java similarity index 55% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ResInfoTest.java rename to dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtilsTest.java index e4318965b7..914a5010ca 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ResInfoTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/ResourceProcessDefinitionUtilsTest.java @@ -14,30 +14,28 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.utils; +package org.apache.dolphinscheduler.dao.utils; import org.junit.Assert; import org.junit.Test; -import java.util.Date; -import org.apache.dolphinscheduler.common.model.Server; -public class ResInfoTest { - @Test - public void testGetHeartBeatInfo() { - String info = ResInfo.getHeartBeatInfo(new Date()); - Assert.assertEquals(7, info.split(",").length); - } +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +/** + * resource process definition utils test + */ +public class ResourceProcessDefinitionUtilsTest { @Test - public void testParseHeartbeatForZKInfo() { - //normal info - String info = ResInfo.getHeartBeatInfo(new Date()); - Server s = ResInfo.parseHeartbeatForZKInfo(info); - Assert.assertNotNull(s); - Assert.assertNotNull(s.getResInfo()); - - //null param - s = ResInfo.parseHeartbeatForZKInfo(null); - Assert.assertNull(s); + public void getResourceProcessDefinitionMapTest(){ + List> mapList = new ArrayList<>(); + Map map = new HashMap(); + map.put("id",1); + map.put("resource_ids","1,2,3"); + mapList.add(map); + Assert.assertNotNull(ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(mapList)); } -} + +} \ No newline at end of file diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml index 80ddfec9b0..f4b8d2b7ab 100644 --- a/dolphinscheduler-dist/pom.xml +++ b/dolphinscheduler-dist/pom.xml @@ -220,10 +220,7 @@ ${basedir}/../dolphinscheduler-alert/src/main/resources - **/*.properties - **/*.xml - **/*.json - **/*.ftl + **/*.* @@ -232,9 +229,7 @@ ${basedir}/../dolphinscheduler-common/src/main/resources - **/*.properties - **/*.xml - **/*.json + **/*.* @@ -243,10 +238,7 @@ ${basedir}/../dolphinscheduler-dao/src/main/resources - **/*.properties - **/*.xml - **/*.json - **/*.yml + **/*.* @@ -255,9 +247,7 @@ ${basedir}/../dolphinscheduler-api/src/main/resources - **/*.properties - **/*.xml - **/*.json + **/*.* @@ -266,10 +256,17 @@ ${basedir}/../dolphinscheduler-server/src/main/resources - **/*.properties - **/*.xml - **/*.json config/*.* + **/*.xml + + + + + + ${basedir}/../dolphinscheduler-service/src/main/resources + + + *.* @@ -342,14 +339,6 @@ - - - ${basedir}/../dolphinscheduler-ui - - - install-dolphinscheduler-ui.sh - - ${basedir}/release-docs @@ -362,7 +351,7 @@ - /opt/soft/${project.build.finalName}/dist + /opt/soft/${project.build.finalName}/ui 755 root root @@ -391,6 +380,14 @@ **/*.* + + + ${basedir}/../sql + + + soft_version + + @@ -405,7 +402,7 @@ ${basedir}/../script - **/*.* + *.sh @@ -416,6 +413,9 @@ + + + diff --git a/dolphinscheduler-dist/release-docs/LICENSE b/dolphinscheduler-dist/release-docs/LICENSE index 97946d1172..82e641ec72 100644 --- a/dolphinscheduler-dist/release-docs/LICENSE +++ b/dolphinscheduler-dist/release-docs/LICENSE @@ -518,6 +518,8 @@ MIT licenses js-cookie 2.2.1: https://github.com/js-cookie/js-cookie MIT jsplumb 2.8.6: https://github.com/jsplumb/jsplumb MIT and GPLv2 lodash 4.17.11: https://github.com/lodash/lodash MIT + normalize.css 8.0.1: https://github.com/necolas/normalize.css MIT + vue-treeselect 0.4.0: https://github.com/riophae/vue-treeselect MIT vue 2.5.17: https://github.com/vuejs/vue MIT vue-router 2.7.0: https://github.com/vuejs/vue-router MIT vuex 3.0.0: https://github.com/vuejs/vuex MIT diff --git a/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-normalize b/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-normalize new file mode 100644 index 0000000000..90e0c091a5 --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-normalize @@ -0,0 +1,8 @@ +The MIT License (MIT) +Copyright © Nicolas Gallagher and Jonathan Neal + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-vue-treeselect b/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-vue-treeselect new file mode 100644 index 0000000000..f7d8cc3ebd --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-vue-treeselect @@ -0,0 +1,20 @@ +Copyright (c) 2017-present Riophae Lee + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/dolphinscheduler-remote/pom.xml b/dolphinscheduler-remote/pom.xml index 39c7c6a7c0..0968e610bc 100644 --- a/dolphinscheduler-remote/pom.xml +++ b/dolphinscheduler-remote/pom.xml @@ -1,5 +1,20 @@ - + diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java index 96258d752a..10f729d32e 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java @@ -25,6 +25,7 @@ import io.netty.channel.socket.nio.NioSocketChannel; import org.apache.dolphinscheduler.remote.codec.NettyDecoder; import org.apache.dolphinscheduler.remote.codec.NettyEncoder; import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.exceptions.RemotingException; import org.apache.dolphinscheduler.remote.exceptions.RemotingTimeoutException; @@ -33,7 +34,8 @@ import org.apache.dolphinscheduler.remote.future.InvokeCallback; import org.apache.dolphinscheduler.remote.future.ReleaseSemaphore; import org.apache.dolphinscheduler.remote.future.ResponseFuture; import org.apache.dolphinscheduler.remote.handler.NettyClientHandler; -import org.apache.dolphinscheduler.remote.utils.Address; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.CallerThreadExecutePolicy; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.slf4j.Logger; @@ -64,7 +66,7 @@ public class NettyRemotingClient { /** * channels */ - private final ConcurrentHashMap channels = new ConcurrentHashMap(128); + private final ConcurrentHashMap channels = new ConcurrentHashMap(128); /** * started flag @@ -158,17 +160,17 @@ public class NettyRemotingClient { /** * async send - * @param address address + * @param host host * @param command command * @param timeoutMillis timeoutMillis * @param invokeCallback callback function * @throws InterruptedException * @throws RemotingException */ - public void sendAsync(final Address address, final Command command, + public void sendAsync(final Host host, final Command command, final long timeoutMillis, final InvokeCallback invokeCallback) throws InterruptedException, RemotingException { - final Channel channel = getChannel(address); + final Channel channel = getChannel(host); if (channel == null) { throw new RemotingException("network error"); } @@ -214,7 +216,7 @@ public class NettyRemotingClient { }); } catch (Throwable ex){ responseFuture.release(); - throw new RemotingException(String.format("send command to address: %s failed", address), ex); + throw new RemotingException(String.format("send command to host: %s failed", host), ex); } } else{ String message = String.format("try to acquire async semaphore timeout: %d, waiting thread num: %d, total permits: %d", @@ -225,17 +227,17 @@ public class NettyRemotingClient { /** * sync send - * @param address address + * @param host host * @param command command * @param timeoutMillis timeoutMillis * @return command * @throws InterruptedException * @throws RemotingException */ - public Command sendSync(final Address address, final Command command, final long timeoutMillis) throws InterruptedException, RemotingException { - final Channel channel = getChannel(address); + public Command sendSync(final Host host, final Command command, final long timeoutMillis) throws InterruptedException, RemotingException { + final Channel channel = getChannel(host); if (channel == null) { - throw new RemotingException(String.format("connect to : %s fail", address)); + throw new RemotingException(String.format("connect to : %s fail", host)); } final long opaque = command.getOpaque(); final ResponseFuture responseFuture = new ResponseFuture(opaque, timeoutMillis, null, null); @@ -250,7 +252,7 @@ public class NettyRemotingClient { } responseFuture.setCause(future.cause()); responseFuture.putResponse(null); - logger.error("send command {} to address {} failed", command, address); + logger.error("send command {} to host {} failed", command, host); } }); /** @@ -259,49 +261,95 @@ public class NettyRemotingClient { Command result = responseFuture.waitResponse(); if(result == null){ if(responseFuture.isSendOK()){ - throw new RemotingTimeoutException(address.toString(), timeoutMillis, responseFuture.getCause()); + throw new RemotingTimeoutException(host.toString(), timeoutMillis, responseFuture.getCause()); } else{ - throw new RemotingException(address.toString(), responseFuture.getCause()); + throw new RemotingException(host.toString(), responseFuture.getCause()); } } return result; } + /** + * send task + * @param host host + * @param command command + * @throws RemotingException + */ + public void send(final Host host, final Command command) throws RemotingException { + Channel channel = getChannel(host); + if (channel == null) { + throw new RemotingException(String.format("connect to : %s fail", host)); + } + try { + ChannelFuture future = channel.writeAndFlush(command).await(); + if (future.isSuccess()) { + logger.debug("send command : {} , to : {} successfully.", command, host.getAddress()); + } else { + String msg = String.format("send command : %s , to :%s failed", command, host.getAddress()); + logger.error(msg, future.cause()); + throw new RemotingException(msg); + } + } catch (Exception e) { + logger.error("Send command {} to address {} encounter error.", command, host.getAddress()); + throw new RemotingException(String.format("Send command : %s , to :%s encounter error", command, host.getAddress()), e); + } + } + + /** + * register processor + * @param commandType command type + * @param processor processor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { + this.registerProcessor(commandType, processor, null); + } + + /** + * register processor + * + * @param commandType command type + * @param processor processor + * @param executor thread executor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { + this.clientHandler.registerProcessor(commandType, processor, executor); + } + /** * get channel - * @param address + * @param host * @return */ - public Channel getChannel(Address address) { - Channel channel = channels.get(address); + public Channel getChannel(Host host) { + Channel channel = channels.get(host); if(channel != null && channel.isActive()){ return channel; } - return createChannel(address, true); + return createChannel(host, true); } /** * create channel - * @param address address + * @param host host * @param isSync sync flag * @return channel */ - public Channel createChannel(Address address, boolean isSync) { + public Channel createChannel(Host host, boolean isSync) { ChannelFuture future; try { synchronized (bootstrap){ - future = bootstrap.connect(new InetSocketAddress(address.getHost(), address.getPort())); + future = bootstrap.connect(new InetSocketAddress(host.getIp(), host.getPort())); } if(isSync){ future.sync(); } if (future.isSuccess()) { Channel channel = future.channel(); - channels.put(address, channel); + channels.put(host, channel); return channel; } } catch (Exception ex) { - logger.info("connect to {} error {}", address, ex); + logger.info("connect to {} error {}", host, ex); } return null; } @@ -341,10 +389,10 @@ public class NettyRemotingClient { /** * close channel - * @param address address + * @param host host */ - public void closeChannel(Address address){ - Channel channel = this.channels.remove(address); + public void closeChannel(Host host){ + Channel channel = this.channels.remove(host); if(channel != null){ channel.close(); } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java index b1b24d3303..c8d56597ee 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java @@ -1 +1 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; public enum CommandType { /** * roll view log request */ ROLL_VIEW_LOG_REQUEST, /** * roll view log response */ ROLL_VIEW_LOG_RESPONSE, /** * view whole log request */ VIEW_WHOLE_LOG_REQUEST, /** * view whole log response */ VIEW_WHOLE_LOG_RESPONSE, /** * get log bytes request */ GET_LOG_BYTES_REQUEST, /** * get log bytes response */ GET_LOG_BYTES_RESPONSE, WORKER_REQUEST, MASTER_RESPONSE, /** * execute task request */ EXECUTE_TASK_REQUEST, /** * execute task response */ EXECUTE_TASK_RESPONSE, /** * ping */ PING, /** * pong */ PONG; } \ No newline at end of file +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; public enum CommandType { /** * roll view log request */ ROLL_VIEW_LOG_REQUEST, /** * roll view log response */ ROLL_VIEW_LOG_RESPONSE, /** * view whole log request */ VIEW_WHOLE_LOG_REQUEST, /** * view whole log response */ VIEW_WHOLE_LOG_RESPONSE, /** * get log bytes request */ GET_LOG_BYTES_REQUEST, /** * get log bytes response */ GET_LOG_BYTES_RESPONSE, WORKER_REQUEST, MASTER_RESPONSE, /** * execute task request */ TASK_EXECUTE_REQUEST, /** * execute task ack */ TASK_EXECUTE_ACK, /** * execute task response */ TASK_EXECUTE_RESPONSE, /** * kill task */ TASK_KILL_REQUEST, /** * kill task response */ TASK_KILL_RESPONSE, /** * ping */ PING, /** * pong */ PONG; } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java deleted file mode 100644 index 93c536c347..0000000000 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java +++ /dev/null @@ -1 +0,0 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.List; /** * execute task request command */ public class ExecuteTaskRequestCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * application name */ private String applicationName; /** * group name */ private String groupName; /** * task name */ private String taskName; /** * connector port */ private int connectorPort; /** * description info */ private String description; /** * class name */ private String className; /** * method name */ private String methodName; /** * parameters */ private String params; /** * shard itemds */ private List shardItems; public List getShardItems() { return shardItems; } public void setShardItems(List shardItems) { this.shardItems = shardItems; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getGroupName() { return groupName; } public void setGroupName(String groupName) { this.groupName = groupName; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public int getConnectorPort() { return connectorPort; } public void setConnectorPort(int connectorPort) { this.connectorPort = connectorPort; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java deleted file mode 100644 index 7543fc3d0e..0000000000 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java +++ /dev/null @@ -1 +0,0 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; /** * execute task response command */ public class ExecuteTaskResponseCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * return result */ private Object result; /** * received time */ private long receivedTime; /** * execute count */ private int executeCount; /** * execute time */ private long executeTime; public String getAttemptId() { return attemptId; } public void setAttemptId(String attemptId) { this.attemptId = attemptId; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public Object getResult() { return result; } public void setResult(Object result) { this.result = result; } public long getReceivedTime() { return receivedTime; } public void setReceivedTime(long receivedTime) { this.receivedTime = receivedTime; } public int getExecuteCount() { return executeCount; } public void setExecuteCount(int executeCount) { this.executeCount = executeCount; } public long getExecuteTime() { return executeTime; } public void setExecuteTime(long executeTime) { this.executeTime = executeTime; } public Command convert2Command(long opaque){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java index c5e4d075af..f90d3fff18 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java @@ -30,12 +30,12 @@ public class Ping implements Serializable { /** * ping body */ - protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; + protected static final ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; /** * request command body */ - private static byte[] EMPTY_BODY_ARRAY = new byte[0]; + private static final byte[] EMPTY_BODY_ARRAY = new byte[0]; private static final ByteBuf PING_BUF; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java index e52cef6d92..1b51373bff 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java @@ -30,12 +30,12 @@ public class Pong implements Serializable { /** * pong body */ - protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; + protected static final ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; /** * pong command body */ - private static byte[] EMPTY_BODY_ARRAY = new byte[0]; + private static final byte[] EMPTY_BODY_ARRAY = new byte[0]; /** * ping byte buffer diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java new file mode 100644 index 0000000000..0b3d901a3c --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.Date; /** * execute task request command */ public class TaskExecuteAckCommand implements Serializable { /** * taskInstanceId */ private int taskInstanceId; /** * startTime */ private Date startTime; /** * host */ private String host; /** * status */ private int status; /** * logPath */ private String logPath; /** * executePath */ private String executePath; public Date getStartTime() { return startTime; } public void setStartTime(Date startTime) { this.startTime = startTime; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public int getTaskInstanceId() { return taskInstanceId; } public void setTaskInstanceId(int taskInstanceId) { this.taskInstanceId = taskInstanceId; } public String getLogPath() { return logPath; } public void setLogPath(String logPath) { this.logPath = logPath; } public String getExecutePath() { return executePath; } public void setExecutePath(String executePath) { this.executePath = executePath; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_ACK); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } @Override public String toString() { return "TaskExecuteAckCommand{" + "taskInstanceId=" + taskInstanceId + ", startTime=" + startTime + ", host='" + host + '\'' + ", status=" + status + ", logPath='" + logPath + '\'' + ", executePath='" + executePath + '\'' + '}'; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java new file mode 100644 index 0000000000..637724f49d --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; /** * execute task request command */ public class TaskExecuteRequestCommand implements Serializable { /** * task execution context */ private String taskExecutionContext; public String getTaskExecutionContext() { return taskExecutionContext; } public void setTaskExecutionContext(String taskExecutionContext) { this.taskExecutionContext = taskExecutionContext; } public TaskExecuteRequestCommand() { } public TaskExecuteRequestCommand(String taskExecutionContext) { this.taskExecutionContext = taskExecutionContext; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } @Override public String toString() { return "TaskExecuteRequestCommand{" + "taskExecutionContext='" + taskExecutionContext + '\'' + '}'; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java new file mode 100644 index 0000000000..deb6f5dd8f --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.Date; /** * execute task response command */ public class TaskExecuteResponseCommand implements Serializable { public TaskExecuteResponseCommand() { } public TaskExecuteResponseCommand(int taskInstanceId) { this.taskInstanceId = taskInstanceId; } /** * task instance id */ private int taskInstanceId; /** * status */ private int status; /** * end time */ private Date endTime; /** * processId */ private int processId; /** * appIds */ private String appIds; public int getTaskInstanceId() { return taskInstanceId; } public void setTaskInstanceId(int taskInstanceId) { this.taskInstanceId = taskInstanceId; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public Date getEndTime() { return endTime; } public void setEndTime(Date endTime) { this.endTime = endTime; } public int getProcessId() { return processId; } public void setProcessId(int processId) { this.processId = processId; } public String getAppIds() { return appIds; } public void setAppIds(String appIds) { this.appIds = appIds; } /** * package response command * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } @Override public String toString() { return "TaskExecuteResponseCommand{" + "taskInstanceId=" + taskInstanceId + ", status=" + status + ", endTime=" + endTime + ", processId=" + processId + ", appIds='" + appIds + '\'' + '}'; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskInfo.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskInfo.java new file mode 100644 index 0000000000..3fb58fe3da --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskInfo.java @@ -0,0 +1,250 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +import java.io.Serializable; +import java.util.Date; + +/** + * master/worker task transport + */ +public class TaskInfo implements Serializable{ + + /** + * task instance id + */ + private Integer taskId; + + + /** + * taks name + */ + private String taskName; + + /** + * task start time + */ + private Date startTime; + + /** + * task type + */ + private String taskType; + + /** + * task execute path + */ + private String executePath; + + /** + * task json + */ + private String taskJson; + + + /** + * process instance id + */ + private Integer processInstanceId; + + + /** + * process instance schedule time + */ + private Date scheduleTime; + + /** + * process instance global parameters + */ + private String globalParams; + + + /** + * execute user id + */ + private Integer executorId; + + + /** + * command type if complement + */ + private Integer cmdTypeIfComplement; + + + /** + * tenant code + */ + private String tenantCode; + + /** + * task queue + */ + private String queue; + + + /** + * process define id + */ + private Integer processDefineId; + + /** + * project id + */ + private Integer projectId; + + public Integer getTaskId() { + return taskId; + } + + public void setTaskId(Integer taskId) { + this.taskId = taskId; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public String getTaskType() { + return taskType; + } + + public void setTaskType(String taskType) { + this.taskType = taskType; + } + + public String getExecutePath() { + return executePath; + } + + public void setExecutePath(String executePath) { + this.executePath = executePath; + } + + public String getTaskJson() { + return taskJson; + } + + public void setTaskJson(String taskJson) { + this.taskJson = taskJson; + } + + public Integer getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(Integer processInstanceId) { + this.processInstanceId = processInstanceId; + } + + public Date getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(Date scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public String getGlobalParams() { + return globalParams; + } + + public void setGlobalParams(String globalParams) { + this.globalParams = globalParams; + } + + public String getTenantCode() { + return tenantCode; + } + + public void setTenantCode(String tenantCode) { + this.tenantCode = tenantCode; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public Integer getProcessDefineId() { + return processDefineId; + } + + public void setProcessDefineId(Integer processDefineId) { + this.processDefineId = processDefineId; + } + + public Integer getProjectId() { + return projectId; + } + + public void setProjectId(Integer projectId) { + this.projectId = projectId; + } + + public Integer getExecutorId() { + return executorId; + } + + public void setExecutorId(Integer executorId) { + this.executorId = executorId; + } + + public Integer getCmdTypeIfComplement() { + return cmdTypeIfComplement; + } + + public void setCmdTypeIfComplement(Integer cmdTypeIfComplement) { + this.cmdTypeIfComplement = cmdTypeIfComplement; + } + + @Override + public String toString() { + return "TaskInfo{" + + "taskId=" + taskId + + ", taskName='" + taskName + '\'' + + ", startTime=" + startTime + + ", taskType='" + taskType + '\'' + + ", executePath='" + executePath + '\'' + + ", taskJson='" + taskJson + '\'' + + ", processInstanceId=" + processInstanceId + + ", scheduleTime=" + scheduleTime + + ", globalParams='" + globalParams + '\'' + + ", executorId=" + executorId + + ", cmdTypeIfComplement=" + cmdTypeIfComplement + + ", tenantCode='" + tenantCode + '\'' + + ", queue='" + queue + '\'' + + ", processDefineId=" + processDefineId + + ", projectId=" + projectId + + '}'; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java new file mode 100644 index 0000000000..b8e02dd057 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; /** * kill task request command */ public class TaskKillRequestCommand implements Serializable { /** * task id */ private int taskInstanceId; public int getTaskInstanceId() { return taskInstanceId; } public void setTaskInstanceId(int taskInstanceId) { this.taskInstanceId = taskInstanceId; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.TASK_KILL_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } @Override public String toString() { return "TaskKillRequestCommand{" + "taskInstanceId=" + taskInstanceId + '}'; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java new file mode 100644 index 0000000000..2ca2330c80 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.Date; import java.util.List; /** * kill task response command */ public class TaskKillResponseCommand implements Serializable { /** * taskInstanceId */ private int taskInstanceId; /** * host */ private String host; /** * status */ private int status; /** * processId */ private int processId; /** * other resource manager appId , for example : YARN etc */ protected List appIds; public int getTaskInstanceId() { return taskInstanceId; } public void setTaskInstanceId(int taskInstanceId) { this.taskInstanceId = taskInstanceId; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public int getProcessId() { return processId; } public void setProcessId(int processId) { this.processId = processId; } public List getAppIds() { return appIds; } public void setAppIds(List appIds) { this.appIds = appIds; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.TASK_KILL_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } @Override public String toString() { return "TaskKillResponseCommand{" + "taskInstanceId=" + taskInstanceId + ", host='" + host + '\'' + ", status=" + status + ", processId=" + processId + ", appIds=" + appIds + '}'; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java index ca304646e4..2e3954f4bc 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java @@ -32,9 +32,9 @@ import java.util.concurrent.*; */ public class ResponseFuture { - private final static Logger LOGGER = LoggerFactory.getLogger(ResponseFuture.class); + private static final Logger LOGGER = LoggerFactory.getLogger(ResponseFuture.class); - private final static ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(256); + private static final ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(256); /** * request unique identification @@ -63,11 +63,11 @@ public class ResponseFuture { /** * response command */ - private volatile Command responseCommand; + private Command responseCommand; private volatile boolean sendOk = true; - private volatile Throwable cause; + private Throwable cause; public ResponseFuture(long opaque, long timeoutMillis, InvokeCallback invokeCallback, ReleaseSemaphore releaseSemaphore) { this.opaque = opaque; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java index 80e561d05c..48d78d9ad6 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java @@ -19,12 +19,19 @@ package org.apache.dolphinscheduler.remote.handler; import io.netty.channel.*; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.future.ResponseFuture; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.ChannelUtils; +import org.apache.dolphinscheduler.remote.utils.Constants; +import org.apache.dolphinscheduler.remote.utils.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.RejectedExecutionException; /** * netty client request handler @@ -44,9 +51,20 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { */ private final ExecutorService callbackExecutor; + /** + * processors + */ + private final ConcurrentHashMap> processors; + + /** + * default executor + */ + private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS); + public NettyClientHandler(NettyRemotingClient nettyRemotingClient, ExecutorService callbackExecutor){ this.nettyRemotingClient = nettyRemotingClient; this.callbackExecutor = callbackExecutor; + this.processors = new ConcurrentHashMap(); } /** @@ -71,18 +89,43 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { */ @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - processReceived((Command)msg); + processReceived(ctx.channel(), (Command)msg); + } + + /** + * register processor + * + * @param commandType command type + * @param processor processor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { + this.registerProcessor(commandType, processor, null); + } + + /** + * register processor + * + * @param commandType command type + * @param processor processor + * @param executor thread executor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { + ExecutorService executorRef = executor; + if(executorRef == null){ + executorRef = defaultExecutor; + } + this.processors.putIfAbsent(commandType, new Pair<>(processor, executorRef)); } /** * process received logic * - * @param responseCommand responseCommand + * @param command command */ - private void processReceived(final Command responseCommand) { - ResponseFuture future = ResponseFuture.getFuture(responseCommand.getOpaque()); + private void processReceived(final Channel channel, final Command command) { + ResponseFuture future = ResponseFuture.getFuture(command.getOpaque()); if(future != null){ - future.setResponseCommand(responseCommand); + future.setResponseCommand(command); future.release(); if(future.getInvokeCallback() != null){ this.callbackExecutor.submit(new Runnable() { @@ -92,10 +135,30 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { } }); } else{ - future.putResponse(responseCommand); + future.putResponse(command); } } else{ - logger.warn("receive response {}, but not matched any request ", responseCommand); + processByCommandType(channel, command); + } + } + + public void processByCommandType(final Channel channel, final Command command) { + final Pair pair = processors.get(command.getType()); + if (pair != null) { + Runnable run = () -> { + try { + pair.getLeft().process(channel, command); + } catch (Throwable e) { + logger.error(String.format("process command %s exception", command), e); + } + }; + try { + pair.getRight().submit(run); + } catch (RejectedExecutionException e) { + logger.warn("thread pool is full, discard command {} from {}", command, ChannelUtils.getRemoteAddress(channel)); + } + } else { + logger.warn("receive response {}, but not matched any request ", command); } } @@ -107,35 +170,9 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { */ @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - logger.error("exceptionCaught : {}",cause.getMessage(), cause); + logger.error("exceptionCaught : {}", cause); nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel())); ctx.channel().close(); } - /** - * channel write changed - * - * @param ctx channel handler context - * @throws Exception - */ - @Override - public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception { - Channel ch = ctx.channel(); - ChannelConfig config = ch.config(); - - if (!ch.isWritable()) { - if (logger.isWarnEnabled()) { - logger.warn("{} is not writable, over high water level : {}", - new Object[]{ch, config.getWriteBufferHighWaterMark()}); - } - - config.setAutoRead(false); - } else { - if (logger.isWarnEnabled()) { - logger.warn("{} is writable, to low water : {}", - new Object[]{ch, config.getWriteBufferLowWaterMark()}); - } - config.setAutoRead(true); - } - } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java index 22251c35c1..da2a6ff8bf 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java @@ -98,7 +98,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { if(executorRef == null){ executorRef = nettyRemotingServer.getDefaultExecutor(); } - this.processors.putIfAbsent(commandType, new Pair(processor, executorRef)); + this.processors.putIfAbsent(commandType, new Pair<>(processor, executorRef)); } /** @@ -117,7 +117,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { try { pair.getLeft().process(channel, msg); } catch (Throwable ex) { - logger.error("process msg {} error : {}", msg, ex); + logger.error("process msg {} error", msg, ex); } } }; @@ -158,14 +158,14 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { if (!ch.isWritable()) { if (logger.isWarnEnabled()) { logger.warn("{} is not writable, over high water level : {}", - new Object[]{ch, config.getWriteBufferHighWaterMark()}); + ch, config.getWriteBufferHighWaterMark()); } config.setAutoRead(false); } else { if (logger.isWarnEnabled()) { logger.warn("{} is writable, to low water : {}", - new Object[]{ch, config.getWriteBufferLowWaterMark()}); + ch, config.getWriteBufferLowWaterMark()); } config.setAutoRead(true); } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Address.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Address.java deleted file mode 100644 index f61dcd615c..0000000000 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Address.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.remote.utils; - -import java.io.Serializable; - -/** - * server address - */ -public class Address implements Serializable { - - /** - * host - */ - private String host; - - /** - * port - */ - private int port; - - public Address(){ - //NOP - } - - public Address(String host, int port){ - this.host = host; - this.port = port; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public int getPort() { - return port; - } - - public void setPort(int port) { - this.port = port; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((host == null) ? 0 : host.hashCode()); - result = prime * result + port; - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - Address other = (Address) obj; - if (host == null) { - if (other.host != null) { - return false; - } - } else if (!host.equals(other.host)) { - return false; - } - return port == other.port; - } - - @Override - public String toString() { - return "Address [host=" + host + ", port=" + port + "]"; - } -} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java index d7af5fe165..138a8f0bdf 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java @@ -49,9 +49,9 @@ public class ChannelUtils { * @param channel channel * @return address */ - public static Address toAddress(Channel channel){ + public static Host toAddress(Channel channel){ InetSocketAddress socketAddress = ((InetSocketAddress)channel.remoteAddress()); - return new Address(socketAddress.getAddress().getHostAddress(), socketAddress.getPort()); + return new Host(socketAddress.getAddress().getHostAddress(), socketAddress.getPort()); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java index 5733b17790..48736ca694 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.remote.utils; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; /** @@ -31,11 +32,14 @@ public class Constants { /** * charset */ - public static final Charset UTF8 = Charset.forName("UTF-8"); + public static final Charset UTF8 = StandardCharsets.UTF_8; /** * cpus */ public static final int CPUS = Runtime.getRuntime().availableProcessors(); + + public static final String LOCAL_ADDRESS = IPUtils.getFirstNoLoopbackIP4Address(); + } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java index e96796a05c..a18b8d5a7c 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java @@ -54,7 +54,7 @@ public class FastJsonSerializer { * @return deserialize type */ public static T deserialize(byte[] src, Class clazz) { - return JSON.parseObject(new String(src, Constants.UTF8), clazz); + return JSON.parseObject(src, clazz); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java new file mode 100644 index 0000000000..e9eaabcad6 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Host.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import java.io.Serializable; +import java.util.Objects; + +/** + * server address + */ +public class Host implements Serializable { + + /** + * address + */ + private String address; + + /** + * ip + */ + private String ip; + + /** + * port + */ + private int port; + + public Host() { + } + + public Host(String ip, int port) { + this.ip = ip; + this.port = port; + this.address = ip + ":" + port; + } + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public String getIp() { + return ip; + } + + public void setIp(String ip) { + this.ip = ip; + this.address = ip + ":" + port; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + this.address = ip + ":" + port; + } + + /** + * address convert host + * @param address address + * @return host + */ + public static Host of(String address){ + if(address == null) { + throw new IllegalArgumentException("Host : address is null."); + } + String[] parts = address.split(":"); + if (parts.length != 2) { + throw new IllegalArgumentException(String.format("Host : %s illegal.", address)); + } + Host host = new Host(parts[0], Integer.parseInt(parts[1])); + return host; + } + + /** + * whether old version + * @param address address + * @return old version is true , otherwise is false + */ + public static Boolean isOldVersion(String address){ + String[] parts = address.split(":"); + return parts.length != 2 ? true : false; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Host host = (Host) o; + return Objects.equals(getAddress(), host.getAddress()); + } + + @Override + public int hashCode() { + return Objects.hash(getAddress()); + } + + @Override + public String toString() { + return "Host{" + + "address='" + address + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/IPUtils.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/IPUtils.java new file mode 100644 index 0000000000..2fa82fd5ba --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/IPUtils.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Enumeration; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class IPUtils { + + private static final Logger logger = LoggerFactory.getLogger(IPUtils.class); + + private static String IP_REGEX = "([1-9]|[1-9]\\d|1\\d{2}|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d{2}|2[0-4]\\d|25[0-5])){3}"; + + private static String LOCAL_HOST = "unknown"; + + static { + String host = System.getenv("HOSTNAME"); + if (isNotEmpty(host)) { + LOCAL_HOST = host; + } else { + + try { + String hostName = InetAddress.getLocalHost().getHostName(); + if (isNotEmpty(hostName)) { + LOCAL_HOST = hostName; + } + } catch (UnknownHostException e) { + logger.error("get hostName error!", e); + } + } + } + + public static String getLocalHost() { + return LOCAL_HOST; + } + + + public static String getFirstNoLoopbackIP4Address() { + Collection allNoLoopbackIP4Addresses = getNoLoopbackIP4Addresses(); + if (allNoLoopbackIP4Addresses.isEmpty()) { + return null; + } + return allNoLoopbackIP4Addresses.iterator().next(); + } + + public static Collection getNoLoopbackIP4Addresses() { + Collection noLoopbackIP4Addresses = new ArrayList<>(); + Collection allInetAddresses = getAllHostAddress(); + + for (InetAddress address : allInetAddresses) { + if (!address.isLoopbackAddress() && !address.isSiteLocalAddress() + && !Inet6Address.class.isInstance(address)) { + noLoopbackIP4Addresses.add(address.getHostAddress()); + } + } + if (noLoopbackIP4Addresses.isEmpty()) { + for (InetAddress address : allInetAddresses) { + if (!address.isLoopbackAddress() && !Inet6Address.class.isInstance(address)) { + noLoopbackIP4Addresses.add(address.getHostAddress()); + } + } + } + return noLoopbackIP4Addresses; + } + + public static Collection getAllHostAddress() { + try { + Enumeration networkInterfaces = NetworkInterface.getNetworkInterfaces(); + Collection addresses = new ArrayList<>(); + + while (networkInterfaces.hasMoreElements()) { + NetworkInterface networkInterface = networkInterfaces.nextElement(); + Enumeration inetAddresses = networkInterface.getInetAddresses(); + while (inetAddresses.hasMoreElements()) { + InetAddress inetAddress = inetAddresses.nextElement(); + addresses.add(inetAddress); + } + } + + return addresses; + } catch (SocketException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + public static String getIpByHostName(String host) { + InetAddress address = null; + try { + address = InetAddress.getByName(host); + } catch (UnknownHostException e) { + logger.error("get IP error", e); + } + if (address == null) { + return ""; + } + return address.getHostAddress(); + + } + + private static boolean isEmpty(final CharSequence cs) { + return cs == null || cs.length() == 0; + } + + private static boolean isNotEmpty(final CharSequence cs) { + return !isEmpty(cs); + } + + public static boolean isIp(String addr) { + if (addr.length() < 7 || addr.length() > 15 || "".equals(addr)) { + return false; + } + + Pattern pat = Pattern.compile(IP_REGEX); + + Matcher mat = pat.matcher(addr); + + boolean ipAddress = mat.find(); + + return ipAddress; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java index 2f0d05ebd4..be84f0f221 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java @@ -52,8 +52,8 @@ public class NamedThreadFactory implements ThreadFactory { */ @Override public Thread newThread(Runnable r) { - final String threadName = count > 0 ? String.format(name + "_%d_%d", count, increment.getAndIncrement()) - : String.format(name + "_%d", increment.getAndIncrement()); + final String threadName = count > 0 ? String.format("%s_%d_%d", name, count, increment.getAndIncrement()) + : String.format("%s_%d", name, increment.getAndIncrement()); Thread t = new Thread(r, threadName); t.setDaemon(true); return t; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java index 2042191486..33bf8ca7c3 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java @@ -50,4 +50,8 @@ public class Pair { public void setRight(R right) { this.right = right; } + + public static Pair of(L left, R right){ + return new Pair(left, right); + } } diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java index ef46c2c781..cfc10b2acb 100644 --- a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java @@ -27,7 +27,7 @@ import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.remote.future.InvokeCallback; import org.apache.dolphinscheduler.remote.future.ResponseFuture; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.Address; +import org.apache.dolphinscheduler.remote.utils.Host; import org.junit.Assert; import org.junit.Test; @@ -62,7 +62,7 @@ public class NettyRemotingClientTest { NettyRemotingClient client = new NettyRemotingClient(clientConfig); Command commandPing = Ping.create(); try { - Command response = client.sendSync(new Address("127.0.0.1", serverConfig.getListenPort()), commandPing, 2000); + Command response = client.sendSync(new Host("127.0.0.1", serverConfig.getListenPort()), commandPing, 2000); Assert.assertEquals(commandPing.getOpaque(), response.getOpaque()); } catch (Exception e) { e.printStackTrace(); @@ -93,7 +93,7 @@ public class NettyRemotingClientTest { Command commandPing = Ping.create(); try { final AtomicLong opaque = new AtomicLong(0); - client.sendAsync(new Address("127.0.0.1", serverConfig.getListenPort()), commandPing, 2000, new InvokeCallback() { + client.sendAsync(new Host("127.0.0.1", serverConfig.getListenPort()), commandPing, 2000, new InvokeCallback() { @Override public void operationComplete(ResponseFuture responseFuture) { opaque.set(responseFuture.getOpaque()); diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml index e8e84297e2..891d918c26 100644 --- a/dolphinscheduler-server/pom.xml +++ b/dolphinscheduler-server/pom.xml @@ -36,10 +36,6 @@ org.apache.dolphinscheduler dolphinscheduler-common - - protobuf-java - com.google.protobuf - io.netty netty @@ -138,7 +134,11 @@ mockito-core test - + + org.springframework + spring-test + + diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java new file mode 100644 index 0000000000..535c274989 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java @@ -0,0 +1,140 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.builder; + +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.server.entity.*; + +/** + * TaskExecutionContext builder + */ +public class TaskExecutionContextBuilder { + + public static TaskExecutionContextBuilder get(){ + return new TaskExecutionContextBuilder(); + } + + private TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); + + /** + * build taskInstance related info + * + * @param taskInstance taskInstance + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildTaskInstanceRelatedInfo(TaskInstance taskInstance){ + taskExecutionContext.setTaskInstanceId(taskInstance.getId()); + taskExecutionContext.setTaskName(taskInstance.getName()); + taskExecutionContext.setStartTime(taskInstance.getStartTime()); + taskExecutionContext.setTaskType(taskInstance.getTaskType()); + taskExecutionContext.setLogPath(taskInstance.getLogPath()); + taskExecutionContext.setExecutePath(taskInstance.getExecutePath()); + taskExecutionContext.setTaskJson(taskInstance.getTaskJson()); + taskExecutionContext.setWorkerGroup(taskInstance.getWorkerGroup()); + taskExecutionContext.setHost(taskInstance.getHost()); + taskExecutionContext.setResources(taskInstance.getResources()); + return this; + } + + + /** + * build processInstance related info + * + * @param processInstance processInstance + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildProcessInstanceRelatedInfo(ProcessInstance processInstance){ + taskExecutionContext.setProcessInstanceId(processInstance.getId()); + taskExecutionContext.setScheduleTime(processInstance.getScheduleTime()); + taskExecutionContext.setGlobalParams(processInstance.getGlobalParams()); + taskExecutionContext.setExecutorId(processInstance.getExecutorId()); + taskExecutionContext.setCmdTypeIfComplement(processInstance.getCmdTypeIfComplement().getCode()); + taskExecutionContext.setTenantCode(processInstance.getTenantCode()); + taskExecutionContext.setQueue(processInstance.getQueue()); + return this; + } + + /** + * build processDefinition related info + * + * @param processDefinition processDefinition + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildProcessDefinitionRelatedInfo(ProcessDefinition processDefinition){ + taskExecutionContext.setProcessDefineId(processDefinition.getId()); + taskExecutionContext.setProjectId(processDefinition.getProjectId()); + return this; + } + + + /** + * build SQLTask related info + * + * @param sqlTaskExecutionContext sqlTaskExecutionContext + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildSQLTaskRelatedInfo(SQLTaskExecutionContext sqlTaskExecutionContext){ + taskExecutionContext.setSqlTaskExecutionContext(sqlTaskExecutionContext); + return this; + } + + + /** + * build DataxTask related info + * + * @param dataxTaskExecutionContext dataxTaskExecutionContext + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildDataxTaskRelatedInfo(DataxTaskExecutionContext dataxTaskExecutionContext){ + taskExecutionContext.setDataxTaskExecutionContext(dataxTaskExecutionContext); + return this; + } + + /** + * build procedureTask related info + * + * @param procedureTaskExecutionContext procedureTaskExecutionContext + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildProcedureTaskRelatedInfo(ProcedureTaskExecutionContext procedureTaskExecutionContext){ + taskExecutionContext.setProcedureTaskExecutionContext(procedureTaskExecutionContext); + return this; + } + + /** + * build sqoopTask related info + * + * @param sqoopTaskExecutionContext sqoopTaskExecutionContext + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildSqoopTaskRelatedInfo(SqoopTaskExecutionContext sqoopTaskExecutionContext){ + taskExecutionContext.setSqoopTaskExecutionContext(sqoopTaskExecutionContext); + return this; + } + + + /** + * create + * + * @return taskExecutionContext + */ + public TaskExecutionContext create(){ + return taskExecutionContext; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/DataxTaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/DataxTaskExecutionContext.java new file mode 100644 index 0000000000..dd8d64698f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/DataxTaskExecutionContext.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import java.io.Serializable; + +/** + * master/worker task transport + */ +public class DataxTaskExecutionContext implements Serializable{ + + /** + * dataSourceId + */ + private int dataSourceId; + + /** + * sourcetype + */ + private int sourcetype; + + /** + * sourceConnectionParams + */ + private String sourceConnectionParams; + + /** + * dataTargetId + */ + private int dataTargetId; + + /** + * targetType + */ + private int targetType; + + /** + * targetConnectionParams + */ + private String targetConnectionParams; + + public int getDataSourceId() { + return dataSourceId; + } + + public void setDataSourceId(int dataSourceId) { + this.dataSourceId = dataSourceId; + } + + public int getSourcetype() { + return sourcetype; + } + + public void setSourcetype(int sourcetype) { + this.sourcetype = sourcetype; + } + + public String getSourceConnectionParams() { + return sourceConnectionParams; + } + + public void setSourceConnectionParams(String sourceConnectionParams) { + this.sourceConnectionParams = sourceConnectionParams; + } + + public int getDataTargetId() { + return dataTargetId; + } + + public void setDataTargetId(int dataTargetId) { + this.dataTargetId = dataTargetId; + } + + public int getTargetType() { + return targetType; + } + + public void setTargetType(int targetType) { + this.targetType = targetType; + } + + public String getTargetConnectionParams() { + return targetConnectionParams; + } + + public void setTargetConnectionParams(String targetConnectionParams) { + this.targetConnectionParams = targetConnectionParams; + } + + @Override + public String toString() { + return "DataxTaskExecutionContext{" + + "dataSourceId=" + dataSourceId + + ", sourcetype=" + sourcetype + + ", sourceConnectionParams='" + sourceConnectionParams + '\'' + + ", dataTargetId=" + dataTargetId + + ", targetType=" + targetType + + ", targetConnectionParams='" + targetConnectionParams + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/DependenceTaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/DependenceTaskExecutionContext.java new file mode 100644 index 0000000000..953f2940de --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/DependenceTaskExecutionContext.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import java.io.Serializable; + +/** + * master/worker task transport + */ +public class DependenceTaskExecutionContext implements Serializable{ + + private String dependence; + + public String getDependence() { + return dependence; + } + + public void setDependence(String dependence) { + this.dependence = dependence; + } + + @Override + public String toString() { + return "DependenceTaskExecutionContext{" + + "dependence='" + dependence + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/ProcedureTaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/ProcedureTaskExecutionContext.java new file mode 100644 index 0000000000..d5fc97c8de --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/ProcedureTaskExecutionContext.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import java.io.Serializable; + +/** + * master/worker task transport + */ +public class ProcedureTaskExecutionContext implements Serializable{ + + /** + * connectionParams + */ + private String connectionParams; + + public String getConnectionParams() { + return connectionParams; + } + + public void setConnectionParams(String connectionParams) { + this.connectionParams = connectionParams; + } + + @Override + public String toString() { + return "ProcedureTaskExecutionContext{" + + "connectionParams='" + connectionParams + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContext.java new file mode 100644 index 0000000000..97afb4f6d9 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContext.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import org.apache.dolphinscheduler.dao.entity.UdfFunc; + +import java.io.Serializable; +import java.util.List; + +/** + * SQL Task ExecutionContext + */ +public class SQLTaskExecutionContext implements Serializable { + + + /** + * warningGroupId + */ + private int warningGroupId; + + /** + * connectionParams + */ + private String connectionParams; + /** + * udf function list + */ + private List udfFuncList; + + + public int getWarningGroupId() { + return warningGroupId; + } + + public void setWarningGroupId(int warningGroupId) { + this.warningGroupId = warningGroupId; + } + + public List getUdfFuncList() { + return udfFuncList; + } + + public void setUdfFuncList(List udfFuncList) { + this.udfFuncList = udfFuncList; + } + + public String getConnectionParams() { + return connectionParams; + } + + public void setConnectionParams(String connectionParams) { + this.connectionParams = connectionParams; + } + + @Override + public String toString() { + return "SQLTaskExecutionContext{" + + "warningGroupId=" + warningGroupId + + ", connectionParams='" + connectionParams + '\'' + + ", udfFuncList=" + udfFuncList + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SqoopTaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SqoopTaskExecutionContext.java new file mode 100644 index 0000000000..c74414bb21 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SqoopTaskExecutionContext.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import java.io.Serializable; + +/** + * master/worker task transport + */ +public class SqoopTaskExecutionContext implements Serializable{ + + /** + * dataSourceId + */ + private int dataSourceId; + + /** + * sourcetype + */ + private int sourcetype; + + /** + * sourceConnectionParams + */ + private String sourceConnectionParams; + + /** + * dataTargetId + */ + private int dataTargetId; + + /** + * targetType + */ + private int targetType; + + /** + * targetConnectionParams + */ + private String targetConnectionParams; + + public int getDataSourceId() { + return dataSourceId; + } + + public void setDataSourceId(int dataSourceId) { + this.dataSourceId = dataSourceId; + } + + public int getSourcetype() { + return sourcetype; + } + + public void setSourcetype(int sourcetype) { + this.sourcetype = sourcetype; + } + + public String getSourceConnectionParams() { + return sourceConnectionParams; + } + + public void setSourceConnectionParams(String sourceConnectionParams) { + this.sourceConnectionParams = sourceConnectionParams; + } + + public int getDataTargetId() { + return dataTargetId; + } + + public void setDataTargetId(int dataTargetId) { + this.dataTargetId = dataTargetId; + } + + public int getTargetType() { + return targetType; + } + + public void setTargetType(int targetType) { + this.targetType = targetType; + } + + public String getTargetConnectionParams() { + return targetConnectionParams; + } + + public void setTargetConnectionParams(String targetConnectionParams) { + this.targetConnectionParams = targetConnectionParams; + } + + @Override + public String toString() { + return "SqoopTaskExecutionContext{" + + "dataSourceId=" + dataSourceId + + ", sourcetype=" + sourcetype + + ", sourceConnectionParams='" + sourceConnectionParams + '\'' + + ", dataTargetId=" + dataTargetId + + ", targetType=" + targetType + + ", targetConnectionParams='" + targetConnectionParams + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java new file mode 100644 index 0000000000..563f5c8459 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java @@ -0,0 +1,499 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; +import java.util.Date; +import java.util.List; +import java.util.Map; + +/** + * master/worker task transport + */ +public class TaskExecutionContext implements Serializable{ + + /** + * task id + */ + private int taskInstanceId; + + + /** + * taks name + */ + private String taskName; + + /** + * task start time + */ + private Date startTime; + + /** + * task type + */ + private String taskType; + + /** + * host + */ + private String host; + + /** + * task execute path + */ + private String executePath; + + /** + * log path + */ + private String logPath; + + /** + * task json + */ + private String taskJson; + + /** + * processId + */ + private int processId; + + /** + * appIds + */ + private String appIds; + + /** + * process instance id + */ + private int processInstanceId; + + + /** + * process instance schedule time + */ + private Date scheduleTime; + + /** + * process instance global parameters + */ + private String globalParams; + + + /** + * execute user id + */ + private int executorId; + + + /** + * command type if complement + */ + private int cmdTypeIfComplement; + + + /** + * tenant code + */ + private String tenantCode; + + /** + * task queue + */ + private String queue; + + + /** + * process define id + */ + private int processDefineId; + + /** + * project id + */ + private int projectId; + + /** + * taskParams + */ + private String taskParams; + + /** + * envFile + */ + private String envFile; + + /** + * definedParams + */ + private Map definedParams; + + /** + * task AppId + */ + private String taskAppId; + + /** + * task timeout strategy + */ + private int taskTimeoutStrategy; + + /** + * task timeout + */ + private int taskTimeout; + + /** + * worker group + */ + private String workerGroup; + + /** + * resources full name + */ + private List resources; + + /** + * sql TaskExecutionContext + */ + private SQLTaskExecutionContext sqlTaskExecutionContext; + + /** + * datax TaskExecutionContext + */ + private DataxTaskExecutionContext dataxTaskExecutionContext; + + /** + * dependence TaskExecutionContext + */ + private DependenceTaskExecutionContext dependenceTaskExecutionContext; + + /** + * sqoop TaskExecutionContext + */ + private SqoopTaskExecutionContext sqoopTaskExecutionContext; + + /** + * procedure TaskExecutionContext + */ + private ProcedureTaskExecutionContext procedureTaskExecutionContext; + + public int getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public String getTaskType() { + return taskType; + } + + public void setTaskType(String taskType) { + this.taskType = taskType; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getExecutePath() { + return executePath; + } + + public void setExecutePath(String executePath) { + this.executePath = executePath; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String getTaskJson() { + return taskJson; + } + + public void setTaskJson(String taskJson) { + this.taskJson = taskJson; + } + + public int getProcessId() { + return processId; + } + + public void setProcessId(int processId) { + this.processId = processId; + } + + public String getAppIds() { + return appIds; + } + + public void setAppIds(String appIds) { + this.appIds = appIds; + } + + public int getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + } + + public Date getScheduleTime() { + return scheduleTime; + } + + public void setScheduleTime(Date scheduleTime) { + this.scheduleTime = scheduleTime; + } + + public String getGlobalParams() { + return globalParams; + } + + public void setGlobalParams(String globalParams) { + this.globalParams = globalParams; + } + + public int getExecutorId() { + return executorId; + } + + public void setExecutorId(int executorId) { + this.executorId = executorId; + } + + public int getCmdTypeIfComplement() { + return cmdTypeIfComplement; + } + + public void setCmdTypeIfComplement(int cmdTypeIfComplement) { + this.cmdTypeIfComplement = cmdTypeIfComplement; + } + + public String getTenantCode() { + return tenantCode; + } + + public void setTenantCode(String tenantCode) { + this.tenantCode = tenantCode; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public int getProcessDefineId() { + return processDefineId; + } + + public void setProcessDefineId(int processDefineId) { + this.processDefineId = processDefineId; + } + + public int getProjectId() { + return projectId; + } + + public void setProjectId(int projectId) { + this.projectId = projectId; + } + + public String getTaskParams() { + return taskParams; + } + + public void setTaskParams(String taskParams) { + this.taskParams = taskParams; + } + + public String getEnvFile() { + return envFile; + } + + public void setEnvFile(String envFile) { + this.envFile = envFile; + } + + public Map getDefinedParams() { + return definedParams; + } + + public void setDefinedParams(Map definedParams) { + this.definedParams = definedParams; + } + + public String getTaskAppId() { + return taskAppId; + } + + public void setTaskAppId(String taskAppId) { + this.taskAppId = taskAppId; + } + + public int getTaskTimeoutStrategy() { + return taskTimeoutStrategy; + } + + public void setTaskTimeoutStrategy(int taskTimeoutStrategy) { + this.taskTimeoutStrategy = taskTimeoutStrategy; + } + + public int getTaskTimeout() { + return taskTimeout; + } + + public void setTaskTimeout(int taskTimeout) { + this.taskTimeout = taskTimeout; + } + + public String getWorkerGroup() { + return workerGroup; + } + + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; + } + + public SQLTaskExecutionContext getSqlTaskExecutionContext() { + return sqlTaskExecutionContext; + } + + public void setSqlTaskExecutionContext(SQLTaskExecutionContext sqlTaskExecutionContext) { + this.sqlTaskExecutionContext = sqlTaskExecutionContext; + } + + public DataxTaskExecutionContext getDataxTaskExecutionContext() { + return dataxTaskExecutionContext; + } + + public void setDataxTaskExecutionContext(DataxTaskExecutionContext dataxTaskExecutionContext) { + this.dataxTaskExecutionContext = dataxTaskExecutionContext; + } + + public ProcedureTaskExecutionContext getProcedureTaskExecutionContext() { + return procedureTaskExecutionContext; + } + + public void setProcedureTaskExecutionContext(ProcedureTaskExecutionContext procedureTaskExecutionContext) { + this.procedureTaskExecutionContext = procedureTaskExecutionContext; + } + + public Command toCommand(){ + TaskExecuteRequestCommand requestCommand = new TaskExecuteRequestCommand(); + requestCommand.setTaskExecutionContext(FastJsonSerializer.serializeToString(this)); + return requestCommand.convert2Command(); + } + + public DependenceTaskExecutionContext getDependenceTaskExecutionContext() { + return dependenceTaskExecutionContext; + } + + public void setDependenceTaskExecutionContext(DependenceTaskExecutionContext dependenceTaskExecutionContext) { + this.dependenceTaskExecutionContext = dependenceTaskExecutionContext; + } + + public List getResources() { + return resources; + } + + public void setResources(List resources) { + this.resources = resources; + } + + public SqoopTaskExecutionContext getSqoopTaskExecutionContext() { + return sqoopTaskExecutionContext; + } + + public void setSqoopTaskExecutionContext(SqoopTaskExecutionContext sqoopTaskExecutionContext) { + this.sqoopTaskExecutionContext = sqoopTaskExecutionContext; + } + + @Override + public String toString() { + return "TaskExecutionContext{" + + "taskInstanceId=" + taskInstanceId + + ", taskName='" + taskName + '\'' + + ", startTime=" + startTime + + ", taskType='" + taskType + '\'' + + ", host='" + host + '\'' + + ", executePath='" + executePath + '\'' + + ", logPath='" + logPath + '\'' + + ", taskJson='" + taskJson + '\'' + + ", processId=" + processId + + ", appIds='" + appIds + '\'' + + ", processInstanceId=" + processInstanceId + + ", scheduleTime=" + scheduleTime + + ", globalParams='" + globalParams + '\'' + + ", executorId=" + executorId + + ", cmdTypeIfComplement=" + cmdTypeIfComplement + + ", tenantCode='" + tenantCode + '\'' + + ", queue='" + queue + '\'' + + ", processDefineId=" + processDefineId + + ", projectId=" + projectId + + ", taskParams='" + taskParams + '\'' + + ", envFile='" + envFile + '\'' + + ", definedParams=" + definedParams + + ", taskAppId='" + taskAppId + '\'' + + ", taskTimeoutStrategy=" + taskTimeoutStrategy + + ", taskTimeout=" + taskTimeout + + ", workerGroup='" + workerGroup + '\'' + + ", resources=" + resources + + ", sqlTaskExecutionContext=" + sqlTaskExecutionContext + + ", dataxTaskExecutionContext=" + dataxTaskExecutionContext + + ", dependenceTaskExecutionContext=" + dependenceTaskExecutionContext + + ", sqoopTaskExecutionContext=" + sqoopTaskExecutionContext + + ", procedureTaskExecutionContext=" + procedureTaskExecutionContext + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskPriority.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskPriority.java new file mode 100644 index 0000000000..991eeed493 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskPriority.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.entity; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * task priority info + */ +public class TaskPriority { + + /** + * processInstancePriority + */ + private int processInstancePriority; + + /** + * processInstanceId + */ + private int processInstanceId; + + /** + * taskInstancePriority + */ + private int taskInstancePriority; + + /** + * taskId + */ + private int taskId; + + /** + * groupName + */ + private String groupName; + + /** + * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${groupName} + */ + private String taskPriorityInfo; + + public TaskPriority(){} + + public TaskPriority(int processInstancePriority, + int processInstanceId, + int taskInstancePriority, + int taskId, String groupName) { + this.processInstancePriority = processInstancePriority; + this.processInstanceId = processInstanceId; + this.taskInstancePriority = taskInstancePriority; + this.taskId = taskId; + this.groupName = groupName; + this.taskPriorityInfo = this.processInstancePriority + + UNDERLINE + + this.processInstanceId + + UNDERLINE + + this.taskInstancePriority + + UNDERLINE + + this.taskId + + UNDERLINE + + this.groupName; + } + + public int getProcessInstancePriority() { + return processInstancePriority; + } + + public void setProcessInstancePriority(int processInstancePriority) { + this.processInstancePriority = processInstancePriority; + } + + public int getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + } + + public int getTaskInstancePriority() { + return taskInstancePriority; + } + + public void setTaskInstancePriority(int taskInstancePriority) { + this.taskInstancePriority = taskInstancePriority; + } + + public int getTaskId() { + return taskId; + } + + public void setTaskId(int taskId) { + this.taskId = taskId; + } + + public String getGroupName() { + return groupName; + } + + public void setGroupName(String groupName) { + this.groupName = groupName; + } + + public String getTaskPriorityInfo() { + return taskPriorityInfo; + } + + public void setTaskPriorityInfo(String taskPriorityInfo) { + this.taskPriorityInfo = taskPriorityInfo; + } + + /** + * taskPriorityInfo convert taskPriority + * + * @param taskPriorityInfo taskPriorityInfo + * @return TaskPriority + */ + public static TaskPriority of(String taskPriorityInfo){ + String[] parts = taskPriorityInfo.split(UNDERLINE); + + if (parts.length != 5) { + throw new IllegalArgumentException(String.format("TaskPriority : %s illegal.", taskPriorityInfo)); + } + TaskPriority taskPriority = new TaskPriority( + Integer.parseInt(parts[0]), + Integer.parseInt(parts[1]), + Integer.parseInt(parts[2]), + Integer.parseInt(parts[3]), + parts[4]); + return taskPriority; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java index 4e4404ea1c..44ec68f89f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.log; import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.utils.IOUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.log.*; @@ -59,14 +60,14 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { */ final CommandType commandType = command.getType(); switch (commandType){ - case GET_LOG_BYTES_REQUEST: - GetLogBytesRequestCommand getLogRequest = FastJsonSerializer.deserialize( - command.getBody(), GetLogBytesRequestCommand.class); - byte[] bytes = getFileContentBytes(getLogRequest.getPath()); - GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes); - channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque())); - break; - case VIEW_WHOLE_LOG_REQUEST: + case GET_LOG_BYTES_REQUEST: + GetLogBytesRequestCommand getLogRequest = FastJsonSerializer.deserialize( + command.getBody(), GetLogBytesRequestCommand.class); + byte[] bytes = getFileContentBytes(getLogRequest.getPath()); + GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes); + channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque())); + break; + case VIEW_WHOLE_LOG_REQUEST: ViewLogRequestCommand viewLogRequest = FastJsonSerializer.deserialize( command.getBody(), ViewLogRequestCommand.class); String msg = readWholeFileContent(viewLogRequest.getPath()); @@ -116,16 +117,8 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { }catch (IOException e){ logger.error("get file bytes error",e); }finally { - if (bos != null){ - try { - bos.close(); - } catch (IOException ignore) {} - } - if (in != null){ - try { - in.close(); - } catch (IOException ignore) {} - } + IOUtils.closeQuietly(bos); + IOUtils.closeQuietly(in); } return new byte[0]; } @@ -146,7 +139,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { } catch (IOException e) { logger.error("read file error",e); } - return Collections.EMPTY_LIST; + return Collections.emptyList(); } /** @@ -168,11 +161,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { }catch (IOException e){ logger.error("read file error",e); }finally { - try { - if (br != null){ - br.close(); - } - } catch (IOException ignore) {} + IOUtils.closeQuietly(br); } return ""; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/MasterLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java similarity index 96% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/MasterLogFilter.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java index 7b5d53a032..575571d9ac 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/MasterLogFilter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/MasterLogFilter.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java similarity index 98% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverter.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java index 971ce7149c..16101c01ae 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverter.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.pattern.MessageConverter; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminator.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java index fd2b0766a8..029242f534 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminator.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.sift.AbstractDiscriminator; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/TaskLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/TaskLogFilter.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java index ac258daf20..954341659b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/TaskLogFilter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogFilter.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/WorkerLogFilter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/WorkerLogFilter.java similarity index 96% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/WorkerLogFilter.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/WorkerLogFilter.java index 6240ed9a2e..1a75e594cf 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/log/WorkerLogFilter.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/WorkerLogFilter.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java index 6b5063cba4..d86374244f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java @@ -17,18 +17,19 @@ package org.apache.dolphinscheduler.server.master; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig; -import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread; +import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; +import org.apache.dolphinscheduler.server.master.registry.MasterRegistry; +import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerService; +import org.apache.dolphinscheduler.server.worker.WorkerServer; import org.apache.dolphinscheduler.server.zk.ZKMasterClient; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.quartz.SchedulerException; import org.slf4j.Logger; @@ -37,17 +38,17 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.WebApplicationType; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.FilterType; import javax.annotation.PostConstruct; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; -/** - * master server - */ -@ComponentScan("org.apache.dolphinscheduler") -public class MasterServer implements IStoppable { + + + +@ComponentScan(value = "org.apache.dolphinscheduler", excludeFilters = { + @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = {WorkerServer.class}) +}) +public class MasterServer { /** * logger of MasterServer @@ -55,41 +56,40 @@ public class MasterServer implements IStoppable { private static final Logger logger = LoggerFactory.getLogger(MasterServer.class); /** - * zk master client + * master config */ @Autowired - private ZKMasterClient zkMasterClient = null; + private MasterConfig masterConfig; /** - * heartbeat thread pool + * spring application context + * only use it for initialization */ - private ScheduledExecutorService heartbeatMasterService; + @Autowired + private SpringApplicationContext springApplicationContext; /** - * process service + * netty remote server */ - @Autowired - protected ProcessService processService; + private NettyRemotingServer nettyRemotingServer; /** - * master exec thread pool + * master registry */ - private ExecutorService masterSchedulerService; + @Autowired + private MasterRegistry masterRegistry; /** - * master config + * zk master client */ @Autowired - private MasterConfig masterConfig; - + private ZKMasterClient zkMasterClient; /** - * spring application context - * only use it for initialization + * scheduler service */ @Autowired - private SpringApplicationContext springApplicationContext; - + private MasterSchedulerService masterSchedulerService; /** * master server startup @@ -100,7 +100,6 @@ public class MasterServer implements IStoppable { public static void main(String[] args) { Thread.currentThread().setName(Constants.THREAD_NAME_MASTER_SERVER); new SpringApplicationBuilder(MasterServer.class).web(WebApplicationType.NONE).run(args); - } /** @@ -108,36 +107,29 @@ public class MasterServer implements IStoppable { */ @PostConstruct public void run(){ - zkMasterClient.init(); - - masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread"); - - heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.DEFAULT_MASTER_HEARTBEAT_THREAD_NUM); - // heartbeat thread implement - Runnable heartBeatThread = heartBeatThread(); + //init remoting server + NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(masterConfig.getListenPort()); + this.nettyRemotingServer = new NettyRemotingServer(serverConfig); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, new TaskResponseProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, new TaskAckProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_RESPONSE, new TaskKillResponseProcessor()); + this.nettyRemotingServer.start(); - zkMasterClient.setStoppable(this); + // register + this.masterRegistry.registry(); - // regular heartbeat - // delay 5 seconds, send heartbeat every 30 seconds - heartbeatMasterService. - scheduleAtFixedRate(heartBeatThread, 5, masterConfig.getMasterHeartbeatInterval(), TimeUnit.SECONDS); + // self tolerant + this.zkMasterClient.start(); - // master scheduler thread - MasterSchedulerThread masterSchedulerThread = new MasterSchedulerThread( - zkMasterClient, - processService, - masterConfig.getMasterExecThreads()); - - // submit master scheduler thread - masterSchedulerService.execute(masterSchedulerThread); + // + masterSchedulerService.start(); // start QuartzExecutors // what system should do if exception try { logger.info("start Quartz server..."); - ProcessScheduleJob.init(processService); QuartzExecutors.getInstance().start(); } catch (Exception e) { try { @@ -148,29 +140,23 @@ public class MasterServer implements IStoppable { logger.error("start Quartz failed", e); } - /** * register hooks, which are called before the process exits */ Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { - if (zkMasterClient.getActiveMasterNum() <= 1) { - zkMasterClient.getAlertDao().sendServerStopedAlert( - 1, OSUtils.getHost(), "Master-Server"); - } - stop("shutdownhook"); + close("shutdownHook"); } })); - } + } /** - * gracefully stop - * @param cause why stopping + * gracefully close + * @param cause close cause */ - @Override - public synchronized void stop(String cause) { + public void close(String cause) { try { //execute only once @@ -184,81 +170,27 @@ public class MasterServer implements IStoppable { Stopper.stop(); try { - //thread sleep 3 seconds for thread quitely stop + //thread sleep 3 seconds for thread quietly stop Thread.sleep(3000L); }catch (Exception e){ logger.warn("thread sleep exception ", e); } - try { - heartbeatMasterService.shutdownNow(); - }catch (Exception e){ - logger.warn("heartbeat service stopped exception"); - } - - logger.info("heartbeat service stopped"); - + // + this.masterSchedulerService.close(); + this.nettyRemotingServer.close(); + this.masterRegistry.unRegistry(); + this.zkMasterClient.close(); //close quartz try{ QuartzExecutors.getInstance().shutdown(); + logger.info("Quartz service stopped"); }catch (Exception e){ logger.warn("Quartz service stopped exception:{}",e.getMessage()); } - - logger.info("Quartz service stopped"); - - try { - ThreadPoolExecutors.getInstance().shutdown(); - }catch (Exception e){ - logger.warn("threadpool service stopped exception:{}",e.getMessage()); - } - - logger.info("threadpool service stopped"); - - try { - masterSchedulerService.shutdownNow(); - }catch (Exception e){ - logger.warn("master scheduler service stopped exception:{}",e.getMessage()); - } - - logger.info("master scheduler service stopped"); - - try { - zkMasterClient.close(); - }catch (Exception e){ - logger.warn("zookeeper service stopped exception:{}",e.getMessage()); - } - - logger.info("zookeeper service stopped"); - - } catch (Exception e) { logger.error("master server stop exception ", e); System.exit(-1); } } - - - /** - * heartbeat thread implement - * @return - */ - private Runnable heartBeatThread(){ - logger.info("start master heart beat thread..."); - Runnable heartBeatThread = new Runnable() { - @Override - public void run() { - if(Stopper.isRunning()) { - // send heartbeat to zk - if (StringUtils.isBlank(zkMasterClient.getMasterZNode())) { - logger.error("master send heartbeat to zk failed: can't find zookeeper path of master server"); - return; - } - - zkMasterClient.heartBeatForZk(zkMasterClient.getMasterZNode(), Constants.MASTER_PREFIX); - } - } - }; - return heartBeatThread; - } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/TaskInstanceCacheManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/TaskInstanceCacheManager.java new file mode 100644 index 0000000000..031d8b2b94 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/TaskInstanceCacheManager.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.cache; + +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; +import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; + +/** + * task instance state manager + */ +public interface TaskInstanceCacheManager { + + /** + * get taskInstance by taskInstance id + * + * @param taskInstanceId taskInstanceId + * @return taskInstance + */ + TaskInstance getByTaskInstanceId(Integer taskInstanceId); + + /** + * cache taskInstance + * + * @param taskExecutionContext taskExecutionContext + */ + void cacheTaskInstance(TaskExecutionContext taskExecutionContext); + + /** + * cache taskInstance + * + * @param taskAckCommand taskAckCommand + */ + void cacheTaskInstance(TaskExecuteAckCommand taskAckCommand); + + /** + * cache taskInstance + * + * @param taskExecuteResponseCommand taskExecuteResponseCommand + */ + void cacheTaskInstance(TaskExecuteResponseCommand taskExecuteResponseCommand); + + /** + * remove taskInstance by taskInstanceId + * @param taskInstanceId taskInstanceId + */ + void removeByTaskInstanceId(Integer taskInstanceId); +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java new file mode 100644 index 0000000000..c149ac3335 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/cache/impl/TaskInstanceCacheManagerImpl.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.cache.impl; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; +import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * taskInstance state manager + */ +@Component +public class TaskInstanceCacheManagerImpl implements TaskInstanceCacheManager { + + /** + * taskInstance caceh + */ + private Map taskInstanceCache = new ConcurrentHashMap<>(); + + /** + * process service + */ + @Autowired + private ProcessService processService; + + + /** + * get taskInstance by taskInstance id + * + * @param taskInstanceId taskInstanceId + * @return taskInstance + */ + @Override + public TaskInstance getByTaskInstanceId(Integer taskInstanceId) { + TaskInstance taskInstance = taskInstanceCache.get(taskInstanceId); + if (taskInstance == null){ + taskInstance = processService.findTaskInstanceById(taskInstanceId); + taskInstanceCache.put(taskInstanceId,taskInstance); + } + return taskInstance; + } + + /** + * cache taskInstance + * + * @param taskExecutionContext taskExecutionContext + */ + @Override + public void cacheTaskInstance(TaskExecutionContext taskExecutionContext) { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(taskExecutionContext.getTaskInstanceId()); + taskInstance.setName(taskExecutionContext.getTaskName()); + taskInstance.setStartTime(taskExecutionContext.getStartTime()); + taskInstance.setTaskType(taskInstance.getTaskType()); + taskInstance.setExecutePath(taskInstance.getExecutePath()); + taskInstance.setTaskJson(taskInstance.getTaskJson()); + taskInstanceCache.put(taskExecutionContext.getTaskInstanceId(), taskInstance); + } + + /** + * cache taskInstance + * + * @param taskAckCommand taskAckCommand + */ + @Override + public void cacheTaskInstance(TaskExecuteAckCommand taskAckCommand) { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setState(ExecutionStatus.of(taskAckCommand.getStatus())); + taskInstance.setStartTime(taskAckCommand.getStartTime()); + taskInstance.setHost(taskAckCommand.getHost()); + taskInstance.setExecutePath(taskAckCommand.getExecutePath()); + taskInstance.setLogPath(taskAckCommand.getLogPath()); + taskInstanceCache.put(taskAckCommand.getTaskInstanceId(), taskInstance); + } + + /** + * cache taskInstance + * + * @param taskExecuteResponseCommand taskExecuteResponseCommand + */ + @Override + public void cacheTaskInstance(TaskExecuteResponseCommand taskExecuteResponseCommand) { + TaskInstance taskInstance = getByTaskInstanceId(taskExecuteResponseCommand.getTaskInstanceId()); + taskInstance.setState(ExecutionStatus.of(taskExecuteResponseCommand.getStatus())); + taskInstance.setEndTime(taskExecuteResponseCommand.getEndTime()); + } + + /** + * remove taskInstance by taskInstanceId + * @param taskInstanceId taskInstanceId + */ + @Override + public void removeByTaskInstanceId(Integer taskInstanceId) { + taskInstanceCache.remove(taskInstanceId); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java index efb7cff1a7..5b4b5daef1 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java @@ -16,33 +16,58 @@ */ package org.apache.dolphinscheduler.server.master.config; +import org.apache.dolphinscheduler.common.Constants; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.PropertySource; import org.springframework.stereotype.Component; @Component +@PropertySource(value = "master.properties") public class MasterConfig { - @Value("${master.exec.threads}") + @Value("${master.exec.threads:100}") private int masterExecThreads; - @Value("${master.exec.task.num}") + @Value("${master.exec.task.num:20}") private int masterExecTaskNum; - @Value("${master.heartbeat.interval}") + @Value("${master.heartbeat.interval:10}") private int masterHeartbeatInterval; - @Value("${master.task.commit.retryTimes}") + @Value("${master.task.commit.retryTimes:5}") private int masterTaskCommitRetryTimes; - @Value("${master.task.commit.interval}") + @Value("${master.task.commit.interval:1000}") private int masterTaskCommitInterval; - @Value("${master.max.cpuload.avg}") + @Value("${master.max.cpuload.avg:-1}") private double masterMaxCpuloadAvg; - @Value("${master.reserved.memory}") + @Value("${master.reserved.memory:0.3}") private double masterReservedMemory; + @Value("${master.host.selector:lowerWeight}") + private String hostSelector; + + @Value("${master.listen.port:5678}") + private int listenPort; + + public int getListenPort() { + return listenPort; + } + + public void setListenPort(int listenPort) { + this.listenPort = listenPort; + } + + public String getHostSelector() { + return hostSelector; + } + + public void setHostSelector(String hostSelector) { + this.hostSelector = hostSelector; + } + public int getMasterExecThreads() { return masterExecThreads; } @@ -84,6 +109,9 @@ public class MasterConfig { } public double getMasterMaxCpuloadAvg() { + if (masterMaxCpuloadAvg == -1){ + return Constants.DEFAULT_MASTER_CPU_LOAD; + } return masterMaxCpuloadAvg; } @@ -98,4 +126,4 @@ public class MasterConfig { public void setMasterReservedMemory(double masterReservedMemory) { this.masterReservedMemory = masterReservedMemory; } -} +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java new file mode 100644 index 0000000000..480d6657c2 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java @@ -0,0 +1,365 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.consumer; + +import com.alibaba.fastjson.JSONObject; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.UdfType; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.datax.DataxParameters; +import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; +import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; +import org.apache.dolphinscheduler.server.entity.*; +import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS; + +/** + * TaskUpdateQueue consumer + */ +@Component +public class TaskPriorityQueueConsumer extends Thread{ + + /** + * logger of TaskUpdateQueueConsumer + */ + private static final Logger logger = LoggerFactory.getLogger(TaskPriorityQueueConsumer.class); + + /** + * taskUpdateQueue + */ + @Autowired + private TaskPriorityQueue taskPriorityQueue; + + /** + * processService + */ + @Autowired + private ProcessService processService; + + /** + * executor dispatcher + */ + @Autowired + private ExecutorDispatcher dispatcher; + + @PostConstruct + public void init(){ + super.setName("TaskUpdateQueueConsumerThread"); + super.start(); + } + + @Override + public void run() { + while (Stopper.isRunning()){ + try { + // if not task , blocking here + String taskPriorityInfo = taskPriorityQueue.take(); + + TaskPriority taskPriority = TaskPriority.of(taskPriorityInfo); + + dispatch(taskPriority.getTaskId()); + }catch (Exception e){ + logger.error("dispatcher task error",e); + } + } + } + + + /** + * dispatch task + * + * @param taskInstanceId taskInstanceId + * @return result + */ + private Boolean dispatch(int taskInstanceId){ + TaskExecutionContext context = getTaskExecutionContext(taskInstanceId); + ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER, context.getWorkerGroup()); + Boolean result = false; + while (Stopper.isRunning()){ + try { + result = dispatcher.dispatch(executionContext); + } catch (ExecuteException e) { + logger.error("dispatch error",e); + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + } + + if (result){ + break; + } + } + return result; + } + + /** + * get TaskExecutionContext + * @param taskInstanceId taskInstanceId + * @return TaskExecutionContext + */ + protected TaskExecutionContext getTaskExecutionContext(int taskInstanceId){ + TaskInstance taskInstance = processService.getTaskInstanceDetailByTaskId(taskInstanceId); + + // task type + TaskType taskType = TaskType.valueOf(taskInstance.getTaskType()); + + // task node + TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class); + + Integer userId = taskInstance.getProcessDefine() == null ? 0 : taskInstance.getProcessDefine().getUserId(); + Tenant tenant = processService.getTenantForProcess(taskInstance.getProcessInstance().getTenantId(), userId); + + // verify tenant is null + if (verifyTenantIsNull(tenant, taskInstance)) { + processService.changeTaskState(ExecutionStatus.FAILURE, + taskInstance.getStartTime(), + taskInstance.getHost(), + null, + null, + taskInstance.getId()); + return null; + } + // set queue for process instance, user-specified queue takes precedence over tenant queue + String userQueue = processService.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); + taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue); + taskInstance.getProcessInstance().setTenantCode(tenant.getTenantCode()); + taskInstance.setExecutePath(getExecLocalPath(taskInstance)); + taskInstance.setResources(getResourceFullNames(taskNode)); + + + SQLTaskExecutionContext sqlTaskExecutionContext = new SQLTaskExecutionContext(); + DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); + ProcedureTaskExecutionContext procedureTaskExecutionContext = new ProcedureTaskExecutionContext(); + SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); + + + // SQL task + if (taskType == TaskType.SQL){ + setSQLTaskRelation(sqlTaskExecutionContext, taskNode); + + } + + // DATAX task + if (taskType == TaskType.DATAX){ + setDataxTaskRelation(dataxTaskExecutionContext, taskNode); + } + + + // procedure task + if (taskType == TaskType.PROCEDURE){ + setProcedureTaskRelation(procedureTaskExecutionContext, taskNode); + } + + if (taskType == TaskType.SQOOP){ + setSqoopTaskRelation(sqoopTaskExecutionContext,taskNode); + } + + + return TaskExecutionContextBuilder.get() + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(taskInstance.getProcessInstance()) + .buildProcessDefinitionRelatedInfo(taskInstance.getProcessDefine()) + .buildSQLTaskRelatedInfo(sqlTaskExecutionContext) + .buildDataxTaskRelatedInfo(dataxTaskExecutionContext) + .buildProcedureTaskRelatedInfo(procedureTaskExecutionContext) + .buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext) + .create(); + } + + /** + * set procedure task relation + * @param procedureTaskExecutionContext procedureTaskExecutionContext + * @param taskNode taskNode + */ + private void setProcedureTaskRelation(ProcedureTaskExecutionContext procedureTaskExecutionContext, TaskNode taskNode) { + ProcedureParameters procedureParameters = JSONObject.parseObject(taskNode.getParams(), ProcedureParameters.class); + int datasourceId = procedureParameters.getDatasource(); + DataSource datasource = processService.findDataSourceById(datasourceId); + procedureTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); + } + + /** + * set datax task relation + * @param dataxTaskExecutionContext dataxTaskExecutionContext + * @param taskNode taskNode + */ + private void setDataxTaskRelation(DataxTaskExecutionContext dataxTaskExecutionContext, TaskNode taskNode) { + DataxParameters dataxParameters = JSONObject.parseObject(taskNode.getParams(), DataxParameters.class); + + DataSource dataSource = processService.findDataSourceById(dataxParameters.getDataSource()); + DataSource dataTarget = processService.findDataSourceById(dataxParameters.getDataTarget()); + + + if (dataSource != null){ + dataxTaskExecutionContext.setDataSourceId(dataxParameters.getDataSource()); + dataxTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); + dataxTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); + } + + if (dataTarget != null){ + dataxTaskExecutionContext.setDataTargetId(dataxParameters.getDataTarget()); + dataxTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); + dataxTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); + } + } + + + /** + * set datax task relation + * @param sqoopTaskExecutionContext sqoopTaskExecutionContext + * @param taskNode taskNode + */ + private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) { + SqoopParameters sqoopParameters = JSONObject.parseObject(taskNode.getParams(), SqoopParameters.class); + + SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); + TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); + + DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource()); + DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource()); + + if (dataSource != null){ + sqoopTaskExecutionContext.setDataSourceId(dataSource.getId()); + sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); + sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); + } + + if (dataTarget != null){ + sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId()); + sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); + sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); + } + } + + /** + * set SQL task relation + * @param sqlTaskExecutionContext sqlTaskExecutionContext + * @param taskNode taskNode + */ + private void setSQLTaskRelation(SQLTaskExecutionContext sqlTaskExecutionContext, TaskNode taskNode) { + SqlParameters sqlParameters = JSONObject.parseObject(taskNode.getParams(), SqlParameters.class); + int datasourceId = sqlParameters.getDatasource(); + DataSource datasource = processService.findDataSourceById(datasourceId); + sqlTaskExecutionContext.setConnectionParams(datasource.getConnectionParams()); + + // whether udf type + boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) + && StringUtils.isNotEmpty(sqlParameters.getUdfs()); + + if (udfTypeFlag){ + String[] udfFunIds = sqlParameters.getUdfs().split(","); + int[] udfFunIdsArray = new int[udfFunIds.length]; + for(int i = 0 ; i < udfFunIds.length;i++){ + udfFunIdsArray[i]=Integer.parseInt(udfFunIds[i]); + } + + List udfFuncList = processService.queryUdfFunListByids(udfFunIdsArray); + sqlTaskExecutionContext.setUdfFuncList(udfFuncList); + } + } + + /** + * get execute local path + * + * @return execute local path + */ + private String getExecLocalPath(TaskInstance taskInstance){ + return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(), + taskInstance.getProcessDefine().getId(), + taskInstance.getProcessInstance().getId(), + taskInstance.getId()); + } + + + /** + * whehter tenant is null + * @param tenant tenant + * @param taskInstance taskInstance + * @return result + */ + private boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) { + if(tenant == null){ + logger.error("tenant not exists,process instance id : {},task instance id : {}", + taskInstance.getProcessInstance().getId(), + taskInstance.getId()); + return true; + } + return false; + } + + /** + * get resource full name list + */ + private List getResourceFullNames(TaskNode taskNode) { + List resourceFullNameList = new ArrayList<>(); + AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); + + if (baseParam != null) { + List projectResourceFiles = baseParam.getResourceFilesList(); + if (projectResourceFiles != null) { + + // filter the resources that the resource id equals 0 + Set oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == 0).collect(Collectors.toSet()); + if (CollectionUtils.isNotEmpty(oldVersionResources)) { + resourceFullNameList.addAll(oldVersionResources.stream().map(resource -> resource.getRes()).collect(Collectors.toSet())); + } + + // get the resource id in order to get the resource names in batch + Stream resourceIdStream = projectResourceFiles.stream().map(resourceInfo -> resourceInfo.getId()); + Set resourceIdsSet = resourceIdStream.collect(Collectors.toSet()); + + if (CollectionUtils.isNotEmpty(resourceIdsSet)) { + Integer[] resourceIds = resourceIdsSet.toArray(new Integer[resourceIdsSet.size()]); + + List resources = processService.listResourceByIds(resourceIds); + resourceFullNameList.addAll(resources.stream() + .map(resourceInfo -> resourceInfo.getFullName()) + .collect(Collectors.toList())); + } + } + } + + return resourceFullNameList; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java new file mode 100644 index 0000000000..605297aadf --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch; + + +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; +import org.apache.dolphinscheduler.server.master.dispatch.executor.ExecutorManager; +import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; +import org.apache.dolphinscheduler.server.master.dispatch.host.HostManager; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.concurrent.ConcurrentHashMap; + +/** + * executor dispatcher + */ +@Service +public class ExecutorDispatcher implements InitializingBean { + + /** + * netty executor manager + */ + @Autowired + private NettyExecutorManager nettyExecutorManager; + + /** + * round robin host manager + */ + @Autowired + private HostManager hostManager; + + /** + * executor manager + */ + private final ConcurrentHashMap> executorManagers; + + /** + * constructor + */ + public ExecutorDispatcher(){ + this.executorManagers = new ConcurrentHashMap<>(); + } + + /** + * task dispatch + * + * @param context context + * @return result + * @throws ExecuteException if error throws ExecuteException + */ + public Boolean dispatch(final ExecutionContext context) throws ExecuteException { + /** + * get executor manager + */ + ExecutorManager executorManager = this.executorManagers.get(context.getExecutorType()); + if(executorManager == null){ + throw new ExecuteException("no ExecutorManager for type : " + context.getExecutorType()); + } + + /** + * host select + */ + Host host = hostManager.select(context); + if (StringUtils.isEmpty(host.getAddress())) { + throw new ExecuteException(String.format("fail to execute : %s due to no worker ", context.getCommand())); + } + context.setHost(host); + executorManager.beforeExecute(context); + try { + /** + * task execute + */ + return executorManager.execute(context); + } finally { + executorManager.afterExecute(context); + } + } + + /** + * register init + * @throws Exception if error throws Exception + */ + @Override + public void afterPropertiesSet() throws Exception { + register(ExecutorType.WORKER, nettyExecutorManager); + register(ExecutorType.CLIENT, nettyExecutorManager); + } + + /** + * register + * @param type executor type + * @param executorManager executorManager + */ + public void register(ExecutorType type, ExecutorManager executorManager){ + executorManagers.put(type, executorManager); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/context/ExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/context/ExecutionContext.java new file mode 100644 index 0000000000..fd673ca678 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/context/ExecutionContext.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.context; + + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; + +import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; + +/** + * execution context + */ +public class ExecutionContext { + + /** + * host + */ + private Host host; + + /** + * command + */ + private final Command command; + + /** + * executor type : worker or client + */ + private final ExecutorType executorType; + + /** + * worker group + */ + private String workerGroup; + + + public ExecutionContext(Command command, ExecutorType executorType) { + this(command, executorType, DEFAULT_WORKER_GROUP); + } + + public ExecutionContext(Command command, ExecutorType executorType, String workerGroup) { + this.command = command; + this.executorType = executorType; + this.workerGroup = workerGroup; + } + + public Command getCommand() { + return command; + } + + public ExecutorType getExecutorType() { + return executorType; + } + + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; + } + + + public String getWorkerGroup(){ + return this.workerGroup; + } + + public Host getHost() { + return host; + } + + public void setHost(Host host) { + this.host = host; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/enums/ExecutorType.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/enums/ExecutorType.java new file mode 100644 index 0000000000..03be62e701 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/enums/ExecutorType.java @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.enums; + +/** + * executor type + */ +public enum ExecutorType { + + WORKER, + + CLIENT; +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/exceptions/ExecuteException.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/exceptions/ExecuteException.java new file mode 100644 index 0000000000..8a441b9de1 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/exceptions/ExecuteException.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.exceptions; + +/** + * execute exception + */ +public class ExecuteException extends Exception{ + + public ExecuteException() { + super(); + } + + /** + * Constructs a new exception with the specified detail message. The + * cause is not initialized, and may subsequently be initialized by + * a call to {@link #initCause}. + * + * @param message the detail message. The detail message is saved for + * later retrieval by the {@link #getMessage()} method. + */ + public ExecuteException(String message) { + super(message); + } + + /** + * Constructs a new exception with the specified detail message and + * cause.

Note that the detail message associated with + * {@code cause} is not automatically incorporated in + * this exception's detail message. + * + * @param message the detail message (which is saved for later retrieval + * by the {@link #getMessage()} method). + * @param cause the cause (which is saved for later retrieval by the + * {@link #getCause()} method). (A null value is + * permitted, and indicates that the cause is nonexistent or + * unknown.) + * @since 1.4 + */ + public ExecuteException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs a new exception with the specified cause and a detail + * message of (cause==null ? null : cause.toString()) (which + * typically contains the class and detail message of cause). + * This constructor is useful for exceptions that are little more than + * wrappers for other throwables (for example, {@link + * java.security.PrivilegedActionException}). + * + * @param cause the cause (which is saved for later retrieval by the + * {@link #getCause()} method). (A null value is + * permitted, and indicates that the cause is nonexistent or + * unknown.) + * @since 1.4 + */ + public ExecuteException(Throwable cause) { + super(cause); + } + + /** + * Constructs a new exception with the specified detail message, + * cause, suppression enabled or disabled, and writable stack + * trace enabled or disabled. + * + * @param message the detail message. + * @param cause the cause. (A {@code null} value is permitted, + * and indicates that the cause is nonexistent or unknown.) + * @param enableSuppression whether or not suppression is enabled + * or disabled + * @param writableStackTrace whether or not the stack trace should + * be writable + * @since 1.7 + */ + protected ExecuteException(String message, Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/AbstractExecutorManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/AbstractExecutorManager.java new file mode 100644 index 0000000000..d6a7720db6 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/AbstractExecutorManager.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.executor; + +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; + +/** + * abstract executor manager + */ +public abstract class AbstractExecutorManager implements ExecutorManager{ + + /** + * before execute , add time monitor , timeout + * + * @param context context + * @throws ExecuteException if error throws ExecuteException + */ + @Override + public void beforeExecute(ExecutionContext context) throws ExecuteException { + } + + /** + * after execute , add dispatch monitor + * @param context context + * @throws ExecuteException if error throws ExecuteException + */ + @Override + public void afterExecute(ExecutionContext context) throws ExecuteException { + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/ExecutorManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/ExecutorManager.java new file mode 100644 index 0000000000..1e7754082c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/ExecutorManager.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.executor; + +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; + +/** + * executor manager + */ +public interface ExecutorManager { + + /** + * before execute + * + * @param executeContext executeContext + * @throws ExecuteException if error throws ExecuteException + */ + void beforeExecute(ExecutionContext executeContext) throws ExecuteException; + + /** + * execute task + * @param context context + * @return T + * @throws ExecuteException if error throws ExecuteException + */ + T execute(ExecutionContext context) throws ExecuteException; + + /** + * execute task directly without retry + * @param context context + * @throws ExecuteException if error throws ExecuteException + */ + void executeDirectly(ExecutionContext context) throws ExecuteException; + + /** + * after execute + * @param context context + * @throws ExecuteException if error throws ExecuteException + */ + void afterExecute(ExecutionContext context) throws ExecuteException; +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java new file mode 100644 index 0000000000..7ded3b0056 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.executor; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; +import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * netty executor manager + */ +@Service +public class NettyExecutorManager extends AbstractExecutorManager{ + + private final Logger logger = LoggerFactory.getLogger(NettyExecutorManager.class); + + /** + * zookeeper node manager + */ + @Autowired + private ZookeeperNodeManager zookeeperNodeManager; + + /** + * netty remote client + */ + private final NettyRemotingClient nettyRemotingClient; + + /** + * constructor + */ + public NettyExecutorManager(){ + final NettyClientConfig clientConfig = new NettyClientConfig(); + this.nettyRemotingClient = new NettyRemotingClient(clientConfig); + } + + @PostConstruct + public void init(){ + /** + * register EXECUTE_TASK_RESPONSE command type TaskResponseProcessor + * register EXECUTE_TASK_ACK command type TaskAckProcessor + */ + this.nettyRemotingClient.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, new TaskResponseProcessor()); + this.nettyRemotingClient.registerProcessor(CommandType.TASK_EXECUTE_ACK, new TaskAckProcessor()); + this.nettyRemotingClient.registerProcessor(CommandType.TASK_KILL_RESPONSE, new TaskKillResponseProcessor()); + } + + /** + * execute logic + * @param context context + * @return result + * @throws ExecuteException if error throws ExecuteException + */ + @Override + public Boolean execute(ExecutionContext context) throws ExecuteException { + + /** + * all nodes + */ + Set allNodes = getAllNodes(context); + + /** + * fail nodes + */ + Set failNodeSet = new HashSet<>(); + + /** + * build command accord executeContext + */ + Command command = context.getCommand(); + + /** + * execute task host + */ + Host host = context.getHost(); + boolean success = false; + while (!success) { + try { + doExecute(host,command); + success = true; + context.setHost(host); + } catch (ExecuteException ex) { + logger.error(String.format("execute command : %s error", command), ex); + try { + failNodeSet.add(host.getAddress()); + Set tmpAllIps = new HashSet<>(allNodes); + Collection remained = CollectionUtils.subtract(tmpAllIps, failNodeSet); + if (remained != null && remained.size() > 0) { + host = Host.of(remained.iterator().next()); + logger.error("retry execute command : {} host : {}", command, host); + } else { + throw new ExecuteException("fail after try all nodes"); + } + } catch (Throwable t) { + throw new ExecuteException("fail after try all nodes"); + } + } + } + + return success; + } + + @Override + public void executeDirectly(ExecutionContext context) throws ExecuteException { + Host host = context.getHost(); + doExecute(host, context.getCommand()); + } + + /** + * execute logic + * @param host host + * @param command command + * @throws ExecuteException if error throws ExecuteException + */ + private void doExecute(final Host host, final Command command) throws ExecuteException { + /** + * retry count,default retry 3 + */ + int retryCount = 3; + boolean success = false; + do { + try { + nettyRemotingClient.send(host, command); + success = true; + } catch (Exception ex) { + logger.error(String.format("send command : %s to %s error", command, host), ex); + retryCount--; + try { + Thread.sleep(100); + } catch (InterruptedException ignore) {} + } + } while (retryCount >= 0 && !success); + + if (!success) { + throw new ExecuteException(String.format("send command : %s to %s error", command, host)); + } + } + + /** + * get all nodes + * @param context context + * @return nodes + */ + private Set getAllNodes(ExecutionContext context){ + Set nodes = Collections.EMPTY_SET; + /** + * executor type + */ + ExecutorType executorType = context.getExecutorType(); + switch (executorType){ + case WORKER: + nodes = zookeeperNodeManager.getWorkerGroupNodes(context.getWorkerGroup()); + break; + case CLIENT: + break; + default: + throw new IllegalArgumentException("invalid executor type : " + executorType); + + } + return nodes; + } + + public NettyRemotingClient getNettyRemotingClient() { + return nettyRemotingClient; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java new file mode 100644 index 0000000000..58006bf7f7 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/CommonHostManager.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + + +/** + * round robin host manager + */ +public abstract class CommonHostManager implements HostManager { + + private final Logger logger = LoggerFactory.getLogger(CommonHostManager.class); + + /** + * zookeeperNodeManager + */ + @Autowired + protected ZookeeperNodeManager zookeeperNodeManager; + + /** + * select host + * @param context context + * @return host + */ + @Override + public Host select(ExecutionContext context){ + Host host = new Host(); + Collection nodes = null; + /** + * executor type + */ + ExecutorType executorType = context.getExecutorType(); + switch (executorType){ + case WORKER: + nodes = zookeeperNodeManager.getWorkerGroupNodes(context.getWorkerGroup()); + break; + case CLIENT: + break; + default: + throw new IllegalArgumentException("invalid executorType : " + executorType); + + } + if(CollectionUtils.isEmpty(nodes)){ + return host; + } + List candidateHosts = new ArrayList<>(nodes.size()); + nodes.stream().forEach(node -> candidateHosts.add(Host.of(node))); + + return select(candidateHosts); + } + + protected abstract Host select(Collection nodes); + + public void setZookeeperNodeManager(ZookeeperNodeManager zookeeperNodeManager) { + this.zookeeperNodeManager = zookeeperNodeManager; + } + + public ZookeeperNodeManager getZookeeperNodeManager() { + return zookeeperNodeManager; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/HostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/HostManager.java new file mode 100644 index 0000000000..ec65cabb0b --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/HostManager.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + + +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; + +/** + * host manager + */ +public interface HostManager { + + /** + * select host + * @param context context + * @return host + */ + Host select(ExecutionContext context); + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/HostManagerConfig.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/HostManagerConfig.java new file mode 100644 index 0000000000..458a1ee036 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/HostManagerConfig.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostSelector; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * host manager config + */ +@Configuration +public class HostManagerConfig { + + private AutowireCapableBeanFactory beanFactory; + + @Autowired + private MasterConfig masterConfig; + + @Autowired + public HostManagerConfig(AutowireCapableBeanFactory beanFactory) { + this.beanFactory = beanFactory; + } + + @Bean + public HostManager hostManager() { + String hostSelector = masterConfig.getHostSelector(); + HostSelector selector = HostSelector.of(hostSelector); + HostManager hostManager; + switch (selector){ + case RANDOM: + hostManager = new RandomHostManager(); + break; + case ROUNDROBIN: + hostManager = new RoundRobinHostManager(); + break; + case LOWERWEIGHT: + hostManager = new LowerWeightHostManager(); + break; + default: + throw new IllegalArgumentException("unSupport selector " + hostSelector); + } + beanFactory.autowireBean(hostManager); + return hostManager; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java new file mode 100644 index 0000000000..99cae6954c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWeight; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.LowerWeightRoundRobin; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import static org.apache.dolphinscheduler.common.Constants.COMMA; + + +/** + * round robin host manager + */ +public class LowerWeightHostManager extends CommonHostManager { + + private final Logger logger = LoggerFactory.getLogger(LowerWeightHostManager.class); + + /** + * zookeeper registry center + */ + @Autowired + private ZookeeperRegistryCenter registryCenter; + + /** + * round robin host manager + */ + private RoundRobinHostManager roundRobinHostManager; + + /** + * selector + */ + private LowerWeightRoundRobin selector; + + /** + * worker host weights + */ + private ConcurrentHashMap> workerHostWeights; + + /** + * worker group host lock + */ + private Lock lock; + + /** + * executor service + */ + private ScheduledExecutorService executorService; + + @PostConstruct + public void init(){ + this.selector = new LowerWeightRoundRobin(); + this.workerHostWeights = new ConcurrentHashMap<>(); + this.lock = new ReentrantLock(); + this.executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("LowerWeightHostManagerExecutor")); + this.executorService.scheduleWithFixedDelay(new RefreshResourceTask(),35, 40, TimeUnit.SECONDS); + this.roundRobinHostManager = new RoundRobinHostManager(); + this.roundRobinHostManager.setZookeeperNodeManager(getZookeeperNodeManager()); + } + + @PreDestroy + public void close(){ + this.executorService.shutdownNow(); + } + + /** + * select host + * @param context context + * @return host + */ + @Override + public Host select(ExecutionContext context){ + Set workerHostWeights = getWorkerHostWeights(context.getWorkerGroup()); + if(CollectionUtils.isNotEmpty(workerHostWeights)){ + return selector.select(workerHostWeights).getHost(); + } else{ + return roundRobinHostManager.select(context); + } + } + + @Override + public Host select(Collection nodes) { + throw new UnsupportedOperationException("not support"); + } + + private void syncWorkerHostWeight(Map> workerHostWeights){ + lock.lock(); + try { + workerHostWeights.clear(); + workerHostWeights.putAll(workerHostWeights); + } finally { + lock.unlock(); + } + } + + private Set getWorkerHostWeights(String workerGroup){ + lock.lock(); + try { + return workerHostWeights.get(workerGroup); + } finally { + lock.unlock(); + } + } + + class RefreshResourceTask implements Runnable{ + + @Override + public void run() { + try { + Map> workerGroupNodes = zookeeperNodeManager.getWorkerGroupNodes(); + Set>> entries = workerGroupNodes.entrySet(); + Map> workerHostWeights = new HashMap<>(); + for(Map.Entry> entry : entries){ + String workerGroup = entry.getKey(); + Set nodes = entry.getValue(); + String workerGroupPath = registryCenter.getWorkerGroupPath(workerGroup); + Set hostWeights = new HashSet<>(nodes.size()); + for(String node : nodes){ + String heartbeat = registryCenter.getZookeeperCachedOperator().get(workerGroupPath + "/" + node); + if(StringUtils.isNotEmpty(heartbeat) && heartbeat.contains(COMMA) && heartbeat.split(COMMA).length == 5){ + String[] parts = heartbeat.split(COMMA); + double cpu = Double.parseDouble(parts[0]); + double memory = Double.parseDouble(parts[1]); + double loadAverage = Double.parseDouble(parts[2]); + HostWeight hostWeight = new HostWeight(Host.of(node), cpu, memory, loadAverage); + hostWeights.add(hostWeight); + } + } + workerHostWeights.put(workerGroup, hostWeights); + } + syncWorkerHostWeight(workerHostWeights); + } catch (Throwable ex){ + logger.error("RefreshResourceTask error", ex); + } + } + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java new file mode 100644 index 0000000000..ef2b6fd22f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RandomHostManager.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.RandomSelector; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.Selector; + +import java.util.Collection; + + +/** + * round robin host manager + */ +public class RandomHostManager extends CommonHostManager { + + /** + * selector + */ + private final Selector selector; + + /** + * set round robin + */ + public RandomHostManager(){ + this.selector = new RandomSelector<>(); + } + + @Override + public Host select(Collection nodes) { + return selector.select(nodes); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java new file mode 100644 index 0000000000..e9fef49ecf --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManager.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host; + +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.RoundRobinSelector; +import org.apache.dolphinscheduler.server.master.dispatch.host.assign.Selector; + +import java.util.Collection; + + +/** + * round robin host manager + */ +public class RoundRobinHostManager extends CommonHostManager { + + /** + * selector + */ + private final Selector selector; + + /** + * set round robin + */ + public RoundRobinHostManager(){ + this.selector = new RoundRobinSelector<>(); + } + + @Override + public Host select(Collection nodes) { + return selector.select(nodes); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostSelector.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostSelector.java new file mode 100644 index 0000000000..145393e1f0 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostSelector.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +/** + * host selector + */ +public enum HostSelector { + + RANDOM, + + ROUNDROBIN, + + LOWERWEIGHT; + + public static HostSelector of(String selector){ + for(HostSelector hs : values()){ + if(hs.name().equalsIgnoreCase(selector)){ + return hs; + } + } + throw new IllegalArgumentException("invalid host selector : " + selector); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java new file mode 100644 index 0000000000..ebceea7b13 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/HostWeight.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import org.apache.dolphinscheduler.remote.utils.Host; + +/** + * host weight + */ +public class HostWeight { + + private final int CPU_FACTOR = 10; + + private final int MEMORY_FACTOR = 20; + + private final int LOAD_AVERAGE_FACTOR = 70; + + private final Host host; + + private final int weight; + + private int currentWeight; + + public HostWeight(Host host, double cpu, double memory, double loadAverage) { + this.weight = calculateWeight(cpu, memory, loadAverage); + this.host = host ; + this.currentWeight = weight ; + } + + public int getCurrentWeight() { + return currentWeight; + } + + public int getWeight() { + return weight; + } + + public void setCurrentWeight(int currentWeight) { + this.currentWeight = currentWeight; + } + + public Host getHost() { + return host; + } + + @Override + public String toString() { + return "HostWeight{" + + "host=" + host + + ", weight=" + weight + + ", currentWeight=" + currentWeight + + '}'; + } + + private int calculateWeight(double cpu, double memory, double loadAverage){ + return (int)(cpu * CPU_FACTOR + memory * MEMORY_FACTOR + loadAverage * LOAD_AVERAGE_FACTOR); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java new file mode 100644 index 0000000000..bdf0f412f4 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobin.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import java.util.Collection; + +/** + * lower weight round robin + */ +public class LowerWeightRoundRobin implements Selector{ + + /** + * select + * @param sources sources + * @return HostWeight + */ + @Override + public HostWeight select(Collection sources){ + int totalWeight = 0; + int lowWeight = 0; + HostWeight lowerNode = null; + for (HostWeight hostWeight : sources) { + totalWeight += hostWeight.getWeight(); + hostWeight.setCurrentWeight(hostWeight.getCurrentWeight() + hostWeight.getWeight()); + if (lowerNode == null || lowWeight > hostWeight.getCurrentWeight() ) { + lowerNode = hostWeight; + lowWeight = hostWeight.getCurrentWeight(); + } + } + lowerNode.setCurrentWeight(lowerNode.getCurrentWeight() + totalWeight); + return lowerNode; + + } +} + + + diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/BaseTaskQueueTest.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java similarity index 50% rename from dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/BaseTaskQueueTest.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java index 17e2ae4056..be52fcb1cf 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/BaseTaskQueueTest.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelector.java @@ -14,36 +14,41 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.service.queue; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; -import org.apache.dolphinscheduler.service.zk.ZKServer; -import org.junit.*; +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import java.util.Collection; +import java.util.Random; /** - * base task queue test for only start zk server once + * random selector + * @param T */ -@Ignore -public class BaseTaskQueueTest { +public class RandomSelector implements Selector { - protected static ITaskQueue tasksQueue = null; + private final Random random = new Random(); - @BeforeClass - public static void setup() { - ZKServer.start(); - tasksQueue = TaskQueueFactory.getTaskQueueInstance(); - //clear all data - tasksQueue.delete(); - } + @Override + public T select(final Collection source) { - @AfterClass - public static void tearDown() { - tasksQueue.delete(); - ZKServer.stop(); - } - @Test - public void tasksQueueNotNull(){ - Assert.assertNotNull(tasksQueue); + if (source == null || source.size() == 0) { + throw new IllegalArgumentException("Empty source."); + } + + /** + * if only one , return directly + */ + if (source.size() == 1) { + return (T) source.toArray()[0]; + } + + int size = source.size(); + /** + * random select + */ + int randomIndex = random.nextInt(size); + + return (T) source.toArray()[randomIndex]; } + } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java new file mode 100644 index 0000000000..1eb30c8d5a --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelector.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import org.springframework.stereotype.Service; + +import java.util.Collection; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * round robin selector + * @param T + */ +@Service +public class RoundRobinSelector implements Selector { + + private final AtomicInteger index = new AtomicInteger(0); + + @Override + public T select(Collection source) { + if (source == null || source.size() == 0) { + throw new IllegalArgumentException("Empty source."); + } + + /** + * if only one , return directly + */ + if (source.size() == 1) { + return (T)source.toArray()[0]; + } + + int size = source.size(); + /** + * round robin + */ + return (T) source.toArray()[index.getAndIncrement() % size]; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/Selector.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/Selector.java new file mode 100644 index 0000000000..08649819a0 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/Selector.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import java.util.Collection; + + +/** + * selector + * @param T + */ +public interface Selector { + + /** + * select + * @param source source + * @return T + */ + T select(Collection source); +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/future/TaskFuture.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/future/TaskFuture.java new file mode 100644 index 0000000000..918ed6764b --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/future/TaskFuture.java @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.future; + + +import org.apache.dolphinscheduler.remote.command.Command; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +/** + * task fulture + */ +public class TaskFuture { + + private final static Logger LOGGER = LoggerFactory.getLogger(TaskFuture.class); + + private final static ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(256); + + /** + * request unique identification + */ + private final long opaque; + + /** + * timeout + */ + private final long timeoutMillis; + + private final CountDownLatch latch = new CountDownLatch(1); + + private final long beginTimestamp = System.currentTimeMillis(); + + /** + * response command + */ + private volatile Command responseCommand; + + private volatile boolean sendOk = true; + + private volatile Throwable cause; + + public TaskFuture(long opaque, long timeoutMillis) { + this.opaque = opaque; + this.timeoutMillis = timeoutMillis; + FUTURE_TABLE.put(opaque, this); + } + + /** + * wait for response + * @return command + * @throws InterruptedException if error throws InterruptedException + */ + public Command waitResponse() throws InterruptedException { + this.latch.await(timeoutMillis, TimeUnit.MILLISECONDS); + return this.responseCommand; + } + + /** + * put response + * + * @param responseCommand responseCommand + */ + public void putResponse(final Command responseCommand) { + this.responseCommand = responseCommand; + this.latch.countDown(); + FUTURE_TABLE.remove(opaque); + } + + /** + * whether timeout + * @return timeout + */ + public boolean isTimeout() { + long diff = System.currentTimeMillis() - this.beginTimestamp; + return diff > this.timeoutMillis; + } + + public static void notify(final Command responseCommand){ + TaskFuture taskFuture = FUTURE_TABLE.remove(responseCommand.getOpaque()); + if(taskFuture != null){ + taskFuture.putResponse(responseCommand); + } + } + + + public boolean isSendOK() { + return sendOk; + } + + public void setSendOk(boolean sendOk) { + this.sendOk = sendOk; + } + + public void setCause(Throwable cause) { + this.cause = cause; + } + + public Throwable getCause() { + return cause; + } + + public long getOpaque() { + return opaque; + } + + public long getTimeoutMillis() { + return timeoutMillis; + } + + public long getBeginTimestamp() { + return beginTimestamp; + } + + public Command getResponseCommand() { + return responseCommand; + } + + public void setResponseCommand(Command responseCommand) { + this.responseCommand = responseCommand; + } + + + /** + * scan future table + */ + public static void scanFutureTable(){ + final List futureList = new LinkedList<>(); + Iterator> it = FUTURE_TABLE.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry next = it.next(); + TaskFuture future = next.getValue(); + if ((future.getBeginTimestamp() + future.getTimeoutMillis() + 1000) <= System.currentTimeMillis()) { + futureList.add(future); + it.remove(); + LOGGER.warn("remove timeout request : {}", future); + } + } + } + + @Override + public String toString() { + return "TaskFuture{" + + "opaque=" + opaque + + ", timeoutMillis=" + timeoutMillis + + ", latch=" + latch + + ", beginTimestamp=" + beginTimestamp + + ", responseCommand=" + responseCommand + + ", sendOk=" + sendOk + + ", cause=" + cause + + '}'; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java new file mode 100644 index 0000000000..3460248dfb --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.ChannelUtils; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; +import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * task ack processor + */ +public class TaskAckProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(TaskAckProcessor.class); + + /** + * process service + */ + private final TaskResponseService taskResponseService; + + /** + * taskInstance cache manager + */ + private final TaskInstanceCacheManager taskInstanceCacheManager; + + + /** + * processService + */ + private ProcessService processService; + + public TaskAckProcessor(){ + this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); + this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); + } + + /** + * task ack process + * @param channel channel channel + * @param command command TaskExecuteAckCommand + */ + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_EXECUTE_ACK == command.getType(), String.format("invalid command type : %s", command.getType())); + TaskExecuteAckCommand taskAckCommand = FastJsonSerializer.deserialize(command.getBody(), TaskExecuteAckCommand.class); + logger.info("taskAckCommand : {}", taskAckCommand); + + taskInstanceCacheManager.cacheTaskInstance(taskAckCommand); + + String workerAddress = ChannelUtils.toAddress(channel).getAddress(); + + ExecutionStatus ackStatus = ExecutionStatus.of(taskAckCommand.getStatus()); + + // TaskResponseEvent + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newAck(ackStatus, + taskAckCommand.getStartTime(), + workerAddress, + taskAckCommand.getExecutePath(), + taskAckCommand.getLogPath(), + taskAckCommand.getTaskInstanceId()); + + taskResponseService.addResponse(taskResponseEvent); + + while (Stopper.isRunning()){ + TaskInstance taskInstance = processService.findTaskInstanceById(taskAckCommand.getTaskInstanceId()); + + if (taskInstance != null && ackStatus.typeIsRunning()){ + break; + } + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + } + + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java new file mode 100644 index 0000000000..3e8cdfdadc --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * task response processor + */ +public class TaskKillResponseProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(TaskKillResponseProcessor.class); + + /** + * task final result response + * need master process , state persistence + * + * @param channel channel + * @param command command + */ + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_KILL_RESPONSE == command.getType(), String.format("invalid command type : %s", command.getType())); + + TaskKillResponseCommand responseCommand = FastJsonSerializer.deserialize(command.getBody(), TaskKillResponseCommand.class); + logger.info("received task kill response command : {}", responseCommand); + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java new file mode 100644 index 0000000000..721b146d86 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; +import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * task response processor + */ +public class TaskResponseProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(TaskResponseProcessor.class); + + /** + * process service + */ + private final TaskResponseService taskResponseService; + + /** + * taskInstance cache manager + */ + private final TaskInstanceCacheManager taskInstanceCacheManager; + + /** + * processService + */ + private ProcessService processService; + + public TaskResponseProcessor(){ + this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); + this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); + } + + /** + * task final result response + * need master process , state persistence + * + * @param channel channel + * @param command command + */ + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_EXECUTE_RESPONSE == command.getType(), String.format("invalid command type : %s", command.getType())); + + TaskExecuteResponseCommand responseCommand = FastJsonSerializer.deserialize(command.getBody(), TaskExecuteResponseCommand.class); + logger.info("received command : {}", responseCommand); + + taskInstanceCacheManager.cacheTaskInstance(responseCommand); + + ExecutionStatus responseStatus = ExecutionStatus.of(responseCommand.getStatus()); + + // TaskResponseEvent + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newResult(ExecutionStatus.of(responseCommand.getStatus()), + responseCommand.getEndTime(), + responseCommand.getProcessId(), + responseCommand.getAppIds(), + responseCommand.getTaskInstanceId()); + + taskResponseService.addResponse(taskResponseEvent); + + while (Stopper.isRunning()){ + TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId()); + + if (taskInstance != null && responseStatus.typeIsFinished()){ + break; + } + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + } + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java new file mode 100644 index 0000000000..9e8813fd7f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor.queue; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; + +import java.util.Date; + +/** + * task event + */ +public class TaskResponseEvent { + + /** + * taskInstanceId + */ + private int taskInstanceId; + + /** + * worker address + */ + private String workerAddress; + + /** + * state + */ + private ExecutionStatus state; + + /** + * start time + */ + private Date startTime; + + /** + * end time + */ + private Date endTime; + + /** + * execute path + */ + private String executePath; + + /** + * log path + */ + private String logPath; + + /** + * processId + */ + private int processId; + + /** + * appIds + */ + private String appIds; + + /** + * ack / response + */ + private Event event; + + public static TaskResponseEvent newAck(ExecutionStatus state, Date startTime, String workerAddress, String executePath, String logPath, int taskInstanceId){ + TaskResponseEvent event = new TaskResponseEvent(); + event.setState(state); + event.setStartTime(startTime); + event.setWorkerAddress(workerAddress); + event.setExecutePath(executePath); + event.setLogPath(logPath); + event.setTaskInstanceId(taskInstanceId); + event.setEvent(Event.ACK); + return event; + } + + public static TaskResponseEvent newResult(ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstanceId){ + TaskResponseEvent event = new TaskResponseEvent(); + event.setState(state); + event.setEndTime(endTime); + event.setProcessId(processId); + event.setAppIds(appIds); + event.setTaskInstanceId(taskInstanceId); + event.setEvent(Event.RESULT); + return event; + } + + public int getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public String getWorkerAddress() { + return workerAddress; + } + + public void setWorkerAddress(String workerAddress) { + this.workerAddress = workerAddress; + } + + public ExecutionStatus getState() { + return state; + } + + public void setState(ExecutionStatus state) { + this.state = state; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getExecutePath() { + return executePath; + } + + public void setExecutePath(String executePath) { + this.executePath = executePath; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public int getProcessId() { + return processId; + } + + public void setProcessId(int processId) { + this.processId = processId; + } + + public String getAppIds() { + return appIds; + } + + public void setAppIds(String appIds) { + this.appIds = appIds; + } + + public Event getEvent() { + return event; + } + + public void setEvent(Event event) { + this.event = event; + } + + public enum Event{ + ACK, + RESULT; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java new file mode 100644 index 0000000000..b9772ca523 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor.queue; + +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * task manager + */ +@Component +public class TaskResponseService { + + /** + * logger + */ + private final Logger logger = LoggerFactory.getLogger(TaskResponseService.class); + + /** + * attemptQueue + */ + private final BlockingQueue eventQueue = new LinkedBlockingQueue<>(5000); + + + /** + * process service + */ + @Autowired + private ProcessService processService; + + /** + * task response worker + */ + private Thread taskResponseWorker; + + + @PostConstruct + public void start(){ + this.taskResponseWorker = new TaskResponseWorker(); + this.taskResponseWorker.setName("TaskResponseWorker"); + this.taskResponseWorker.start(); + } + + @PreDestroy + public void stop(){ + this.taskResponseWorker.interrupt(); + if(!eventQueue.isEmpty()){ + List remainEvents = new ArrayList<>(eventQueue.size()); + eventQueue.drainTo(remainEvents); + for(TaskResponseEvent event : remainEvents){ + this.persist(event); + } + } + } + + /** + * put task to attemptQueue + * + * @param taskResponseEvent taskResponseEvent + */ + public void addResponse(TaskResponseEvent taskResponseEvent){ + try { + eventQueue.put(taskResponseEvent); + } catch (InterruptedException e) { + logger.error("put task : {} error :{}", taskResponseEvent,e); + } + } + + + /** + * task worker thread + */ + class TaskResponseWorker extends Thread { + + @Override + public void run() { + + while (Stopper.isRunning()){ + try { + // if not task , blocking here + TaskResponseEvent taskResponseEvent = eventQueue.take(); + persist(taskResponseEvent); + } catch (InterruptedException e){ + break; + } catch (Exception e){ + logger.error("persist task error",e); + } + } + logger.info("TaskResponseWorker stopped"); + } + } + + /** + * persist taskResponseEvent + * @param taskResponseEvent taskResponseEvent + */ + private void persist(TaskResponseEvent taskResponseEvent){ + TaskResponseEvent.Event event = taskResponseEvent.getEvent(); + + switch (event){ + case ACK: + processService.changeTaskState(taskResponseEvent.getState(), + taskResponseEvent.getStartTime(), + taskResponseEvent.getWorkerAddress(), + taskResponseEvent.getExecutePath(), + taskResponseEvent.getLogPath(), + taskResponseEvent.getTaskInstanceId()); + break; + case RESULT: + processService.changeTaskState(taskResponseEvent.getState(), + taskResponseEvent.getEndTime(), + taskResponseEvent.getProcessId(), + taskResponseEvent.getAppIds(), + taskResponseEvent.getTaskInstanceId()); + break; + default: + throw new IllegalArgumentException("invalid event type : " + event); + } + } + + public BlockingQueue getEventQueue() { + return eventQueue; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java new file mode 100644 index 0000000000..b6582981f2 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.registry; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.state.ConnectionState; +import org.apache.curator.framework.state.ConnectionStateListener; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import java.util.Date; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA; + +/** + * master registry + */ +@Service +public class MasterRegistry { + + private final Logger logger = LoggerFactory.getLogger(MasterRegistry.class); + + /** + * zookeeper registry center + */ + @Autowired + private ZookeeperRegistryCenter zookeeperRegistryCenter; + + /** + * master config + */ + @Autowired + private MasterConfig masterConfig; + + /** + * heartbeat executor + */ + private ScheduledExecutorService heartBeatExecutor; + + /** + * worker start time + */ + private String startTime; + + + @PostConstruct + public void init(){ + this.startTime = DateUtils.dateToString(new Date()); + this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor")); + } + + /** + * registry + */ + public void registry() { + String address = OSUtils.getHost(); + String localNodePath = getMasterPath(); + zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, ""); + zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() { + @Override + public void stateChanged(CuratorFramework client, ConnectionState newState) { + if(newState == ConnectionState.LOST){ + logger.error("master : {} connection lost from zookeeper", address); + } else if(newState == ConnectionState.RECONNECTED){ + logger.info("master : {} reconnected to zookeeper", address); + zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, ""); + } else if(newState == ConnectionState.SUSPENDED){ + logger.warn("master : {} connection SUSPENDED ", address); + } + } + }); + int masterHeartbeatInterval = masterConfig.getMasterHeartbeatInterval(); + this.heartBeatExecutor.scheduleAtFixedRate(new HeartBeatTask(), masterHeartbeatInterval, masterHeartbeatInterval, TimeUnit.SECONDS); + logger.info("master node : {} registry to ZK successfully with heartBeatInterval : {}s", address, masterHeartbeatInterval); + } + + /** + * remove registry info + */ + public void unRegistry() { + String address = getLocalAddress(); + String localNodePath = getMasterPath(); + zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath); + logger.info("master node : {} unRegistry to ZK.", address); + } + + /** + * get master path + * @return + */ + private String getMasterPath() { + String address = getLocalAddress(); + String localNodePath = this.zookeeperRegistryCenter.getMasterPath() + "/" + address; + return localNodePath; + } + + /** + * get local address + * @return + */ + private String getLocalAddress(){ + return OSUtils.getHost() + ":" + masterConfig.getListenPort(); + } + + /** + * hear beat task + */ + class HeartBeatTask implements Runnable{ + + @Override + public void run() { + try { + StringBuilder builder = new StringBuilder(100); + builder.append(OSUtils.cpuUsage()).append(COMMA); + builder.append(OSUtils.memoryUsage()).append(COMMA); + builder.append(OSUtils.loadAverage()).append(COMMA); + builder.append(startTime).append(COMMA); + builder.append(DateUtils.dateToString(new Date())); + String masterPath = getMasterPath(); + zookeeperRegistryCenter.getZookeeperCachedOperator().update(masterPath, builder.toString()); + } catch (Throwable ex){ + logger.error("error write master heartbeat info", ex); + } + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/conditions/ConditionsTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java similarity index 51% rename from dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/conditions/ConditionsTask.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java index cbe82ce20a..7e3950df1f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/conditions/ConditionsTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java @@ -14,31 +14,28 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.worker.task.conditions; +package org.apache.dolphinscheduler.server.master.runner; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DependResult; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.DependentItem; import org.apache.dolphinscheduler.common.model.DependentTaskModel; -import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; import org.apache.dolphinscheduler.common.utils.DependentUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.worker.task.AbstractTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -public class ConditionsTask extends AbstractTask { +public class ConditionsTaskExecThread extends MasterBaseTaskExecThread { /** @@ -47,60 +44,51 @@ public class ConditionsTask extends AbstractTask { private DependentParameters dependentParameters; /** - * process dao + * complete task map */ - private ProcessService processService; - - /** - * taskInstance - */ - private TaskInstance taskInstance; - - /** - * processInstance - */ - private ProcessInstance processInstance; + private Map completeTaskList = new ConcurrentHashMap<>(); /** - * + * condition result */ - private Map completeTaskList = new ConcurrentHashMap<>(); + private DependResult conditionResult; /** - * constructor + * constructor of MasterBaseTaskExecThread * - * @param taskProps task props - * @param logger logger + * @param taskInstance task instance */ - public ConditionsTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); + public ConditionsTaskExecThread(TaskInstance taskInstance) { + super(taskInstance); } @Override - public void init() throws Exception { - logger.info("conditions task initialize"); - - this.processService = SpringApplicationContext.getBean(ProcessService.class); - - this.dependentParameters = JSONUtils.parseObject(this.taskProps.getDependence(), DependentParameters.class); - - this.taskInstance = processService.findTaskInstanceById(taskProps.getTaskInstId()); - - if(taskInstance == null){ - throw new Exception("cannot find the task instance!"); - } - - List taskInstanceList = processService.findValidTaskListByProcessId(taskInstance.getProcessInstanceId()); - for(TaskInstance task : taskInstanceList){ - this.completeTaskList.putIfAbsent(task.getName(), task.getState()); + public Boolean submitWaitComplete() { + try{ + this.taskInstance = submit(); + logger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + taskInstance.getProcessDefinitionId(), + taskInstance.getProcessInstanceId(), + taskInstance.getId())); + String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, processService.formatTaskAppId(this.taskInstance)); + Thread.currentThread().setName(threadLoggerInfoName); + initTaskParameters(); + logger.info("dependent task start"); + waitTaskQuit(); + updateTaskState(); + }catch (Exception e){ + logger.error("conditions task run exception" , e); } + return true; } - @Override - public void handle() throws Exception { - - String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskProps.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); + private void waitTaskQuit() { + List taskInstances = processService.findValidTaskListByProcessId( + taskInstance.getProcessInstanceId() + ); + for(TaskInstance task : taskInstances){ + completeTaskList.putIfAbsent(task.getName(), task.getState()); + } List modelResultList = new ArrayList<>(); for(DependentTaskModel dependentTaskModel : dependentParameters.getDependTaskList()){ @@ -112,14 +100,43 @@ public class ConditionsTask extends AbstractTask { DependResult modelResult = DependentUtils.getDependResultForRelation(dependentTaskModel.getRelation(), itemDependResult); modelResultList.add(modelResult); } - DependResult result = DependentUtils.getDependResultForRelation( + conditionResult = DependentUtils.getDependResultForRelation( dependentParameters.getRelation(), modelResultList ); - logger.info("the conditions task depend result : {}", result); - exitStatusCode = (result == DependResult.SUCCESS) ? - Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; + logger.info("the conditions task depend result : {}", conditionResult); } + /** + * + */ + private void updateTaskState() { + ExecutionStatus status; + if(this.cancel){ + status = ExecutionStatus.KILL; + }else{ + status = (conditionResult == DependResult.SUCCESS) ? ExecutionStatus.SUCCESS : ExecutionStatus.FAILURE; + } + taskInstance.setState(status); + taskInstance.setEndTime(new Date()); + processService.updateTaskInstance(taskInstance); + } + + private void initTaskParameters() { + this.taskInstance.setLogPath(getTaskLogPath(taskInstance)); + this.taskInstance.setHost(OSUtils.getHost() + Constants.COLON + masterConfig.getListenPort()); + taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + taskInstance.setStartTime(new Date()); + this.processService.saveTaskInstance(taskInstance); + + this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(), DependentParameters.class); + } + + + /** + * depend result for depend item + * @param item + * @return + */ private DependResult getDependResultForItem(DependentItem item){ DependResult dependResult = DependResult.SUCCESS; @@ -130,16 +147,13 @@ public class ConditionsTask extends AbstractTask { } ExecutionStatus executionStatus = completeTaskList.get(item.getDepTasks()); if(executionStatus != item.getStatus()){ - logger.info("depend item : {} expect status: {}, actual status: {}" ,item.getDepTasks(), item.getStatus().toString(), executionStatus.toString()); + logger.info("depend item : {} expect status: {}, actual status: {}" ,item.getDepTasks(), item.getStatus(), executionStatus); dependResult = DependResult.FAILED; } - logger.info("depend item: {}, depend result: {}", - item.getDepTasks(), dependResult); + logger.info("dependent item complete {} {},{}", + Constants.DEPENDENT_SPLIT, item.getDepTasks(), dependResult); return dependResult; } - @Override - public AbstractParameters getParameters() { - return null; - } -} \ No newline at end of file + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java new file mode 100644 index 0000000000..015c20024c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java @@ -0,0 +1,217 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.runner; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.DependentUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.utils.DependentExecute; +import org.slf4j.LoggerFactory; + +import java.util.*; + +import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; + +public class DependentTaskExecThread extends MasterBaseTaskExecThread { + + private DependentParameters dependentParameters; + + /** + * dependent task list + */ + private List dependentTaskList = new ArrayList<>(); + + /** + * depend item result map + * save the result to log file + */ + private Map dependResultMap = new HashMap<>(); + + + /** + * dependent date + */ + private Date dependentDate; + + /** + * constructor of MasterBaseTaskExecThread + * + * @param taskInstance task instance + */ + public DependentTaskExecThread(TaskInstance taskInstance) { + super(taskInstance); + } + + + @Override + public Boolean submitWaitComplete() { + try{ + logger.info("dependent task start"); + this.taskInstance = submit(); + logger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + taskInstance.getProcessDefinitionId(), + taskInstance.getProcessInstanceId(), + taskInstance.getId())); + String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, processService.formatTaskAppId(this.taskInstance)); + Thread.currentThread().setName(threadLoggerInfoName); + initTaskParameters(); + initDependParameters(); + waitTaskQuit(); + updateTaskState(); + }catch (Exception e){ + logger.error("dependent task run exception" , e); + } + return true; + } + + /** + * init dependent parameters + */ + private void initDependParameters() { + + this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(), + DependentParameters.class); + + for(DependentTaskModel taskModel : dependentParameters.getDependTaskList()){ + this.dependentTaskList.add(new DependentExecute( + taskModel.getDependItemList(), taskModel.getRelation())); + } + if(this.processInstance.getScheduleTime() != null){ + this.dependentDate = this.processInstance.getScheduleTime(); + }else{ + this.dependentDate = new Date(); + } + } + + /** + * + */ + private void updateTaskState() { + ExecutionStatus status; + if(this.cancel){ + status = ExecutionStatus.KILL; + }else{ + DependResult result = getTaskDependResult(); + status = (result == DependResult.SUCCESS) ? ExecutionStatus.SUCCESS : ExecutionStatus.FAILURE; + } + taskInstance.setState(status); + taskInstance.setEndTime(new Date()); + processService.saveTaskInstance(taskInstance); + } + + /** + * wait dependent tasks quit + */ + private Boolean waitTaskQuit() { + logger.info("wait depend task : {} complete", this.taskInstance.getName()); + if (taskInstance.getState().typeIsFinished()) { + logger.info("task {} already complete. task state:{}", + this.taskInstance.getName(), + this.taskInstance.getState()); + return true; + } + while (Stopper.isRunning()) { + try{ + if(this.processInstance == null){ + logger.error("process instance not exists , master task exec thread exit"); + return true; + } + if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ + cancelTaskInstance(); + break; + } + + if ( allDependentTaskFinish() || taskInstance.getState().typeIsFinished()){ + break; + } + // updateProcessInstance task instance + taskInstance = processService.findTaskInstanceById(taskInstance.getId()); + processInstance = processService.findProcessInstanceById(processInstance.getId()); + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + } catch (Exception e) { + logger.error("exception",e); + if (processInstance != null) { + logger.error("wait task quit failed, instance id:{}, task id:{}", + processInstance.getId(), taskInstance.getId()); + } + } + } + return true; + } + + /** + * cancel dependent task + */ + private void cancelTaskInstance() { + this.cancel = true; + } + + private void initTaskParameters() { + taskInstance.setLogPath(getTaskLogPath(taskInstance)); + taskInstance.setHost(OSUtils.getHost() + Constants.COLON + masterConfig.getListenPort()); + taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + taskInstance.setStartTime(new Date()); + processService.updateTaskInstance(taskInstance); + } + + /** + * judge all dependent tasks finish + * @return whether all dependent tasks finish + */ + private boolean allDependentTaskFinish(){ + boolean finish = true; + for(DependentExecute dependentExecute : dependentTaskList){ + for(Map.Entry entry: dependentExecute.getDependResultMap().entrySet()) { + if(!dependResultMap.containsKey(entry.getKey())){ + dependResultMap.put(entry.getKey(), entry.getValue()); + //save depend result to log + logger.info("dependent item complete {} {},{}", + DEPENDENT_SPLIT, entry.getKey(), entry.getValue()); + } + } + if(!dependentExecute.finish(dependentDate)){ + finish = false; + } + } + return finish; + } + + /** + * get dependent result + * @return DependResult + */ + private DependResult getTaskDependResult(){ + List dependResultList = new ArrayList<>(); + for(DependentExecute dependentExecute : dependentTaskList){ + DependResult dependResult = dependentExecute.getModelDependResult(dependentDate); + dependResultList.add(dependResult); + } + DependResult result = DependentUtils.getDependResultForRelation( + this.dependentParameters.getRelation(), dependResultList + ); + logger.info("dependent task completed, dependent result:{}", result); + return result; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java index f8fcb1456d..3226d82304 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java @@ -16,20 +16,27 @@ */ package org.apache.dolphinscheduler.server.master.runner; +import ch.qos.logback.classic.LoggerContext; +import ch.qos.logback.classic.sift.SiftingAppender; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.utils.BeanContext; +import org.apache.dolphinscheduler.server.log.TaskLogDiscriminator; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; +import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.dolphinscheduler.common.Constants.*; import java.util.concurrent.Callable; + /** * master task exec base class */ @@ -38,7 +45,8 @@ public class MasterBaseTaskExecThread implements Callable { /** * logger of MasterBaseTaskExecThread */ - private static final Logger logger = LoggerFactory.getLogger(MasterBaseTaskExecThread.class); + protected Logger logger = LoggerFactory.getLogger(getClass()); + /** * process service @@ -60,11 +68,6 @@ public class MasterBaseTaskExecThread implements Callable { */ protected TaskInstance taskInstance; - /** - * task queue - */ - protected ITaskQueue taskQueue; - /** * whether need cancel */ @@ -73,21 +76,23 @@ public class MasterBaseTaskExecThread implements Callable { /** * master config */ - private MasterConfig masterConfig; + protected MasterConfig masterConfig; + /** + * taskUpdateQueue + */ + private TaskPriorityQueue taskUpdateQueue; /** * constructor of MasterBaseTaskExecThread * @param taskInstance task instance - * @param processInstance process instance */ - public MasterBaseTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - this.processService = BeanContext.getBean(ProcessService.class); - this.alertDao = BeanContext.getBean(AlertDao.class); - this.processInstance = processInstance; - this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); + public MasterBaseTaskExecThread(TaskInstance taskInstance){ + this.processService = SpringApplicationContext.getBean(ProcessService.class); + this.alertDao = SpringApplicationContext.getBean(AlertDao.class); this.cancel = false; this.taskInstance = taskInstance; this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); + this.taskUpdateQueue = SpringApplicationContext.getBean(TaskPriorityQueueImpl.class); } /** @@ -115,38 +120,109 @@ public class MasterBaseTaskExecThread implements Callable { int retryTimes = 1; boolean submitDB = false; - boolean submitQueue = false; + boolean submitTask = false; TaskInstance task = null; while (retryTimes <= commitRetryTimes){ try { if(!submitDB){ // submit task to db - task = processService.submitTask(taskInstance, processInstance); + task = processService.submitTask(taskInstance); if(task != null && task.getId() != 0){ submitDB = true; } } - if(submitDB && !submitQueue){ - // submit task to queue - submitQueue = processService.submitTaskToQueue(task); + if(submitDB && !submitTask){ + // dispatch task + submitTask = dispatchTask(task); } - if(submitDB && submitQueue){ + if(submitDB && submitTask){ return task; } if(!submitDB){ logger.error("task commit to db failed , taskId {} has already retry {} times, please check the database", taskInstance.getId(), retryTimes); - }else if(!submitQueue){ - logger.error("task commit to queue failed , taskId {} has already retry {} times, please check the queue", taskInstance.getId(), retryTimes); + }else if(!submitTask){ + logger.error("task commit failed , taskId {} has already retry {} times, please check", taskInstance.getId(), retryTimes); } Thread.sleep(commitRetryInterval); } catch (Exception e) { - logger.error("task commit to mysql and queue failed",e); + logger.error("task commit to mysql and dispatcht task failed",e); } retryTimes += 1; } return task; } + + + /** + * dispatcht task + * @param taskInstance taskInstance + * @return whether submit task success + */ + public Boolean dispatchTask(TaskInstance taskInstance) { + + try{ + if(taskInstance.isConditionsTask() + || taskInstance.isDependTask() + || taskInstance.isSubProcess()){ + return true; + } + if(taskInstance.getState().typeIsFinished()){ + logger.info(String.format("submit task , but task [%s] state [%s] is already finished. ", taskInstance.getName(), taskInstance.getState().toString())); + return true; + } + // task cannot submit when running + if(taskInstance.getState() == ExecutionStatus.RUNNING_EXEUTION){ + logger.info(String.format("submit to task, but task [%s] state already be running. ", taskInstance.getName())); + return true; + } + logger.info("task ready to submit: {}", taskInstance); + + /** + * taskPriorityInfo + */ + String taskPriorityInfo = buildTaskPriorityInfo(processInstance.getProcessInstancePriority().getCode(), + processInstance.getId(), + taskInstance.getProcessInstancePriority().getCode(), + taskInstance.getId(), + org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP); + taskUpdateQueue.put(taskPriorityInfo); + logger.info(String.format("master submit success, task : %s", taskInstance.getName()) ); + return true; + }catch (Exception e){ + logger.error("submit task Exception: ", e); + logger.error("task error : %s", JSONUtils.toJson(taskInstance)); + return false; + } + } + + + /** + * buildTaskPriorityInfo + * + * @param processInstancePriority processInstancePriority + * @param processInstanceId processInstanceId + * @param taskInstancePriority taskInstancePriority + * @param taskInstanceId taskInstanceId + * @param workerGroup workerGroup + * @return TaskPriorityInfo + */ + private String buildTaskPriorityInfo(int processInstancePriority, + int processInstanceId, + int taskInstancePriority, + int taskInstanceId, + String workerGroup){ + return processInstancePriority + + UNDERLINE + + processInstanceId + + UNDERLINE + + taskInstancePriority + + UNDERLINE + + taskInstanceId + + UNDERLINE + + workerGroup; + } + /** * submit wait complete * @return true @@ -162,7 +238,39 @@ public class MasterBaseTaskExecThread implements Callable { */ @Override public Boolean call() throws Exception { + this.processInstance = processService.findProcessInstanceById(taskInstance.getProcessInstanceId()); return submitWaitComplete(); } + /** + * get task log path + * @return log path + */ + public String getTaskLogPath(TaskInstance task) { + String logPath; + try{ + String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) + .getLogger("ROOT") + .getAppender("TASKLOGFILE")) + .getDiscriminator()).getLogBase(); + if (baseLog.startsWith(Constants.SINGLE_SLASH)){ + logPath = baseLog + Constants.SINGLE_SLASH + + task.getProcessDefinitionId() + Constants.SINGLE_SLASH + + task.getProcessInstanceId() + Constants.SINGLE_SLASH + + task.getId() + ".log"; + }else{ + logPath = System.getProperty("user.dir") + Constants.SINGLE_SLASH + + baseLog + Constants.SINGLE_SLASH + + task.getProcessDefinitionId() + Constants.SINGLE_SLASH + + task.getProcessInstanceId() + Constants.SINGLE_SLASH + + task.getId() + ".log"; + } + }catch (Exception e){ + logger.error("logger", e); + logPath = ""; + } + return logPath; + } + + } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java index 0b81a30191..e0110adfcd 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.master.runner; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.JSON; import com.google.common.collect.Lists; import org.apache.commons.io.FileUtils; import org.apache.dolphinscheduler.common.Constants; @@ -33,6 +33,7 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.AlertManager; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; @@ -68,7 +69,7 @@ public class MasterExecThread implements Runnable { /** * runing TaskNode */ - private final Map> activeTaskNode = new ConcurrentHashMap>(); + private final Map> activeTaskNode = new ConcurrentHashMap<>(); /** * task exec service @@ -78,7 +79,7 @@ public class MasterExecThread implements Runnable { /** * submit failure nodes */ - private Boolean taskFailedSubmit = false; + private boolean taskFailedSubmit = false; /** * recover node id list @@ -140,12 +141,18 @@ public class MasterExecThread implements Runnable { */ private MasterConfig masterConfig; + /** + * + */ + private NettyRemotingClient nettyRemotingClient; + /** * constructor of MasterExecThread - * @param processInstance process instance - * @param processService process dao + * @param processInstance processInstance + * @param processService processService + * @param nettyRemotingClient nettyRemotingClient */ - public MasterExecThread(ProcessInstance processInstance, ProcessService processService){ + public MasterExecThread(ProcessInstance processInstance, ProcessService processService, NettyRemotingClient nettyRemotingClient){ this.processService = processService; this.processInstance = processInstance; @@ -153,9 +160,12 @@ public class MasterExecThread implements Runnable { int masterTaskExecNum = masterConfig.getMasterExecTaskNum(); this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", masterTaskExecNum); + this.nettyRemotingClient = nettyRemotingClient; } + + @Override public void run() { @@ -355,7 +365,6 @@ public class MasterExecThread implements Runnable { } // generate process dag dag = DagHelper.buildDagGraph(processDag); - } /** @@ -408,9 +417,13 @@ public class MasterExecThread implements Runnable { private TaskInstance submitTaskExec(TaskInstance taskInstance) { MasterBaseTaskExecThread abstractExecThread = null; if(taskInstance.isSubProcess()){ - abstractExecThread = new SubProcessTaskExecThread(taskInstance, processInstance); + abstractExecThread = new SubProcessTaskExecThread(taskInstance); + }else if(taskInstance.isDependTask()){ + abstractExecThread = new DependentTaskExecThread(taskInstance); + }else if(taskInstance.isConditionsTask()){ + abstractExecThread = new ConditionsTaskExecThread(taskInstance); }else { - abstractExecThread = new MasterTaskExecThread(taskInstance, processInstance); + abstractExecThread = new MasterTaskExecThread(taskInstance); } Future future = taskExecService.submit(abstractExecThread); activeTaskNode.putIfAbsent(abstractExecThread, future); @@ -454,7 +467,7 @@ public class MasterExecThread implements Runnable { // process instance id taskInstance.setProcessInstanceId(processInstance.getId()); // task instance node json - taskInstance.setTaskJson(JSONObject.toJSONString(taskNode)); + taskInstance.setTaskJson(JSON.toJSONString(taskNode)); // task instance type taskInstance.setTaskType(taskNode.getType()); // task instance whether alert @@ -482,34 +495,19 @@ public class MasterExecThread implements Runnable { taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); } - int workerGroupId = taskNode.getWorkerGroupId(); - taskInstance.setWorkerGroupId(workerGroupId); + String processWorkerGroup = processInstance.getWorkerGroup(); + String taskWorkerGroup = StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup(); + if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) { + taskInstance.setWorkerGroup(processWorkerGroup); + }else { + taskInstance.setWorkerGroup(taskWorkerGroup); + } } return taskInstance; } - /** - * is there have conditions after the parent node - * @param parentNodeName - * @return - */ - private boolean haveConditionsAfterNode(String parentNodeName){ - boolean result = false; - Collection startVertex = DagHelper.getStartVertex(parentNodeName, dag, completeTaskList); - if(startVertex == null){ - return result; - } - for(String nodeName : startVertex){ - TaskNode taskNode = dag.getNode(nodeName); - if(taskNode.getType().equals(TaskType.CONDITIONS.toString())){ - result = true; - break; - } - } - return result; - } /** * if all of the task dependence are skip, skip it too. @@ -652,7 +650,7 @@ public class MasterExecThread implements Runnable { continue; } if(task.getState().typeIsPause() || task.getState().typeIsCancel()){ - logger.info("task {} stopped, the state is {}", task.getName(), task.getState().toString()); + logger.info("task {} stopped, the state is {}", task.getName(), task.getState()); }else{ addTaskToStandByList(task); } @@ -674,9 +672,9 @@ public class MasterExecThread implements Runnable { TaskNode taskNode = dag.getNode(taskName); List depNameList = taskNode.getDepList(); for(String depsNode : depNameList ){ - - if(forbiddenTaskList.containsKey(depsNode) || - skipTaskNodeList.containsKey(depsNode)){ + if(!dag.containsNode(depsNode) + || forbiddenTaskList.containsKey(depsNode) + || skipTaskNodeList.containsKey(depsNode)){ continue; } // dependencies must be fully completed @@ -685,11 +683,12 @@ public class MasterExecThread implements Runnable { } ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); // conditions task would not return failed. - if(depTaskState.typeIsFailure()){ - if(!haveConditionsAfterNode(depsNode) && !dag.getNode(depsNode).isConditionsTask()){ - return DependResult.FAILED; - } + if(depTaskState.typeIsFailure() + && !DagHelper.haveConditionsAfterNode(depsNode, dag ) + && !dag.getNode(depsNode).isConditionsTask()){ + return DependResult.FAILED; } + if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){ return DependResult.WAITING; } @@ -737,7 +736,7 @@ public class MasterExecThread implements Runnable { * * @return Boolean whether has failed task */ - private Boolean hasFailedTask(){ + private boolean hasFailedTask(){ if(this.taskFailedSubmit){ return true; @@ -753,7 +752,7 @@ public class MasterExecThread implements Runnable { * * @return Boolean whether process instance failed */ - private Boolean processFailed(){ + private boolean processFailed(){ if(hasFailedTask()) { if(processInstance.getFailureStrategy() == FailureStrategy.END){ return true; @@ -769,9 +768,9 @@ public class MasterExecThread implements Runnable { * whether task for waiting thread * @return Boolean whether has waiting thread task */ - private Boolean hasWaitingThreadTask(){ + private boolean hasWaitingThreadTask(){ List waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); - return waitingList.size() > 0; + return CollectionUtils.isNotEmpty(waitingList); } /** @@ -787,7 +786,7 @@ public class MasterExecThread implements Runnable { } List pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); - if(pauseList.size() > 0 + if(CollectionUtils.isNotEmpty(pauseList) || !isComplementEnd() || readyToSubmitTaskList.size() > 0){ return ExecutionStatus.PAUSE; @@ -805,7 +804,7 @@ public class MasterExecThread implements Runnable { ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); ExecutionStatus state = instance.getState(); - if(activeTaskNode.size() > 0){ + if(activeTaskNode.size() > 0 || haveRetryTaskStandBy()){ return runningState(state); } // process failure @@ -827,7 +826,8 @@ public class MasterExecThread implements Runnable { if(state == ExecutionStatus.READY_STOP){ List stopList = getCompleteTaskByState(ExecutionStatus.STOP); List killList = getCompleteTaskByState(ExecutionStatus.KILL); - if(stopList.size() > 0 || killList.size() > 0 || !isComplementEnd()){ + if(CollectionUtils.isNotEmpty(stopList) + || CollectionUtils.isNotEmpty(killList) || !isComplementEnd()){ return ExecutionStatus.STOP; }else{ return ExecutionStatus.SUCCESS; @@ -848,11 +848,29 @@ public class MasterExecThread implements Runnable { return state; } + /** + * whether standby task list have retry tasks + * @return + */ + private boolean haveRetryTaskStandBy() { + + boolean result = false; + + for(String taskName : readyToSubmitTaskList.keySet()){ + TaskInstance task = readyToSubmitTaskList.get(taskName); + if(task.getState().typeIsFailure()){ + result = true; + break; + } + } + return result; + } + /** * whether complement end * @return Boolean whether is complement end */ - private Boolean isComplementEnd() { + private boolean isComplementEnd() { if(!processInstance.isComplementData()){ return true; } @@ -877,9 +895,9 @@ public class MasterExecThread implements Runnable { logger.info( "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", processInstance.getId(), processInstance.getName(), - processInstance.getState().toString(), state.toString(), - processInstance.getCommandType().toString()); - processInstance.setState(state); + processInstance.getState(), state, + processInstance.getCommandType()); + ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); instance.setState(state); instance.setProcessDefinition(processInstance.getProcessDefinition()); @@ -894,8 +912,7 @@ public class MasterExecThread implements Runnable { * @return DependResult */ private DependResult getDependResultForTask(TaskInstance taskInstance){ - DependResult inner = isTaskDepsComplete(taskInstance.getName()); - return inner; + return isTaskDepsComplete(taskInstance.getName()); } /** @@ -920,7 +937,7 @@ public class MasterExecThread implements Runnable { * has retry task in standby * @return Boolean whether has retry task in standby */ - private Boolean hasRetryTaskInStandBy(){ + private boolean hasRetryTaskInStandBy(){ for (Map.Entry entry: readyToSubmitTaskList.entrySet()) { if(entry.getValue().getState().typeIsFailure()){ return true; @@ -936,10 +953,10 @@ public class MasterExecThread implements Runnable { // submit start node submitPostNode(null); boolean sendTimeWarning = false; - while(!processInstance.IsProcessInstanceStop()){ + while(!processInstance.isProcessInstanceStop()){ // send warning email if process time out. - if( !sendTimeWarning && checkProcessTimeOut(processInstance) ){ + if(!sendTimeWarning && checkProcessTimeOut(processInstance) ){ alertManager.sendProcessTimeoutAlert(processInstance, processService.findProcessDefineById(processInstance.getProcessDefinitionId())); sendTimeWarning = true; @@ -951,14 +968,23 @@ public class MasterExecThread implements Runnable { if(!future.isDone()){ continue; } + // node monitor thread complete - activeTaskNode.remove(entry.getKey()); + task = this.processService.findTaskInstanceById(task.getId()); + if(task == null){ this.taskFailedSubmit = true; + activeTaskNode.remove(entry.getKey()); continue; } + + // node monitor thread complete + if(task.getState().typeIsFinished()){ + activeTaskNode.remove(entry.getKey()); + } + logger.info("task :{}, id:{} complete, state is {} ", - task.getName(), task.getId(), task.getState().toString()); + task.getName(), task.getId(), task.getState()); // node success , post node submit if(task.getState() == ExecutionStatus.SUCCESS){ completeTaskList.put(task.getName(), task); @@ -974,8 +1000,8 @@ public class MasterExecThread implements Runnable { addTaskToStandByList(task); }else{ completeTaskList.put(task.getName(), task); - if( task.getTaskType().equals(TaskType.CONDITIONS.toString()) || - haveConditionsAfterNode(task.getName())) { + if( task.isConditionsTask() + || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { submitPostNode(task.getName()); }else{ errorTaskList.put(task.getName(), task); @@ -990,7 +1016,7 @@ public class MasterExecThread implements Runnable { completeTaskList.put(task.getName(), task); } // send alert - if(this.recoverToleranceFaultTaskList.size() > 0){ + if(CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)){ alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); this.recoverToleranceFaultTaskList.clear(); } @@ -1034,10 +1060,7 @@ public class MasterExecThread implements Runnable { Date now = new Date(); long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); - if(runningTime > processInstance.getTimeout()){ - return true; - } - return false; + return runningTime > processInstance.getTimeout(); } /** @@ -1062,7 +1085,7 @@ public class MasterExecThread implements Runnable { TaskInstance taskInstance = taskExecThread.getTaskInstance(); taskInstance = processService.findTaskInstanceById(taskInstance.getId()); - if(taskInstance.getState().typeIsFinished()){ + if(taskInstance != null && taskInstance.getState().typeIsFinished()){ continue; } @@ -1081,22 +1104,19 @@ public class MasterExecThread implements Runnable { * @param taskInstance task instance * @return Boolean */ - private Boolean retryTaskIntervalOverTime(TaskInstance taskInstance){ + private boolean retryTaskIntervalOverTime(TaskInstance taskInstance){ if(taskInstance.getState() != ExecutionStatus.FAILURE){ - return Boolean.TRUE; + return true; } if(taskInstance.getId() == 0 || taskInstance.getMaxRetryTimes() ==0 || taskInstance.getRetryInterval() == 0 ){ - return Boolean.TRUE; + return true; } Date now = new Date(); long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime()); // task retry does not over time, return false - if(taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT >= failedTimeInterval){ - return Boolean.FALSE; - } - return Boolean.TRUE; + return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval; } /** @@ -1189,7 +1209,7 @@ public class MasterExecThread implements Runnable { */ private List getRecoveryNodeNameList(){ List recoveryNodeNameList = new ArrayList<>(); - if(recoverNodeIdList.size() > 0) { + if(CollectionUtils.isNotEmpty(recoverNodeIdList)) { for (TaskInstance task : recoverNodeIdList) { recoveryNodeNameList.add(task.getName()); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java similarity index 60% rename from dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java index cc5a7e76e4..87e16596b4 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java @@ -24,65 +24,87 @@ import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.zk.ZKMasterClient; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; -import java.util.concurrent.ExecutorService; +import javax.annotation.PostConstruct; import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; /** * master scheduler thread */ -public class MasterSchedulerThread implements Runnable { +@Service +public class MasterSchedulerService extends Thread { /** * logger of MasterSchedulerThread */ - private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerThread.class); + private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerService.class); /** - * master exec service + * dolphinscheduler database interface */ - private final ExecutorService masterExecService; + @Autowired + private ProcessService processService; /** - * dolphinscheduler database interface + * zookeeper master client */ - private final ProcessService processService; + @Autowired + private ZKMasterClient zkMasterClient; /** - * zookeeper master client + * master config */ - private final ZKMasterClient zkMasterClient ; + @Autowired + private MasterConfig masterConfig; /** - * master exec thread num + * netty remoting client */ - private int masterExecThreadNum; + private NettyRemotingClient nettyRemotingClient; /** - * master config + * master exec service */ - private MasterConfig masterConfig; + private ThreadPoolExecutor masterExecService; /** * constructor of MasterSchedulerThread - * @param zkClient zookeeper master client - * @param processService process service - * @param masterExecThreadNum master exec thread num */ - public MasterSchedulerThread(ZKMasterClient zkClient, ProcessService processService, int masterExecThreadNum){ - this.processService = processService; - this.zkMasterClient = zkClient; - this.masterExecThreadNum = masterExecThreadNum; - this.masterExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread",masterExecThreadNum); - this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); + @PostConstruct + public void init(){ + this.masterExecService = (ThreadPoolExecutor)ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread", masterConfig.getMasterExecThreads()); + NettyClientConfig clientConfig = new NettyClientConfig(); + this.nettyRemotingClient = new NettyRemotingClient(clientConfig); + } + + @Override + public void start(){ + super.setName("MasterSchedulerThread"); + super.start(); + } + + public void close() { + masterExecService.shutdown(); + boolean terminated = false; + try { + terminated = masterExecService.awaitTermination(5, TimeUnit.SECONDS); + } catch (InterruptedException ignore) {} + if(!terminated){ + logger.warn("masterExecService shutdown without terminated, increase await time"); + } + nettyRemotingClient.close(); + logger.info("master schedule service stopped..."); } /** @@ -90,15 +112,10 @@ public class MasterSchedulerThread implements Runnable { */ @Override public void run() { - logger.info("master scheduler start successfully..."); + logger.info("master scheduler started"); while (Stopper.isRunning()){ - - // process instance - ProcessInstance processInstance = null; - InterProcessMutex mutex = null; try { - boolean runCheckFlag = OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory()); if(!runCheckFlag) { Thread.sleep(Constants.SLEEP_TIME_MILLIS); @@ -106,24 +123,22 @@ public class MasterSchedulerThread implements Runnable { } if (zkMasterClient.getZkClient().getState() == CuratorFrameworkState.STARTED) { - // create distributed lock with the root node path of the lock space as /dolphinscheduler/lock/masters - String znodeLock = zkMasterClient.getMasterLockPath(); - - mutex = new InterProcessMutex(zkMasterClient.getZkClient(), znodeLock); - mutex.acquire(); + mutex = zkMasterClient.blockAcquireMutex(); - ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) masterExecService; - int activeCount = poolExecutor.getActiveCount(); + int activeCount = masterExecService.getActiveCount(); // make sure to scan and delete command table in one transaction Command command = processService.findOneCommand(); if (command != null) { logger.info("find one command: id: {}, type: {}", command.getId(),command.getCommandType()); try{ - processInstance = processService.handleCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount, command); + + ProcessInstance processInstance = processService.handleCommand(logger, + getLocalAddress(), + this.masterConfig.getMasterExecThreads() - activeCount, command); if (processInstance != null) { logger.info("start master exec thread , split DAG ..."); - masterExecService.execute(new MasterExecThread(processInstance, processService)); + masterExecService.execute(new MasterExecThread(processInstance, processService, nettyRemotingClient)); } }catch (Exception e){ logger.error("scan command error ", e); @@ -134,14 +149,15 @@ public class MasterSchedulerThread implements Runnable { Thread.sleep(Constants.SLEEP_TIME_MILLIS); } } - }catch (Exception e){ - logger.error("master scheduler thread exception",e); - }finally{ - AbstractZKClient.releaseMutex(mutex); + } catch (Exception e){ + logger.error("master scheduler thread error",e); + } finally{ + zkMasterClient.releaseMutex(mutex); } } - logger.info("master server stopped..."); } - + private String getLocalAddress(){ + return OSUtils.getHost() + ":" + masterConfig.getListenPort(); + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java index 37262c05e2..9986b07319 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java @@ -16,6 +16,10 @@ */ package org.apache.dolphinscheduler.server.master.runner; + + +import com.alibaba.fastjson.JSON; + import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; @@ -23,15 +27,18 @@ import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import com.alibaba.fastjson.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; +import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.util.Date; -import static org.apache.dolphinscheduler.common.Constants.DOLPHINSCHEDULER_TASKS_KILL; /** * master task exec thread @@ -39,17 +46,21 @@ import static org.apache.dolphinscheduler.common.Constants.DOLPHINSCHEDULER_TASK public class MasterTaskExecThread extends MasterBaseTaskExecThread { /** - * logger of MasterTaskExecThread + * taskInstance state manager */ - private static final Logger logger = LoggerFactory.getLogger(MasterTaskExecThread.class); + private TaskInstanceCacheManager taskInstanceCacheManager; + + + private NettyExecutorManager nettyExecutorManager; /** * constructor of MasterTaskExecThread * @param taskInstance task instance - * @param processInstance process instance */ - public MasterTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - super(taskInstance, processInstance); + public MasterTaskExecThread(TaskInstance taskInstance){ + super(taskInstance); + this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); + this.nettyExecutorManager = SpringApplicationContext.getBean(NettyExecutorManager.class); } /** @@ -64,10 +75,11 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { /** * whether already Killed,default false */ - private Boolean alreadyKilled = false; + private boolean alreadyKilled = false; /** * submit task instance and wait complete + * * @return true is task quit is true */ @Override @@ -89,6 +101,8 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { } /** + * polling db + * * wait task quit * @return true if task quit success */ @@ -98,7 +112,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { logger.info("wait task: process id: {}, task id:{}, task name:{} complete", this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName()); // task time out - Boolean checkTimeout = false; + boolean checkTimeout = false; TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter(); if(taskTimeoutParameter.getEnable()){ TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy(); @@ -119,6 +133,8 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { } // task instance finished if (taskInstance.getState().typeIsFinished()){ + // if task is final result , then remove taskInstance from cache + taskInstanceCacheManager.removeByTaskInstanceId(taskInstance.getId()); break; } if(checkTimeout){ @@ -153,18 +169,21 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { /** * task instance add queue , waiting worker to kill */ - private void cancelTaskInstance(){ + private void cancelTaskInstance() throws Exception{ if(alreadyKilled){ return ; } alreadyKilled = true; - String host = taskInstance.getHost(); - if(host == null){ - host = Constants.NULL; - } - String queueValue = String.format("%s-%d", - host, taskInstance.getId()); - taskQueue.sadd(DOLPHINSCHEDULER_TASKS_KILL, queueValue); + + TaskKillRequestCommand killCommand = new TaskKillRequestCommand(); + killCommand.setTaskInstanceId(taskInstance.getId()); + + ExecutionContext executionContext = new ExecutionContext(killCommand.convert2Command(), ExecutorType.WORKER); + + Host host = Host.of(taskInstance.getHost()); + executionContext.setHost(host); + + nettyExecutorManager.executeDirectly(executionContext); logger.info("master add kill task :{} id:{} to kill queue", taskInstance.getName(), taskInstance.getId() ); @@ -176,20 +195,19 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { */ private TaskTimeoutParameter getTaskTimeoutParameter(){ String taskJson = taskInstance.getTaskJson(); - TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); + TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class); return taskNode.getTaskTimeoutParameter(); } /** - * get remain time(s) + * get remain time?s? * * @return remain time */ private long getRemaintime(long timeoutSeconds) { Date startTime = taskInstance.getStartTime(); long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; - long remainTime = timeoutSeconds - usedTime; - return remainTime; + return timeoutSeconds - usedTime; } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java index fc16b5112b..ee290487b7 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java @@ -21,8 +21,6 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.Date; @@ -31,11 +29,6 @@ import java.util.Date; */ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { - /** - * logger of SubProcessTaskExecThread - */ - private static final Logger logger = LoggerFactory.getLogger(SubProcessTaskExecThread.class); - /** * sub process instance */ @@ -44,10 +37,9 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { /** * sub process task exec thread * @param taskInstance task instance - * @param processInstance process instance */ - public SubProcessTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - super(taskInstance, processInstance); + public SubProcessTaskExecThread(TaskInstance taskInstance){ + super(taskInstance); } @Override @@ -95,7 +87,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { * set task instance state * @return */ - private Boolean setTaskInstanceState(){ + private boolean setTaskInstanceState(){ subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); if(subProcessInstance == null || taskInstance.getState().typeIsFinished()){ return false; @@ -131,8 +123,8 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { if (taskInstance.getState().typeIsFinished()) { logger.info("sub work flow task {} already complete. task state:{}, parent work flow instance state:{}", this.taskInstance.getName(), - this.taskInstance.getState().toString(), - this.processInstance.getState().toString()); + this.taskInstance.getState(), + this.processInstance.getState()); return; } while (Stopper.isRunning()) { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/Monitor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/Monitor.java index 3ee9488a3e..8d7bf0bb89 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/Monitor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/Monitor.java @@ -23,6 +23,11 @@ public interface Monitor { /** * monitor server and restart + * + * @param masterPath masterPath + * @param workerPath workerPath + * @param port port + * @param installPath installPath */ void monitor(String masterPath, String workerPath, Integer port, String installPath); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java index ac549bc386..a1f43add6e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/MonitorServer.java @@ -30,7 +30,7 @@ import org.springframework.context.annotation.ComponentScan; @ComponentScan("org.apache.dolphinscheduler") public class MonitorServer implements CommandLineRunner { - private static Integer ARGS_LENGTH = 4; + private static final Integer ARGS_LENGTH = 4; private static final Logger logger = LoggerFactory.getLogger(MonitorServer.class); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManager.java new file mode 100644 index 0000000000..278da60867 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManager.java @@ -0,0 +1,272 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.registry; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.curator.framework.CuratorFramework; + +import org.apache.curator.framework.recipes.cache.TreeCacheEvent; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.service.zk.AbstractListener; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; + +import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; + +/** + * zookeeper node manager + */ +@Service +public class ZookeeperNodeManager implements InitializingBean { + + private final Logger logger = LoggerFactory.getLogger(ZookeeperNodeManager.class); + + /** + * master lock + */ + private final Lock masterLock = new ReentrantLock(); + + /** + * worker group lock + */ + private final Lock workerGroupLock = new ReentrantLock(); + + /** + * worker group nodes + */ + private final ConcurrentHashMap> workerGroupNodes = new ConcurrentHashMap<>(); + + /** + * master nodes + */ + private final Set masterNodes = new HashSet<>(); + + /** + * zookeeper registry center + */ + @Autowired + private ZookeeperRegistryCenter registryCenter; + + /** + * alert dao + */ + @Autowired + private AlertDao alertDao; + + /** + * init listener + * @throws Exception if error throws Exception + */ + @Override + public void afterPropertiesSet() throws Exception { + /** + * load nodes from zookeeper + */ + load(); + /** + * init MasterNodeListener listener + */ + registryCenter.getZookeeperCachedOperator().addListener(new MasterNodeListener()); + /** + * init WorkerNodeListener listener + */ + registryCenter.getZookeeperCachedOperator().addListener(new WorkerGroupNodeListener()); + } + + /** + * load nodes from zookeeper + */ + private void load(){ + /** + * master nodes from zookeeper + */ + Set masterNodes = registryCenter.getMasterNodesDirectly(); + syncMasterNodes(masterNodes); + + /** + * worker group nodes from zookeeper + */ + Set workerGroups = registryCenter.getWorkerGroupDirectly(); + for(String workerGroup : workerGroups){ + syncWorkerGroupNodes(workerGroup, registryCenter.getWorkerGroupNodesDirectly(workerGroup)); + } + } + + /** + * worker group node listener + */ + class WorkerGroupNodeListener extends AbstractListener { + + @Override + protected void dataChanged(CuratorFramework client, TreeCacheEvent event, String path) { + if(registryCenter.isWorkerPath(path)){ + try { + if (event.getType() == TreeCacheEvent.Type.NODE_ADDED) { + logger.info("worker group node : {} added.", path); + String group = parseGroup(path); + Set workerNodes = workerGroupNodes.getOrDefault(group, new HashSet<>()); + Set previousNodes = new HashSet<>(workerNodes); + Set currentNodes = registryCenter.getWorkerGroupNodesDirectly(group); + logger.info("currentNodes : {}", currentNodes); + syncWorkerGroupNodes(group, currentNodes); + } else if (event.getType() == TreeCacheEvent.Type.NODE_REMOVED) { + logger.info("worker group node : {} down.", path); + String group = parseGroup(path); + Set workerNodes = workerGroupNodes.getOrDefault(group, new HashSet<>()); + Set previousNodes = new HashSet<>(workerNodes); + Set currentNodes = registryCenter.getWorkerGroupNodesDirectly(group); + syncWorkerGroupNodes(group, currentNodes); + alertDao.sendServerStopedAlert(1, path, "WORKER"); + } + } catch (IllegalArgumentException ignore) { + logger.warn(ignore.getMessage()); + } catch (Exception ex) { + logger.error("WorkerGroupListener capture data change and get data failed", ex); + } + } + } + + private String parseGroup(String path){ + String[] parts = path.split("\\/"); + if(parts.length != 6){ + throw new IllegalArgumentException(String.format("worker group path : %s is not valid, ignore", path)); + } + String group = parts[4]; + return group; + } + } + + + /** + * master node listener + */ + class MasterNodeListener extends AbstractListener { + + @Override + protected void dataChanged(CuratorFramework client, TreeCacheEvent event, String path) { + if (registryCenter.isMasterPath(path)) { + try { + if (event.getType() == TreeCacheEvent.Type.NODE_ADDED) { + logger.info("master node : {} added.", path); + Set previousNodes = new HashSet<>(masterNodes); + Set currentNodes = registryCenter.getMasterNodesDirectly(); + syncMasterNodes(currentNodes); + } else if (event.getType() == TreeCacheEvent.Type.NODE_REMOVED) { + logger.info("master node : {} down.", path); + Set previousNodes = new HashSet<>(masterNodes); + Set currentNodes = registryCenter.getMasterNodesDirectly(); + syncMasterNodes(currentNodes); + alertDao.sendServerStopedAlert(1, path, "MASTER"); + } + } catch (Exception ex) { + logger.error("MasterNodeListener capture data change and get data failed.", ex); + } + } + } + } + + /** + * get master nodes + * @return master nodes + */ + public Set getMasterNodes() { + masterLock.lock(); + try { + return Collections.unmodifiableSet(masterNodes); + } finally { + masterLock.unlock(); + } + } + + /** + * sync master nodes + * @param nodes master nodes + */ + private void syncMasterNodes(Set nodes){ + masterLock.lock(); + try { + masterNodes.clear(); + masterNodes.addAll(nodes); + } finally { + masterLock.unlock(); + } + } + + /** + * sync worker group nodes + * @param workerGroup worker group + * @param nodes worker nodes + */ + private void syncWorkerGroupNodes(String workerGroup, Set nodes){ + workerGroupLock.lock(); + try { + workerGroup = workerGroup.toLowerCase(); + Set workerNodes = workerGroupNodes.getOrDefault(workerGroup, new HashSet<>()); + workerNodes.clear(); + workerNodes.addAll(nodes); + workerGroupNodes.put(workerGroup, workerNodes); + } finally { + workerGroupLock.unlock(); + } + } + + public Map> getWorkerGroupNodes(){ + return Collections.unmodifiableMap(workerGroupNodes); + } + + /** + * get worker group nodes + * @param workerGroup workerGroup + * @return worker nodes + */ + public Set getWorkerGroupNodes(String workerGroup){ + workerGroupLock.lock(); + try { + if(StringUtils.isEmpty(workerGroup)){ + workerGroup = DEFAULT_WORKER_GROUP; + } + workerGroup = workerGroup.toLowerCase(); + Set nodes = workerGroupNodes.get(workerGroup); + if(CollectionUtils.isNotEmpty(nodes)){ + return Collections.unmodifiableSet(nodes); + } + return nodes; + } finally { + workerGroupLock.unlock(); + } + } + + /** + * close + */ + public void close(){ + registryCenter.close(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperRegistryCenter.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperRegistryCenter.java new file mode 100644 index 0000000000..3ca62bee6a --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperRegistryCenter.java @@ -0,0 +1,197 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.registry; + +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * zookeeper register center + */ +@Service +public class ZookeeperRegistryCenter implements InitializingBean { + + private final AtomicBoolean isStarted = new AtomicBoolean(false); + + + @Autowired + protected ZookeeperCachedOperator zookeeperCachedOperator; + + @Autowired + private ZookeeperConfig zookeeperConfig; + + /** + * nodes namespace + */ + public String NODES; + + /** + * master path + */ + public String MASTER_PATH; + + /** + * worker path + */ + public String WORKER_PATH; + + public final String EMPTY = ""; + + @Override + public void afterPropertiesSet() throws Exception { + NODES = zookeeperConfig.getDsRoot() + "/nodes"; + MASTER_PATH = NODES + "/master"; + WORKER_PATH = NODES + "/worker"; + + init(); + } + + /** + * init node persist + */ + public void init() { + if (isStarted.compareAndSet(false, true)) { + initNodes(); + } + } + + /** + * init nodes + */ + private void initNodes() { + zookeeperCachedOperator.persist(MASTER_PATH, EMPTY); + zookeeperCachedOperator.persist(WORKER_PATH, EMPTY); + } + + /** + * close + */ + public void close() { + if (isStarted.compareAndSet(true, false)) { + if (zookeeperCachedOperator != null) { + zookeeperCachedOperator.close(); + } + } + } + + /** + * get master path + * @return master path + */ + public String getMasterPath() { + return MASTER_PATH; + } + + /** + * get worker path + * @return worker path + */ + public String getWorkerPath() { + return WORKER_PATH; + } + + /** + * get master nodes directly + * @return master nodes + */ + public Set getMasterNodesDirectly() { + List masters = getChildrenKeys(MASTER_PATH); + return new HashSet<>(masters); + } + + /** + * get worker nodes directly + * @return master nodes + */ + public Set getWorkerNodesDirectly() { + List workers = getChildrenKeys(WORKER_PATH); + return new HashSet<>(workers); + } + + /** + * get worker group directly + * @return worker group nodes + */ + public Set getWorkerGroupDirectly() { + List workers = getChildrenKeys(getWorkerPath()); + return new HashSet<>(workers); + } + + /** + * get worker group nodes + * @param workerGroup + * @return + */ + public Set getWorkerGroupNodesDirectly(String workerGroup) { + List workers = getChildrenKeys(getWorkerGroupPath(workerGroup)); + return new HashSet<>(workers); + } + + /** + * whether worker path + * @param path path + * @return result + */ + public boolean isWorkerPath(String path) { + return path != null && path.contains(WORKER_PATH); + } + + /** + * whether master path + * @param path path + * @return result + */ + public boolean isMasterPath(String path) { + return path != null && path.contains(MASTER_PATH); + } + + /** + * get worker group path + * @param workerGroup workerGroup + * @return worker group path + */ + public String getWorkerGroupPath(String workerGroup) { + return WORKER_PATH + "/" + workerGroup; + } + + /** + * get children nodes + * @param key key + * @return children nodes + */ + public List getChildrenKeys(final String key) { + return zookeeperCachedOperator.getChildrenKeys(key); + } + + /** + * get zookeeperCachedOperator + * @return zookeeperCachedOperator + */ + public ZookeeperCachedOperator getZookeeperCachedOperator() { + return zookeeperCachedOperator; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java similarity index 67% rename from dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java rename to dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java index b08cabc2e9..235954632e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.worker.task.dependent; +package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DependResult; @@ -22,9 +22,12 @@ import org.apache.dolphinscheduler.common.enums.DependentRelation; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.DependentItem; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DependentUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.utils.DagHelper; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; @@ -82,7 +85,7 @@ public class DependentExecute { * @param currentTime current time * @return DependResult */ - public DependResult getDependentResultForItem(DependentItem dependentItem, Date currentTime){ + private DependResult getDependentResultForItem(DependentItem dependentItem, Date currentTime){ List dateIntervals = DependentUtils.getDateIntervalList(currentTime, dependentItem.getDateValue()); return calculateResultForTasks(dependentItem, dateIntervals ); } @@ -94,7 +97,8 @@ public class DependentExecute { * @return dateIntervals */ private DependResult calculateResultForTasks(DependentItem dependentItem, - List dateIntervals) { + List dateIntervals) { + DependResult result = DependResult.FAILED; for(DateInterval dateInterval : dateIntervals){ ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), @@ -104,25 +108,11 @@ public class DependentExecute { dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() ); return DependResult.FAILED; } + // need to check workflow for updates, so get all task and check the task state if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ - result = getDependResultByState(processInstance.getState()); + result = dependResultByProcessInstance(processInstance); }else{ - TaskInstance taskInstance = null; - List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); - - for(TaskInstance task : taskInstanceList){ - if(task.getName().equals(dependentItem.getDepTasks())){ - taskInstance = task; - break; - } - } - if(taskInstance == null){ - // cannot find task in the process instance - // maybe because process instance is running or failed. - result = getDependResultByState(processInstance.getState()); - }else{ - result = getDependResultByState(taskInstance.getState()); - } + result = getDependTaskResult(dependentItem.getDepTasks(),processInstance); } if(result != DependResult.SUCCESS){ break; @@ -131,6 +121,61 @@ public class DependentExecute { return result; } + /** + * depend type = depend_all + * skip the condition tasks. + * judge all the task + * @return + */ + private DependResult dependResultByProcessInstance(ProcessInstance processInstance){ + DependResult result = DependResult.FAILED; + List taskNodes = + processService.getTaskNodeListByDefinitionId(processInstance.getProcessDefinitionId()); + if(CollectionUtils.isEmpty(taskNodes)) { + return result; + } + for(TaskNode taskNode:taskNodes){ + if(taskNode.isConditionsTask() + || DagHelper.haveConditionsAfterNode(taskNode.getName(), taskNodes)){ + continue; + } + DependResult tmpResult = getDependTaskResult(taskNode.getName(),processInstance); + if(DependResult.SUCCESS != tmpResult){ + return tmpResult; + } + } + return DependResult.SUCCESS; + } + + /** + * get depend task result + * @param taskName + * @param processInstance + * @return + */ + private DependResult getDependTaskResult(String taskName, ProcessInstance processInstance) { + DependResult result; + TaskInstance taskInstance = null; + List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); + + for(TaskInstance task : taskInstanceList){ + if(task.getName().equals(taskName)){ + taskInstance = task; + break; + } + } + + if(taskInstance == null){ + // cannot find task in the process instance + // maybe because process instance is running or failed. + result = getDependResultByProcessStateWhenTaskNull(processInstance.getState()); + }else{ + result = getDependResultByState(taskInstance.getState()); + } + + return result; + } + /** * find the last one process instance that : * 1. manual run and finish between the interval @@ -141,7 +186,7 @@ public class DependentExecute { */ private ProcessInstance findLastProcessInterval(int definitionId, DateInterval dateInterval) { - ProcessInstance runningProcess = processService.findLastRunningProcess(definitionId, dateInterval); + ProcessInstance runningProcess = processService.findLastRunningProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime()); if(runningProcess != null){ return runningProcess; } @@ -172,7 +217,9 @@ public class DependentExecute { */ private DependResult getDependResultByState(ExecutionStatus state) { - if(state.typeIsRunning() || state == ExecutionStatus.SUBMITTED_SUCCESS || state == ExecutionStatus.WAITTING_THREAD){ + if(state.typeIsRunning() + || state == ExecutionStatus.SUBMITTED_SUCCESS + || state == ExecutionStatus.WAITTING_THREAD){ return DependResult.WAITING; }else if(state.typeIsSuccess()){ return DependResult.SUCCESS; @@ -181,6 +228,22 @@ public class DependentExecute { } } + /** + * get dependent result by task instance state when task instance is null + * @param state state + * @return DependResult + */ + private DependResult getDependResultByProcessStateWhenTaskNull(ExecutionStatus state) { + + if(state.typeIsRunning() + || state == ExecutionStatus.SUBMITTED_SUCCESS + || state == ExecutionStatus.WAITTING_THREAD){ + return DependResult.WAITING; + }else{ + return DependResult.FAILED; + } + } + /** * judge depend item finished * @param currentTime current time @@ -222,7 +285,7 @@ public class DependentExecute { * @param currentTime current time * @return DependResult */ - public DependResult getDependResultForItem(DependentItem item, Date currentTime){ + private DependResult getDependResultForItem(DependentItem item, Date currentTime){ String key = item.getKey(); if(dependResultMap.containsKey(key)){ return dependResultMap.get(key); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java index 1d7a80daf0..125bd965f7 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ParamUtils.java @@ -17,8 +17,11 @@ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; import java.util.Date; @@ -70,17 +73,16 @@ public class ParamUtils { Map.Entry en = iter.next(); Property property = en.getValue(); - if (property.getValue() != null && property.getValue().length() > 0){ - if (property.getValue().startsWith("$")){ - /** - * local parameter refers to global parameter with the same name - * note: the global parameters of the process instance here are solidified parameters, - * and there are no variables in them. - */ - String val = property.getValue(); - val = ParameterUtils.convertParameterPlaceholders(val, timeParams); - property.setValue(val); - } + if (StringUtils.isNotEmpty(property.getValue()) + && property.getValue().startsWith("$")){ + /** + * local parameter refers to global parameter with the same name + * note: the global parameters of the process instance here are solidified parameters, + * and there are no variables in them. + */ + String val = property.getValue(); + val = ParameterUtils.convertParameterPlaceholders(val, timeParams); + property.setValue(val); } } @@ -105,4 +107,24 @@ public class ParamUtils { } return map; } + + + /** + * get parameters map + * @param definedParams definedParams + * @return parameters map + */ + public static Map getUserDefParamsMap(Map definedParams) { + if (definedParams != null) { + Map userDefParamsMaps = new HashMap<>(); + Iterator> iter = definedParams.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + Property property = new Property(en.getKey(), Direct.IN, DataType.VARCHAR , en.getValue()); + userDefParamsMaps.put(property.getProp(),property); + } + return userDefParamsMaps; + } + return null; + } } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java index e0c00c55d9..5074a5e0f5 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java @@ -16,20 +16,21 @@ */ package org.apache.dolphinscheduler.server.utils; +import java.nio.charset.StandardCharsets; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.commons.io.FileUtils; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.service.log.LogClientService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; -import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; @@ -60,7 +61,7 @@ public class ProcessUtils { allowAmbiguousCommands = true; String value = System.getProperty("jdk.lang.Process.allowAmbiguousCommands"); if (value != null) { - allowAmbiguousCommands = !"false".equalsIgnoreCase(value); + allowAmbiguousCommands = !"false".equalsIgnoreCase(value); } } if (allowAmbiguousCommands) { @@ -68,7 +69,7 @@ public class ProcessUtils { String executablePath = new File(cmd[0]).getPath(); if (needsEscaping(VERIFICATION_LEGACY, executablePath)) { - executablePath = quoteString(executablePath); + executablePath = quoteString(executablePath); } cmdstr = createCommandLine( @@ -81,7 +82,7 @@ public class ProcessUtils { StringBuilder join = new StringBuilder(); for (String s : cmd) { - join.append(s).append(' '); + join.append(s).append(' '); } cmd = getTokensFromCommand(join.toString()); @@ -89,7 +90,7 @@ public class ProcessUtils { // Check new executable name once more if (security != null) { - security.checkExec(executablePath); + security.checkExec(executablePath); } } @@ -147,7 +148,7 @@ public class ProcessUtils { ArrayList matchList = new ArrayList<>(8); Matcher regexMatcher = LazyPattern.PATTERN.matcher(command); while (regexMatcher.find()) { - matchList.add(regexMatcher.group()); + matchList.add(regexMatcher.group()); } return matchList.toArray(new String[matchList.size()]); } @@ -273,15 +274,15 @@ public class ProcessUtils { * @param appIds app id list * @param logger logger * @param tenantCode tenant code - * @param workDir work dir + * @param executePath execute path * @throws IOException io exception */ - public static void cancelApplication(List appIds, Logger logger, String tenantCode,String workDir) + public static void cancelApplication(List appIds, Logger logger, String tenantCode,String executePath) throws IOException { if (appIds.size() > 0) { String appid = appIds.get(appIds.size() - 1); String commandFile = String - .format("%s/%s.kill", workDir, appid); + .format("%s/%s.kill", executePath, appid); String cmd = "yarn application -kill " + appid; try { StringBuilder sb = new StringBuilder(); @@ -297,7 +298,7 @@ public class ProcessUtils { File f = new File(commandFile); if (!f.exists()) { - FileUtils.writeStringToFile(new File(commandFile), sb.toString(), Charset.forName("UTF-8")); + FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); } String runCmd = "sh " + commandFile; @@ -309,7 +310,7 @@ public class ProcessUtils { Runtime.getRuntime().exec(runCmd); } catch (Exception e) { - logger.error("kill application failed", e); + logger.error("kill application error", e); } } } @@ -317,15 +318,15 @@ public class ProcessUtils { /** * kill tasks according to different task types * - * @param taskInstance task instance + * @param taskExecutionContext taskExecutionContext */ - public static void kill(TaskInstance taskInstance) { + public static void kill(TaskExecutionContext taskExecutionContext) { try { - int processId = taskInstance.getPid(); + int processId = taskExecutionContext.getProcessId(); if(processId == 0 ){ - logger.error("process kill failed, process id :{}, task id:{}", - processId, taskInstance.getId()); - return ; + logger.error("process kill failed, process id :{}, task id:{}", + processId, taskExecutionContext.getTaskInstanceId()); + return ; } String cmd = String.format("sudo kill -9 %s", getPidsStr(processId)); @@ -335,7 +336,7 @@ public class ProcessUtils { OSUtils.exeCmd(cmd); // find log and kill yarn job - killYarnJob(taskInstance); + killYarnJob(taskExecutionContext); } catch (Exception e) { logger.error("kill task failed", e); @@ -370,16 +371,18 @@ public class ProcessUtils { /** * find logs and kill yarn tasks * - * @param taskInstance task instance + * @param taskExecutionContext taskExecutionContext */ - public static void killYarnJob(TaskInstance taskInstance) { + public static void killYarnJob(TaskExecutionContext taskExecutionContext) { try { Thread.sleep(Constants.SLEEP_TIME_MILLIS); LogClientService logClient = null; String log = null; try { logClient = new LogClientService(); - log = logClient.viewLog(taskInstance.getHost(), Constants.RPC_PORT, taskInstance.getLogPath()); + log = logClient.viewLog(Host.of(taskExecutionContext.getHost()).getIp(), + Constants.RPC_PORT, + taskExecutionContext.getLogPath()); } finally { if(logClient != null){ logClient.close(); @@ -387,13 +390,13 @@ public class ProcessUtils { } if (StringUtils.isNotEmpty(log)) { List appIds = LoggerUtils.getAppIds(log, logger); - String workerDir = taskInstance.getExecutePath(); + String workerDir = taskExecutionContext.getExecutePath(); if (StringUtils.isEmpty(workerDir)) { logger.error("task instance work dir is empty"); throw new RuntimeException("task instance work dir is empty"); } if (appIds.size() > 0) { - cancelApplication(appIds, logger, taskInstance.getProcessInstance().getTenantCode(), taskInstance.getExecutePath()); + cancelApplication(appIds, logger, taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java index 5550e750b5..caec6e78a8 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java @@ -28,7 +28,7 @@ import org.springframework.context.annotation.ComponentScan; @ComponentScan("org.apache.dolphinscheduler") public class RemoveZKNode implements CommandLineRunner { - private static Integer ARGS_LENGTH = 1; + private static final Integer ARGS_LENGTH = 1; private static final Logger logger = LoggerFactory.getLogger(RemoveZKNode.class); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java index 5e2e535cdb..63efb24a3e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.UdfFunc; @@ -48,6 +49,11 @@ public class UDFUtils { * @return create function list */ public static List createFuncs(List udfFuncs, String tenantCode,Logger logger){ + + if (CollectionUtils.isEmpty(udfFuncs)){ + logger.info("can't find udf function resource"); + return null; + } // get hive udf jar path String hiveUdfJarPath = HadoopUtils.getHdfsUdfDir(tenantCode); logger.info("hive udf jar path : {}" , hiveUdfJarPath); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java index ace93079ff..c9052750e8 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java @@ -16,102 +16,50 @@ */ package org.apache.dolphinscheduler.server.worker; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.IStoppable; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; -import org.apache.dolphinscheduler.server.worker.runner.FetchTaskThread; -import org.apache.dolphinscheduler.server.zk.ZKWorkerClient; +import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor; +import org.apache.dolphinscheduler.server.worker.processor.TaskKillProcessor; +import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; -import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.WebApplicationType; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.context.annotation.ComponentScan; import javax.annotation.PostConstruct; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; /** * worker server */ @ComponentScan("org.apache.dolphinscheduler") -public class WorkerServer implements IStoppable { +public class WorkerServer { /** * logger */ private static final Logger logger = LoggerFactory.getLogger(WorkerServer.class); - /** - * zk worker client + * netty remote server */ - @Autowired - private ZKWorkerClient zkWorkerClient = null; - + private NettyRemotingServer nettyRemotingServer; /** - * process service + * worker registry */ @Autowired - private ProcessService processService; + private WorkerRegistry workerRegistry; /** - * alert database access + * worker config */ - @Autowired - private AlertDao alertDao; - - /** - * heartbeat thread pool - */ - private ScheduledExecutorService heartbeatWorkerService; - - /** - * task queue impl - */ - protected ITaskQueue taskQueue; - - /** - * kill executor service - */ - private ExecutorService killExecutorService; - - /** - * fetch task executor service - */ - private ExecutorService fetchTaskExecutorService; - - /** - * CountDownLatch latch - */ - private CountDownLatch latch; - - @Value("${server.is-combined-server:false}") - private Boolean isCombinedServer; - @Autowired private WorkerConfig workerConfig; @@ -141,36 +89,16 @@ public class WorkerServer implements IStoppable { public void run(){ logger.info("start worker server..."); - zkWorkerClient.init(); - - this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); - - this.killExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Kill-Thread-Executor"); - - this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor"); - - heartbeatWorkerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.DEFAUL_WORKER_HEARTBEAT_THREAD_NUM); + //init remoting server + NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(workerConfig.getListenPort()); + this.nettyRemotingServer = new NettyRemotingServer(serverConfig); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, new TaskExecuteProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_REQUEST, new TaskKillProcessor()); + this.nettyRemotingServer.start(); - // heartbeat thread implement - Runnable heartBeatThread = heartBeatThread(); - - zkWorkerClient.setStoppable(this); - - // regular heartbeat - // delay 5 seconds, send heartbeat every 30 seconds - heartbeatWorkerService.scheduleAtFixedRate(heartBeatThread, 5, workerConfig.getWorkerHeartbeatInterval(), TimeUnit.SECONDS); - - // kill process thread implement - Runnable killProcessThread = getKillProcessThread(); - - // submit kill process thread - killExecutorService.execute(killProcessThread); - - // new fetch task thread - FetchTaskThread fetchTaskThread = new FetchTaskThread(zkWorkerClient, processService, taskQueue); - - // submit fetch task thread - fetchTaskExecutorService.execute(fetchTaskThread); + // worker registry + this.workerRegistry.registry(); /** * register hooks, which are called before the process exits @@ -178,26 +106,12 @@ public class WorkerServer implements IStoppable { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { - // worker server exit alert - if (zkWorkerClient.getActiveMasterNum() <= 1) { - alertDao.sendServerStopedAlert(1, OSUtils.getHost(), "Worker-Server"); - } - stop("shutdownhook"); + close("shutdownHook"); } })); - - //let the main thread await - latch = new CountDownLatch(1); - if (!isCombinedServer) { - try { - latch.await(); - } catch (InterruptedException ignore) { - } - } } - @Override - public synchronized void stop(String cause) { + public void close(String cause) { try { //execute only once @@ -217,42 +131,8 @@ public class WorkerServer implements IStoppable { logger.warn("thread sleep exception", e); } - try { - heartbeatWorkerService.shutdownNow(); - }catch (Exception e){ - logger.warn("heartbeat service stopped exception"); - } - logger.info("heartbeat service stopped"); - - try { - ThreadPoolExecutors.getInstance().shutdown(); - }catch (Exception e){ - logger.warn("threadpool service stopped exception:{}",e.getMessage()); - } - - logger.info("threadpool service stopped"); - - try { - killExecutorService.shutdownNow(); - }catch (Exception e){ - logger.warn("worker kill executor service stopped exception:{}",e.getMessage()); - } - logger.info("worker kill executor service stopped"); - - try { - fetchTaskExecutorService.shutdownNow(); - }catch (Exception e){ - logger.warn("worker fetch task service stopped exception:{}",e.getMessage()); - } - logger.info("worker fetch task service stopped"); - - try{ - zkWorkerClient.close(); - }catch (Exception e){ - logger.warn("zookeeper service stopped exception:{}",e.getMessage()); - } - latch.countDown(); - logger.info("zookeeper service stopped"); + this.nettyRemotingServer.close(); + this.workerRegistry.unRegistry(); } catch (Exception e) { logger.error("worker server stop exception ", e); @@ -260,131 +140,4 @@ public class WorkerServer implements IStoppable { } } - - /** - * heartbeat thread implement - * - * @return - */ - private Runnable heartBeatThread(){ - logger.info("start worker heart beat thread..."); - Runnable heartBeatThread = new Runnable() { - @Override - public void run() { - // send heartbeat to zk - if (StringUtils.isEmpty(zkWorkerClient.getWorkerZNode())){ - logger.error("worker send heartbeat to zk failed"); - } - - zkWorkerClient.heartBeatForZk(zkWorkerClient.getWorkerZNode() , Constants.WORKER_PREFIX); - } - }; - return heartBeatThread; - } - - - /** - * kill process thread implement - * - * @return kill process thread - */ - private Runnable getKillProcessThread(){ - Runnable killProcessThread = new Runnable() { - @Override - public void run() { - logger.info("start listening kill process thread..."); - while (Stopper.isRunning()){ - Set taskInfoSet = taskQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_KILL); - if (CollectionUtils.isNotEmpty(taskInfoSet)){ - for (String taskInfo : taskInfoSet){ - killTask(taskInfo, processService); - removeKillInfoFromQueue(taskInfo); - } - } - try { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } catch (InterruptedException e) { - logger.error("interrupted exception",e); - Thread.currentThread().interrupt(); - } - } - } - }; - return killProcessThread; - } - - /** - * kill task - * - * @param taskInfo task info - * @param pd process dao - */ - private void killTask(String taskInfo, ProcessService pd) { - logger.info("get one kill command from tasks kill queue: " + taskInfo); - String[] taskInfoArray = taskInfo.split("-"); - if(taskInfoArray.length != 2){ - logger.error("error format kill info: " + taskInfo); - return ; - } - String host = taskInfoArray[0]; - int taskInstanceId = Integer.parseInt(taskInfoArray[1]); - TaskInstance taskInstance = pd.getTaskInstanceDetailByTaskId(taskInstanceId); - if(taskInstance == null){ - logger.error("cannot find the kill task :" + taskInfo); - return; - } - - if(host.equals(Constants.NULL) && StringUtils.isEmpty(taskInstance.getHost())){ - deleteTaskFromQueue(taskInstance, pd); - taskInstance.setState(ExecutionStatus.KILL); - pd.saveTaskInstance(taskInstance); - }else{ - if(taskInstance.getTaskType().equals(TaskType.DEPENDENT.toString())){ - taskInstance.setState(ExecutionStatus.KILL); - pd.saveTaskInstance(taskInstance); - }else if(!taskInstance.getState().typeIsFinished()){ - ProcessUtils.kill(taskInstance); - }else{ - logger.info("the task aleady finish: task id: " + taskInstance.getId() - + " state: " + taskInstance.getState().toString()); - } - } - } - - /** - * delete task from queue - * - * @param taskInstance - * @param pd process dao - */ - private void deleteTaskFromQueue(TaskInstance taskInstance, ProcessService pd){ - // creating distributed locks, lock path /dolphinscheduler/lock/worker - InterProcessMutex mutex = null; - logger.info("delete task from tasks queue: " + taskInstance.getId()); - - try { - mutex = zkWorkerClient.acquireZkLock(zkWorkerClient.getZkClient(), - zkWorkerClient.getWorkerLockPath()); - if(pd.checkTaskExistsInTaskQueue(taskInstance)){ - String taskQueueStr = pd.taskZkInfo(taskInstance); - taskQueue.removeNode(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, taskQueueStr); - } - - } catch (Exception e){ - logger.error("remove task thread failure" ,e); - }finally { - AbstractZKClient.releaseMutex(mutex); - } - } - - /** - * remove Kill info from queue - * - * @param taskInfo task info - */ - private void removeKillInfoFromQueue(String taskInfo){ - taskQueue.srem(Constants.DOLPHINSCHEDULER_TASKS_KILL,taskInfo); - } - -} - +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java new file mode 100644 index 0000000000..7df8e01b3d --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.cache; + + +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; + +/** + * TaskExecutionContextCacheManager + */ +public interface TaskExecutionContextCacheManager { + + /** + * get taskInstance by taskInstance id + * + * @param taskInstanceId taskInstanceId + * @return taskInstance + */ + TaskExecutionContext getByTaskInstanceId(Integer taskInstanceId); + + /** + * cache taskInstance + * + * @param taskExecutionContext taskExecutionContext + */ + void cacheTaskExecutionContext(TaskExecutionContext taskExecutionContext); + + /** + * remove taskInstance by taskInstanceId + * @param taskInstanceId taskInstanceId + */ + void removeByTaskInstanceId(Integer taskInstanceId); +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java new file mode 100644 index 0000000000..009332f05c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.cache.impl; + +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; +import org.springframework.stereotype.Service; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * TaskExecutionContextCache + */ +@Service +public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContextCacheManager { + + + /** + * taskInstance caceh + */ + private Map taskExecutionContextCache = new ConcurrentHashMap<>(); + + /** + * get taskInstance by taskInstance id + * + * @param taskInstanceId taskInstanceId + * @return taskInstance + */ + @Override + public TaskExecutionContext getByTaskInstanceId(Integer taskInstanceId) { + return taskExecutionContextCache.get(taskInstanceId); + } + + /** + * cache taskInstance + * + * @param taskExecutionContext taskExecutionContext + */ + @Override + public void cacheTaskExecutionContext(TaskExecutionContext taskExecutionContext) { + taskExecutionContextCache.put(taskExecutionContext.getTaskInstanceId(),taskExecutionContext); + } + + /** + * remove taskInstance by taskInstanceId + * @param taskInstanceId taskInstanceId + */ + @Override + public void removeByTaskInstanceId(Integer taskInstanceId) { + taskExecutionContextCache.remove(taskInstanceId); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java index c4d4b61af5..7f4d93fdf8 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java @@ -1,3 +1,4 @@ + /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with @@ -16,27 +17,52 @@ */ package org.apache.dolphinscheduler.server.worker.config; +import org.apache.dolphinscheduler.common.Constants; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.PropertySource; import org.springframework.stereotype.Component; @Component +@PropertySource(value = "worker.properties") public class WorkerConfig { - @Value("${worker.exec.threads}") + @Value("${worker.exec.threads:100}") private int workerExecThreads; - @Value("${worker.heartbeat.interval}") + @Value("${worker.heartbeat.interval:10}") private int workerHeartbeatInterval; - @Value("${worker.fetch.task.num}") + @Value("${worker.fetch.task.num:3}") private int workerFetchTaskNum; - @Value("${worker.max.cpuload.avg}") + @Value("${worker.max.cpuload.avg:-1}") private int workerMaxCpuloadAvg; - @Value("${master.reserved.memory}") + @Value("${worker.reserved.memory:0.5}") private double workerReservedMemory; + @Value("${worker.group: default}") + private String workerGroup; + + @Value("${worker.listen.port: 1234}") + private int listenPort; + + public int getListenPort() { + return listenPort; + } + + public void setListenPort(int listenPort) { + this.listenPort = listenPort; + } + + public String getWorkerGroup() { + return workerGroup; + } + + public void setWorkerGroup(String workerGroup) { + this.workerGroup = workerGroup; + } + public int getWorkerExecThreads() { return workerExecThreads; } @@ -70,10 +96,13 @@ public class WorkerConfig { } public int getWorkerMaxCpuloadAvg() { + if (workerMaxCpuloadAvg == -1){ + return Constants.DEFAULT_WORKER_CPU_LOAD; + } return workerMaxCpuloadAvg; } public void setWorkerMaxCpuloadAvg(int workerMaxCpuloadAvg) { this.workerMaxCpuloadAvg = workerMaxCpuloadAvg; } -} +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java new file mode 100644 index 0000000000..cbb8972a33 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.utils.ChannelUtils; +import org.apache.dolphinscheduler.remote.utils.Host; + +/** + * callback channel + */ +public class NettyRemoteChannel { + + /** + * channel + */ + private final Channel channel; + + /** + * equest unique identification + */ + private final long opaque; + + /** + * master host + */ + private final Host host; + + + public NettyRemoteChannel(Channel channel, long opaque) { + this.channel = channel; + this.host = ChannelUtils.toAddress(channel); + this.opaque = opaque; + } + + public Channel getChannel() { + return channel; + } + + public long getOpaque() { + return opaque; + } + + public Host getHost() { + return host; + } + + public boolean isActive(){ + return this.channel.isActive(); + } + + public ChannelFuture writeAndFlush(Command command){ + return this.channel.writeAndFlush(command); + } + + public void close(){ + this.channel.close(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java new file mode 100644 index 0000000000..1e8bf9d0e7 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + + +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelFutureListener; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; + +import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS; + +/** + * taks callback service + */ +@Service +public class TaskCallbackService { + + private final Logger logger = LoggerFactory.getLogger(TaskCallbackService.class); + + /** + * remote channels + */ + private static final ConcurrentHashMap REMOTE_CHANNELS = new ConcurrentHashMap<>(); + + /** + * zookeeper register center + */ + @Autowired + private ZookeeperRegistryCenter zookeeperRegistryCenter; + + /** + * netty remoting client + */ + private final NettyRemotingClient nettyRemotingClient; + + + public TaskCallbackService(){ + final NettyClientConfig clientConfig = new NettyClientConfig(); + this.nettyRemotingClient = new NettyRemotingClient(clientConfig); + } + + /** + * add callback channel + * @param taskInstanceId taskInstanceId + * @param channel channel + */ + public void addRemoteChannel(int taskInstanceId, NettyRemoteChannel channel){ + REMOTE_CHANNELS.put(taskInstanceId, channel); + } + + /** + * get callback channel + * @param taskInstanceId taskInstanceId + * @return callback channel + */ + private NettyRemoteChannel getRemoteChannel(int taskInstanceId){ + NettyRemoteChannel nettyRemoteChannel = REMOTE_CHANNELS.get(taskInstanceId); + if(nettyRemoteChannel == null){ + throw new IllegalArgumentException("nettyRemoteChannel is empty, should call addRemoteChannel first"); + } + if(nettyRemoteChannel.isActive()){ + return nettyRemoteChannel; + } + Channel newChannel = nettyRemotingClient.getChannel(nettyRemoteChannel.getHost()); + if(newChannel != null){ + return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId); + } + logger.warn("original master : {} is not reachable, random select master", nettyRemoteChannel.getHost()); + Set masterNodes = null; + while (Stopper.isRunning()) { + masterNodes = zookeeperRegistryCenter.getMasterNodesDirectly(); + if (CollectionUtils.isEmpty(masterNodes)) { + logger.error("no available master node"); + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + }else { + break; + } + } + for(String masterNode : masterNodes){ + newChannel = nettyRemotingClient.getChannel(Host.of(masterNode)); + if(newChannel != null){ + return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId); + } + } + throw new IllegalStateException(String.format("all available master nodes : %s are not reachable", masterNodes)); + } + + private NettyRemoteChannel getRemoteChannel(Channel newChannel, long opaque, int taskInstanceId){ + NettyRemoteChannel remoteChannel = new NettyRemoteChannel(newChannel, opaque); + addRemoteChannel(taskInstanceId, remoteChannel); + return remoteChannel; + } + + /** + * remove callback channels + * @param taskInstanceId taskInstanceId + */ + public void remove(int taskInstanceId){ + REMOTE_CHANNELS.remove(taskInstanceId); + } + + /** + * send ack + * @param taskInstanceId taskInstanceId + * @param command command + */ + public void sendAck(int taskInstanceId, Command command){ + NettyRemoteChannel nettyRemoteChannel = getRemoteChannel(taskInstanceId); + nettyRemoteChannel.writeAndFlush(command); + } + + /** + * send result + * + * @param taskInstanceId taskInstanceId + * @param command command + */ + public void sendResult(int taskInstanceId, Command command){ + NettyRemoteChannel nettyRemoteChannel = getRemoteChannel(taskInstanceId); + nettyRemoteChannel.writeAndFlush(command).addListener(new ChannelFutureListener(){ + + @Override + public void operationComplete(ChannelFuture future) throws Exception { + if(future.isSuccess()){ + remove(taskInstanceId); + return; + } + } + }); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java new file mode 100644 index 0000000000..ed476133ca --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java @@ -0,0 +1,169 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import ch.qos.logback.classic.LoggerContext; +import ch.qos.logback.classic.sift.SiftingAppender; +import com.alibaba.fastjson.JSONObject; +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.server.log.TaskLogDiscriminator; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; +import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Date; +import java.util.concurrent.ExecutorService; + +/** + * worker request processor + */ +public class TaskExecuteProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(TaskExecuteProcessor.class); + + + /** + * thread executor service + */ + private final ExecutorService workerExecService; + + /** + * worker config + */ + private final WorkerConfig workerConfig; + + /** + * task callback service + */ + private final TaskCallbackService taskCallbackService; + + public TaskExecuteProcessor(){ + this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); + this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); + this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Execute-Thread", workerConfig.getWorkerExecThreads()); + } + + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_EXECUTE_REQUEST == command.getType(), + String.format("invalid command type : %s", command.getType())); + + TaskExecuteRequestCommand taskRequestCommand = FastJsonSerializer.deserialize( + command.getBody(), TaskExecuteRequestCommand.class); + + logger.info("received command : {}", taskRequestCommand); + + String contextJson = taskRequestCommand.getTaskExecutionContext(); + + TaskExecutionContext taskExecutionContext = JSONObject.parseObject(contextJson, TaskExecutionContext.class); + taskExecutionContext.setHost(OSUtils.getHost() + ":" + workerConfig.getListenPort()); + + // local execute path + String execLocalPath = getExecLocalPath(taskExecutionContext); + logger.info("task instance local execute path : {} ", execLocalPath); + + try { + FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, taskExecutionContext.getTenantCode()); + } catch (Exception ex){ + logger.error(String.format("create execLocalPath : %s", execLocalPath), ex); + } + taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(), + new NettyRemoteChannel(channel, command.getOpaque())); + + this.doAck(taskExecutionContext); + // submit task + workerExecService.submit(new TaskExecuteThread(taskExecutionContext,taskCallbackService)); + } + + private void doAck(TaskExecutionContext taskExecutionContext){ + // tell master that task is in executing + TaskExecuteAckCommand ackCommand = buildAckCommand(taskExecutionContext); + taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command()); + } + + /** + * get task log path + * @return log path + */ + private String getTaskLogPath(TaskExecutionContext taskExecutionContext) { + String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) + .getLogger("ROOT") + .getAppender("TASKLOGFILE")) + .getDiscriminator()).getLogBase(); + if (baseLog.startsWith(Constants.SINGLE_SLASH)){ + return baseLog + Constants.SINGLE_SLASH + + taskExecutionContext.getProcessDefineId() + Constants.SINGLE_SLASH + + taskExecutionContext.getProcessInstanceId() + Constants.SINGLE_SLASH + + taskExecutionContext.getTaskInstanceId() + ".log"; + } + return System.getProperty("user.dir") + Constants.SINGLE_SLASH + + baseLog + Constants.SINGLE_SLASH + + taskExecutionContext.getProcessDefineId() + Constants.SINGLE_SLASH + + taskExecutionContext.getProcessInstanceId() + Constants.SINGLE_SLASH + + taskExecutionContext.getTaskInstanceId() + ".log"; + } + + /** + * build ack command + * @param taskExecutionContext taskExecutionContext + * @return TaskExecuteAckCommand + */ + private TaskExecuteAckCommand buildAckCommand(TaskExecutionContext taskExecutionContext) { + TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); + ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + ackCommand.setStatus(ExecutionStatus.RUNNING_EXEUTION.getCode()); + ackCommand.setLogPath(getTaskLogPath(taskExecutionContext)); + ackCommand.setHost(taskExecutionContext.getHost()); + ackCommand.setStartTime(new Date()); + if(taskExecutionContext.getTaskType().equals(TaskType.SQL.name()) || taskExecutionContext.getTaskType().equals(TaskType.PROCEDURE.name())){ + ackCommand.setExecutePath(null); + }else{ + ackCommand.setExecutePath(taskExecutionContext.getExecutePath()); + } + taskExecutionContext.setLogPath(ackCommand.getLogPath()); + return ackCommand; + } + + /** + * get execute local path + * @param taskExecutionContext taskExecutionContext + * @return execute local path + */ + private String getExecLocalPath(TaskExecutionContext taskExecutionContext){ + return FileUtils.getProcessExecDir(taskExecutionContext.getProjectId(), + taskExecutionContext.getProcessDefineId(), + taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getTaskInstanceId()); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java new file mode 100644 index 0000000000..b6f58279b1 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java @@ -0,0 +1,192 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand; +import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.remote.utils.Pair; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; +import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.log.LogClientService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collections; +import java.util.List; + +/** + * task kill processor + */ +public class TaskKillProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(TaskKillProcessor.class); + + /** + * worker config + */ + private final WorkerConfig workerConfig; + + /** + * task callback service + */ + private final TaskCallbackService taskCallbackService; + + /** + * taskExecutionContextCacheManager + */ + private TaskExecutionContextCacheManager taskExecutionContextCacheManager; + + + public TaskKillProcessor(){ + this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); + this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); + this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); + } + + /** + * task kill process + * + * @param channel channel channel + * @param command command command + */ + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.TASK_KILL_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType())); + TaskKillRequestCommand killCommand = FastJsonSerializer.deserialize(command.getBody(), TaskKillRequestCommand.class); + logger.info("received kill command : {}", killCommand); + + Pair> result = doKill(killCommand); + + taskCallbackService.addRemoteChannel(killCommand.getTaskInstanceId(), + new NettyRemoteChannel(channel, command.getOpaque())); + + TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(killCommand,result); + taskCallbackService.sendResult(taskKillResponseCommand.getTaskInstanceId(), taskKillResponseCommand.convert2Command()); + } + + /** + * do kill + * @param killCommand + * @return kill result + */ + private Pair> doKill(TaskKillRequestCommand killCommand){ + List appIds = Collections.EMPTY_LIST; + try { + TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); + + Integer processId = taskExecutionContext.getProcessId(); + + if (processId == null || processId.equals(0)){ + logger.error("process kill failed, process id :{}, task id:{}", processId, killCommand.getTaskInstanceId()); + return Pair.of(false, appIds); + } + + String cmd = String.format("sudo kill -9 %s", ProcessUtils.getPidsStr(taskExecutionContext.getProcessId())); + + logger.info("process id:{}, cmd:{}", taskExecutionContext.getProcessId(), cmd); + + OSUtils.exeCmd(cmd); + + // find log and kill yarn job + appIds = killYarnJob(Host.of(taskExecutionContext.getHost()).getIp(), + taskExecutionContext.getLogPath(), + taskExecutionContext.getExecutePath(), + taskExecutionContext.getTenantCode()); + + return Pair.of(true, appIds); + } catch (Exception e) { + logger.error("kill task error", e); + } + return Pair.of(false, appIds); + } + + /** + * build TaskKillResponseCommand + * + * @param killCommand kill command + * @param result exe result + * @return build TaskKillResponseCommand + */ + private TaskKillResponseCommand buildKillTaskResponseCommand(TaskKillRequestCommand killCommand, + Pair> result) { + TaskKillResponseCommand taskKillResponseCommand = new TaskKillResponseCommand(); + taskKillResponseCommand.setStatus(result.getLeft() ? ExecutionStatus.SUCCESS.getCode() : ExecutionStatus.FAILURE.getCode()); + taskKillResponseCommand.setAppIds(result.getRight()); + TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); + if(taskExecutionContext != null){ + taskKillResponseCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + taskKillResponseCommand.setHost(taskExecutionContext.getHost()); + taskKillResponseCommand.setProcessId(taskExecutionContext.getProcessId()); + } + return taskKillResponseCommand; + } + + /** + * kill yarn job + * + * @param host host + * @param logPath logPath + * @param executePath executePath + * @param tenantCode tenantCode + * @return List appIds + */ + private List killYarnJob(String host, String logPath, String executePath, String tenantCode) { + LogClientService logClient = null; + try { + logClient = new LogClientService(); + logger.info("view log host : {},logPath : {}", host,logPath); + String log = logClient.viewLog(host, Constants.RPC_PORT, logPath); + + if (StringUtils.isNotEmpty(log)) { + List appIds = LoggerUtils.getAppIds(log, logger); + if (StringUtils.isEmpty(executePath)) { + logger.error("task instance execute path is empty"); + throw new RuntimeException("task instance execute path is empty"); + } + if (appIds.size() > 0) { + ProcessUtils.cancelApplication(appIds, logger, tenantCode, executePath); + return appIds; + } + } + } catch (Exception e) { + logger.error("kill yarn job error",e); + } finally { + if(logClient != null){ + logClient.close(); + } + } + return Collections.EMPTY_LIST; + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java new file mode 100644 index 0000000000..4d723404a5 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.registry; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.state.ConnectionState; +import org.apache.curator.framework.state.ConnectionStateListener; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import java.util.Date; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import static org.apache.dolphinscheduler.common.Constants.COMMA; +import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; +import static org.apache.dolphinscheduler.common.Constants.SLASH; + + +/** + * worker registry + */ +@Service +public class WorkerRegistry { + + private final Logger logger = LoggerFactory.getLogger(WorkerRegistry.class); + + /** + * zookeeper registry center + */ + @Autowired + private ZookeeperRegistryCenter zookeeperRegistryCenter; + + /** + * worker config + */ + @Autowired + private WorkerConfig workerConfig; + + /** + * heartbeat executor + */ + private ScheduledExecutorService heartBeatExecutor; + + /** + * worker start time + */ + private String startTime; + + + private String workerGroup; + + @PostConstruct + public void init(){ + this.workerGroup = workerConfig.getWorkerGroup(); + this.startTime = DateUtils.dateToString(new Date()); + this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor")); + } + + /** + * registry + */ + public void registry() { + String address = OSUtils.getHost(); + String localNodePath = getWorkerPath(); + zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, ""); + zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() { + @Override + public void stateChanged(CuratorFramework client, ConnectionState newState) { + if(newState == ConnectionState.LOST){ + logger.error("worker : {} connection lost from zookeeper", address); + } else if(newState == ConnectionState.RECONNECTED){ + logger.info("worker : {} reconnected to zookeeper", address); + zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, ""); + } else if(newState == ConnectionState.SUSPENDED){ + logger.warn("worker : {} connection SUSPENDED ", address); + } + } + }); + int workerHeartbeatInterval = workerConfig.getWorkerHeartbeatInterval(); + this.heartBeatExecutor.scheduleAtFixedRate(new HeartBeatTask(), workerHeartbeatInterval, workerHeartbeatInterval, TimeUnit.SECONDS); + logger.info("worker node : {} registry to ZK successfully with heartBeatInterval : {}s", address, workerHeartbeatInterval); + + } + + /** + * remove registry info + */ + public void unRegistry() { + String address = getLocalAddress(); + String localNodePath = getWorkerPath(); + zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath); + this.heartBeatExecutor.shutdownNow(); + logger.info("worker node : {} unRegistry to ZK.", address); + } + + /** + * get worker path + * @return + */ + private String getWorkerPath() { + String address = getLocalAddress(); + StringBuilder builder = new StringBuilder(100); + String workerPath = this.zookeeperRegistryCenter.getWorkerPath(); + builder.append(workerPath).append(SLASH); + if(StringUtils.isEmpty(workerGroup)){ + workerGroup = DEFAULT_WORKER_GROUP; + } + //trim and lower case is need + builder.append(workerGroup.trim().toLowerCase()).append(SLASH); + builder.append(address); + return builder.toString(); + } + + /** + * get local address + * @return + */ + private String getLocalAddress(){ + return OSUtils.getHost() + ":" + workerConfig.getListenPort(); + } + + /** + * hear beat task + */ + class HeartBeatTask implements Runnable{ + + @Override + public void run() { + try { + StringBuilder builder = new StringBuilder(100); + builder.append(OSUtils.cpuUsage()).append(COMMA); + builder.append(OSUtils.memoryUsage()).append(COMMA); + builder.append(OSUtils.loadAverage()).append(COMMA); + builder.append(startTime).append(COMMA); + builder.append(DateUtils.dateToString(new Date())); + String workerPath = getWorkerPath(); + zookeeperRegistryCenter.getZookeeperCachedOperator().update(workerPath, builder.toString()); + } catch (Throwable ex){ + logger.error("error write worker heartbeat info", ex); + } + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java deleted file mode 100644 index 013db83761..0000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java +++ /dev/null @@ -1,365 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.worker.runner; - -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.entity.Tenant; -import org.apache.dolphinscheduler.dao.entity.WorkerGroup; -import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; -import org.apache.dolphinscheduler.server.zk.ZKWorkerClient; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; -import org.apache.dolphinscheduler.service.zk.AbstractZKClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadPoolExecutor; - -/** - * fetch task thread - */ -public class FetchTaskThread implements Runnable{ - - private static final Logger logger = LoggerFactory.getLogger(FetchTaskThread.class); - /** - * set worker concurrent tasks - */ - private final int taskNum; - - /** - * zkWorkerClient - */ - private final ZKWorkerClient zkWorkerClient; - - /** - * task queue impl - */ - protected ITaskQueue taskQueue; - - /** - * process database access - */ - private final ProcessService processService; - - /** - * worker thread pool executor - */ - private final ExecutorService workerExecService; - - /** - * worker exec nums - */ - private int workerExecNums; - - /** - * task instance - */ - private TaskInstance taskInstance; - - /** - * task instance id - */ - Integer taskInstId; - - /** - * worker config - */ - private WorkerConfig workerConfig; - - public FetchTaskThread(ZKWorkerClient zkWorkerClient, - ProcessService processService, - ITaskQueue taskQueue){ - this.zkWorkerClient = zkWorkerClient; - this.processService = processService; - this.taskQueue = taskQueue; - this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); - this.taskNum = workerConfig.getWorkerFetchTaskNum(); - this.workerExecNums = workerConfig.getWorkerExecThreads(); - // worker thread pool executor - this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Fetch-Task-Thread", workerExecNums); - this.taskInstance = null; - } - - /** - * Check if the task runs on this worker - * @param taskInstance - * @param host - * @return - */ - private boolean checkWorkerGroup(TaskInstance taskInstance, String host){ - - int taskWorkerGroupId = processService.getTaskWorkerGroupId(taskInstance); - - if(taskWorkerGroupId <= 0){ - return true; - } - WorkerGroup workerGroup = processService.queryWorkerGroupById(taskWorkerGroupId); - if(workerGroup == null ){ - logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); - return true; - } - String ips = workerGroup.getIpList(); - if(StringUtils.isBlank(ips)){ - logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers", - taskInstance.getId(), workerGroup.getId()); - } - String[] ipArray = ips.split(Constants.COMMA); - List ipList = Arrays.asList(ipArray); - return ipList.contains(host); - } - - - - - @Override - public void run() { - logger.info("worker start fetch tasks..."); - while (Stopper.isRunning()){ - InterProcessMutex mutex = null; - String currentTaskQueueStr = null; - try { - ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) workerExecService; - //check memory and cpu usage and threads - boolean runCheckFlag = OSUtils.checkResource(workerConfig.getWorkerMaxCpuloadAvg(), workerConfig.getWorkerReservedMemory()) && checkThreadCount(poolExecutor); - - if(!runCheckFlag) { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - continue; - } - - //whether have tasks, if no tasks , no need lock //get all tasks - boolean hasTask = taskQueue.hasTask(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - - if (!hasTask){ - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - continue; - } - // creating distributed locks, lock path /dolphinscheduler/lock/worker - mutex = zkWorkerClient.acquireZkLock(zkWorkerClient.getZkClient(), - zkWorkerClient.getWorkerLockPath()); - - - // task instance id str - List taskQueueStrArr = taskQueue.poll(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, taskNum); - - for(String taskQueueStr : taskQueueStrArr){ - - currentTaskQueueStr = taskQueueStr; - - if (StringUtils.isEmpty(taskQueueStr)) { - continue; - } - - if (!checkThreadCount(poolExecutor)) { - break; - } - - // get task instance id - taskInstId = getTaskInstanceId(taskQueueStr); - - // mainly to wait for the master insert task to succeed - waitForTaskInstance(); - - taskInstance = processService.getTaskInstanceDetailByTaskId(taskInstId); - - // verify task instance is null - if (verifyTaskInstanceIsNull(taskInstance)) { - logger.warn("remove task queue : {} due to taskInstance is null", taskQueueStr); - processErrorTask(taskQueueStr); - continue; - } - - if(!checkWorkerGroup(taskInstance, OSUtils.getHost())){ - continue; - } - - // if process definition is null ,process definition already deleted - int userId = taskInstance.getProcessDefine() == null ? 0 : taskInstance.getProcessDefine().getUserId(); - - Tenant tenant = processService.getTenantForProcess( - taskInstance.getProcessInstance().getTenantId(), - userId); - - // verify tenant is null - if (verifyTenantIsNull(tenant)) { - logger.warn("remove task queue : {} due to tenant is null", taskQueueStr); - processErrorTask(taskQueueStr); - continue; - } - - // set queue for process instance, user-specified queue takes precedence over tenant queue - String userQueue = processService.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); - taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue); - taskInstance.getProcessInstance().setTenantCode(tenant.getTenantCode()); - - logger.info("worker fetch taskId : {} from queue ", taskInstId); - - // local execute path - String execLocalPath = getExecLocalPath(); - - logger.info("task instance local execute path : {} ", execLocalPath); - - // init task - taskInstance.init(OSUtils.getHost(), - new Date(), - execLocalPath); - - // check and create users - FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, - tenant.getTenantCode()); - - logger.info("task : {} ready to submit to task scheduler thread",taskInstId); - // submit task - workerExecService.submit(new TaskScheduleThread(taskInstance, processService)); - - // remove node from zk - removeNodeFromTaskQueue(taskQueueStr); - } - - }catch (Exception e){ - processErrorTask(currentTaskQueueStr); - logger.error("fetch task thread failure" ,e); - }finally { - AbstractZKClient.releaseMutex(mutex); - } - } - } - - /** - * process error task - * - * @param taskQueueStr task queue str - */ - private void processErrorTask(String taskQueueStr){ - // remove from zk - removeNodeFromTaskQueue(taskQueueStr); - - if (taskInstance != null){ - processService.changeTaskState(ExecutionStatus.FAILURE, - taskInstance.getStartTime(), - taskInstance.getHost(), - null, - null, - taskInstId); - } - - } - - /** - * remove node from task queue - * - * @param taskQueueStr task queue - */ - private void removeNodeFromTaskQueue(String taskQueueStr){ - taskQueue.removeNode(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, taskQueueStr); - } - - /** - * verify task instance is null - * @param taskInstance - * @return true if task instance is null - */ - private boolean verifyTaskInstanceIsNull(TaskInstance taskInstance) { - if (taskInstance == null ) { - logger.error("task instance is null. task id : {} ", taskInstId); - return true; - } - return false; - } - - /** - * verify tenant is null - * - * @param tenant tenant - * @return true if tenant is null - */ - private boolean verifyTenantIsNull(Tenant tenant) { - if(tenant == null){ - logger.error("tenant not exists,process instance id : {},task instance id : {}", - taskInstance.getProcessInstance().getId(), - taskInstance.getId()); - return true; - } - return false; - } - - /** - * get execute local path - * - * @return execute local path - */ - private String getExecLocalPath(){ - return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(), - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId()); - } - - /** - * check thread count - * - * @param poolExecutor pool executor - * @return true if active count < worker exec nums - */ - private boolean checkThreadCount(ThreadPoolExecutor poolExecutor) { - int activeCount = poolExecutor.getActiveCount(); - if (activeCount >= workerExecNums) { - logger.info("thread insufficient , activeCount : {} , " + - "workerExecNums : {}, will sleep : {} millis for thread resource", - activeCount, - workerExecNums, - Constants.SLEEP_TIME_MILLIS); - return false; - } - return true; - } - - /** - * wait for task instance exists, because of db action would be delayed. - * - * @throws Exception exception - */ - private void waitForTaskInstance()throws Exception{ - int retryTimes = 30; - while (taskInstance == null && retryTimes > 0) { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - taskInstance = processService.findTaskInstanceById(taskInstId); - retryTimes--; - } - } - - /** - * get task instance id - * - * @param taskQueueStr task queue - * @return task instance id - */ - private int getTaskInstanceId(String taskQueueStr){ - return Integer.parseInt(taskQueueStr.split(Constants.UNDERLINE)[3]); - } -} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java new file mode 100644 index 0000000000..8cdbf60503 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java @@ -0,0 +1,235 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.runner; + + +import com.alibaba.fastjson.JSONObject; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; +import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.util.*; +import java.util.stream.Collectors; + + +/** + * task scheduler thread + */ +public class TaskExecuteThread implements Runnable { + + /** + * logger + */ + private final Logger logger = LoggerFactory.getLogger(TaskExecuteThread.class); + + /** + * task instance + */ + private TaskExecutionContext taskExecutionContext; + + /** + * abstract task + */ + private AbstractTask task; + + /** + * task callback service + */ + private TaskCallbackService taskCallbackService; + + /** + * constructor + * @param taskExecutionContext taskExecutionContext + * @param taskCallbackService taskCallbackService + */ + public TaskExecuteThread(TaskExecutionContext taskExecutionContext, TaskCallbackService taskCallbackService){ + this.taskExecutionContext = taskExecutionContext; + this.taskCallbackService = taskCallbackService; + } + + @Override + public void run() { + + TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(taskExecutionContext.getTaskInstanceId()); + try { + logger.info("script path : {}", taskExecutionContext.getExecutePath()); + // task node + TaskNode taskNode = JSONObject.parseObject(taskExecutionContext.getTaskJson(), TaskNode.class); + + // copy hdfs/minio file to local + downloadResource(taskExecutionContext.getExecutePath(), + taskExecutionContext.getResources(), + taskExecutionContext.getTenantCode(), + logger); + + taskExecutionContext.setTaskParams(taskNode.getParams()); + taskExecutionContext.setEnvFile(CommonUtils.getSystemEnvPath()); + taskExecutionContext.setDefinedParams(getGlobalParamsMap()); + + // set task timeout + setTaskTimeout(taskExecutionContext, taskNode); + + taskExecutionContext.setTaskAppId(String.format("%s_%s_%s", + taskExecutionContext.getProcessDefineId(), + taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getTaskInstanceId())); + + // custom logger + Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, + taskExecutionContext.getProcessDefineId(), + taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getTaskInstanceId())); + + + + task = TaskManager.newTask(taskExecutionContext, + taskLogger); + + // task init + task.init(); + + // task handle + task.handle(); + + // task result process + task.after(); + + responseCommand.setStatus(task.getExitStatus().getCode()); + responseCommand.setEndTime(new Date()); + responseCommand.setProcessId(task.getProcessId()); + responseCommand.setAppIds(task.getAppIds()); + logger.info("task instance id : {},task final status : {}", taskExecutionContext.getTaskInstanceId(), task.getExitStatus()); + }catch (Exception e){ + logger.error("task scheduler failure", e); + kill(); + responseCommand.setStatus(ExecutionStatus.FAILURE.getCode()); + responseCommand.setEndTime(new Date()); + responseCommand.setProcessId(task.getProcessId()); + responseCommand.setAppIds(task.getAppIds()); + } finally { + taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command()); + } + } + + /** + * get global paras map + * @return + */ + private Map getGlobalParamsMap() { + Map globalParamsMap = new HashMap<>(16); + + // global params string + String globalParamsStr = taskExecutionContext.getGlobalParams(); + if (globalParamsStr != null) { + List globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class); + globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue))); + } + return globalParamsMap; + } + + /** + * set task timeout + * @param taskExecutionContext TaskExecutionContext + * @param taskNode + */ + private void setTaskTimeout(TaskExecutionContext taskExecutionContext, TaskNode taskNode) { + // the default timeout is the maximum value of the integer + taskExecutionContext.setTaskTimeout(Integer.MAX_VALUE); + TaskTimeoutParameter taskTimeoutParameter = taskNode.getTaskTimeoutParameter(); + if (taskTimeoutParameter.getEnable()){ + // get timeout strategy + taskExecutionContext.setTaskTimeoutStrategy(taskTimeoutParameter.getStrategy().getCode()); + switch (taskTimeoutParameter.getStrategy()){ + case WARN: + break; + case FAILED: + if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { + taskExecutionContext.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); + } + break; + case WARNFAILED: + if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { + taskExecutionContext.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); + } + break; + default: + logger.error("not support task timeout strategy: {}", taskTimeoutParameter.getStrategy()); + throw new IllegalArgumentException("not support task timeout strategy"); + + } + } + } + + + /** + * kill task + */ + public void kill(){ + if (task != null){ + try { + task.cancelApplication(true); + }catch (Exception e){ + logger.error(e.getMessage(),e); + } + } + } + + + /** + * download resource file + * + * @param execLocalPath + * @param projectRes + * @param logger + */ + private void downloadResource(String execLocalPath, + List projectRes, + String tenantCode, + Logger logger) throws Exception { + if (CollectionUtils.isEmpty(projectRes)){ + return; + } + + for (String resource : projectRes) { + File resFile = new File(execLocalPath, resource); + if (!resFile.exists()) { + try { + // query the tenant code of the resource according to the name of the resource + String resHdfsPath = HadoopUtils.getHdfsResourceFileName(tenantCode, resource); + + logger.info("get resource file from hdfs :{}", resHdfsPath); + HadoopUtils.getInstance().copyHdfsToLocal(resHdfsPath, execLocalPath + File.separator + resource, false, true); + }catch (Exception e){ + logger.error(e.getMessage(),e); + throw new RuntimeException(e.getMessage()); + } + } else { + logger.info("file : {} exists ", resFile.getName()); + } + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java deleted file mode 100644 index 21ee1dfa40..0000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java +++ /dev/null @@ -1,345 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.worker.runner; - - -import ch.qos.logback.classic.LoggerContext; -import ch.qos.logback.classic.sift.SiftingAppender; -import com.alibaba.fastjson.JSONObject; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.AuthorizationType; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.TaskType; -import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.task.AbstractParameters; -import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; -import org.apache.dolphinscheduler.common.log.TaskLogDiscriminator; -import org.apache.dolphinscheduler.server.worker.task.AbstractTask; -import org.apache.dolphinscheduler.server.worker.task.TaskManager; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.permission.PermissionCheck; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.*; -import java.util.stream.Collectors; - - -/** - * task scheduler thread - */ -public class TaskScheduleThread implements Runnable { - - /** - * logger - */ - private final Logger logger = LoggerFactory.getLogger(TaskScheduleThread.class); - - /** - * task instance - */ - private TaskInstance taskInstance; - - /** - * process service - */ - private final ProcessService processService; - - /** - * abstract task - */ - private AbstractTask task; - - /** - * constructor - * - * @param taskInstance task instance - * @param processService process dao - */ - public TaskScheduleThread(TaskInstance taskInstance, ProcessService processService){ - this.processService = processService; - this.taskInstance = taskInstance; - } - - @Override - public void run() { - - try { - // update task state is running according to task type - updateTaskState(taskInstance.getTaskType()); - - logger.info("script path : {}", taskInstance.getExecutePath()); - // task node - TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class); - - // get resource files - List resourceFiles = createProjectResFiles(taskNode); - // copy hdfs/minio file to local - downloadResource( - taskInstance.getExecutePath(), - resourceFiles, - logger); - - - // get process instance according to tak instance - ProcessInstance processInstance = taskInstance.getProcessInstance(); - - // set task props - TaskProps taskProps = new TaskProps(taskNode.getParams(), - taskInstance.getExecutePath(), - processInstance.getScheduleTime(), - taskInstance.getName(), - taskInstance.getTaskType(), - taskInstance.getId(), - CommonUtils.getSystemEnvPath(), - processInstance.getTenantCode(), - processInstance.getQueue(), - taskInstance.getStartTime(), - getGlobalParamsMap(), - taskInstance.getDependency(), - processInstance.getCmdTypeIfComplement()); - // set task timeout - setTaskTimeout(taskProps, taskNode); - - taskProps.setTaskAppId(String.format("%s_%s_%s", - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId())); - - // custom logger - Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId())); - - task = TaskManager.newTask(taskInstance.getTaskType(), - taskProps, - taskLogger); - - // task init - task.init(); - - // task handle - task.handle(); - - // task result process - task.after(); - - }catch (Exception e){ - logger.error("task scheduler failure", e); - kill(); - // update task instance state - processService.changeTaskState(ExecutionStatus.FAILURE, - new Date(), - taskInstance.getId()); - } - - logger.info("task instance id : {},task final status : {}", - taskInstance.getId(), - task.getExitStatus()); - // update task instance state - processService.changeTaskState(task.getExitStatus(), - new Date(), - taskInstance.getId()); - } - /** - * get global paras map - * @return - */ - private Map getGlobalParamsMap() { - Map globalParamsMap = new HashMap<>(16); - - // global params string - String globalParamsStr = taskInstance.getProcessInstance().getGlobalParams(); - - if (globalParamsStr != null) { - List globalParamsList = JSONObject.parseArray(globalParamsStr, Property.class); - globalParamsMap.putAll(globalParamsList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue))); - } - return globalParamsMap; - } - - /** - * update task state according to task type - * @param taskType - */ - private void updateTaskState(String taskType) { - // update task status is running - if(taskType.equals(TaskType.SQL.name()) || - taskType.equals(TaskType.PROCEDURE.name())){ - processService.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, - taskInstance.getStartTime(), - taskInstance.getHost(), - null, - getTaskLogPath(), - taskInstance.getId()); - }else{ - processService.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, - taskInstance.getStartTime(), - taskInstance.getHost(), - taskInstance.getExecutePath(), - getTaskLogPath(), - taskInstance.getId()); - } - } - - /** - * get task log path - * @return log path - */ - private String getTaskLogPath() { - String logPath; - try{ - String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) - .getLogger("ROOT") - .getAppender("TASKLOGFILE")) - .getDiscriminator()).getLogBase(); - if (baseLog.startsWith(Constants.SINGLE_SLASH)){ - logPath = baseLog + Constants.SINGLE_SLASH + - taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + - taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + - taskInstance.getId() + ".log"; - }else{ - logPath = System.getProperty("user.dir") + Constants.SINGLE_SLASH + - baseLog + Constants.SINGLE_SLASH + - taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + - taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + - taskInstance.getId() + ".log"; - } - }catch (Exception e){ - logger.error("logger" + e); - logPath = ""; - } - return logPath; - } - - /** - * set task timeout - * @param taskProps - * @param taskNode - */ - private void setTaskTimeout(TaskProps taskProps, TaskNode taskNode) { - // the default timeout is the maximum value of the integer - taskProps.setTaskTimeout(Integer.MAX_VALUE); - TaskTimeoutParameter taskTimeoutParameter = taskNode.getTaskTimeoutParameter(); - if (taskTimeoutParameter.getEnable()){ - // get timeout strategy - taskProps.setTaskTimeoutStrategy(taskTimeoutParameter.getStrategy()); - switch (taskTimeoutParameter.getStrategy()){ - case WARN: - break; - case FAILED: - if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { - taskProps.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); - } - break; - case WARNFAILED: - if (Integer.MAX_VALUE > taskTimeoutParameter.getInterval() * 60) { - taskProps.setTaskTimeout(taskTimeoutParameter.getInterval() * 60); - } - break; - default: - logger.error("not support task timeout strategy: {}", taskTimeoutParameter.getStrategy()); - throw new IllegalArgumentException("not support task timeout strategy"); - - } - } - } - - - - - /** - * kill task - */ - public void kill(){ - if (task != null){ - try { - task.cancelApplication(true); - }catch (Exception e){ - logger.error(e.getMessage(),e); - } - } - } - - - /** - * create project resource files - */ - private List createProjectResFiles(TaskNode taskNode) throws Exception{ - - Set projectFiles = new HashSet<>(); - AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); - - if (baseParam != null) { - List projectResourceFiles = baseParam.getResourceFilesList(); - projectFiles.addAll(projectResourceFiles); - } - - return new ArrayList<>(projectFiles); - } - - /** - * download resource file - * - * @param execLocalPath - * @param projectRes - * @param logger - */ - private void downloadResource(String execLocalPath, List projectRes, Logger logger) throws Exception { - checkDownloadPermission(projectRes); - for (String res : projectRes) { - File resFile = new File(execLocalPath, res); - if (!resFile.exists()) { - try { - // query the tenant code of the resource according to the name of the resource - String tentnCode = processService.queryTenantCodeByResName(res); - String resHdfsPath = HadoopUtils.getHdfsFilename(tentnCode, res); - - logger.info("get resource file from hdfs :{}", resHdfsPath); - HadoopUtils.getInstance().copyHdfsToLocal(resHdfsPath, execLocalPath + File.separator + res, false, true); - }catch (Exception e){ - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage()); - } - } else { - logger.info("file : {} exists ", resFile.getName()); - } - } - } - - /** - * check download resource permission - * @param projectRes resource name list - * @throws Exception exception - */ - private void checkDownloadPermission(List projectRes) throws Exception { - int userId = taskInstance.getProcessInstance().getExecutorId(); - String[] resNames = projectRes.toArray(new String[projectRes.size()]); - PermissionCheck permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE, processService,resNames,userId,logger); - permissionCheck.checkPermission(); - } -} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java index 8e0ccee16c..7224d349f5 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java @@ -16,35 +16,33 @@ */ package org.apache.dolphinscheduler.server.worker.task; -import com.sun.jna.platform.win32.Kernel32; -import com.sun.jna.platform.win32.WinNT; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; -import org.apache.dolphinscheduler.common.utils.LoggerUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.common.utils.process.ProcessBuilderForWin32; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; -import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; +import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; + import org.slf4j.Logger; import java.io.*; import java.lang.reflect.Field; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; +import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static org.apache.dolphinscheduler.common.Constants.*; + /** * abstract command executor */ @@ -65,218 +63,139 @@ public abstract class AbstractCommandExecutor { protected Consumer> logHandler; /** - * task dir - */ - protected final String taskDir; - - /** - * task appId - */ - protected final String taskAppId; - - /** - * task appId - */ - protected final int taskInstId; - - /** - * tenant code , execute task linux user - */ - protected final String tenantCode; - - /** - * env file - */ - protected final String envFile; - - /** - * start time + * logger */ - protected final Date startTime; + protected Logger logger; /** - * timeout + * log list */ - protected int timeout; + protected final List logBuffer; /** - * logger + * taskExecutionContext */ - protected Logger logger; + protected TaskExecutionContext taskExecutionContext; /** - * log list + * taskExecutionContextCacheManager */ - protected final List logBuffer; - + private TaskExecutionContextCacheManager taskExecutionContextCacheManager; public AbstractCommandExecutor(Consumer> logHandler, - String taskDir, String taskAppId,int taskInstId,String tenantCode, String envFile, - Date startTime, int timeout, Logger logger){ + TaskExecutionContext taskExecutionContext , + Logger logger){ this.logHandler = logHandler; - this.taskDir = taskDir; - this.taskAppId = taskAppId; - this.taskInstId = taskInstId; - this.tenantCode = tenantCode; - this.envFile = envFile; - this.startTime = startTime; - this.timeout = timeout; + this.taskExecutionContext = taskExecutionContext; this.logger = logger; this.logBuffer = Collections.synchronizedList(new ArrayList<>()); + this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); + } + + /** + * build process + * + * @param commandFile command file + * @throws IOException IO Exception + */ + private void buildProcess(String commandFile) throws IOException { + //init process builder + ProcessBuilder processBuilder = new ProcessBuilder(); + // setting up a working directory + processBuilder.directory(new File(taskExecutionContext.getExecutePath())); + // merge error information to standard output stream + processBuilder.redirectErrorStream(true); + // setting up user to run commands + List command = new LinkedList<>(); + command.add("sudo"); + command.add("-u"); + command.add(taskExecutionContext.getTenantCode()); + command.add(commandInterpreter()); + command.addAll(commandOptions()); + command.add(commandFile); + processBuilder.command(command); + + process = processBuilder.start(); + + // print command + printCommand(processBuilder); } /** * task specific execution logic * - * @param execCommand exec command - * @param processService process dao - * @return exit status code + * @param execCommand execCommand + * @return CommandExecuteResult + * @throws Exception if error throws Exception */ - public int run(String execCommand, ProcessService processService) { - int exitStatusCode; + public CommandExecuteResult run(String execCommand) throws Exception{ - try { - if (StringUtils.isEmpty(execCommand)) { - exitStatusCode = 0; - return exitStatusCode; - } + CommandExecuteResult result = new CommandExecuteResult(); - String commandFilePath = buildCommandFilePath(); - // create command file if not exists - createCommandFileIfNotExists(execCommand, commandFilePath); + if (StringUtils.isEmpty(execCommand)) { + return result; + } - //build process - buildProcess(commandFilePath); + String commandFilePath = buildCommandFilePath(); - // parse process output - parseProcessOutput(process); + // create command file if not exists + createCommandFileIfNotExists(execCommand, commandFilePath); - // get process id - int pid = getProcessId(process); + //build process + buildProcess(commandFilePath); - processService.updatePidByTaskInstId(taskInstId, pid, ""); + // parse process output + parseProcessOutput(process); - logger.info("process start, process id is: {}", pid); - // if timeout occurs, exit directly - long remainTime = getRemaintime(); + Integer processId = getProcessId(process); - // waiting for the run to finish - boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); + result.setProcessId(processId); - if (status) { - exitStatusCode = process.exitValue(); - logger.info("process has exited, work dir:{}, pid:{} ,exitStatusCode:{}", taskDir, pid,exitStatusCode); - //update process state to db - exitStatusCode = updateState(processService, exitStatusCode, pid, taskInstId); + // cache processId + taskExecutionContext.setProcessId(processId); + taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext); - } else { - TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); - if (taskInstance == null) { - logger.error("task instance id:{} not exist", taskInstId); - } else { - ProcessUtils.kill(taskInstance); - } - exitStatusCode = -1; - logger.warn("process timeout, work dir:{}, pid:{}", taskDir, pid); - } + // print process id + logger.info("process start, process id is: {}", processId); - } catch (InterruptedException e) { - exitStatusCode = -1; - logger.error("interrupt exception: {}, task may be cancelled or killed", e.getMessage(), e); - throw new RuntimeException("interrupt exception. exitCode is : " + exitStatusCode); - } catch (Exception e) { - exitStatusCode = -1; - logger.error(e.getMessage(), e); - throw new RuntimeException("process error . exitCode is : " + exitStatusCode); - } + // if timeout occurs, exit directly + long remainTime = getRemaintime(); - return exitStatusCode; - } + // waiting for the run to finish + boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); - /** - * build process - * - * @param commandFile command file - * @throws IOException IO Exception - */ - private void buildProcess(String commandFile) throws IOException { - // command list - List command = new ArrayList<>(); - //init process builder - if (OSUtils.isWindows()) { - ProcessBuilderForWin32 processBuilder = new ProcessBuilderForWin32(); - // setting up a working directory - processBuilder.directory(new File(taskDir)); - processBuilder.user(tenantCode, StringUtils.EMPTY); - // merge error information to standard output stream - processBuilder.redirectErrorStream(true); - - // setting up user to run commands - command.add(commandInterpreter()); - command.add("/c"); - command.addAll(commandOptions()); - command.add(commandFile); - - // setting commands - processBuilder.command(command); - process = processBuilder.start(); - } else { - ProcessBuilder processBuilder = new ProcessBuilder(); - // setting up a working directory - processBuilder.directory(new File(taskDir)); - // merge error information to standard output stream - processBuilder.redirectErrorStream(true); - - // setting up user to run commands - command.add("sudo"); - command.add("-u"); - command.add(tenantCode); - command.add(commandInterpreter()); - command.addAll(commandOptions()); - command.add(commandFile); - - // setting commands - processBuilder.command(command); - process = processBuilder.start(); - } + logger.info("process has exited, execute path:{}, processId:{} ,exitStatusCode:{}", + taskExecutionContext.getExecutePath(), + processId + , result.getExitStatusCode()); - // print command - printCommand(command); - } + // if SHELL task exit + if (status) { + // set appIds + List appIds = getAppIds(taskExecutionContext.getLogPath()); + result.setAppIds(String.join(Constants.COMMA, appIds)); - /** - * update process state to db - * - * @param processService process dao - * @param exitStatusCode exit status code - * @param pid process id - * @param taskInstId task instance id - * @return exit status code - */ - private int updateState(ProcessService processService, int exitStatusCode, int pid, int taskInstId) { - //get yarn state by log - if (exitStatusCode == 0) { - TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); - logger.info("process id is {}", pid); - - List appIds = getAppLinks(taskInstance.getLogPath()); - if (appIds.size() > 0) { - String appUrl = String.join(Constants.COMMA, appIds); - logger.info("yarn log url:{}",appUrl); - processService.updatePidByTaskInstId(taskInstId, pid, appUrl); - } + // SHELL task state + result.setExitStatusCode(process.exitValue()); - // check if all operations are completed - if (!isSuccessOfYarnState(appIds)) { - exitStatusCode = -1; + // if yarn task , yarn state is final state + if (process.exitValue() == 0){ + result.setExitStatusCode(isSuccessOfYarnState(appIds) ? EXIT_CODE_SUCCESS : EXIT_CODE_FAILURE); } + } else { + logger.error("process has failure , exitStatusCode : {} , ready to kill ...", result.getExitStatusCode()); + ProcessUtils.kill(taskExecutionContext); + result.setExitStatusCode(EXIT_CODE_FAILURE); } - return exitStatusCode; + + + return result; } + /** * cancel application * @throws Exception exception @@ -320,7 +239,7 @@ public abstract class AbstractCommandExecutor { // sudo -u user command to run command String cmd = String.format("sudo kill %d", processId); - logger.info("soft kill task:{}, process id:{}, cmd:{}", taskAppId, processId, cmd); + logger.info("soft kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd); Runtime.getRuntime().exec(cmd); } catch (IOException e) { @@ -340,7 +259,7 @@ public abstract class AbstractCommandExecutor { try { String cmd = String.format("sudo kill -9 %d", processId); - logger.info("hard kill task:{}, process id:{}, cmd:{}", taskAppId, processId, cmd); + logger.info("hard kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd); Runtime.getRuntime().exec(cmd); } catch (IOException e) { @@ -351,13 +270,13 @@ public abstract class AbstractCommandExecutor { /** * print command - * @param command command + * @param processBuilder process builder */ - private void printCommand(List command) { + private void printCommand(ProcessBuilder processBuilder) { String cmdStr; try { - cmdStr = ProcessUtils.buildCommandStr(command); + cmdStr = ProcessUtils.buildCommandStr(processBuilder.command()); logger.info("task run command:\n{}", cmdStr); } catch (IOException e) { logger.error(e.getMessage(), e); @@ -381,7 +300,7 @@ public abstract class AbstractCommandExecutor { * @param process process */ private void parseProcessOutput(Process process) { - String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskAppId); + String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskExecutionContext.getTaskAppId()); ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName); parseProcessOutputExecutorService.submit(new Runnable(){ @Override @@ -389,11 +308,7 @@ public abstract class AbstractCommandExecutor { BufferedReader inReader = null; try { - if (OSUtils.isWindows()) { - inReader = new BufferedReader(new InputStreamReader(process.getInputStream(), Charset.forName("GBK"))); - } else { - inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); - } + inReader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; long lastFlushTime = System.currentTimeMillis(); @@ -413,10 +328,6 @@ public abstract class AbstractCommandExecutor { parseProcessOutputExecutorService.shutdown(); } - public int getPid() { - return getProcessId(process); - } - /** * check yarn state * @@ -424,11 +335,10 @@ public abstract class AbstractCommandExecutor { * @return is success of yarn task state */ public boolean isSuccessOfYarnState(List appIds) { - boolean result = true; try { for (String appId : appIds) { - while(true){ + while(Stopper.isRunning()){ ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); logger.info("appId:{}, final state:{}",appId,applicationStatus.name()); if (applicationStatus.equals(ExecutionStatus.FAILURE) || @@ -443,24 +353,29 @@ public abstract class AbstractCommandExecutor { } } } catch (Exception e) { - logger.error("yarn applications: {} status failed ", appIds,e); + logger.error(String.format("yarn applications: %s status failed ", appIds.toString()),e); result = false; } return result; } + public int getProcessId() { + return getProcessId(process); + } + /** * get app links - * @param fileName file name + * + * @param logPath log path * @return app id list */ - private List getAppLinks(String fileName) { - List logs = convertFile2List(fileName); + private List getAppIds(String logPath) { + List logs = convertFile2List(logPath); - List appIds = new ArrayList(); + List appIds = new ArrayList<>(); /** - * analysis log,get submited yarn application id + * analysis log?get submited yarn application id */ for (String log : logs) { String appId = findAppId(log); @@ -493,7 +408,7 @@ public abstract class AbstractCommandExecutor { lineList.add(line); } } catch (Exception e) { - logger.error("read file: {} failed",filename,e); + logger.error(String.format("read file: %s failed : ",filename),e); } finally { if(br != null){ try { @@ -522,13 +437,13 @@ public abstract class AbstractCommandExecutor { /** - * get remain time(s) + * get remain time?s? * * @return remain time */ private long getRemaintime() { - long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; - long remainTime = timeout - usedTime; + long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000; + long remainTime = taskExecutionContext.getTaskTimeout() - usedTime; if (remainTime < 0) { throw new RuntimeException("task execution time out"); @@ -545,15 +460,12 @@ public abstract class AbstractCommandExecutor { */ private int getProcessId(Process process) { int processId = 0; + try { Field f = process.getClass().getDeclaredField(Constants.PID); f.setAccessible(true); - if (OSUtils.isWindows()) { - WinNT.HANDLE handle = (WinNT.HANDLE) f.get(process); - processId = Kernel32.INSTANCE.GetProcessId(handle); - } else { - processId = f.getInt(process); - } + + processId = f.getInt(process); } catch (Throwable e) { logger.error(e.getMessage(), e); } @@ -603,6 +515,5 @@ public abstract class AbstractCommandExecutor { } protected abstract String buildCommandFilePath(); protected abstract String commandInterpreter(); - protected abstract boolean checkFindApp(String line); protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; -} +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java index 3795506b78..36b974b97a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java @@ -17,9 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.TaskRecordStatus; -import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; @@ -34,10 +32,13 @@ import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.TaskRecordDao; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; +import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; @@ -47,9 +48,9 @@ import java.util.Map; public abstract class AbstractTask { /** - * task props + * taskExecutionContext **/ - protected TaskProps taskProps; + TaskExecutionContext taskExecutionContext; /** * log record @@ -57,6 +58,17 @@ public abstract class AbstractTask { protected Logger logger; + /** + * SHELL process pid + */ + protected int processId; + + /** + * other resource manager appId , for example : YARN etc + */ + protected String appIds; + + /** * cancel */ @@ -69,11 +81,11 @@ public abstract class AbstractTask { /** * constructor - * @param taskProps task props + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - protected AbstractTask(TaskProps taskProps, Logger logger) { - this.taskProps = taskProps; + protected AbstractTask(TaskExecutionContext taskExecutionContext, Logger logger) { + this.taskExecutionContext = taskExecutionContext; this.logger = logger; } @@ -121,6 +133,22 @@ public abstract class AbstractTask { this.exitStatusCode = exitStatusCode; } + public String getAppIds() { + return appIds; + } + + public void setAppIds(String appIds) { + this.appIds = appIds; + } + + public int getProcessId() { + return processId; + } + + public void setProcessId(int processId) { + this.processId = processId; + } + /** * get task parameters * @return AbstractParameters @@ -128,6 +156,7 @@ public abstract class AbstractTask { public abstract AbstractParameters getParameters(); + /** * result processing */ @@ -135,20 +164,20 @@ public abstract class AbstractTask { if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ // task recor flat : if true , start up qianfan if (TaskRecordDao.getTaskRecordFlag() - && TaskType.typeIsNormalTask(taskProps.getTaskType())){ - AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskProps.getTaskParams(), getCurTaskParamsClass()); + && TaskType.typeIsNormalTask(taskExecutionContext.getTaskType())){ + AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskExecutionContext.getTaskParams(), getCurTaskParamsClass()); // replace placeholder - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), params.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); if (paramsMap != null && !paramsMap.isEmpty() && paramsMap.containsKey("v_proc_date")){ String vProcDate = paramsMap.get("v_proc_date").getValue(); if (!StringUtils.isEmpty(vProcDate)){ - TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskProps.getNodeName(), vProcDate); + TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskExecutionContext.getTaskName(), vProcDate); logger.info("task record status : {}",taskRecordState); if (taskRecordState == TaskRecordStatus.FAILURE){ setExitStatusCode(Constants.EXIT_CODE_FAILURE); @@ -174,7 +203,7 @@ public abstract class AbstractTask { private Class getCurTaskParamsClass(){ Class paramsClass = null; // get task type - TaskType taskType = TaskType.valueOf(taskProps.getTaskType()); + TaskType taskType = TaskType.valueOf(taskExecutionContext.getTaskType()); switch (taskType){ case SHELL: paramsClass = ShellParameters.class; @@ -232,4 +261,5 @@ public abstract class AbstractTask { } return status; } + } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java index 39f4dfbb97..07b8f80847 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -26,11 +27,6 @@ import org.slf4j.Logger; * abstract yarn task */ public abstract class AbstractYarnTask extends AbstractTask { - - /** - * process instance - */ - /** * process task */ @@ -43,28 +39,25 @@ public abstract class AbstractYarnTask extends AbstractTask { /** * Abstract Yarn Task - * @param taskProps task rops + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - public AbstractYarnTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); + public AbstractYarnTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); this.processService = SpringApplicationContext.getBean(ProcessService.class); this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, - taskProps.getTaskDir(), - taskProps.getTaskAppId(), - taskProps.getTaskInstId(), - taskProps.getTenantCode(), - taskProps.getEnvFile(), - taskProps.getTaskStartTime(), - taskProps.getTaskTimeout(), + taskExecutionContext, logger); } @Override public void handle() throws Exception { try { - // construct process - exitStatusCode = shellCommandExecutor.run(buildCommand(), processService); + // SHELL task exit code + CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(buildCommand()); + setExitStatusCode(commandExecuteResult.getExitStatusCode()); + setAppIds(commandExecuteResult.getAppIds()); + setProcessId(commandExecuteResult.getProcessId()); } catch (Exception e) { logger.error("yarn process failure", e); exitStatusCode = -1; @@ -82,9 +75,9 @@ public abstract class AbstractYarnTask extends AbstractTask { cancel = true; // cancel process shellCommandExecutor.cancelApplication(); - TaskInstance taskInstance = processService.findTaskInstanceById(taskProps.getTaskInstId()); + TaskInstance taskInstance = processService.findTaskInstanceById(taskExecutionContext.getTaskInstanceId()); if (status && taskInstance != null){ - ProcessUtils.killYarnJob(taskInstance); + ProcessUtils.killYarnJob(taskExecutionContext); } } @@ -94,4 +87,9 @@ public abstract class AbstractYarnTask extends AbstractTask { * @throws Exception exception */ protected abstract String buildCommand() throws Exception; + + /** + * set main jar name + */ + protected abstract void setMainJarName(); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/CommandExecuteResult.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/CommandExecuteResult.java new file mode 100644 index 0000000000..5d1afe5ebd --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/CommandExecuteResult.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.task; + +/** + * command execute result + */ +public class CommandExecuteResult { + + /** + * command exit code + */ + private Integer exitStatusCode; + + /** + * appIds + */ + private String appIds; + + /** + * process id + */ + private Integer processId; + + + public CommandExecuteResult(){ + this.exitStatusCode = 0; + } + + + public Integer getExitStatusCode() { + return exitStatusCode; + } + + public void setExitStatusCode(Integer exitStatusCode) { + this.exitStatusCode = exitStatusCode; + } + + public String getAppIds() { + return appIds; + } + + public void setAppIds(String appIds) { + this.appIds = appIds; + } + + public Integer getProcessId() { + return processId; + } + + public void setProcessId(Integer processId) { + this.processId = processId; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java index a673134488..344d00fa88 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/PythonCommandExecutor.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,25 +51,13 @@ public class PythonCommandExecutor extends AbstractCommandExecutor { /** * constructor * @param logHandler log handler - * @param taskDir task dir - * @param taskAppId task app id - * @param taskInstId task instance id - * @param tenantCode tenant code - * @param envFile env file - * @param startTime start time - * @param timeout timeout + * @param taskExecutionContext taskExecutionContext * @param logger logger */ public PythonCommandExecutor(Consumer> logHandler, - String taskDir, - String taskAppId, - int taskInstId, - String tenantCode, - String envFile, - Date startTime, - int timeout, + TaskExecutionContext taskExecutionContext, Logger logger) { - super(logHandler,taskDir,taskAppId,taskInstId,tenantCode, envFile, startTime, timeout, logger); + super(logHandler,taskExecutionContext,logger); } @@ -79,7 +68,7 @@ public class PythonCommandExecutor extends AbstractCommandExecutor { */ @Override protected String buildCommandFilePath() { - return String.format("%s/py_%s.command", taskDir, taskAppId); + return String.format("%s/py_%s.command", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId()); } /** @@ -90,7 +79,7 @@ public class PythonCommandExecutor extends AbstractCommandExecutor { */ @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { - logger.info("tenantCode :{}, task dir:{}", tenantCode, taskDir); + logger.info("tenantCode :{}, task dir:{}", taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); if (!Files.exists(Paths.get(commandFile))) { logger.info("generate command file:{}", commandFile); @@ -125,22 +114,13 @@ public class PythonCommandExecutor extends AbstractCommandExecutor { */ @Override protected String commandInterpreter() { - String pythonHome = getPythonHome(envFile); + String pythonHome = getPythonHome(taskExecutionContext.getEnvFile()); if (StringUtils.isEmpty(pythonHome)){ return PYTHON; } return pythonHome; } - /** - * check find yarn application id - * @param line line - * @return boolean - */ - @Override - protected boolean checkFindApp(String line) { - return true; - } /** diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java index 5d14e6b2a7..21418104a0 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/ShellCommandExecutor.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task; import org.apache.commons.io.FileUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.slf4j.Logger; @@ -25,7 +26,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; -import java.util.Date; import java.util.List; import java.util.function.Consumer; @@ -46,33 +46,21 @@ public class ShellCommandExecutor extends AbstractCommandExecutor { /** * constructor - * @param logHandler log handler - * @param taskDir task dir - * @param taskAppId task app id - * @param taskInstId task instance id - * @param tenantCode tenant code - * @param envFile env file - * @param startTime start time - * @param timeout timeout - * @param logger logger + * @param logHandler logHandler + * @param taskExecutionContext taskExecutionContext + * @param logger logger */ public ShellCommandExecutor(Consumer> logHandler, - String taskDir, - String taskAppId, - int taskInstId, - String tenantCode, - String envFile, - Date startTime, - int timeout, + TaskExecutionContext taskExecutionContext, Logger logger) { - super(logHandler,taskDir,taskAppId,taskInstId,tenantCode, envFile, startTime, timeout, logger); + super(logHandler,taskExecutionContext,logger); } @Override protected String buildCommandFilePath() { // command file - return String.format("%s/%s.%s", taskDir, taskAppId, OSUtils.isWindows() ? "bat" : "command"); + return String.format("%s/%s.command", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId()); } /** @@ -84,15 +72,6 @@ public class ShellCommandExecutor extends AbstractCommandExecutor { return OSUtils.isWindows() ? CMD : SH; } - /** - * check find yarn application id - * @param line line - * @return true if line contains task app id - */ - @Override - protected boolean checkFindApp(String line) { - return line.contains(taskAppId); - } /** * create command file if not exists @@ -102,7 +81,8 @@ public class ShellCommandExecutor extends AbstractCommandExecutor { */ @Override protected void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException { - logger.info("tenantCode user:{}, task dir:{}", tenantCode, taskAppId); + logger.info("tenantCode user:{}, task dir:{}", taskExecutionContext.getTenantCode(), + taskExecutionContext.getTaskAppId()); // create if non existence if (!Files.exists(Paths.get(commandFile))) { @@ -112,15 +92,15 @@ public class ShellCommandExecutor extends AbstractCommandExecutor { if (OSUtils.isWindows()) { sb.append("@echo off\n"); sb.append("cd /d %~dp0\n"); - if (envFile != null) { - sb.append("call ").append(envFile).append("\n"); + if (taskExecutionContext.getEnvFile() != null) { + sb.append("call ").append(taskExecutionContext.getEnvFile()).append("\n"); } } else { sb.append("#!/bin/sh\n"); sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); sb.append("cd $BASEDIR\n"); - if (envFile != null) { - sb.append("source ").append(envFile).append("\n"); + if (taskExecutionContext.getEnvFile() != null) { + sb.append("source ").append(taskExecutionContext.getEnvFile()).append("\n"); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java index ad62b77655..19ba9c9a21 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java @@ -19,8 +19,7 @@ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.utils.EnumUtils; -import org.apache.dolphinscheduler.server.worker.task.conditions.ConditionsTask; -import org.apache.dolphinscheduler.server.worker.task.dependent.DependentTask; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.datax.DataxTask; import org.apache.dolphinscheduler.server.worker.task.flink.FlinkTask; import org.apache.dolphinscheduler.server.worker.task.http.HttpTask; @@ -38,44 +37,39 @@ import org.slf4j.Logger; */ public class TaskManager { - /** * create new task - * @param taskType task type - * @param props props + * @param taskExecutionContext taskExecutionContext * @param logger logger * @return AbstractTask * @throws IllegalArgumentException illegal argument exception */ - public static AbstractTask newTask(String taskType, TaskProps props, Logger logger) + public static AbstractTask newTask(TaskExecutionContext taskExecutionContext, + Logger logger) throws IllegalArgumentException { - switch (EnumUtils.getEnum(TaskType.class,taskType)) { + switch (EnumUtils.getEnum(TaskType.class,taskExecutionContext.getTaskType())) { case SHELL: - return new ShellTask(props, logger); + return new ShellTask(taskExecutionContext, logger); case PROCEDURE: - return new ProcedureTask(props, logger); + return new ProcedureTask(taskExecutionContext, logger); case SQL: - return new SqlTask(props, logger); + return new SqlTask(taskExecutionContext, logger); case MR: - return new MapReduceTask(props, logger); + return new MapReduceTask(taskExecutionContext, logger); case SPARK: - return new SparkTask(props, logger); + return new SparkTask(taskExecutionContext, logger); case FLINK: - return new FlinkTask(props, logger); + return new FlinkTask(taskExecutionContext, logger); case PYTHON: - return new PythonTask(props, logger); - case DEPENDENT: - return new DependentTask(props, logger); + return new PythonTask(taskExecutionContext, logger); case HTTP: - return new HttpTask(props, logger); + return new HttpTask(taskExecutionContext, logger); case DATAX: - return new DataxTask(props, logger); + return new DataxTask(taskExecutionContext, logger); case SQOOP: - return new SqoopTask(props, logger); - case CONDITIONS: - return new ConditionsTask(props, logger); + return new SqoopTask(taskExecutionContext, logger); default: - logger.error("unsupport task type: {}", taskType); + logger.error("unsupport task type: {}", taskExecutionContext.getTaskType()); throw new IllegalArgumentException("not support task type"); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java index edec419384..00e78d37d1 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskProps.java @@ -35,12 +35,12 @@ public class TaskProps { /** * task node name **/ - private String nodeName; + private String taskName; /** * task instance id **/ - private int taskInstId; + private int taskInstanceId; /** * tenant code , execute task linux user @@ -57,11 +57,6 @@ public class TaskProps { **/ private String taskParams; - /** - * task dir - **/ - private String taskDir; - /** * queue **/ @@ -111,6 +106,22 @@ public class TaskProps { */ private CommandType cmdTypeIfComplement; + + /** + * host + */ + private String host; + + /** + * log path + */ + private String logPath; + + /** + * execute path + */ + private String executePath; + /** * constructor */ @@ -118,39 +129,42 @@ public class TaskProps { /** * constructor - * @param taskParams task params - * @param taskDir task dir - * @param scheduleTime schedule time - * @param nodeName node name - * @param taskType task type - * @param taskInstId task instance id - * @param envFile env file - * @param tenantCode tenant code - * @param queue queue - * @param taskStartTime task start time - * @param definedParams defined params - * @param dependence dependence - * @param cmdTypeIfComplement cmd type if complement + * @param taskParams taskParams + * @param scheduleTime scheduleTime + * @param nodeName nodeName + * @param taskType taskType + * @param taskInstanceId taskInstanceId + * @param envFile envFile + * @param tenantCode tenantCode + * @param queue queue + * @param taskStartTime taskStartTime + * @param definedParams definedParams + * @param dependence dependence + * @param cmdTypeIfComplement cmdTypeIfComplement + * @param host host + * @param logPath logPath + * @param executePath executePath */ public TaskProps(String taskParams, - String taskDir, Date scheduleTime, String nodeName, String taskType, - int taskInstId, + int taskInstanceId, String envFile, String tenantCode, String queue, Date taskStartTime, Map definedParams, String dependence, - CommandType cmdTypeIfComplement){ + CommandType cmdTypeIfComplement, + String host, + String logPath, + String executePath){ this.taskParams = taskParams; - this.taskDir = taskDir; this.scheduleTime = scheduleTime; - this.nodeName = nodeName; + this.taskName = nodeName; this.taskType = taskType; - this.taskInstId = taskInstId; + this.taskInstanceId = taskInstanceId; this.envFile = envFile; this.tenantCode = tenantCode; this.queue = queue; @@ -158,7 +172,9 @@ public class TaskProps { this.definedParams = definedParams; this.dependence = dependence; this.cmdTypeIfComplement = cmdTypeIfComplement; - + this.host = host; + this.logPath = logPath; + this.executePath = executePath; } public String getTenantCode() { @@ -177,12 +193,12 @@ public class TaskProps { this.taskParams = taskParams; } - public String getTaskDir() { - return taskDir; + public String getExecutePath() { + return executePath; } - public void setTaskDir(String taskDir) { - this.taskDir = taskDir; + public void setExecutePath(String executePath) { + this.executePath = executePath; } public Map getDefinedParams() { @@ -202,20 +218,20 @@ public class TaskProps { } - public String getNodeName() { - return nodeName; + public String getTaskName() { + return taskName; } - public void setNodeName(String nodeName) { - this.nodeName = nodeName; + public void setTaskName(String taskName) { + this.taskName = taskName; } - public int getTaskInstId() { - return taskInstId; + public int getTaskInstanceId() { + return taskInstanceId; } - public void setTaskInstId(int taskInstId) { - this.taskInstId = taskInstId; + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; } public String getQueue() { @@ -291,6 +307,22 @@ public class TaskProps { this.cmdTypeIfComplement = cmdTypeIfComplement; } + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + /** * get parameters map * @return user defined params map diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java index 7537ca2edc..218906d91c 100755 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java @@ -18,7 +18,7 @@ package org.apache.dolphinscheduler.server.worker.task.datax; import java.io.File; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; @@ -38,6 +38,8 @@ import java.util.Set; import org.apache.commons.io.FileUtils; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; @@ -50,11 +52,13 @@ import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; @@ -96,40 +100,28 @@ public class DataxTask extends AbstractTask { */ private DataxParameters dataXParameters; - /** - * task dir - */ - private String taskDir; - /** * shell command executor */ private ShellCommandExecutor shellCommandExecutor; /** - * process dao + * taskExecutionContext */ - private ProcessService processService; + private TaskExecutionContext taskExecutionContext; /** * constructor - * - * @param props - * props - * @param logger - * logger + * @param taskExecutionContext taskExecutionContext + * @param logger logger */ - public DataxTask(TaskProps props, Logger logger) { - super(props, logger); - - this.taskDir = props.getTaskDir(); - logger.info("task dir : {}", taskDir); + public DataxTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.taskExecutionContext = taskExecutionContext; - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, props.getTaskDir(), props.getTaskAppId(), - props.getTaskInstId(), props.getTenantCode(), props.getEnvFile(), props.getTaskStartTime(), - props.getTaskTimeout(), logger); - this.processService = SpringApplicationContext.getBean(ProcessService.class); + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, + taskExecutionContext,logger); } /** @@ -137,8 +129,8 @@ public class DataxTask extends AbstractTask { */ @Override public void init() { - logger.info("datax task params {}", taskProps.getTaskParams()); - dataXParameters = JSONUtils.parseObject(taskProps.getTaskParams(), DataxParameters.class); + logger.info("datax task params {}", taskExecutionContext.getTaskParams()); + dataXParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DataxParameters.class); if (!dataXParameters.checkParameters()) { throw new RuntimeException("datax task params is not valid"); @@ -147,33 +139,37 @@ public class DataxTask extends AbstractTask { /** * run DataX process - * - * @throws Exception + * + * @throws Exception if error throws Exception */ @Override - public void handle() - throws Exception { + public void handle() throws Exception { try { // set the name of the current thread - String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskProps.getTaskAppId()); + String threadLoggerInfoName = String.format("TaskLogInfo-%s", taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); // run datax process String jsonFilePath = buildDataxJsonFile(); String shellCommandFilePath = buildShellCommandFile(jsonFilePath); - exitStatusCode = shellCommandExecutor.run(shellCommandFilePath, processService); + CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(shellCommandFilePath); + + setExitStatusCode(commandExecuteResult.getExitStatusCode()); + setAppIds(commandExecuteResult.getAppIds()); + setProcessId(commandExecuteResult.getProcessId()); } catch (Exception e) { - exitStatusCode = -1; + logger.error("datax task failure", e); + setExitStatusCode(Constants.EXIT_CODE_FAILURE); throw e; } } /** * cancel DataX process - * - * @param cancelApplication - * @throws Exception + * + * @param cancelApplication cancelApplication + * @throws Exception if error throws Exception */ @Override public void cancelApplication(boolean cancelApplication) @@ -185,49 +181,74 @@ public class DataxTask extends AbstractTask { /** * build datax configuration file * - * @return - * @throws Exception + * @return datax json file name + * @throws Exception if error throws Exception */ private String buildDataxJsonFile() throws Exception { // generate json - String fileName = String.format("%s/%s_job.json", taskDir, taskProps.getTaskAppId()); + String fileName = String.format("%s/%s_job.json", + taskExecutionContext.getExecutePath(), + taskExecutionContext.getTaskAppId()); + String json; Path path = new File(fileName).toPath(); if (Files.exists(path)) { return fileName; } - JSONObject job = new JSONObject(); - job.put("content", buildDataxJobContentJson()); - job.put("setting", buildDataxJobSettingJson()); - JSONObject root = new JSONObject(); - root.put("job", job); - root.put("core", buildDataxCoreJson()); - logger.debug("datax job json : {}", root.toString()); + if (dataXParameters.getCustomConfig() == 1){ + + json = dataXParameters.getJson().replaceAll("\\r\\n", "\n"); + + /** + * combining local and global parameters + */ + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), + dataXParameters.getLocalParametersMap(), + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); + if (paramsMap != null){ + json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap)); + } + + }else { + + JSONObject job = new JSONObject(); + job.put("content", buildDataxJobContentJson()); + job.put("setting", buildDataxJobSettingJson()); + + JSONObject root = new JSONObject(); + root.put("job", job); + root.put("core", buildDataxCoreJson()); + json = root.toString(); + } + + logger.debug("datax job json : {}", json); // create datax json file - FileUtils.writeStringToFile(new File(fileName), root.toString(), Charset.forName("UTF-8")); + FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8); return fileName; } /** * build datax job config * - * @return - * @throws SQLException + * @return collection of datax job config JSONObject + * @throws SQLException if error throws SQLException */ - private List buildDataxJobContentJson() - throws SQLException { - DataSource dataSource = processService.findDataSourceById(dataXParameters.getDataSource()); - BaseDataSource dataSourceCfg = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); + private List buildDataxJobContentJson() throws SQLException { + DataxTaskExecutionContext dataxTaskExecutionContext = taskExecutionContext.getDataxTaskExecutionContext(); + + + BaseDataSource dataSourceCfg = DataSourceFactory.getDatasource(DbType.of(dataxTaskExecutionContext.getSourcetype()), + dataxTaskExecutionContext.getSourceConnectionParams()); - DataSource dataTarget = processService.findDataSourceById(dataXParameters.getDataTarget()); - BaseDataSource dataTargetCfg = DataSourceFactory.getDatasource(dataTarget.getType(), - dataTarget.getConnectionParams()); + BaseDataSource dataTargetCfg = DataSourceFactory.getDatasource(DbType.of(dataxTaskExecutionContext.getTargetType()), + dataxTaskExecutionContext.getTargetConnectionParams()); List readerConnArr = new ArrayList<>(); JSONObject readerConn = new JSONObject(); @@ -241,7 +262,7 @@ public class DataxTask extends AbstractTask { readerParam.put("connection", readerConnArr); JSONObject reader = new JSONObject(); - reader.put("name", DataxUtils.getReaderPluginName(dataSource.getType())); + reader.put("name", DataxUtils.getReaderPluginName(DbType.of(dataxTaskExecutionContext.getSourcetype()))); reader.put("parameter", readerParam); List writerConnArr = new ArrayList<>(); @@ -254,7 +275,9 @@ public class DataxTask extends AbstractTask { writerParam.put("username", dataTargetCfg.getUser()); writerParam.put("password", dataTargetCfg.getPassword()); writerParam.put("column", - parsingSqlColumnNames(dataSource.getType(), dataTarget.getType(), dataSourceCfg, dataXParameters.getSql())); + parsingSqlColumnNames(DbType.of(dataxTaskExecutionContext.getSourcetype()), + DbType.of(dataxTaskExecutionContext.getTargetType()), + dataSourceCfg, dataXParameters.getSql())); writerParam.put("connection", writerConnArr); if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) { @@ -266,7 +289,7 @@ public class DataxTask extends AbstractTask { } JSONObject writer = new JSONObject(); - writer.put("name", DataxUtils.getWriterPluginName(dataTarget.getType())); + writer.put("name", DataxUtils.getWriterPluginName(DbType.of(dataxTaskExecutionContext.getTargetType()))); writer.put("parameter", writerParam); List contentList = new ArrayList<>(); @@ -281,7 +304,7 @@ public class DataxTask extends AbstractTask { /** * build datax setting config * - * @return + * @return datax setting config JSONObject */ private JSONObject buildDataxJobSettingJson() { JSONObject speed = new JSONObject(); @@ -333,13 +356,16 @@ public class DataxTask extends AbstractTask { /** * create command * - * @return - * @throws Exception + * @return shell command file name + * @throws Exception if error throws Exception */ private String buildShellCommandFile(String jobConfigFilePath) throws Exception { // generate scripts - String fileName = String.format("%s/%s_node.%s", taskDir, taskProps.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); + String fileName = String.format("%s/%s_node.sh", + taskExecutionContext.getExecutePath(), + taskExecutionContext.getTaskAppId()); + Path path = new File(fileName).toPath(); if (Files.exists(path)) { @@ -355,13 +381,13 @@ public class DataxTask extends AbstractTask { sbr.append(jobConfigFilePath); String dataxCommand = sbr.toString(); - // find process instance by task id - ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - // combining local and global parameters - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), dataXParameters.getLocalParametersMap(), - processInstance.getCmdTypeIfComplement(), processInstance.getScheduleTime()); + // replace placeholder + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), + dataXParameters.getLocalParametersMap(), + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); if (paramsMap != null) { dataxCommand = ParameterUtils.convertParameterPlaceholders(dataxCommand, ParamUtils.convert(paramsMap)); } @@ -394,7 +420,7 @@ public class DataxTask extends AbstractTask { * the database connection parameters of the data source * @param sql * sql for data synchronization - * @return + * @return Keyword converted column names */ private String[] parsingSqlColumnNames(DbType dsType, DbType dtType, BaseDataSource dataSourceCfg, String sql) { String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(dsType, sql); @@ -417,7 +443,7 @@ public class DataxTask extends AbstractTask { * @param sql * sql for data synchronization * @return column name array - * @throws RuntimeException + * @throws RuntimeException if error throws RuntimeException */ private String[] tryGrammaticalAnalysisSqlColumnNames(DbType dbType, String sql) { String[] columnNames; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java deleted file mode 100644 index f074d57e6c..0000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.worker.task.dependent; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DependResult; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.model.DependentTaskModel; -import org.apache.dolphinscheduler.common.task.AbstractParameters; -import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.DependentUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.worker.task.AbstractTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; - -import java.util.*; - -import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; - -/** - * Dependent Task - */ -public class DependentTask extends AbstractTask { - - /** - * dependent task list - */ - private List dependentTaskList = new ArrayList<>(); - - /** - * depend item result map - * save the result to log file - */ - private Map dependResultMap = new HashMap<>(); - - /** - * dependent parameters - */ - private DependentParameters dependentParameters; - - /** - * dependent date - */ - private Date dependentDate; - - /** - * process service - */ - private ProcessService processService; - - /** - * constructor - * @param props props - * @param logger logger - */ - public DependentTask(TaskProps props, Logger logger) { - super(props, logger); - } - - @Override - public void init(){ - logger.info("dependent task initialize"); - - this.dependentParameters = JSONUtils.parseObject(this.taskProps.getDependence(), - DependentParameters.class); - - for(DependentTaskModel taskModel : dependentParameters.getDependTaskList()){ - this.dependentTaskList.add(new DependentExecute( - taskModel.getDependItemList(), taskModel.getRelation())); - } - - this.processService = SpringApplicationContext.getBean(ProcessService.class); - - if(taskProps.getScheduleTime() != null){ - this.dependentDate = taskProps.getScheduleTime(); - }else{ - this.dependentDate = taskProps.getTaskStartTime(); - } - - } - - @Override - public void handle() throws Exception { - // set the name of the current thread - String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskProps.getTaskAppId()); - Thread.currentThread().setName(threadLoggerInfoName); - - try{ - TaskInstance taskInstance = null; - while(Stopper.isRunning()){ - taskInstance = processService.findTaskInstanceById(this.taskProps.getTaskInstId()); - - if(taskInstance == null){ - exitStatusCode = -1; - break; - } - - if(taskInstance.getState() == ExecutionStatus.KILL){ - this.cancel = true; - } - - if(this.cancel || allDependentTaskFinish()){ - break; - } - - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } - - if(cancel){ - exitStatusCode = Constants.EXIT_CODE_KILL; - }else{ - DependResult result = getTaskDependResult(); - exitStatusCode = (result == DependResult.SUCCESS) ? - Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; - } - }catch (Exception e){ - logger.error(e.getMessage(),e); - exitStatusCode = -1; - throw e; - } - } - - /** - * get dependent result - * @return DependResult - */ - private DependResult getTaskDependResult(){ - List dependResultList = new ArrayList<>(); - for(DependentExecute dependentExecute : dependentTaskList){ - DependResult dependResult = dependentExecute.getModelDependResult(dependentDate); - dependResultList.add(dependResult); - } - DependResult result = DependentUtils.getDependResultForRelation( - this.dependentParameters.getRelation(), dependResultList - ); - return result; - } - - /** - * judge all dependent tasks finish - * @return whether all dependent tasks finish - */ - private boolean allDependentTaskFinish(){ - boolean finish = true; - for(DependentExecute dependentExecute : dependentTaskList){ - for(Map.Entry entry: dependentExecute.getDependResultMap().entrySet()) { - if(!dependResultMap.containsKey(entry.getKey())){ - dependResultMap.put(entry.getKey(), entry.getValue()); - //save depend result to log - logger.info("dependent item complete {} {},{}", - DEPENDENT_SPLIT, entry.getKey(), entry.getValue().toString()); - } - } - if(!dependentExecute.finish(dependentDate)){ - finish = false; - } - } - return finish; - } - - - @Override - public void cancelApplication(boolean cancelApplication) throws Exception { - // cancel process - this.cancel = true; - } - - @Override - public AbstractParameters getParameters() { - return null; - } -} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java index c562fbe4dd..c377d5fa68 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java @@ -16,17 +16,19 @@ */ package org.apache.dolphinscheduler.server.worker.task.flink; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.server.utils.FlinkArgsUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.slf4j.Logger; import java.util.ArrayList; @@ -49,35 +51,40 @@ public class FlinkTask extends AbstractYarnTask { */ private FlinkParameters flinkParameters; - public FlinkTask(TaskProps props, Logger logger) { - super(props, logger); + /** + * taskExecutionContext + */ + private TaskExecutionContext taskExecutionContext; + + public FlinkTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.taskExecutionContext = taskExecutionContext; } @Override public void init() { - logger.info("flink task params {}", taskProps.getTaskParams()); + logger.info("flink task params {}", taskExecutionContext.getTaskParams()); - flinkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), FlinkParameters.class); + flinkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), FlinkParameters.class); if (!flinkParameters.checkParameters()) { throw new RuntimeException("flink task params is not valid"); } - flinkParameters.setQueue(taskProps.getQueue()); + flinkParameters.setQueue(taskExecutionContext.getQueue()); + setMainJarName(); + if (StringUtils.isNotEmpty(flinkParameters.getMainArgs())) { String args = flinkParameters.getMainArgs(); - // get process instance by task instance id - ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + + + // replace placeholder + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), flinkParameters.getLocalParametersMap(), - processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); logger.info("param Map : {}", paramsMap); if (paramsMap != null ){ @@ -104,13 +111,35 @@ public class FlinkTask extends AbstractYarnTask { args.addAll(FlinkArgsUtils.buildArgs(flinkParameters)); String command = ParameterUtils - .convertParameterPlaceholders(String.join(" ", args), taskProps.getDefinedParams()); + .convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams()); logger.info("flink task command : {}", command); return command; } + @Override + protected void setMainJarName() { + // main jar + ResourceInfo mainJar = flinkParameters.getMainJar(); + if (mainJar != null) { + int resourceId = mainJar.getId(); + String resourceName; + if (resourceId == 0) { + resourceName = mainJar.getRes(); + } else { + Resource resource = processService.getResourceById(flinkParameters.getMainJar().getId()); + if (resource == null) { + logger.error("resource id: {} not exist", resourceId); + throw new RuntimeException(String.format("resource id: %d not exist", resourceId)); + } + resourceName = resource.getFullName().replaceFirst("/", ""); + } + mainJar.setRes(resourceName); + flinkParameters.setMainJar(mainJar); + } + } + @Override public AbstractParameters getParameters() { return flinkParameters; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java index 85c8d2723c..ef1ccdd09a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java @@ -21,6 +21,7 @@ import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import org.apache.commons.io.Charsets; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.HttpMethod; import org.apache.dolphinscheduler.common.enums.HttpParametersType; import org.apache.dolphinscheduler.common.process.HttpProperty; @@ -31,12 +32,9 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.http.HttpEntity; import org.apache.http.ParseException; import org.apache.http.client.config.RequestConfig; @@ -68,10 +66,7 @@ public class HttpTask extends AbstractTask { */ private HttpParameters httpParameters; - /** - * process service - */ - private ProcessService processService; + /** * Convert mill seconds to second unit @@ -88,20 +83,26 @@ public class HttpTask extends AbstractTask { */ protected String output; + + /** + * taskExecutionContext + */ + private TaskExecutionContext taskExecutionContext; + /** * constructor - * @param props props + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - public HttpTask(TaskProps props, Logger logger) { - super(props, logger); - this.processService = SpringApplicationContext.getBean(ProcessService.class); + public HttpTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.taskExecutionContext = taskExecutionContext; } @Override public void init() { - logger.info("http task params {}", taskProps.getTaskParams()); - this.httpParameters = JSON.parseObject(taskProps.getTaskParams(), HttpParameters.class); + logger.info("http task params {}", taskExecutionContext.getTaskParams()); + this.httpParameters = JSONObject.parseObject(taskExecutionContext.getTaskParams(), HttpParameters.class); if (!httpParameters.checkParameters()) { throw new RuntimeException("http task params is not valid"); @@ -110,7 +111,7 @@ public class HttpTask extends AbstractTask { @Override public void handle() throws Exception { - String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskProps.getTaskAppId()); + String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); long startTime = System.currentTimeMillis(); @@ -141,13 +142,13 @@ public class HttpTask extends AbstractTask { */ protected CloseableHttpResponse sendRequest(CloseableHttpClient client) throws IOException { RequestBuilder builder = createRequestBuilder(); - ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + // replace placeholder + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), httpParameters.getLocalParametersMap(), - processInstance.getCmdTypeIfComplement(), - processInstance.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); List httpPropertyList = new ArrayList<>(); if(CollectionUtils.isNotEmpty(httpParameters.getHttpParams() )){ for (HttpProperty httpProperty: httpParameters.getHttpParams()) { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java index b86ff9952e..fed7b27739 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/mr/MapReduceTask.java @@ -17,16 +17,19 @@ package org.apache.dolphinscheduler.server.worker.task.mr; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.slf4j.Logger; import java.util.ArrayList; @@ -44,35 +47,44 @@ public class MapReduceTask extends AbstractYarnTask { */ private MapreduceParameters mapreduceParameters; + /** + * taskExecutionContext + */ + private TaskExecutionContext taskExecutionContext; + /** * constructor - * @param props task props + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - public MapReduceTask(TaskProps props, Logger logger) { - super(props, logger); + public MapReduceTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.taskExecutionContext = taskExecutionContext; } @Override public void init() { - logger.info("mapreduce task params {}", taskProps.getTaskParams()); + logger.info("mapreduce task params {}", taskExecutionContext.getTaskParams()); - this.mapreduceParameters = JSONUtils.parseObject(taskProps.getTaskParams(), MapreduceParameters.class); + this.mapreduceParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), MapreduceParameters.class); // check parameters if (!mapreduceParameters.checkParameters()) { throw new RuntimeException("mapreduce task params is not valid"); } - mapreduceParameters.setQueue(taskProps.getQueue()); + mapreduceParameters.setQueue(taskExecutionContext.getQueue()); + setMainJarName(); + // replace placeholder - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), mapreduceParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); + if (paramsMap != null){ String args = ParameterUtils.convertParameterPlaceholders(mapreduceParameters.getMainArgs(), ParamUtils.convert(paramsMap)); mapreduceParameters.setMainArgs(args); @@ -93,12 +105,34 @@ public class MapReduceTask extends AbstractYarnTask { List parameterList = buildParameters(mapreduceParameters); String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", parameterList), - taskProps.getDefinedParams()); + taskExecutionContext.getDefinedParams()); logger.info("mapreduce task command: {}", command); return command; } + @Override + protected void setMainJarName() { + // main jar + ResourceInfo mainJar = mapreduceParameters.getMainJar(); + if (mainJar != null) { + int resourceId = mainJar.getId(); + String resourceName; + if (resourceId == 0) { + resourceName = mainJar.getRes(); + } else { + Resource resource = processService.getResourceById(mapreduceParameters.getMainJar().getId()); + if (resource == null) { + logger.error("resource id: {} not exist", resourceId); + throw new RuntimeException(String.format("resource id: %d not exist", resourceId)); + } + resourceName = resource.getFullName().replaceFirst("/", ""); + } + mainJar.setRes(resourceName); + mapreduceParameters.setMainJar(mainJar); + } + } + @Override public AbstractParameters getParameters() { return mapreduceParameters; @@ -122,22 +156,19 @@ public class MapReduceTask extends AbstractYarnTask { } // main class - if(mapreduceParameters.getProgramType() !=null ){ - if(mapreduceParameters.getProgramType()!= ProgramType.PYTHON){ - if(StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){ - result.add(mapreduceParameters.getMainClass()); - } - } + if(!ProgramType.PYTHON.equals(mapreduceParameters.getProgramType()) + && StringUtils.isNotEmpty(mapreduceParameters.getMainClass())){ + result.add(mapreduceParameters.getMainClass()); } // others if (StringUtils.isNotEmpty(mapreduceParameters.getOthers())) { String others = mapreduceParameters.getOthers(); - if(!others.contains(Constants.MR_QUEUE)){ - if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { - result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); - } + if (!others.contains(Constants.MR_QUEUE) + && StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { + result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); } + result.add(mapreduceParameters.getOthers()); }else if (StringUtils.isNotEmpty(mapreduceParameters.getQueue())) { result.add(String.format("%s %s=%s", Constants.D, Constants.MR_QUEUE, mapreduceParameters.getQueue())); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java index fb881453e9..72d5616e5b 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java @@ -16,12 +16,11 @@ */ package org.apache.dolphinscheduler.server.worker.task.processdure; +import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.cronutils.utils.StringUtils; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DataType; -import org.apache.dolphinscheduler.common.enums.Direct; -import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; @@ -29,12 +28,9 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; -import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.sql.*; @@ -56,69 +52,59 @@ public class ProcedureTask extends AbstractTask { private ProcedureParameters procedureParameters; /** - * process service + * base datasource */ - private ProcessService processService; + private BaseDataSource baseDataSource; + /** - * base datasource + * taskExecutionContext */ - private BaseDataSource baseDataSource; + private TaskExecutionContext taskExecutionContext; /** * constructor - * @param taskProps task props + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - public ProcedureTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); + public ProcedureTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + + this.taskExecutionContext = taskExecutionContext; - logger.info("procedure task params {}", taskProps.getTaskParams()); + logger.info("procedure task params {}", taskExecutionContext.getTaskParams()); + + this.procedureParameters = JSONObject.parseObject(taskExecutionContext.getTaskParams(), ProcedureParameters.class); - this.procedureParameters = JSONObject.parseObject(taskProps.getTaskParams(), ProcedureParameters.class); // check parameters if (!procedureParameters.checkParameters()) { throw new RuntimeException("procedure task params is not valid"); } - - this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override public void handle() throws Exception { // set the name of the current thread - String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskProps.getTaskAppId()); + String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); - logger.info("processdure type : {}, datasource : {}, method : {} , localParams : {}", + logger.info("procedure type : {}, datasource : {}, method : {} , localParams : {}", procedureParameters.getType(), procedureParameters.getDatasource(), procedureParameters.getMethod(), procedureParameters.getLocalParams()); - DataSource dataSource = processService.findDataSourceById(procedureParameters.getDatasource()); - if (dataSource == null){ - logger.error("datasource not exists"); - exitStatusCode = -1; - throw new IllegalArgumentException("datasource not found"); - } - - logger.info("datasource name : {} , type : {} , desc : {} , user_id : {} , parameter : {}", - dataSource.getName(), - dataSource.getType(), - dataSource.getNote(), - dataSource.getUserId(), - dataSource.getConnectionParams()); - Connection connection = null; CallableStatement stmt = null; try { // load class - DataSourceFactory.loadClass(dataSource.getType()); + DataSourceFactory.loadClass(DbType.valueOf(procedureParameters.getType())); + // get datasource - baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); + baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(procedureParameters.getType()), + taskExecutionContext.getProcedureTaskExecutionContext().getConnectionParams()); + // get jdbc connection connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), @@ -128,11 +114,11 @@ public class ProcedureTask extends AbstractTask { // combining local and global parameters - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), procedureParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); Collection userDefParamsList = null; @@ -141,87 +127,149 @@ public class ProcedureTask extends AbstractTask { userDefParamsList = procedureParameters.getLocalParametersMap().values(); } - String method = ""; - // no parameters - if (CollectionUtils.isEmpty(userDefParamsList)){ - method = "{call " + procedureParameters.getMethod() + "}"; - }else { // exists parameters - int size = userDefParamsList.size(); - StringBuilder parameter = new StringBuilder(); - parameter.append("("); - for (int i = 0 ;i < size - 1; i++){ - parameter.append("?,"); - } - parameter.append("?)"); - method = "{call " + procedureParameters.getMethod() + parameter.toString()+ "}"; - } + String method = getCallMethod(userDefParamsList); logger.info("call method : {}",method); + // call method stmt = connection.prepareCall(method); - if(taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED || taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED){ - stmt.setQueryTimeout(taskProps.getTaskTimeout()); - } - Map outParameterMap = new HashMap<>(); - if (userDefParamsList != null && userDefParamsList.size() > 0){ - int index = 1; - for (Property property : userDefParamsList){ - logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" - ,property.getProp(), - property.getDirect(), - property.getType(), - property.getValue()); - // set parameters - if (property.getDirect().equals(Direct.IN)){ - ParameterUtils.setInParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); - }else if (property.getDirect().equals(Direct.OUT)){ - setOutParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); - property.setValue(paramsMap.get(property.getProp()).getValue()); - outParameterMap.put(index,property); - } - index++; - } - } + + // set timeout + setTimeout(stmt); + + // outParameterMap + Map outParameterMap = getOutParameterMap(stmt, paramsMap, userDefParamsList); + stmt.executeUpdate(); /** * print the output parameters to the log */ - Iterator> iter = outParameterMap.entrySet().iterator(); - while (iter.hasNext()){ - Map.Entry en = iter.next(); - - int index = en.getKey(); - Property property = en.getValue(); - String prop = property.getProp(); - DataType dataType = property.getType(); - // get output parameter - getOutputParameter(stmt, index, prop, dataType); - } + printOutParameter(stmt, outParameterMap); - exitStatusCode = 0; + setExitStatusCode(Constants.EXIT_CODE_SUCCESS); }catch (Exception e){ - logger.error(e.getMessage(),e); - exitStatusCode = -1; - throw new RuntimeException(String.format("process interrupted. exit status code is %d",exitStatusCode)); + setExitStatusCode(Constants.EXIT_CODE_FAILURE); + logger.error("procedure task error",e); + throw e; } finally { - if (stmt != null) { - try { - stmt.close(); - } catch (SQLException e) { - exitStatusCode = -1; - logger.error(e.getMessage(),e); - } + close(stmt,connection); + } + } + + /** + * get call method + * @param userDefParamsList userDefParamsList + * @return method + */ + private String getCallMethod(Collection userDefParamsList) { + String method;// no parameters + if (CollectionUtils.isEmpty(userDefParamsList)){ + method = "{call " + procedureParameters.getMethod() + "}"; + }else { // exists parameters + int size = userDefParamsList.size(); + StringBuilder parameter = new StringBuilder(); + parameter.append("("); + for (int i = 0 ;i < size - 1; i++){ + parameter.append("?,"); } - if (connection != null) { - try { - connection.close(); - } catch (SQLException e) { - exitStatusCode = -1; - logger.error(e.getMessage(), e); + parameter.append("?)"); + method = "{call " + procedureParameters.getMethod() + parameter.toString()+ "}"; + } + return method; + } + + /** + * print outParameter + * @param stmt CallableStatement + * @param outParameterMap outParameterMap + * @throws SQLException + */ + private void printOutParameter(CallableStatement stmt, + Map outParameterMap) throws SQLException { + Iterator> iter = outParameterMap.entrySet().iterator(); + while (iter.hasNext()){ + Map.Entry en = iter.next(); + + int index = en.getKey(); + Property property = en.getValue(); + String prop = property.getProp(); + DataType dataType = property.getType(); + // get output parameter + getOutputParameter(stmt, index, prop, dataType); + } + } + + /** + * get output parameter + * + * @param stmt CallableStatement + * @param paramsMap paramsMap + * @param userDefParamsList userDefParamsList + * @return outParameterMap + * @throws Exception + */ + private Map getOutParameterMap(CallableStatement stmt, + Map paramsMap, + Collection userDefParamsList) throws Exception { + Map outParameterMap = new HashMap<>(); + if (userDefParamsList != null && userDefParamsList.size() > 0){ + int index = 1; + for (Property property : userDefParamsList){ + logger.info("localParams : prop : {} , dirct : {} , type : {} , value : {}" + ,property.getProp(), + property.getDirect(), + property.getType(), + property.getValue()); + // set parameters + if (property.getDirect().equals(Direct.IN)){ + ParameterUtils.setInParameter(index, stmt, property.getType(), paramsMap.get(property.getProp()).getValue()); + }else if (property.getDirect().equals(Direct.OUT)){ + setOutParameter(index,stmt,property.getType(),paramsMap.get(property.getProp()).getValue()); + property.setValue(paramsMap.get(property.getProp()).getValue()); + outParameterMap.put(index,property); } + index++; + } + } + return outParameterMap; + } + + /** + * set timtou + * @param stmt CallableStatement + * @throws SQLException + */ + private void setTimeout(CallableStatement stmt) throws SQLException { + Boolean failed = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED; + Boolean warnfailed = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED; + if(failed || warnfailed){ + stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout()); + } + } + + /** + * close jdbc resource + * + * @param stmt + * @param connection + */ + private void close(PreparedStatement stmt, + Connection connection){ + if (stmt != null) { + try { + stmt.close(); + } catch (SQLException e) { + + } + } + if (connection != null) { + try { + connection.close(); + } catch (SQLException e) { + } } } @@ -237,31 +285,31 @@ public class ProcedureTask extends AbstractTask { private void getOutputParameter(CallableStatement stmt, int index, String prop, DataType dataType) throws SQLException { switch (dataType){ case VARCHAR: - logger.info("out prameter key : {} , value : {}",prop,stmt.getString(index)); + logger.info("out prameter varchar key : {} , value : {}",prop,stmt.getString(index)); break; case INTEGER: - logger.info("out prameter key : {} , value : {}", prop, stmt.getInt(index)); + logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index)); break; case LONG: - logger.info("out prameter key : {} , value : {}",prop,stmt.getLong(index)); + logger.info("out prameter long key : {} , value : {}",prop,stmt.getLong(index)); break; case FLOAT: - logger.info("out prameter key : {} , value : {}",prop,stmt.getFloat(index)); + logger.info("out prameter float key : {} , value : {}",prop,stmt.getFloat(index)); break; case DOUBLE: - logger.info("out prameter key : {} , value : {}",prop,stmt.getDouble(index)); + logger.info("out prameter double key : {} , value : {}",prop,stmt.getDouble(index)); break; case DATE: - logger.info("out prameter key : {} , value : {}",prop,stmt.getDate(index)); + logger.info("out prameter date key : {} , value : {}",prop,stmt.getDate(index)); break; case TIME: - logger.info("out prameter key : {} , value : {}",prop,stmt.getTime(index)); + logger.info("out prameter time key : {} , value : {}",prop,stmt.getTime(index)); break; case TIMESTAMP: - logger.info("out prameter key : {} , value : {}",prop,stmt.getTimestamp(index)); + logger.info("out prameter timestamp key : {} , value : {}",prop,stmt.getTimestamp(index)); break; case BOOLEAN: - logger.info("out prameter key : {} , value : {}",prop, stmt.getBoolean(index)); + logger.info("out prameter boolean key : {} , value : {}",prop, stmt.getBoolean(index)); break; default: break; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java index fc212f866b..7a66227b8d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java @@ -17,17 +17,18 @@ package org.apache.dolphinscheduler.server.worker.task.python; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.python.PythonParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.PythonCommandExecutor; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.util.Map; @@ -53,37 +54,29 @@ public class PythonTask extends AbstractTask { private PythonCommandExecutor pythonCommandExecutor; /** - * process service + * taskExecutionContext */ - private ProcessService processService; + private TaskExecutionContext taskExecutionContext; /** * constructor - * @param taskProps task props + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - public PythonTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - - this.taskDir = taskProps.getTaskDir(); + public PythonTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.taskExecutionContext = taskExecutionContext; this.pythonCommandExecutor = new PythonCommandExecutor(this::logHandle, - taskProps.getTaskDir(), - taskProps.getTaskAppId(), - taskProps.getTaskInstId(), - taskProps.getTenantCode(), - taskProps.getEnvFile(), - taskProps.getTaskStartTime(), - taskProps.getTaskTimeout(), + taskExecutionContext, logger); - this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override public void init() { - logger.info("python task params {}", taskProps.getTaskParams()); + logger.info("python task params {}", taskExecutionContext.getTaskParams()); - pythonParameters = JSONUtils.parseObject(taskProps.getTaskParams(), PythonParameters.class); + pythonParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), PythonParameters.class); if (!pythonParameters.checkParameters()) { throw new RuntimeException("python task params is not valid"); @@ -94,10 +87,15 @@ public class PythonTask extends AbstractTask { public void handle() throws Exception { try { // construct process - exitStatusCode = pythonCommandExecutor.run(buildCommand(), processService); - } catch (Exception e) { + CommandExecuteResult commandExecuteResult = pythonCommandExecutor.run(buildCommand()); + + setExitStatusCode(commandExecuteResult.getExitStatusCode()); + setAppIds(commandExecuteResult.getAppIds()); + setProcessId(commandExecuteResult.getProcessId()); + } + catch (Exception e) { logger.error("python task failure", e); - exitStatusCode = -1; + setExitStatusCode(Constants.EXIT_CODE_FAILURE); throw e; } } @@ -116,14 +114,12 @@ public class PythonTask extends AbstractTask { private String buildCommand() throws Exception { String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", "\n"); - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + // replace placeholder + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), pythonParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); if (paramsMap != null){ rawPythonScript = ParameterUtils.convertParameterPlaceholders(rawPythonScript, ParamUtils.convert(paramsMap)); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java index 90661a690a..f24aa54ffb 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java @@ -18,18 +18,19 @@ package org.apache.dolphinscheduler.server.worker.task.shell; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; +import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.io.File; @@ -52,47 +53,35 @@ public class ShellTask extends AbstractTask { */ private ShellParameters shellParameters; - /** - * task dir - */ - private String taskDir; - /** * shell command executor */ private ShellCommandExecutor shellCommandExecutor; /** - * process database access + * taskExecutionContext */ - private ProcessService processService; + private TaskExecutionContext taskExecutionContext; /** * constructor - * @param taskProps task props + * @param taskExecutionContext taskExecutionContext * @param logger logger */ - public ShellTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); - - this.taskDir = taskProps.getTaskDir(); - - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskProps.getTaskDir(), - taskProps.getTaskAppId(), - taskProps.getTaskInstId(), - taskProps.getTenantCode(), - taskProps.getEnvFile(), - taskProps.getTaskStartTime(), - taskProps.getTaskTimeout(), + public ShellTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + + this.taskExecutionContext = taskExecutionContext; + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, + taskExecutionContext, logger); - this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override public void init() { - logger.info("shell task params {}", taskProps.getTaskParams()); + logger.info("shell task params {}", taskExecutionContext.getTaskParams()); - shellParameters = JSONUtils.parseObject(taskProps.getTaskParams(), ShellParameters.class); + shellParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ShellParameters.class); if (!shellParameters.checkParameters()) { throw new RuntimeException("shell task params is not valid"); @@ -103,10 +92,13 @@ public class ShellTask extends AbstractTask { public void handle() throws Exception { try { // construct process - exitStatusCode = shellCommandExecutor.run(buildCommand(), processService); + CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(buildCommand()); + setExitStatusCode(commandExecuteResult.getExitStatusCode()); + setAppIds(commandExecuteResult.getAppIds()); + setProcessId(commandExecuteResult.getProcessId()); } catch (Exception e) { - logger.error("shell task failure", e); - exitStatusCode = -1; + logger.error("shell task error", e); + setExitStatusCode(Constants.EXIT_CODE_FAILURE); throw e; } } @@ -124,7 +116,10 @@ public class ShellTask extends AbstractTask { */ private String buildCommand() throws Exception { // generate scripts - String fileName = String.format("%s/%s_node.%s", taskDir, taskProps.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); + String fileName = String.format("%s/%s_node.sh", + taskExecutionContext.getExecutePath(), + taskExecutionContext.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); + Path path = new File(fileName).toPath(); if (Files.exists(path)) { @@ -132,25 +127,34 @@ public class ShellTask extends AbstractTask { } String script = shellParameters.getRawScript().replaceAll("\\r\\n", "\n"); - - /** * combining local and global parameters */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), shellParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); if (paramsMap != null){ script = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); } - + // new + // replace variable TIME with $[YYYYmmddd...] in shell file when history run job and batch complement job + if (paramsMap != null) { + if (taskExecutionContext.getScheduleTime() != null) { + String dateTime = DateUtils.format(taskExecutionContext.getScheduleTime(), Constants.PARAMETER_FORMAT_TIME); + Property p = new Property(); + p.setValue(dateTime); + p.setProp(Constants.PARAMETER_SHECDULE_TIME); + paramsMap.put(Constants.PARAMETER_SHECDULE_TIME, p); + } + script = ParameterUtils.convertParameterPlaceholders2(script, ParamUtils.convert(paramsMap)); + } shellParameters.setRawScript(script); logger.info("raw script : {}", shellParameters.getRawScript()); - logger.info("task dir : {}", taskDir); + logger.info("task execute path : {}", taskExecutionContext.getExecutePath()); Set perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); FileAttribute> attr = PosixFilePermissions.asFileAttribute(perms); @@ -171,6 +175,4 @@ public class ShellTask extends AbstractTask { return shellParameters; } - - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java index 203c0fe146..505d88fb37 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java @@ -16,17 +16,20 @@ */ package org.apache.dolphinscheduler.server.worker.task.spark; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.SparkVersion; import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.spark.SparkParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.SparkArgsUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.slf4j.Logger; import java.util.ArrayList; @@ -53,33 +56,40 @@ public class SparkTask extends AbstractYarnTask { */ private SparkParameters sparkParameters; - public SparkTask(TaskProps props, Logger logger) { - super(props, logger); + /** + * taskExecutionContext + */ + private TaskExecutionContext taskExecutionContext; + + public SparkTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.taskExecutionContext = taskExecutionContext; } @Override public void init() { - logger.info("spark task params {}", taskProps.getTaskParams()); + logger.info("spark task params {}", taskExecutionContext.getTaskParams()); - sparkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), SparkParameters.class); + sparkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SparkParameters.class); if (!sparkParameters.checkParameters()) { throw new RuntimeException("spark task params is not valid"); } - sparkParameters.setQueue(taskProps.getQueue()); + sparkParameters.setQueue(taskExecutionContext.getQueue()); + + setMainJarName(); if (StringUtils.isNotEmpty(sparkParameters.getMainArgs())) { String args = sparkParameters.getMainArgs(); - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + // replace placeholder + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), sparkParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); + if (paramsMap != null ){ args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); } @@ -108,13 +118,35 @@ public class SparkTask extends AbstractYarnTask { args.addAll(SparkArgsUtils.buildArgs(sparkParameters)); String command = ParameterUtils - .convertParameterPlaceholders(String.join(" ", args), taskProps.getDefinedParams()); + .convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams()); logger.info("spark task command : {}", command); return command; } + @Override + protected void setMainJarName() { + // main jar + ResourceInfo mainJar = sparkParameters.getMainJar(); + if (mainJar != null) { + int resourceId = mainJar.getId(); + String resourceName; + if (resourceId == 0) { + resourceName = mainJar.getRes(); + } else { + Resource resource = processService.getResourceById(sparkParameters.getMainJar().getId()); + if (resource == null) { + logger.error("resource id: {} not exist", resourceId); + throw new RuntimeException(String.format("resource id: %d not exist", resourceId)); + } + resourceName = resource.getFullName().replaceFirst("/", ""); + } + mainJar.setRes(resourceName); + sparkParameters.setMainJar(mainJar); + } + } + @Override public AbstractParameters getParameters() { return sparkParameters; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java index aae11f5530..84e4e54a50 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java @@ -19,14 +19,13 @@ package org.apache.dolphinscheduler.server.worker.task.sql; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.serializer.SerializerFeature; -import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.alert.utils.MailUtils; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.AuthorizationType; +import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.ShowType; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; -import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.sql.SqlBinds; @@ -36,17 +35,13 @@ import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; -import org.apache.dolphinscheduler.dao.entity.DataSource; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; -import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.UDFUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.permission.PermissionCheck; -import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.sql.*; @@ -66,46 +61,46 @@ public class SqlTask extends AbstractTask { * sql parameters */ private SqlParameters sqlParameters; - - /** - * process service - */ - private ProcessService processService; - /** * alert dao */ private AlertDao alertDao; + /** + * base datasource + */ + private BaseDataSource baseDataSource; /** - * datasource + * taskExecutionContext */ - private DataSource dataSource; + private TaskExecutionContext taskExecutionContext; /** - * base datasource + * default query sql limit */ - private BaseDataSource baseDataSource; + private static final int LIMIT = 10000; + public SqlTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); - public SqlTask(TaskProps taskProps, Logger logger) { - super(taskProps, logger); + this.taskExecutionContext = taskExecutionContext; - logger.info("sql task params {}", taskProps.getTaskParams()); - this.sqlParameters = JSONObject.parseObject(taskProps.getTaskParams(), SqlParameters.class); + logger.info("sql task params {}", taskExecutionContext.getTaskParams()); + this.sqlParameters = JSONObject.parseObject(taskExecutionContext.getTaskParams(), SqlParameters.class); if (!sqlParameters.checkParameters()) { throw new RuntimeException("sql task params is not valid"); } - this.processService = SpringApplicationContext.getBean(ProcessService.class); + this.alertDao = SpringApplicationContext.getBean(AlertDao.class); } @Override public void handle() throws Exception { // set the name of the current thread - String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskProps.getTaskAppId()); + String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId()); Thread.currentThread().setName(threadLoggerInfoName); + logger.info("Full sql parameters: {}", sqlParameters); logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {}", sqlParameters.getType(), @@ -115,38 +110,14 @@ public class SqlTask extends AbstractTask { sqlParameters.getUdfs(), sqlParameters.getShowType(), sqlParameters.getConnParams()); - - // not set data source - if (sqlParameters.getDatasource() == 0){ - logger.error("datasource id not exists"); - exitStatusCode = -1; - return; - } - - dataSource= processService.findDataSourceById(sqlParameters.getDatasource()); - - // data source is null - if (dataSource == null){ - logger.error("datasource not exists"); - exitStatusCode = -1; - return; - } - - logger.info("datasource name : {} , type : {} , desc : {} , user_id : {} , parameter : {}", - dataSource.getName(), - dataSource.getType(), - dataSource.getNote(), - dataSource.getUserId(), - dataSource.getConnectionParams()); - - Connection con = null; - List createFuncs = null; try { + SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext(); // load class - DataSourceFactory.loadClass(dataSource.getType()); + DataSourceFactory.loadClass(DbType.valueOf(sqlParameters.getType())); + // get datasource - baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); + baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(sqlParameters.getType()), + sqlTaskExecutionContext.getConnectionParams()); // ready to execute SQL and parameter entity Map SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlParameters.getSql()); @@ -161,34 +132,19 @@ public class SqlTask extends AbstractTask { .map(this::getSqlAndSqlParamsMap) .collect(Collectors.toList()); - // determine if it is UDF - boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) - && StringUtils.isNotEmpty(sqlParameters.getUdfs()); - if(udfTypeFlag){ - String[] ids = sqlParameters.getUdfs().split(","); - int[] idsArray = new int[ids.length]; - for(int i=0;i udfFuncList = processService.queryUdfFunListByids(idsArray); - createFuncs = UDFUtils.createFuncs(udfFuncList, taskProps.getTenantCode(), logger); - } + List createFuncs = UDFUtils.createFuncs(sqlTaskExecutionContext.getUdfFuncList(), + taskExecutionContext.getTenantCode(), + logger); // execute sql task - con = executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); + executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); + + setExitStatusCode(Constants.EXIT_CODE_SUCCESS); + } catch (Exception e) { - logger.error(e.getMessage(), e); + setExitStatusCode(Constants.EXIT_CODE_FAILURE); + logger.error("sql task error", e); throw e; - } finally { - if (con != null) { - try { - con.close(); - } catch (SQLException e) { - logger.error(e.getMessage(),e); - } - } } } @@ -203,11 +159,11 @@ public class SqlTask extends AbstractTask { // find process instance by task id - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), sqlParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); // spell SQL according to the final user-defined variable if(paramsMap == null){ @@ -221,17 +177,19 @@ public class SqlTask extends AbstractTask { logger.info("SQL title : {}",title); sqlParameters.setTitle(title); } - + //new + //replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job + sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime(), paramsMap); // special characters need to be escaped, ${} needs to be escaped String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); // replace the ${} of the SQL statement with the Placeholder - String formatSql = sql.replaceAll(rgex,"?"); + String formatSql = sql.replaceAll(rgex, "?"); sqlBuilder.append(formatSql); // print repalce sql - printReplacedSql(sql,formatSql,rgex,sqlParamsMap); + printReplacedSql(sql, formatSql, rgex, sqlParamsMap); return new SqlBinds(sqlBuilder.toString(), sqlParamsMap); } @@ -246,109 +204,196 @@ public class SqlTask extends AbstractTask { * @param preStatementsBinds pre statements binds * @param postStatementsBinds post statements binds * @param createFuncs create functions - * @return Connection */ - public Connection executeFuncAndSql(SqlBinds mainSqlBinds, + public void executeFuncAndSql(SqlBinds mainSqlBinds, List preStatementsBinds, List postStatementsBinds, List createFuncs){ Connection connection = null; + PreparedStatement stmt = null; + ResultSet resultSet = null; try { // if upload resource is HDFS and kerberos startup CommonUtils.loadKerberosConf(); + // create connection + connection = createConnection(); + // create temp function + if (CollectionUtils.isNotEmpty(createFuncs)) { + createTempFunction(connection,createFuncs); + } - // if hive , load connection params if exists - if (HIVE == dataSource.getType()) { - Properties paramProp = new Properties(); - paramProp.setProperty(USER, baseDataSource.getUser()); - paramProp.setProperty(PASSWORD, baseDataSource.getPassword()); - Map connParamMap = CollectionUtils.stringToMap(sqlParameters.getConnParams(), - SEMICOLON, - HIVE_CONF); - paramProp.putAll(connParamMap); - - connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), - paramProp); - }else{ - connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), - baseDataSource.getUser(), - baseDataSource.getPassword()); + // pre sql + preSql(connection,preStatementsBinds); + stmt = prepareStatementAndBind(connection, mainSqlBinds); + + // decide whether to executeQuery or executeUpdate based on sqlType + if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) { + // query statements need to be convert to JsonArray and inserted into Alert to send + resultSet = stmt.executeQuery(); + resultProcess(resultSet); + + } else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) { + // non query statement + stmt.executeUpdate(); } - // create temp function - if (CollectionUtils.isNotEmpty(createFuncs)) { - try (Statement funcStmt = connection.createStatement()) { - for (String createFunc : createFuncs) { - logger.info("hive create function sql: {}", createFunc); - funcStmt.execute(createFunc); - } - } + postSql(connection,postStatementsBinds); + + } catch (Exception e) { + logger.error("execute sql error",e); + throw new RuntimeException("execute sql error"); + } finally { + close(resultSet,stmt,connection); + } + } + + /** + * result process + * + * @param resultSet resultSet + * @throws Exception + */ + private void resultProcess(ResultSet resultSet) throws Exception{ + JSONArray resultJSONArray = new JSONArray(); + ResultSetMetaData md = resultSet.getMetaData(); + int num = md.getColumnCount(); + + int rowCount = 0; + + while (rowCount < LIMIT && resultSet.next()) { + JSONObject mapOfColValues = new JSONObject(true); + for (int i = 1; i <= num; i++) { + mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i)); } + resultJSONArray.add(mapOfColValues); + rowCount++; + } + logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - for (SqlBinds sqlBind: preStatementsBinds) { - try (PreparedStatement stmt = prepareStatementAndBind(connection, sqlBind)) { - int result = stmt.executeUpdate(); - logger.info("pre statement execute result: {}, for sql: {}",result,sqlBind.getSql()); - } + // if there is a result set + if (!resultJSONArray.isEmpty() ) { + if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { + sendAttachment(sqlParameters.getTitle(), + JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); + }else{ + sendAttachment(taskExecutionContext.getTaskName() + " query resultsets ", + JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); } + } + } + + /** + * pre sql + * + * @param connection connection + * @param preStatementsBinds preStatementsBinds + */ + private void preSql(Connection connection, + List preStatementsBinds) throws Exception{ + for (SqlBinds sqlBind: preStatementsBinds) { + try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)){ + int result = pstmt.executeUpdate(); + logger.info("pre statement execute result: {}, for sql: {}",result,sqlBind.getSql()); - try (PreparedStatement stmt = prepareStatementAndBind(connection, mainSqlBinds); - ResultSet resultSet = stmt.executeQuery()) { - // decide whether to executeQuery or executeUpdate based on sqlType - if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) { - // query statements need to be convert to JsonArray and inserted into Alert to send - JSONArray resultJSONArray = new JSONArray(); - ResultSetMetaData md = resultSet.getMetaData(); - int num = md.getColumnCount(); - - while (resultSet.next()) { - JSONObject mapOfColValues = new JSONObject(true); - for (int i = 1; i <= num; i++) { - mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i)); - } - resultJSONArray.add(mapOfColValues); - } - logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - - // if there is a result set - if ( !resultJSONArray.isEmpty() ) { - if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { - sendAttachment(sqlParameters.getTitle(), - JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - }else{ - sendAttachment(taskProps.getNodeName() + " query resultsets ", - JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue)); - } - } - - exitStatusCode = 0; - - } else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) { - // non query statement - stmt.executeUpdate(); - exitStatusCode = 0; - } } + } + } - for (SqlBinds sqlBind: postStatementsBinds) { - try (PreparedStatement stmt = prepareStatementAndBind(connection, sqlBind)) { - int result = stmt.executeUpdate(); - logger.info("post statement execute result: {},for sql: {}",result,sqlBind.getSql()); - } + /** + * post psql + * + * @param connection connection + * @param postStatementsBinds postStatementsBinds + * @throws Exception + */ + private void postSql(Connection connection, + List postStatementsBinds) throws Exception{ + for (SqlBinds sqlBind: postStatementsBinds) { + try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)){ + int result = pstmt.executeUpdate(); + logger.info("post statement execute result: {},for sql: {}",result,sqlBind.getSql()); } - } catch (Exception e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage()); - } finally { - try { - connection.close(); - } catch (Exception e) { - logger.error(e.getMessage(), e); + } + } + /** + * create temp function + * + * @param connection connection + * @param createFuncs createFuncs + * @throws Exception + */ + private void createTempFunction(Connection connection, + List createFuncs) throws Exception{ + try (Statement funcStmt = connection.createStatement()) { + for (String createFunc : createFuncs) { + logger.info("hive create function sql: {}", createFunc); + funcStmt.execute(createFunc); } } + } + /** + * create connection + * + * @return connection + * @throws Exception + */ + private Connection createConnection() throws Exception{ + // if hive , load connection params if exists + Connection connection = null; + if (HIVE == DbType.valueOf(sqlParameters.getType())) { + Properties paramProp = new Properties(); + paramProp.setProperty(USER, baseDataSource.getUser()); + paramProp.setProperty(PASSWORD, baseDataSource.getPassword()); + Map connParamMap = CollectionUtils.stringToMap(sqlParameters.getConnParams(), + SEMICOLON, + HIVE_CONF); + paramProp.putAll(connParamMap); + + connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), + paramProp); + }else{ + connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(), + baseDataSource.getUser(), + baseDataSource.getPassword()); + } return connection; } + /** + * close jdbc resource + * + * @param resultSet resultSet + * @param pstmt pstmt + * @param connection connection + */ + private void close(ResultSet resultSet, + PreparedStatement pstmt, + Connection connection){ + if (resultSet != null){ + try { + connection.close(); + } catch (SQLException e) { + + } + } + + if (pstmt != null){ + try { + connection.close(); + } catch (SQLException e) { + + } + } + + if (connection != null){ + try { + connection.close(); + } catch (SQLException e) { + + } + } + } + /** * preparedStatement bind * @param connection @@ -358,22 +403,21 @@ public class SqlTask extends AbstractTask { */ private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception { // is the timeout set - boolean timeoutFlag = taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.FAILED || - taskProps.getTaskTimeoutStrategy() == TaskTimeoutStrategy.WARNFAILED; - try (PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql())) { - if(timeoutFlag){ - stmt.setQueryTimeout(taskProps.getTaskTimeout()); - } - Map params = sqlBinds.getParamsMap(); - if(params != null) { - for (Map.Entry entry : params.entrySet()) { - Property prop = entry.getValue(); - ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue()); - } + boolean timeoutFlag = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED || + TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED; + PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql()); + if(timeoutFlag){ + stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout()); + } + Map params = sqlBinds.getParamsMap(); + if(params != null) { + for (Map.Entry entry : params.entrySet()) { + Property prop = entry.getValue(); + ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue()); } - logger.info("prepare statement replace sql : {} ", stmt); - return stmt; } + logger.info("prepare statement replace sql : {} ", stmt); + return stmt; } /** @@ -383,13 +427,10 @@ public class SqlTask extends AbstractTask { */ public void sendAttachment(String title,String content){ - // process instance - ProcessInstance instance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - - List users = alertDao.queryUserByAlertGroupId(instance.getWarningGroupId()); + List users = alertDao.queryUserByAlertGroupId(taskExecutionContext.getSqlTaskExecutionContext().getWarningGroupId()); // receiving group list - List receviersList = new ArrayList(); + List receviersList = new ArrayList<>(); for(User user:users){ receviersList.add(user.getEmail().trim()); } @@ -403,7 +444,7 @@ public class SqlTask extends AbstractTask { } // copy list - List receviersCcList = new ArrayList(); + List receviersCcList = new ArrayList<>(); // Custom Copier String receiversCc = sqlParameters.getReceiversCc(); if (StringUtils.isNotEmpty(receiversCc)){ @@ -417,7 +458,7 @@ public class SqlTask extends AbstractTask { if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){ Map mailResult = MailUtils.sendMails(receviersList, receviersCcList, title, content, ShowType.valueOf(showTypeName)); - if(!(Boolean) mailResult.get(STATUS)){ + if(!(boolean) mailResult.get(STATUS)){ throw new RuntimeException("send mail failed!"); } }else{ @@ -463,33 +504,4 @@ public class SqlTask extends AbstractTask { } logger.info("Sql Params are {}", logPrint); } - - /** - * check udf function permission - * @param udfFunIds udf functions - * @return if has download permission return true else false - */ - private void checkUdfPermission(Integer[] udfFunIds) throws Exception{ - // process instance - ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - int userId = processInstance.getExecutorId(); - - PermissionCheck permissionCheckUdf = new PermissionCheck(AuthorizationType.UDF, processService,udfFunIds,userId,logger); - permissionCheckUdf.checkPermission(); - } - - /** - * check data source permission - * @param dataSourceId data source id - * @return if has download permission return true else false - */ - private void checkDataSourcePermission(int dataSourceId) throws Exception{ - // process instance - ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); - int userId = processInstance.getExecutorId(); - - PermissionCheck permissionCheckDataSource = new PermissionCheck(AuthorizationType.DATASOURCE, processService,new Integer[]{dataSourceId},userId,logger); - permissionCheckDataSource.checkPermission(); - } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java index 64bc7924d2..9f54d089be 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java @@ -17,13 +17,14 @@ package org.apache.dolphinscheduler.server.worker.task.sqoop; import com.alibaba.fastjson.JSON; +import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.slf4j.Logger; import java.util.Map; @@ -35,15 +36,21 @@ public class SqoopTask extends AbstractYarnTask { private SqoopParameters sqoopParameters; - public SqoopTask(TaskProps props, Logger logger){ - super(props,logger); + /** + * taskExecutionContext + */ + private TaskExecutionContext taskExecutionContext; + + public SqoopTask(TaskExecutionContext taskExecutionContext, Logger logger){ + super(taskExecutionContext,logger); + this.taskExecutionContext = taskExecutionContext; } @Override public void init() throws Exception { - logger.info("sqoop task params {}", taskProps.getTaskParams()); + logger.info("sqoop task params {}", taskExecutionContext.getTaskParams()); sqoopParameters = - JSON.parseObject(taskProps.getTaskParams(),SqoopParameters.class); + JSON.parseObject(taskExecutionContext.getTaskParams(),SqoopParameters.class); if (!sqoopParameters.checkParameters()) { throw new RuntimeException("sqoop task params is not valid"); } @@ -54,13 +61,13 @@ public class SqoopTask extends AbstractYarnTask { protected String buildCommand() throws Exception { //get sqoop scripts SqoopJobGenerator generator = new SqoopJobGenerator(); - String script = generator.generateSqoopJob(sqoopParameters); + String script = generator.generateSqoopJob(sqoopParameters,taskExecutionContext); - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), sqoopParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); if(paramsMap != null){ String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); @@ -71,6 +78,10 @@ public class SqoopTask extends AbstractYarnTask { return null; } + @Override + protected void setMainJarName() { + } + @Override public AbstractParameters getParameters() { return sqoopParameters; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java index 6c1d1fdca8..841654b699 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; /** * Source Generator Interface @@ -25,8 +26,9 @@ public interface ISourceGenerator { /** * generate the source script - * @param sqoopParameters sqoop params - * @return + * @param sqoopParameters sqoopParameters + * @param taskExecutionContext taskExecutionContext + * @return source script */ - String generate(SqoopParameters sqoopParameters); + String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java index be307af5f2..7bdaf49e83 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; /** * Target Generator Interface @@ -24,9 +25,10 @@ import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; public interface ITargetGenerator { /** - * generate the target script - * @param sqoopParameters sqoop params - * @return + * generate the target script + * @param sqoopParameters sqoopParameters + * @param taskExecutionContext taskExecutionContext + * @return target script */ - String generate(SqoopParameters sqoopParameters); + String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java index 24c76e027d..4e9cb84ff3 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HdfsSourceGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HiveSourceGenerator; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.MysqlSourceGenerator; @@ -60,15 +61,15 @@ public class SqoopJobGenerator { * @param sqoopParameters * @return */ - public String generateSqoopJob(SqoopParameters sqoopParameters){ + public String generateSqoopJob(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext){ createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType()); if(sourceGenerator == null || targetGenerator == null){ return null; } return commonGenerator.generate(sqoopParameters) - + sourceGenerator.generate(sqoopParameters) - + targetGenerator.generate(sqoopParameters); + + sourceGenerator.generate(sqoopParameters,taskExecutionContext) + + targetGenerator.generate(sqoopParameters,taskExecutionContext); } /** diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java index 47b01363e6..41e56682ae 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java @@ -20,6 +20,7 @@ import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,7 +33,7 @@ public class HdfsSourceGenerator implements ISourceGenerator { private Logger logger = LoggerFactory.getLogger(getClass()); @Override - public String generate(SqoopParameters sqoopParameters) { + public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { StringBuilder result = new StringBuilder(); try{ SourceHdfsParameter sourceHdfsParameter diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java index 91363e296a..ea12616825 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java @@ -20,6 +20,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,7 +33,7 @@ public class HiveSourceGenerator implements ISourceGenerator { private Logger logger = LoggerFactory.getLogger(getClass()); @Override - public String generate(SqoopParameters sqoopParameters) { + public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { StringBuilder sb = new StringBuilder(); try{ SourceHiveParameter sourceHiveParameter diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java index 050fef7cc7..f8e3d57c7d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.QueryType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; @@ -24,10 +25,9 @@ import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParamete import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; -import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,17 +41,17 @@ public class MysqlSourceGenerator implements ISourceGenerator { private Logger logger = LoggerFactory.getLogger(getClass()); @Override - public String generate(SqoopParameters sqoopParameters) { + public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { StringBuilder result = new StringBuilder(); try { SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceMysqlParameter.class); + SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); + if(sourceMysqlParameter != null){ - ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); - DataSource dataSource= processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource()); - BaseDataSource baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); + BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getSourcetype()), + sqoopTaskExecutionContext.getSourceConnectionParams()); if(baseDataSource != null){ result.append(" --connect ") .append(baseDataSource.getJdbcUrl()) @@ -69,17 +69,16 @@ public class MysqlSourceGenerator implements ISourceGenerator { result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns()); } - }else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()){ - if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){ - - String srcQuery = sourceMysqlParameter.getSrcQuerySql(); - if(srcQuery.toLowerCase().contains("where")){ - srcQuery += " AND "+"$CONDITIONS"; - }else{ - srcQuery += " WHERE $CONDITIONS"; - } - result.append(" --query \'"+srcQuery+"\'"); + }else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal() + && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){ + String srcQuery = sourceMysqlParameter.getSrcQuerySql(); + if(srcQuery.toLowerCase().contains("where")){ + srcQuery += " AND "+"$CONDITIONS"; + }else{ + srcQuery += " WHERE $CONDITIONS"; } + result.append(" --query \'"+srcQuery+"\'"); + } List mapColumnHive = sourceMysqlParameter.getMapColumnHive(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java index 411e9b4450..64ea75e742 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java @@ -20,6 +20,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,7 +33,7 @@ public class HdfsTargetGenerator implements ITargetGenerator { private Logger logger = LoggerFactory.getLogger(getClass()); @Override - public String generate(SqoopParameters sqoopParameters) { + public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { StringBuilder result = new StringBuilder(); try{ TargetHdfsParameter targetHdfsParameter = diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java index ad59173ad0..dc5440b529 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java @@ -20,6 +20,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,7 +33,7 @@ public class HiveTargetGenerator implements ITargetGenerator { private Logger logger = LoggerFactory.getLogger(getClass()); @Override - public String generate(SqoopParameters sqoopParameters) { + public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { StringBuilder result = new StringBuilder(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java index 0733338812..aed8b9e24a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java @@ -17,12 +17,15 @@ package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -37,7 +40,7 @@ public class MysqlTargetGenerator implements ITargetGenerator { private Logger logger = LoggerFactory.getLogger(getClass()); @Override - public String generate(SqoopParameters sqoopParameters) { + public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { StringBuilder result = new StringBuilder(); try{ @@ -45,13 +48,13 @@ public class MysqlTargetGenerator implements ITargetGenerator { TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetMysqlParameter.class); + SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); + if(targetMysqlParameter != null && targetMysqlParameter.getTargetDatasource() != 0){ - ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); - DataSource dataSource= processService.findDataSourceById(targetMysqlParameter.getTargetDatasource()); // get datasource - BaseDataSource baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), - dataSource.getConnectionParams()); + BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getTargetType()), + sqoopTaskExecutionContext.getTargetConnectionParams()); if(baseDataSource != null){ result.append(" --connect ") @@ -75,12 +78,11 @@ public class MysqlTargetGenerator implements ITargetGenerator { result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'"); } - if(targetMysqlParameter.isUpdate()){ - if(StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())&& - StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){ - result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) - .append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode()); - } + if(targetMysqlParameter.isUpdate() + && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey()) + && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){ + result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) + .append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode()); } } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java index fe4ec9130a..69aecee444 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java @@ -16,20 +16,21 @@ */ package org.apache.dolphinscheduler.server.zk; +import org.apache.commons.lang.StringUtils; +import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; +import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ZKNodeType; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.DaoFactory; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.locks.InterProcessMutex; -import org.apache.curator.utils.ThreadUtils; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; @@ -39,7 +40,8 @@ import org.springframework.stereotype.Component; import java.util.Date; import java.util.List; -import java.util.concurrent.ThreadFactory; + +import static org.apache.dolphinscheduler.common.Constants.*; /** @@ -55,100 +57,45 @@ public class ZKMasterClient extends AbstractZKClient { */ private static final Logger logger = LoggerFactory.getLogger(ZKMasterClient.class); - /** - * thread factory - */ - private static final ThreadFactory defaultThreadFactory = ThreadUtils.newGenericThreadFactory("Master-Main-Thread"); - - /** - * master znode - */ - private String masterZNode = null; - - /** - * alert database access - */ - private AlertDao alertDao = null; /** * process service */ @Autowired private ProcessService processService; - /** - * default constructor - */ - private ZKMasterClient(){} - - /** - * init - */ - public void init(){ - - logger.info("initialize master client..."); - - // init dao - this.initDao(); + public void start() { InterProcessMutex mutex = null; try { // create distributed lock with the root node path of the lock space as /dolphinscheduler/lock/failover/master String znodeLock = getMasterStartUpLockPath(); - mutex = new InterProcessMutex(zkClient, znodeLock); + mutex = new InterProcessMutex(getZkClient(), znodeLock); mutex.acquire(); // init system znode this.initSystemZNode(); - // register master - this.registerMaster(); + while (!checkZKNodeExists(OSUtils.getHost(), ZKNodeType.MASTER)){ + ThreadUtils.sleep(SLEEP_TIME_MILLIS); + } + - // check if fault tolerance is required,failure and tolerance + // self tolerant if (getActiveMasterNum() == 1) { failoverWorker(null, true); failoverMaster(null); } }catch (Exception e){ - logger.error("master start up exception",e); + logger.error("master start up exception",e); }finally { releaseMutex(mutex); } } - - /** - * init dao - */ - public void initDao(){ - this.alertDao = DaoFactory.getDaoInstance(AlertDao.class); - } - /** - * get alert dao - * - * @return AlertDao - */ - public AlertDao getAlertDao() { - return alertDao; - } - - - - - /** - * register master znode - */ - public void registerMaster(){ - try { - String serverPath = registerServer(ZKNodeType.MASTER); - if(StringUtils.isEmpty(serverPath)){ - System.exit(-1); - } - masterZNode = serverPath; - } catch (Exception e) { - logger.error("register master failure ",e); - System.exit(-1); - } + @Override + public void close(){ + super.close(); } /** @@ -159,13 +106,13 @@ public class ZKMasterClient extends AbstractZKClient { */ @Override protected void dataChanged(CuratorFramework client, TreeCacheEvent event, String path) { - if(path.startsWith(getZNodeParentPath(ZKNodeType.MASTER)+Constants.SINGLE_SLASH)){ //monitor master + //monitor master + if(path.startsWith(getZNodeParentPath(ZKNodeType.MASTER)+Constants.SINGLE_SLASH)){ handleMasterEvent(event,path); - - }else if(path.startsWith(getZNodeParentPath(ZKNodeType.WORKER)+Constants.SINGLE_SLASH)){ //monitor worker + }else if(path.startsWith(getZNodeParentPath(ZKNodeType.WORKER)+Constants.SINGLE_SLASH)){ + //monitor worker handleWorkerEvent(event,path); } - //other path event, ignore } /** @@ -187,8 +134,6 @@ public class ZKMasterClient extends AbstractZKClient { String serverHost = getHostByEventDataPath(path); // handle dead server handleDeadServer(path, zkNodeType, Constants.ADD_ZK_OP); - //alert server down. - alertServerDown(serverHost, zkNodeType); //failover server if(failover){ failoverServerWhenDown(serverHost, zkNodeType); @@ -210,8 +155,8 @@ public class ZKMasterClient extends AbstractZKClient { * @throws Exception exception */ private void failoverServerWhenDown(String serverHost, ZKNodeType zkNodeType) throws Exception { - if(StringUtils.isEmpty(serverHost)){ - return ; + if(StringUtils.isEmpty(serverHost) || serverHost.startsWith(OSUtils.getHost())){ + return ; } switch (zkNodeType){ case MASTER: @@ -242,20 +187,10 @@ public class ZKMasterClient extends AbstractZKClient { } } - /** - * send alert when server down - * - * @param serverHost server host - * @param zkNodeType zookeeper node type - */ - private void alertServerDown(String serverHost, ZKNodeType zkNodeType) { - - String serverType = zkNodeType.toString(); - alertDao.sendServerStopedAlert(1, serverHost, serverType); - } - /** * monitor master + * @param event event + * @param path path */ public void handleMasterEvent(TreeCacheEvent event, String path){ switch (event.getType()) { @@ -263,10 +198,6 @@ public class ZKMasterClient extends AbstractZKClient { logger.info("master node added : {}", path); break; case NODE_REMOVED: - String serverHost = getHostByEventDataPath(path); - if (checkServerSelfDead(serverHost, ZKNodeType.MASTER)) { - return; - } removeZKNodePath(path, ZKNodeType.MASTER, true); break; default: @@ -276,6 +207,8 @@ public class ZKMasterClient extends AbstractZKClient { /** * monitor worker + * @param event event + * @param path path */ public void handleWorkerEvent(TreeCacheEvent event, String path){ switch (event.getType()) { @@ -291,19 +224,9 @@ public class ZKMasterClient extends AbstractZKClient { } } - - /** - * get master znode - * - * @return master zookeeper node - */ - public String getMasterZNode() { - return masterZNode; - } - /** * task needs failover if task start before worker starts - * + * * @param taskInstance task instance * @return true if task instance need fail over */ @@ -317,10 +240,10 @@ public class ZKMasterClient extends AbstractZKClient { } // if the worker node exists in zookeeper, we must check the task starts after the worker - if(checkZKNodeExists(taskInstance.getHost(), ZKNodeType.WORKER)){ - //if task start after worker starts, there is no need to failover the task. - if(checkTaskAfterWorkerStart(taskInstance)){ - taskNeedFailover = false; + if(checkZKNodeExists(taskInstance.getHost(), ZKNodeType.WORKER)){ + //if task start after worker starts, there is no need to failover the task. + if(checkTaskAfterWorkerStart(taskInstance)){ + taskNeedFailover = false; } } return taskNeedFailover; @@ -333,15 +256,15 @@ public class ZKMasterClient extends AbstractZKClient { * @return true if task instance start time after worker server start date */ private boolean checkTaskAfterWorkerStart(TaskInstance taskInstance) { - if(StringUtils.isEmpty(taskInstance.getHost())){ - return false; + if(StringUtils.isEmpty(taskInstance.getHost())){ + return false; } - Date workerServerStartDate = null; - List workerServers = getServersList(ZKNodeType.WORKER); - for(Server workerServer : workerServers){ - if(workerServer.getHost().equals(taskInstance.getHost())){ - workerServerStartDate = workerServer.getCreateTime(); - break; + Date workerServerStartDate = null; + List workerServers = getServersList(ZKNodeType.WORKER); + for(Server workerServer : workerServers){ + if(workerServer.getHost().equals(taskInstance.getHost())){ + workerServerStartDate = workerServer.getCreateTime(); + break; } } @@ -357,7 +280,7 @@ public class ZKMasterClient extends AbstractZKClient { * * 1. kill yarn job if there are yarn jobs in tasks. * 2. change task state from running to need failover. - * 3. failover all tasks when workerHost is null + * 3. failover all tasks when workerHost is null * @param workerHost worker host */ @@ -379,15 +302,20 @@ public class ZKMasterClient extends AbstractZKClient { if(needCheckWorkerAlive){ if(!checkTaskInstanceNeedFailover(taskInstance)){ continue; - } + } } - ProcessInstance instance = processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); - if(instance!=null){ - taskInstance.setProcessInstance(instance); + ProcessInstance processInstance = processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); + if(processInstance != null){ + taskInstance.setProcessInstance(processInstance); } + + TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(processInstance) + .create(); // only kill yarn job if exists , the local thread has exited - ProcessUtils.killYarnJob(taskInstance); + ProcessUtils.killYarnJob(taskExecutionContext); taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); processService.saveTaskInstance(taskInstance); @@ -413,4 +341,10 @@ public class ZKMasterClient extends AbstractZKClient { logger.info("master failover end"); } + public InterProcessMutex blockAcquireMutex() throws Exception { + InterProcessMutex mutex = new InterProcessMutex(getZkClient(), getMasterLockPath()); + mutex.acquire(); + return mutex; + } + } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java deleted file mode 100644 index 7ddee3b2a1..0000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.zk; - -import org.apache.curator.framework.recipes.cache.TreeCacheEvent; -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.ZKNodeType; -import org.apache.commons.lang.StringUtils; -import org.apache.curator.framework.CuratorFramework; -import org.apache.dolphinscheduler.service.zk.AbstractZKClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.stereotype.Component; - - -/** - * zookeeper worker client - * single instance - */ -@Component -public class ZKWorkerClient extends AbstractZKClient { - - /** - * logger - */ - private static final Logger logger = LoggerFactory.getLogger(ZKWorkerClient.class); - - - /** - * worker znode - */ - private String workerZNode = null; - - - /** - * init - */ - public void init(){ - - logger.info("initialize worker client..."); - // init system znode - this.initSystemZNode(); - - // register worker - this.registWorker(); - } - - /** - * register worker - */ - private void registWorker(){ - try { - String serverPath = registerServer(ZKNodeType.WORKER); - if(StringUtils.isEmpty(serverPath)){ - System.exit(-1); - } - workerZNode = serverPath; - } catch (Exception e) { - logger.error("register worker failure",e); - System.exit(-1); - } - } - - /** - * handle path events that this class cares about - * @param client zkClient - * @param event path event - * @param path zk path - */ - @Override - protected void dataChanged(CuratorFramework client, TreeCacheEvent event, String path) { - if(path.startsWith(getZNodeParentPath(ZKNodeType.WORKER)+Constants.SINGLE_SLASH)){ - handleWorkerEvent(event,path); - } - } - - /** - * monitor worker - */ - public void handleWorkerEvent(TreeCacheEvent event, String path){ - switch (event.getType()) { - case NODE_ADDED: - logger.info("worker node added : {}", path); - break; - case NODE_REMOVED: - //find myself dead - String serverHost = getHostByEventDataPath(path); - if(checkServerSelfDead(serverHost, ZKNodeType.WORKER)){ - return; - } - break; - default: - break; - } - } - - /** - * get worker znode - * @return worker zookeeper node - */ - public String getWorkerZNode() { - return workerZNode; - } - -} diff --git a/dolphinscheduler-server/src/main/resources/config/install_config.conf b/dolphinscheduler-server/src/main/resources/config/install_config.conf index 0378490abb..4671be7371 100644 --- a/dolphinscheduler-server/src/main/resources/config/install_config.conf +++ b/dolphinscheduler-server/src/main/resources/config/install_config.conf @@ -15,11 +15,126 @@ # limitations under the License. # -installPath=/data1_1T/dolphinscheduler -deployUser=dolphinscheduler -ips=ark0,ark1,ark2,ark3,ark4 -sshPort=22 -masters=ark0,ark1 -workers=ark2,ark3,ark4 -alertServer=ark3 -apiServers=ark1 + +# NOTICE : If the following config has special characters in the variable `.*[]^${}\+?|()@#&`, Please escape, for example, `[` escape to `\[` +# postgresql or mysql +dbtype="mysql" + +# db config +# db address and port +dbhost="192.168.xx.xx:3306" + +# db username +username="xx" + +# db passwprd +# NOTICE: if there are special characters, please use the \ to escape, for example, `[` escape to `\[` +password="xx" + +# zk cluster +zkQuorum="192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181" + +# Note: the target installation path for dolphinscheduler, please not config as the same as the current path (pwd) +installPath="/data1_1T/dolphinscheduler" + +# deployment user +# Note: the deployment user needs to have sudo privileges and permissions to operate hdfs. If hdfs is enabled, the root directory needs to be created by itself +deployUser="dolphinscheduler" + + +# alert config +# mail server host +mailServerHost="smtp.exmail.qq.com" + +# mail server port +# note: Different protocols and encryption methods correspond to different ports, when SSL/TLS is enabled, make sure the port is correct. +mailServerPort="25" + +# sender +mailSender="xxxxxxxxxx" + +# user +mailUser="xxxxxxxxxx" + +# sender password +# note: The mail.passwd is email service authorization code, not the email login password. +mailPassword="xxxxxxxxxx" + +# TLS mail protocol support +starttlsEnable="false" + +sslTrust="xxxxxxxxxx" + +# SSL mail protocol support +# note: The SSL protocol is enabled by default. +# only one of TLS and SSL can be in the true state. +sslEnable="true" + + +# resource storage type:HDFS,S3,NONE +resourceStorageType="NONE" + +# if resourceStorageType is HDFS,defaultFS write namenode address,HA you need to put core-site.xml and hdfs-site.xml in the conf directory. +# if S3,write S3 address,HA,for example :s3a://dolphinscheduler, +# Note,s3 be sure to create the root directory /dolphinscheduler +defaultFS="hdfs://mycluster:8020" + +# if resourceStorageType is S3, the following three configuration is required, otherwise please ignore +s3Endpoint="http://192.168.xx.xx:9010" +s3AccessKey="xxxxxxxxxx" +s3SecretKey="xxxxxxxxxx" + +# if not use hadoop resourcemanager, please keep default value; if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty +yarnHaIps="192.168.xx.xx,192.168.xx.xx" + +# if resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ark1 to actual resourcemanager hostname. +singleYarnIp="ark1" + +# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。/dolphinscheduler is recommended +resourceUploadPath="/dolphinscheduler" + +# who have permissions to create directory under HDFS/S3 root path +# Note: if kerberos is enabled, please config hdfsRootUser= +hdfsRootUser="hdfs" + +# kerberos config +# whether kerberos starts, if kerberos starts, following four items need to config, otherwise please ignore +kerberosStartUp="false" +# kdc krb5 config file path +krb5ConfPath="$installPath/conf/krb5.conf" +# keytab username +keytabUserName="hdfs-mycluster@ESZ.COM" +# username keytab path +keytabPath="$installPath/conf/hdfs.headless.keytab" + + +# api server port +apiServerPort="12345" + + +# install hosts +# Note: install the scheduled hostname list. If it is pseudo-distributed, just write a pseudo-distributed hostname +ips="ark0,ark1,ark2,ark3,ark4" + +# ssh port, default 22 +# Note: if ssh port is not default, modify here +sshPort="22" + +# run master machine +# Note: list of hosts hostname for deploying master +masters="ark0,ark1" + +# run worker machine +# note: list of machine hostnames for deploying workers +workers="ark2,ark3,ark4" + +# run alert machine +# note: list of machine hostnames for deploying alert server +alertServer="ark3" + +# run api machine +# note: list of machine hostnames for deploying api server +apiServers="ark1" + +# whether to start monitoring self-starting scripts +monitorServerState="false" diff --git a/dolphinscheduler-server/src/main/resources/logback-master.xml b/dolphinscheduler-server/src/main/resources/logback-master.xml new file mode 100644 index 0000000000..7410c01f05 --- /dev/null +++ b/dolphinscheduler-server/src/main/resources/logback-master.xml @@ -0,0 +1,82 @@ + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + INFO + + + + taskAppId + ${log.base} + + + + ${log.base}/${taskAppId}.log + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n + + UTF-8 + + true + + + + + + ${log.base}/dolphinscheduler-master.log + + + ${log.base}/dolphinscheduler-master.%d{yyyy-MM-dd_HH}.%i.log + 168 + 200MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/resources/logback-worker.xml b/dolphinscheduler-server/src/main/resources/logback-worker.xml new file mode 100644 index 0000000000..be1d0acde5 --- /dev/null +++ b/dolphinscheduler-server/src/main/resources/logback-worker.xml @@ -0,0 +1,83 @@ + + + + + + + + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n + + UTF-8 + + + + + + + + INFO + + + + taskAppId + ${log.base} + + + + ${log.base}/${taskAppId}.log + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n + + UTF-8 + + true + + + + + ${log.base}/dolphinscheduler-worker.log + + INFO + + + + ${log.base}/dolphinscheduler-worker.%d{yyyy-MM-dd_HH}.%i.log + 168 + 200MB + + + + [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %messsage%n + + UTF-8 + + + + + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/resources/master.properties b/dolphinscheduler-server/src/main/resources/master.properties new file mode 100644 index 0000000000..2f75aa50ad --- /dev/null +++ b/dolphinscheduler-server/src/main/resources/master.properties @@ -0,0 +1,41 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# master execute thread num +#master.exec.threads=100 + +# master execute task number in parallel +#master.exec.task.num=20 + +# master heartbeat interval +#master.heartbeat.interval=10 + +# master commit task retry times +#master.task.commit.retryTimes=5 + +# master commit task interval +#master.task.commit.interval=1000 + + +# only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 +#master.max.cpuload.avg=100 + +# only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. +#master.reserved.memory=0.3 + +# master listen port +#master.listen.port=5678 \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/resources/worker.properties b/dolphinscheduler-server/src/main/resources/worker.properties new file mode 100644 index 0000000000..d078f26ca6 --- /dev/null +++ b/dolphinscheduler-server/src/main/resources/worker.properties @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# worker execute thread num +#worker.exec.threads=100 + +# worker heartbeat interval +#worker.heartbeat.interval=10 + +# submit the number of tasks at a time +#worker.fetch.task.num = 3 + +# only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 +#worker.max.cpuload.avg=100 + +# only larger than reserved memory, worker server can work. default value : physical memory * 1/6, unit is G. +#worker.reserved.memory=0.3 + +# worker listener port +#worker.listen.port: 1234 + +# default worker group +#worker.group=default \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/MasterLogFilterTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/MasterLogFilterTest.java similarity index 98% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/MasterLogFilterTest.java rename to dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/MasterLogFilterTest.java index 8cf6cfc2df..1a546951d6 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/MasterLogFilterTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/MasterLogFilterTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java similarity index 99% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java rename to dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java index 727ab41002..6319bf1ee4 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/SensitiveDataConverterTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/SensitiveDataConverterTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminatorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java similarity index 98% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminatorTest.java rename to dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java index ff298000f5..190847541c 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogDiscriminatorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogDiscriminatorTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogFilterTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java similarity index 98% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogFilterTest.java rename to dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java index 5cca6403c8..d8abb48d72 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/TaskLogFilterTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/TaskLogFilterTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/WorkerLogFilterTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/WorkerLogFilterTest.java similarity index 98% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/WorkerLogFilterTest.java rename to dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/WorkerLogFilterTest.java index 90b154407f..dbcd4b8633 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/log/WorkerLogFilterTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/log/WorkerLogFilterTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.log; +package org.apache.dolphinscheduler.server.log; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java new file mode 100644 index 0000000000..299d4ba800 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.runner.ConditionsTaskExecThread; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContext; + +import java.util.ArrayList; +import java.util.List; + +@RunWith(MockitoJUnitRunner.Silent.class) +public class ConditionsTaskTest { + + + private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class); + + private ProcessService processService; + private ApplicationContext applicationContext; + + + private MasterConfig config; + + @Before + public void before() { + config = new MasterConfig(); + config.setMasterTaskCommitRetryTimes(3); + config.setMasterTaskCommitInterval(1000); + processService = Mockito.mock(ProcessService.class); + applicationContext = Mockito.mock(ApplicationContext.class); + SpringApplicationContext springApplicationContext = new SpringApplicationContext(); + springApplicationContext.setApplicationContext(applicationContext); + Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); + Mockito.when(applicationContext.getBean(MasterConfig.class)).thenReturn(config); + + Mockito.when(processService + .findTaskInstanceById(252612)) + .thenReturn(getTaskInstance()); + + Mockito.when(processService.saveTaskInstance(getTaskInstance())) + .thenReturn(true); + + Mockito.when(processService.findProcessInstanceById(10112)) + .thenReturn(getProcessInstance()); + + Mockito.when(processService + .findValidTaskListByProcessId(10112)) + .thenReturn(getTaskInstances()); + } + + @Test + public void testCondition(){ + TaskInstance taskInstance = getTaskInstance(); + String dependString = "{\"dependTaskList\":[{\"dependItemList\":[{\"depTasks\":\"1\",\"status\":\"SUCCESS\"}],\"relation\":\"AND\"}],\"relation\":\"AND\"}"; + String conditionResult = "{\"successNode\":[\"2\"],\"failedNode\":[\"3\"]}"; + + taskInstance.setDependency(dependString); + Mockito.when(processService.submitTask(taskInstance)) + .thenReturn(taskInstance); + ConditionsTaskExecThread conditions = + new ConditionsTaskExecThread(taskInstance); + + try { + conditions.call(); + } catch (Exception e) { + e.printStackTrace(); + } + + Assert.assertEquals(ExecutionStatus.SUCCESS, conditions.getTaskInstance().getState()); + } + + + private TaskInstance getTaskInstance(){ + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(252612); + taskInstance.setName("C"); + taskInstance.setTaskType("CONDITIONS"); + taskInstance.setProcessInstanceId(10112); + taskInstance.setProcessDefinitionId(100001); + return taskInstance; + } + + + + private List getTaskInstances(){ + List list = new ArrayList<>(); + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(199999); + taskInstance.setName("1"); + taskInstance.setState(ExecutionStatus.SUCCESS); + list.add(taskInstance); + return list; + } + + private ProcessInstance getProcessInstance(){ + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(10112); + processInstance.setProcessDefinitionId(100001); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + + return processInstance; + } + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java new file mode 100644 index 0000000000..a65b0508d8 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.DateInterval; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.utils.dependent.DependentDateUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.runner.DependentTaskExecThread; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContext; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +@RunWith(MockitoJUnitRunner.Silent.class) +public class DependentTaskTest { + + private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class); + + private ProcessService processService; + private ApplicationContext applicationContext; + + + private MasterConfig config; + + @Before + public void before() throws Exception{ + + config = new MasterConfig(); + config.setMasterTaskCommitRetryTimes(3); + config.setMasterTaskCommitInterval(1000); + processService = Mockito.mock(ProcessService.class); + DateInterval dateInterval =DependentDateUtils.getTodayInterval(new Date()).get(0); + Mockito.when(processService + .findLastRunningProcess(4, dateInterval.getStartTime(), + dateInterval.getEndTime())) + .thenReturn(findLastProcessInterval()); + + + + Mockito.when(processService + .getTaskNodeListByDefinitionId(4)) + .thenReturn(getTaskNodes()); + Mockito.when(processService + .findValidTaskListByProcessId(11)) + .thenReturn(getTaskInstances()); + + Mockito.when(processService + .findTaskInstanceById(252612)) + .thenReturn(getTaskInstance()); + + + Mockito.when(processService.findProcessInstanceById(10111)) + .thenReturn(getProcessInstance()); + Mockito.when(processService.findProcessDefineById(0)) + .thenReturn(getProcessDefinition()); + Mockito.when(processService.saveTaskInstance(getTaskInstance())) + .thenReturn(true); + + applicationContext = Mockito.mock(ApplicationContext.class); + SpringApplicationContext springApplicationContext = new SpringApplicationContext(); + springApplicationContext.setApplicationContext(applicationContext); + Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); + Mockito.when(applicationContext.getBean(MasterConfig.class)).thenReturn(config); + } + + @Test + public void test() throws Exception{ + + TaskInstance taskInstance = getTaskInstance(); + String dependString = "{\"dependTaskList\":[{\"dependItemList\":[{\"dateValue\":\"today\",\"depTasks\":\"ALL\",\"projectId\":1,\"definitionList\":[{\"label\":\"C\",\"value\":4},{\"label\":\"B\",\"value\":3},{\"label\":\"A\",\"value\":2}],\"cycle\":\"day\",\"definitionId\":4}],\"relation\":\"AND\"}],\"relation\":\"AND\"}"; + taskInstance.setDependency(dependString); + Mockito.when(processService.submitTask(taskInstance)) + .thenReturn(taskInstance); + DependentTaskExecThread dependentTask = + new DependentTaskExecThread(taskInstance); + + dependentTask.call(); + + Assert.assertEquals(ExecutionStatus.SUCCESS, dependentTask.getTaskInstance().getState()); + } + + private ProcessInstance findLastProcessInterval(){ + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(11); + processInstance.setProcessDefinitionId(4); + processInstance.setState(ExecutionStatus.SUCCESS); + return processInstance; + } + + private ProcessDefinition getProcessDefinition(){ + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(0); + return processDefinition; + } + + private ProcessInstance getProcessInstance(){ + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(10111); + processInstance.setProcessDefinitionId(0); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + + return processInstance; + } + + + private List getTaskNodes(){ + List list = new ArrayList<>(); + TaskNode taskNode = new TaskNode(); + taskNode.setName("C"); + taskNode.setType("SQL"); + list.add(taskNode); + return list; + } + + private List getTaskInstances(){ + List list = new ArrayList<>(); + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setName("C"); + taskInstance.setState(ExecutionStatus.SUCCESS); + taskInstance.setDependency("1231"); + list.add(taskInstance); + return list; + } + + private TaskInstance getTaskInstance(){ + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setTaskType("DEPENDENT"); + taskInstance.setId(252612); + taskInstance.setName("C"); + taskInstance.setProcessInstanceId(10111); + return taskInstance; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java index d2a0fb2407..4dbf9df70e 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.master; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.utils.DateUtils; @@ -85,13 +85,13 @@ public class MasterExecThreadTest { Map cmdParam = new HashMap<>(); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, "2020-01-01 00:00:00"); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, "2020-01-31 23:00:00"); - Mockito.when(processInstance.getCommandParam()).thenReturn(JSONObject.toJSONString(cmdParam)); + Mockito.when(processInstance.getCommandParam()).thenReturn(JSON.toJSONString(cmdParam)); ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setGlobalParamMap(Collections.EMPTY_MAP); processDefinition.setGlobalParamList(Collections.EMPTY_LIST); Mockito.when(processInstance.getProcessDefinition()).thenReturn(processDefinition); - masterExecThread = PowerMockito.spy(new MasterExecThread(processInstance, processService)); + masterExecThread = PowerMockito.spy(new MasterExecThread(processInstance, processService,null)); // prepareProcess init dag Field dag = MasterExecThread.class.getDeclaredField("dag"); dag.setAccessible(true); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java new file mode 100644 index 0000000000..958df01cf8 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch; + + +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; +import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; +import org.apache.dolphinscheduler.server.registry.DependencyConfig; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.utils.ExecutionContextTestUtils; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor; +import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +/** + * executor dispatch test + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class, WorkerRegistry.class, + NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, WorkerConfig.class, + ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class}) +public class ExecutorDispatcherTest { + + @Autowired + private ExecutorDispatcher executorDispatcher; + + @Autowired + private WorkerRegistry workerRegistry; + + @Autowired + private WorkerConfig workerConfig; + + @Test(expected = ExecuteException.class) + public void testDispatchWithException() throws ExecuteException { + ExecutionContext executionContext = ExecutionContextTestUtils.getExecutionContext(10000); + executorDispatcher.dispatch(executionContext); + } + + @Test + public void testDispatch() throws ExecuteException { + int port = 30000; + final NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(port); + NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); + nettyRemotingServer.registerProcessor(org.apache.dolphinscheduler.remote.command.CommandType.TASK_EXECUTE_REQUEST, Mockito.mock(TaskExecuteProcessor.class)); + nettyRemotingServer.start(); + // + workerConfig.setListenPort(port); + workerRegistry.registry(); + + ExecutionContext executionContext = ExecutionContextTestUtils.getExecutionContext(port); + executorDispatcher.dispatch(executionContext); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManagerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManagerTest.java new file mode 100644 index 0000000000..5955f46056 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManagerTest.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.executor; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; +import org.apache.dolphinscheduler.server.registry.DependencyConfig; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor; +import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +/** + * netty executor manager test + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, WorkerRegistry.class, + ZookeeperNodeManager.class, ZookeeperRegistryCenter.class, WorkerConfig.class, + ZookeeperCachedOperator.class, ZookeeperConfig.class, SpringApplicationContext.class, NettyExecutorManager.class}) +public class NettyExecutorManagerTest { + + @Autowired + private NettyExecutorManager nettyExecutorManager; + + + @Test + public void testExecute() throws ExecuteException{ + final NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(30000); + NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); + nettyRemotingServer.registerProcessor(org.apache.dolphinscheduler.remote.command.CommandType.TASK_EXECUTE_REQUEST, new TaskExecuteProcessor()); + nettyRemotingServer.start(); + TaskInstance taskInstance = Mockito.mock(TaskInstance.class); + ProcessDefinition processDefinition = Mockito.mock(ProcessDefinition.class); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setCommandType(CommandType.COMPLEMENT_DATA); + taskInstance.setProcessInstance(processInstance); + TaskExecutionContext context = TaskExecutionContextBuilder.get() + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(processInstance) + .buildProcessDefinitionRelatedInfo(processDefinition) + .create(); + ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER); + executionContext.setHost(Host.of(OSUtils.getHost() + ":" + serverConfig.getListenPort())); + Boolean execute = nettyExecutorManager.execute(executionContext); + Assert.assertTrue(execute); + nettyRemotingServer.close(); + } + + @Test(expected = ExecuteException.class) + public void testExecuteWithException() throws ExecuteException{ + TaskInstance taskInstance = Mockito.mock(TaskInstance.class); + ProcessDefinition processDefinition = Mockito.mock(ProcessDefinition.class); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setCommandType(CommandType.COMPLEMENT_DATA); + taskInstance.setProcessInstance(processInstance); + TaskExecutionContext context = TaskExecutionContextBuilder.get() + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(processInstance) + .buildProcessDefinitionRelatedInfo(processDefinition) + .create(); + ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER); + executionContext.setHost(Host.of(OSUtils.getHost() + ":4444")); + nettyExecutorManager.execute(executionContext); + + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManagerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManagerTest.java new file mode 100644 index 0000000000..e223a762dd --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManagerTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.host; + + +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.registry.DependencyConfig; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.utils.ExecutionContextTestUtils; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + + +/** + * round robin host manager test + */ +@RunWith(SpringRunner.class) +@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, WorkerRegistry.class, ZookeeperRegistryCenter.class, WorkerConfig.class, + ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class}) +public class RoundRobinHostManagerTest { + + + @Autowired + private ZookeeperNodeManager zookeeperNodeManager; + + @Autowired + private WorkerRegistry workerRegistry; + + @Autowired + private WorkerConfig workerConfig; + + @Test + public void testSelectWithEmptyResult(){ + RoundRobinHostManager roundRobinHostManager = new RoundRobinHostManager(); + roundRobinHostManager.setZookeeperNodeManager(zookeeperNodeManager); + ExecutionContext context = ExecutionContextTestUtils.getExecutionContext(10000); + Host emptyHost = roundRobinHostManager.select(context); + Assert.assertTrue(StringUtils.isEmpty(emptyHost.getAddress())); + } + + @Test + public void testSelectWithResult(){ + workerRegistry.registry(); + RoundRobinHostManager roundRobinHostManager = new RoundRobinHostManager(); + roundRobinHostManager.setZookeeperNodeManager(zookeeperNodeManager); + ExecutionContext context = ExecutionContextTestUtils.getExecutionContext(10000); + Host host = roundRobinHostManager.select(context); + Assert.assertTrue(StringUtils.isNotEmpty(host.getAddress())); + Assert.assertTrue(host.getAddress().equalsIgnoreCase(OSUtils.getHost() + ":" + workerConfig.getListenPort())); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java new file mode 100644 index 0000000000..fadaa84a69 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import org.apache.dolphinscheduler.remote.utils.Host; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Collection; + + +public class LowerWeightRoundRobinTest { + + + @Test + public void testSelect(){ + Collection sources = new ArrayList<>(); + sources.add(new HostWeight(Host.of("192.158.2.1:11"), 0.06, 0.44, 3.84)); + sources.add(new HostWeight(Host.of("192.158.2.1:22"), 0.06, 0.56, 3.24)); + sources.add(new HostWeight(Host.of("192.158.2.1:33"), 0.06, 0.80, 3.15)); + System.out.println(sources); + LowerWeightRoundRobin roundRobin = new LowerWeightRoundRobin(); + for(int i = 0; i < 100; i ++){ + System.out.println(roundRobin.select(sources)); + } + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java new file mode 100644 index 0000000000..a14ea32e4e --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RandomSelectorTest.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; + +/** + * random selector + */ +public class RandomSelectorTest { + + @Test(expected = IllegalArgumentException.class) + public void testSelectWithIllegalArgumentException(){ + RandomSelector selector = new RandomSelector(); + selector.select(Collections.EMPTY_LIST); + } + + @Test + public void testSelect1(){ + RandomSelector selector = new RandomSelector(); + String result = selector.select(Arrays.asList("1")); + Assert.assertTrue(StringUtils.isNotEmpty(result)); + Assert.assertTrue(result.equalsIgnoreCase("1")); + } + + @Test + public void testSelect(){ + RandomSelector selector = new RandomSelector(); + int result = selector.select(Arrays.asList(1,2,3,4,5,6,7)); + Assert.assertTrue(result >= 1 && result <= 7); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java new file mode 100644 index 0000000000..adc55a4774 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.dispatch.host.assign; + +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +/** + * round robin selector + */ +public class RoundRobinSelectorTest { + + @Test(expected = IllegalArgumentException.class) + public void testSelectWithIllegalArgumentException(){ + RoundRobinSelector selector = new RoundRobinSelector(); + selector.select(Collections.EMPTY_LIST); + } + + @Test + public void testSelect1(){ + RoundRobinSelector selector = new RoundRobinSelector(); + String result = selector.select(Arrays.asList("1")); + Assert.assertTrue(StringUtils.isNotEmpty(result)); + Assert.assertTrue(result.equalsIgnoreCase("1")); + } + + @Test + public void testSelect(){ + RoundRobinSelector selector = new RoundRobinSelector(); + List sources = Arrays.asList(1, 2, 3, 4, 5, 6, 7); + int result = selector.select(sources); + Assert.assertTrue(result == 1); + int result2 = selector.select(Arrays.asList(1,2,3,4,5,6,7)); + Assert.assertTrue(result2 == 2); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java new file mode 100644 index 0000000000..dcba83271c --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.master.processor.queue; + + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.server.registry.DependencyConfig; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import java.util.Date; + +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, TaskResponseService.class, ZookeeperRegistryCenter.class, + ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class, TaskResponseService.class}) +public class TaskResponseServiceTest { + + @Autowired + private TaskResponseService taskResponseService; + + @Test + public void testAdd(){ + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newAck(ExecutionStatus.RUNNING_EXEUTION, new Date(), + "", "", "", 1); + taskResponseService.addResponse(taskResponseEvent); + Assert.assertTrue(taskResponseService.getEventQueue().size() == 1); + try { + Thread.sleep(10); + } catch (InterruptedException ignore) { + } + //after sleep, inner worker will take the event + Assert.assertTrue(taskResponseService.getEventQueue().size() == 0); + } + + @Test + public void testStop(){ + TaskResponseEvent taskResponseEvent = TaskResponseEvent.newAck(ExecutionStatus.RUNNING_EXEUTION, new Date(), + "", "", "", 1); + taskResponseService.addResponse(taskResponseEvent); + taskResponseService.stop(); + Assert.assertTrue(taskResponseService.getEventQueue().size() == 0); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java new file mode 100644 index 0000000000..a482029a1e --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.registry; + +import org.apache.dolphinscheduler.remote.utils.Constants; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.List; +import java.util.concurrent.TimeUnit; + + +/** + * master registry test + */ +@RunWith(SpringRunner.class) +@ContextConfiguration(classes={SpringZKServer.class, MasterRegistry.class,ZookeeperRegistryCenter.class, MasterConfig.class, ZookeeperCachedOperator.class, ZookeeperConfig.class}) +public class MasterRegistryTest { + + @Autowired + private MasterRegistry masterRegistry; + + @Autowired + private ZookeeperRegistryCenter zookeeperRegistryCenter; + + @Autowired + private MasterConfig masterConfig; + + @Test + public void testRegistry() throws InterruptedException { + masterRegistry.registry(); + String masterPath = zookeeperRegistryCenter.getMasterPath(); + TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node + String masterNodePath = masterPath + "/" + (Constants.LOCAL_ADDRESS + ":" + masterConfig.getListenPort()); + String heartbeat = zookeeperRegistryCenter.getZookeeperCachedOperator().get(masterNodePath); + Assert.assertEquals(5, heartbeat.split(",").length); + } + + @Test + public void testUnRegistry() throws InterruptedException { + masterRegistry.registry(); + TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node + masterRegistry.unRegistry(); + String masterPath = zookeeperRegistryCenter.getMasterPath(); + List childrenKeys = zookeeperRegistryCenter.getZookeeperCachedOperator().getChildrenKeys(masterPath); + Assert.assertTrue(childrenKeys.isEmpty()); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java new file mode 100644 index 0000000000..0adea44cfd --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/DependencyConfig.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.registry; + +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; +import org.apache.dolphinscheduler.server.master.dispatch.host.HostManager; +import org.apache.dolphinscheduler.server.master.dispatch.host.RandomHostManager; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.mockito.Mockito; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * dependency config + */ +@Configuration +public class DependencyConfig { + + @Bean + public AlertDao alertDao() { + return new AlertDao(); + } + + @Bean + public AlertMapper alertMapper() { + return Mockito.mock(AlertMapper.class); + } + + @Bean + public UserAlertGroupMapper userAlertGroupMapper() { + return Mockito.mock(UserAlertGroupMapper.class); + } + + @Bean + public TaskInstanceCacheManagerImpl taskInstanceCacheManagerImpl(){ + return Mockito.mock(TaskInstanceCacheManagerImpl.class); + } + + @Bean + public ProcessService processService(){ + return Mockito.mock(ProcessService.class); + } + + @Bean + public UserMapper userMapper(){ + return Mockito.mock(UserMapper.class); + } + + @Bean + public ProcessDefinitionMapper processDefineMapper(){ + return Mockito.mock(ProcessDefinitionMapper.class); + } + + @Bean + public ProcessInstanceMapper processInstanceMapper(){ + return Mockito.mock(ProcessInstanceMapper.class); + } + + @Bean + public DataSourceMapper dataSourceMapper(){ + return Mockito.mock(DataSourceMapper.class); + } + + @Bean + public ProcessInstanceMapMapper processInstanceMapMapper(){ + return Mockito.mock(ProcessInstanceMapMapper.class); + } + + @Bean + public TaskInstanceMapper taskInstanceMapper(){ + return Mockito.mock(TaskInstanceMapper.class); + } + + @Bean + public CommandMapper commandMapper(){ + return Mockito.mock(CommandMapper.class); + } + + @Bean + public ScheduleMapper scheduleMapper(){ + return Mockito.mock(ScheduleMapper.class); + } + + @Bean + public UdfFuncMapper udfFuncMapper(){ + return Mockito.mock(UdfFuncMapper.class); + } + + @Bean + public ResourceMapper resourceMapper(){ + return Mockito.mock(ResourceMapper.class); + } + + @Bean + public WorkerGroupMapper workerGroupMapper(){ + return Mockito.mock(WorkerGroupMapper.class); + } + + @Bean + public ErrorCommandMapper errorCommandMapper(){ + return Mockito.mock(ErrorCommandMapper.class); + } + + @Bean + public TenantMapper tenantMapper(){ + return Mockito.mock(TenantMapper.class); + } + + @Bean + public ProjectMapper projectMapper(){ + return Mockito.mock(ProjectMapper.class); + } + + @Bean + public TaskCallbackService taskCallbackService(){ + return Mockito.mock(TaskCallbackService.class); + } + + @Bean + public HostManager hostManager(){ + return new RandomHostManager(); + } + + @Bean + public TaskResponseService taskResponseService(){ + return Mockito.mock(TaskResponseService.class); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManagerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManagerTest.java new file mode 100644 index 0000000000..c99dfc1c9f --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManagerTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.registry; + + +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.registry.MasterRegistry; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import java.util.Map; +import java.util.Set; + +/** + * zookeeper node manager test + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, MasterRegistry.class,WorkerRegistry.class, + ZookeeperRegistryCenter.class, MasterConfig.class, WorkerConfig.class, + ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class}) +public class ZookeeperNodeManagerTest { + + @Autowired + private ZookeeperNodeManager zookeeperNodeManager; + + @Autowired + private MasterRegistry masterRegistry; + + @Autowired + private WorkerRegistry workerRegistry; + + @Autowired + private ZookeeperRegistryCenter zookeeperRegistryCenter; + + @Autowired + private WorkerConfig workerConfig; + + @Autowired + private MasterConfig masterConfig; + + @Test + public void testGetMasterNodes(){ + masterRegistry.registry(); + try { + //let the zookeeperNodeManager catch the registry event + Thread.sleep(2000); + } catch (InterruptedException ignore) { + } + Set masterNodes = zookeeperNodeManager.getMasterNodes(); + Assert.assertTrue(CollectionUtils.isNotEmpty(masterNodes)); + Assert.assertEquals(1, masterNodes.size()); + Assert.assertEquals(OSUtils.getHost() + ":" + masterConfig.getListenPort(), masterNodes.iterator().next()); + } + + @Test + public void testGetWorkerGroupNodes(){ + workerRegistry.registry(); + try { + //let the zookeeperNodeManager catch the registry event + Thread.sleep(2000); + } catch (InterruptedException ignore) { + } + Map> workerGroupNodes = zookeeperNodeManager.getWorkerGroupNodes(); + Assert.assertEquals(1, workerGroupNodes.size()); + Assert.assertEquals("default".trim(), workerGroupNodes.keySet().iterator().next()); + } + + @Test + public void testGetWorkerGroupNodesWithParam(){ + workerRegistry.registry(); + try { + //let the zookeeperNodeManager catch the registry event + Thread.sleep(3000); + } catch (InterruptedException ignore) { + } + Map> workerGroupNodes = zookeeperNodeManager.getWorkerGroupNodes(); + Set workerNodes = zookeeperNodeManager.getWorkerGroupNodes("default"); + Assert.assertTrue(CollectionUtils.isNotEmpty(workerNodes)); + Assert.assertEquals(1, workerNodes.size()); + Assert.assertEquals(OSUtils.getHost() + ":" + workerConfig.getListenPort(), workerNodes.iterator().next()); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ExecutionContextTestUtils.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ExecutionContextTestUtils.java new file mode 100644 index 0000000000..26d904f798 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ExecutionContextTestUtils.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.utils; + + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; +import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; +import org.mockito.Mockito; + +/** + * for test use only + */ +public class ExecutionContextTestUtils { + + + public static ExecutionContext getExecutionContext(int port){ + TaskInstance taskInstance = Mockito.mock(TaskInstance.class); + ProcessDefinition processDefinition = Mockito.mock(ProcessDefinition.class); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setCommandType(CommandType.COMPLEMENT_DATA); + taskInstance.setProcessInstance(processInstance); + TaskExecutionContext context = TaskExecutionContextBuilder.get() + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(processInstance) + .buildProcessDefinitionRelatedInfo(processDefinition) + .create(); + ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER); + executionContext.setHost(Host.of(OSUtils.getHost() + ":" + port)); + + return executionContext; + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java new file mode 100644 index 0000000000..78ba3a6b44 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java @@ -0,0 +1,215 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; +import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.apache.dolphinscheduler.remote.utils.Host; +import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor; +import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.server.master.registry.MasterRegistry; +import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; + +import java.util.Date; + +/** + * test task call back service + */ +@RunWith(SpringJUnit4ClassRunner.class) +@ContextConfiguration(classes={TaskCallbackServiceTestConfig.class, SpringZKServer.class, SpringApplicationContext.class, MasterRegistry.class, WorkerRegistry.class, + ZookeeperRegistryCenter.class, MasterConfig.class, WorkerConfig.class, + ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class, TaskCallbackService.class, + TaskResponseService.class, TaskAckProcessor.class,TaskResponseProcessor.class}) +public class TaskCallbackServiceTest { + + @Autowired + private TaskCallbackService taskCallbackService; + + @Autowired + private MasterRegistry masterRegistry; + + @Autowired + private TaskAckProcessor taskAckProcessor; + + @Autowired + private TaskResponseProcessor taskResponseProcessor; + + /** + * send ack test + * @throws Exception + */ + @Test + public void testSendAck() throws Exception{ + final NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(30000); + NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); + nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor); + nettyRemotingServer.start(); + + final NettyClientConfig clientConfig = new NettyClientConfig(); + NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); + Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); + taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); + TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); + ackCommand.setTaskInstanceId(1); + ackCommand.setStartTime(new Date()); + taskCallbackService.sendAck(1, ackCommand.convert2Command()); + + Thread.sleep(5000); + + Stopper.stop(); + + Thread.sleep(5000); + + nettyRemotingServer.close(); + nettyRemotingClient.close(); + } + + /** + * send result test + * @throws Exception + */ + @Test + public void testSendResult() throws Exception{ + final NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(30000); + NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); + nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, taskResponseProcessor); + nettyRemotingServer.start(); + + final NettyClientConfig clientConfig = new NettyClientConfig(); + NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); + Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); + taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); + TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(); + responseCommand.setTaskInstanceId(1); + responseCommand.setEndTime(new Date()); + + taskCallbackService.sendResult(1, responseCommand.convert2Command()); + + Thread.sleep(5000); + + Stopper.stop(); + + Thread.sleep(5000); + + nettyRemotingServer.close(); + nettyRemotingClient.close(); + } + + @Test(expected = IllegalArgumentException.class) + public void testSendAckWithIllegalArgumentException(){ + TaskExecuteAckCommand ackCommand = Mockito.mock(TaskExecuteAckCommand.class); + taskCallbackService.sendAck(1, ackCommand.convert2Command()); + Stopper.stop(); + } + + @Test(expected = IllegalStateException.class) + public void testSendAckWithIllegalStateException1(){ + masterRegistry.registry(); + final NettyServerConfig serverConfig = new NettyServerConfig(); + serverConfig.setListenPort(30000); + NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); + nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor); + nettyRemotingServer.start(); + + final NettyClientConfig clientConfig = new NettyClientConfig(); + NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); + Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); + taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); + channel.close(); + TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); + ackCommand.setTaskInstanceId(1); + ackCommand.setStartTime(new Date()); + + nettyRemotingServer.close(); + + taskCallbackService.sendAck(1, ackCommand.convert2Command()); + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + + Stopper.stop(); + + try { + Thread.sleep(5000); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + +// @Test(expected = IllegalStateException.class) +// public void testSendAckWithIllegalStateException2(){ +// masterRegistry.registry(); +// final NettyServerConfig serverConfig = new NettyServerConfig(); +// serverConfig.setListenPort(30000); +// NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); +// nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor); +// nettyRemotingServer.start(); +// +// final NettyClientConfig clientConfig = new NettyClientConfig(); +// NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); +// Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); +// taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); +// channel.close(); +// TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); +// ackCommand.setTaskInstanceId(1); +// ackCommand.setStartTime(new Date()); +// +// nettyRemotingServer.close(); +// +// taskCallbackService.sendAck(1, ackCommand.convert2Command()); +// try { +// Thread.sleep(5000); +// } catch (InterruptedException e) { +// e.printStackTrace(); +// } +// +// Stopper.stop(); +// +// try { +// Thread.sleep(5000); +// } catch (InterruptedException e) { +// e.printStackTrace(); +// } +// } + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java new file mode 100644 index 0000000000..e6dd8e721e --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTestConfig.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.mockito.Mockito; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * dependency config + */ +@Configuration +public class TaskCallbackServiceTestConfig { + + + @Bean + public AlertDao alertDao() { + return new AlertDao(); + } + + @Bean + public AlertMapper alertMapper() { + return Mockito.mock(AlertMapper.class); + } + + @Bean + public UserAlertGroupMapper userAlertGroupMapper() { + return Mockito.mock(UserAlertGroupMapper.class); + } + + @Bean + public TaskInstanceCacheManagerImpl taskInstanceCacheManagerImpl(){ + return Mockito.mock(TaskInstanceCacheManagerImpl.class); + } + + @Bean + public ProcessService processService(){ + return Mockito.mock(ProcessService.class); + } + + @Bean + public UserMapper userMapper(){ + return Mockito.mock(UserMapper.class); + } + + @Bean + public ProcessDefinitionMapper processDefineMapper(){ + return Mockito.mock(ProcessDefinitionMapper.class); + } + + @Bean + public ProcessInstanceMapper processInstanceMapper(){ + return Mockito.mock(ProcessInstanceMapper.class); + } + + @Bean + public DataSourceMapper dataSourceMapper(){ + return Mockito.mock(DataSourceMapper.class); + } + + @Bean + public ProcessInstanceMapMapper processInstanceMapMapper(){ + return Mockito.mock(ProcessInstanceMapMapper.class); + } + + @Bean + public TaskInstanceMapper taskInstanceMapper(){ + return Mockito.mock(TaskInstanceMapper.class); + } + + @Bean + public CommandMapper commandMapper(){ + return Mockito.mock(CommandMapper.class); + } + + @Bean + public ScheduleMapper scheduleMapper(){ + return Mockito.mock(ScheduleMapper.class); + } + + @Bean + public UdfFuncMapper udfFuncMapper(){ + return Mockito.mock(UdfFuncMapper.class); + } + + @Bean + public ResourceMapper resourceMapper(){ + return Mockito.mock(ResourceMapper.class); + } + + @Bean + public WorkerGroupMapper workerGroupMapper(){ + return Mockito.mock(WorkerGroupMapper.class); + } + + @Bean + public ErrorCommandMapper errorCommandMapper(){ + return Mockito.mock(ErrorCommandMapper.class); + } + + @Bean + public TenantMapper tenantMapper(){ + return Mockito.mock(TenantMapper.class); + } + + @Bean + public ProjectMapper projectMapper(){ + return Mockito.mock(ProjectMapper.class); + } + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryTest.java new file mode 100644 index 0000000000..d5f836e403 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryTest.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.registry; + +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.remote.utils.Constants; +import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.zk.SpringZKServer; +import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit4.SpringRunner; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; + +/** + * worker registry test + */ +@RunWith(SpringRunner.class) +@ContextConfiguration(classes={SpringZKServer.class, WorkerRegistry.class,ZookeeperRegistryCenter.class, WorkerConfig.class, ZookeeperCachedOperator.class, ZookeeperConfig.class}) + +public class WorkerRegistryTest { + + @Autowired + private WorkerRegistry workerRegistry; + + @Autowired + private ZookeeperRegistryCenter zookeeperRegistryCenter; + + @Autowired + private WorkerConfig workerConfig; + + @Test + public void testRegistry() throws InterruptedException { + workerRegistry.registry(); + String workerPath = zookeeperRegistryCenter.getWorkerPath(); + Assert.assertEquals(DEFAULT_WORKER_GROUP, workerConfig.getWorkerGroup().trim()); + String instancePath = workerPath + "/" + workerConfig.getWorkerGroup().trim() + "/" + (OSUtils.getHost() + ":" + workerConfig.getListenPort()); + TimeUnit.SECONDS.sleep(workerConfig.getWorkerHeartbeatInterval() + 2); //wait heartbeat info write into zk node + String heartbeat = zookeeperRegistryCenter.getZookeeperCachedOperator().get(instancePath); + Assert.assertEquals(5, heartbeat.split(",").length); + } + + @Test + public void testUnRegistry() throws InterruptedException { + workerRegistry.registry(); + TimeUnit.SECONDS.sleep(workerConfig.getWorkerHeartbeatInterval() + 2); //wait heartbeat info write into zk node + workerRegistry.unRegistry(); + String workerPath = zookeeperRegistryCenter.getWorkerPath(); + String workerGroupPath = workerPath + "/" + workerConfig.getWorkerGroup().trim(); + List childrenKeys = zookeeperRegistryCenter.getZookeeperCachedOperator().getChildrenKeys(workerGroupPath); + Assert.assertTrue(childrenKeys.isEmpty()); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java index 5d4263644b..acc7a22ff0 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.worker.shell; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.TaskNode; @@ -55,20 +55,20 @@ public class ShellCommandExecutorTest { TaskProps taskProps = new TaskProps(); // processDefineId_processInstanceId_taskInstanceId - taskProps.setTaskDir("/opt/soft/program/tmp/dolphinscheduler/exec/flow/5/36/2864/7657"); + taskProps.setExecutePath("/opt/soft/program/tmp/dolphinscheduler/exec/flow/5/36/2864/7657"); taskProps.setTaskAppId("36_2864_7657"); // set tenant -> task execute linux user taskProps.setTenantCode("hdfs"); taskProps.setTaskStartTime(new Date()); taskProps.setTaskTimeout(360000); - taskProps.setTaskInstId(7657); + taskProps.setTaskInstanceId(7657); TaskInstance taskInstance = processService.findTaskInstanceById(7657); String taskJson = taskInstance.getTaskJson(); - TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); + TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class); taskProps.setTaskParams(taskNode.getParams()); @@ -79,7 +79,9 @@ public class ShellCommandExecutorTest { taskInstance.getId())); - AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); +// AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); + + AbstractTask task = null; logger.info("task info : {}", task); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java index c395eabe51..49301c3906 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.worker.sql; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; @@ -97,22 +97,22 @@ public class SqlExecutorTest { */ private void sharedTestSqlTask(String nodeName, String taskAppId, String tenantCode, int taskInstId) throws Exception { TaskProps taskProps = new TaskProps(); - taskProps.setTaskDir(""); + taskProps.setExecutePath(""); // processDefineId_processInstanceId_taskInstanceId taskProps.setTaskAppId(taskAppId); // set tenant -> task execute linux user taskProps.setTenantCode(tenantCode); taskProps.setTaskStartTime(new Date()); taskProps.setTaskTimeout(360000); - taskProps.setTaskInstId(taskInstId); - taskProps.setNodeName(nodeName); + taskProps.setTaskInstanceId(taskInstId); + taskProps.setTaskName(nodeName); taskProps.setCmdTypeIfComplement(CommandType.START_PROCESS); TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); String taskJson = taskInstance.getTaskJson(); - TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); + TaskNode taskNode = JSON.parseObject(taskJson, TaskNode.class); taskProps.setTaskParams(taskNode.getParams()); @@ -123,9 +123,10 @@ public class SqlExecutorTest { taskInstance.getId())); - AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); +// AbstractTask task = TaskManager.newTask(taskInstance.getTaskType(), taskProps, taskLogger); + AbstractTask task = null; - logger.info("task info : {}", task); + logger.info("task info : {}", task); // job init task.init(); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java index bd7f27530a..a2a46ef5a5 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java @@ -29,6 +29,8 @@ import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; @@ -44,6 +46,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; +import static org.apache.dolphinscheduler.common.enums.CommandType.START_PROCESS; + /** * DataxTask Tester. */ @@ -51,6 +55,8 @@ public class DataxTaskTest { private static final Logger logger = LoggerFactory.getLogger(DataxTaskTest.class); + private static final String CONNECTION_PARAMS = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"; + private DataxTask dataxTask; private ProcessService processService; @@ -59,6 +65,9 @@ public class DataxTaskTest { private ApplicationContext applicationContext; + private TaskExecutionContext taskExecutionContext; + private TaskProps props = new TaskProps(); + @Before public void before() throws Exception { @@ -71,38 +80,73 @@ public class DataxTaskTest { Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskProps props = new TaskProps(); - props.setTaskDir("/tmp"); + props.setExecutePath("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTaskInstId(1); + props.setTaskInstanceId(1); props.setTenantCode("1"); props.setEnvFile(".dolphinscheduler_env.sh"); props.setTaskStartTime(new Date()); props.setTaskTimeout(0); props.setTaskParams( "{\"targetTable\":\"test\",\"postStatements\":[],\"jobSpeedByte\":1024,\"jobSpeedRecord\":1000,\"dtType\":\"MYSQL\",\"datasource\":1,\"dsType\":\"MYSQL\",\"datatarget\":2,\"jobSpeedByte\":0,\"sql\":\"select 1 as test from dual\",\"preStatements\":[\"delete from test\"],\"postStatements\":[\"delete from test\"]}"); - dataxTask = PowerMockito.spy(new DataxTask(props, logger)); + + taskExecutionContext = Mockito.mock(TaskExecutionContext.class); + Mockito.when(taskExecutionContext.getTaskParams()).thenReturn(props.getTaskParams()); + Mockito.when(taskExecutionContext.getExecutePath()).thenReturn("/tmp"); + Mockito.when(taskExecutionContext.getTaskAppId()).thenReturn("1"); + Mockito.when(taskExecutionContext.getTenantCode()).thenReturn("root"); + Mockito.when(taskExecutionContext.getStartTime()).thenReturn(new Date()); + Mockito.when(taskExecutionContext.getTaskTimeout()).thenReturn(10000); + Mockito.when(taskExecutionContext.getLogPath()).thenReturn("/tmp/dx"); + + + DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); + dataxTaskExecutionContext.setSourcetype(0); + dataxTaskExecutionContext.setTargetType(0); + dataxTaskExecutionContext.setSourceConnectionParams(CONNECTION_PARAMS); + dataxTaskExecutionContext.setTargetConnectionParams(CONNECTION_PARAMS); + Mockito.when(taskExecutionContext.getDataxTaskExecutionContext()).thenReturn(dataxTaskExecutionContext); + + dataxTask = PowerMockito.spy(new DataxTask(taskExecutionContext, logger)); dataxTask.init(); + props.setCmdTypeIfComplement(START_PROCESS); + setTaskParems(0); Mockito.when(processService.findDataSourceById(1)).thenReturn(getDataSource()); Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); Mockito.when(processService.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); - String fileName = String.format("%s/%s_node.sh", props.getTaskDir(), props.getTaskAppId()); - Mockito.when(shellCommandExecutor.run(fileName, processService)).thenReturn(0); + String fileName = String.format("%s/%s_node.sh", props.getExecutePath(), props.getTaskAppId()); + Mockito.when(shellCommandExecutor.run(fileName)).thenReturn(null); + } + + private void setTaskParems(Integer customConfig) { + if (customConfig == 1) { + props.setTaskParams( + "{\"customConfig\":1, \"localParams\":[{\"prop\":\"test\",\"value\":\"38294729\"}],\"json\":\"{\\\"job\\\":{\\\"setting\\\":{\\\"speed\\\":{\\\"byte\\\":1048576},\\\"errorLimit\\\":{\\\"record\\\":0,\\\"percentage\\\":0.02}},\\\"content\\\":[{\\\"reader\\\":{\\\"name\\\":\\\"rdbmsreader\\\",\\\"parameter\\\":{\\\"username\\\":\\\"xxx\\\",\\\"password\\\":\\\"${test}\\\",\\\"column\\\":[\\\"id\\\",\\\"name\\\"],\\\"splitPk\\\":\\\"pk\\\",\\\"connection\\\":[{\\\"querySql\\\":[\\\"SELECT * from dual\\\"],\\\"jdbcUrl\\\":[\\\"jdbc:dm://ip:port/database\\\"]}],\\\"fetchSize\\\":1024,\\\"where\\\":\\\"1 = 1\\\"}},\\\"writer\\\":{\\\"name\\\":\\\"streamwriter\\\",\\\"parameter\\\":{\\\"print\\\":true}}}]}}\"}"); + +// "{\"customConfig\":1,\"json\":\"{\\\"job\\\":{\\\"setting\\\":{\\\"speed\\\":{\\\"byte\\\":1048576},\\\"errorLimit\\\":{\\\"record\\\":0,\\\"percentage\\\":0.02}},\\\"content\\\":[{\\\"reader\\\":{\\\"name\\\":\\\"rdbmsreader\\\",\\\"parameter\\\":{\\\"username\\\":\\\"xxx\\\",\\\"password\\\":\\\"xxx\\\",\\\"column\\\":[\\\"id\\\",\\\"name\\\"],\\\"splitPk\\\":\\\"pk\\\",\\\"connection\\\":[{\\\"querySql\\\":[\\\"SELECT * from dual\\\"],\\\"jdbcUrl\\\":[\\\"jdbc:dm://ip:port/database\\\"]}],\\\"fetchSize\\\":1024,\\\"where\\\":\\\"1 = 1\\\"}},\\\"writer\\\":{\\\"name\\\":\\\"streamwriter\\\",\\\"parameter\\\":{\\\"print\\\":true}}}]}}\"}"); + } else { + props.setTaskParams( + "{\"customConfig\":0,\"targetTable\":\"test\",\"postStatements\":[],\"jobSpeedByte\":1024,\"jobSpeedRecord\":1000,\"dtType\":\"MYSQL\",\"datasource\":1,\"dsType\":\"MYSQL\",\"datatarget\":2,\"jobSpeedByte\":0,\"sql\":\"select 1 as test from dual\",\"preStatements\":[\"delete from test\"],\"postStatements\":[\"delete from test\"]}"); + + } + + dataxTask = PowerMockito.spy(new DataxTask(taskExecutionContext, logger)); + dataxTask.init(); } private DataSource getDataSource() { DataSource dataSource = new DataSource(); dataSource.setType(DbType.MYSQL); - dataSource.setConnectionParams( - "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"); + dataSource.setConnectionParams(CONNECTION_PARAMS); dataSource.setUserId(1); return dataSource; } private ProcessInstance getProcessInstance() { ProcessInstance processInstance = new ProcessInstance(); - processInstance.setCommandType(CommandType.START_PROCESS); + processInstance.setCommandType(START_PROCESS); processInstance.setScheduleTime(new Date()); return processInstance; } @@ -118,11 +162,11 @@ public class DataxTaskTest { public void testDataxTask() throws Exception { TaskProps props = new TaskProps(); - props.setTaskDir("/tmp"); + props.setExecutePath("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTaskInstId(1); + props.setTaskInstanceId(1); props.setTenantCode("1"); - Assert.assertNotNull(new DataxTask(props, logger)); + Assert.assertNotNull(new DataxTask(null, logger)); } /** @@ -144,13 +188,6 @@ public class DataxTaskTest { @Test public void testHandle() throws Exception { - try { - dataxTask.handle(); - } catch (RuntimeException e) { - if (e.getMessage().indexOf("process error . exitCode is : -1") < 0) { - Assert.fail(); - } - } } /** @@ -229,18 +266,24 @@ public class DataxTaskTest { */ @Test public void testBuildDataxJsonFile() - throws Exception { + throws Exception { try { - Method method = DataxTask.class.getDeclaredMethod("buildDataxJsonFile"); - method.setAccessible(true); - String filePath = (String) method.invoke(dataxTask, null); - Assert.assertNotNull(filePath); - } - catch (Exception e) { + setTaskParems(1); + buildDataJson(); + setTaskParems(0); + buildDataJson(); + } catch (Exception e) { Assert.fail(e.getMessage()); } } + public void buildDataJson() throws Exception { + Method method = DataxTask.class.getDeclaredMethod("buildDataxJsonFile"); + method.setAccessible(true); + String filePath = (String) method.invoke(dataxTask, null); + Assert.assertNotNull(filePath); + } + /** * Method: buildDataxJobContentJson() */ diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java deleted file mode 100644 index 272fb546da..0000000000 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTaskTest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.worker.task.dependent; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.junit.Assert; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DependentTaskTest { - - private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class); - - - @Test - public void testDependInit() throws Exception{ - - TaskProps taskProps = new TaskProps(); - - String dependString = "{\n" + - "\"dependTaskList\":[\n" + - " {\n" + - " \"dependItemList\":[\n" + - " {\n" + - " \"definitionId\": 101,\n" + - " \"depTasks\": \"ALL\",\n" + - " \"cycle\": \"day\",\n" + - " \"dateValue\": \"last1Day\"\n" + - " }\n" + - " ],\n" + - " \"relation\": \"AND\"\n" + - " }\n" + - " ],\n" + - "\"relation\":\"OR\"\n" + - "}"; - - taskProps.setTaskInstId(252612); - taskProps.setDependence(dependString); - DependentTask dependentTask = new DependentTask(taskProps, logger); - dependentTask.init(); - dependentTask.handle(); - Assert.assertEquals(dependentTask.getExitStatusCode(), Constants.EXIT_CODE_FAILURE ); - } - - - -} \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java index ebe90147d1..c30f33c683 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java @@ -21,17 +21,15 @@ import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.junit.After; -import org.junit.Assert; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; +import org.junit.*; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; @@ -45,6 +43,7 @@ import java.util.Date; */ @RunWith(PowerMockRunner.class) @PrepareForTest(OSUtils.class) +@PowerMockIgnore({"javax.management.*"}) public class ShellTaskTest { private static final Logger logger = LoggerFactory.getLogger(ShellTaskTest.class); @@ -56,9 +55,12 @@ public class ShellTaskTest { private ShellCommandExecutor shellCommandExecutor; private ApplicationContext applicationContext; + private TaskExecutionContext taskExecutionContext; @Before public void before() throws Exception { + taskExecutionContext = new TaskExecutionContext(); + PowerMockito.mockStatic(OSUtils.class); processService = PowerMockito.mock(ProcessService.class); shellCommandExecutor = PowerMockito.mock(ShellCommandExecutor.class); @@ -69,23 +71,22 @@ public class ShellTaskTest { PowerMockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskProps props = new TaskProps(); - props.setTaskDir("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTaskInstId(1); props.setTenantCode("1"); props.setEnvFile(".dolphinscheduler_env.sh"); props.setTaskStartTime(new Date()); props.setTaskTimeout(0); props.setTaskParams("{\"rawScript\": \" echo 'hello world!'\"}"); - shellTask = new ShellTask(props, logger); + shellTask = new ShellTask(taskExecutionContext, logger); shellTask.init(); PowerMockito.when(processService.findDataSourceById(1)).thenReturn(getDataSource()); PowerMockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); PowerMockito.when(processService.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); - String fileName = String.format("%s/%s_node.%s", props.getTaskDir(), props.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); - PowerMockito.when(shellCommandExecutor.run(fileName, processService)).thenReturn(0); + String fileName = String.format("%s/%s_node.%s", taskExecutionContext.getExecutePath(), + props.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); + PowerMockito.when(shellCommandExecutor.run("")).thenReturn(null); } private DataSource getDataSource() { @@ -114,11 +115,9 @@ public class ShellTaskTest { public void testShellTask() throws Exception { TaskProps props = new TaskProps(); - props.setTaskDir("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTaskInstId(1); props.setTenantCode("1"); - ShellTask shellTaskTest = new ShellTask(props, logger); + ShellTask shellTaskTest = new ShellTask(taskExecutionContext, logger); Assert.assertNotNull(shellTaskTest); } @@ -136,6 +135,26 @@ public class ShellTaskTest { } } + @Test + public void testInitException() { + TaskProps props = new TaskProps(); + props.setTaskAppId(String.valueOf(System.currentTimeMillis())); + props.setTenantCode("1"); + props.setEnvFile(".dolphinscheduler_env.sh"); + props.setTaskStartTime(new Date()); + props.setTaskTimeout(0); + props.setTaskParams("{\"rawScript\": \"\"}"); + ShellTask shellTask = new ShellTask(taskExecutionContext, logger); + try { + shellTask.init(); + } catch (Exception e) { + logger.info(e.getMessage(), e); + if (e.getMessage().contains("shell task params is not valid")) { + Assert.assertTrue(true); + } + } + } + /** * Method: init for Windows */ @@ -157,7 +176,18 @@ public class ShellTaskTest { public void testHandleForUnix() throws Exception { try { PowerMockito.when(OSUtils.isWindows()).thenReturn(false); - shellTask.handle(); + TaskProps props = new TaskProps(); + props.setTaskAppId(String.valueOf(System.currentTimeMillis())); + props.setTenantCode("1"); + props.setEnvFile(".dolphinscheduler_env.sh"); + props.setTaskStartTime(new Date()); + props.setTaskTimeout(0); + props.setScheduleTime(new Date()); + props.setCmdTypeIfComplement(CommandType.START_PROCESS); + props.setTaskParams("{\"rawScript\": \" echo ${test}\", \"localParams\": [{\"prop\":\"test\", \"direct\":\"IN\", \"type\":\"VARCHAR\", \"value\":\"123\"}]}"); + ShellTask shellTask1 = new ShellTask(taskExecutionContext, logger); + shellTask1.init(); + shellTask1.handle(); Assert.assertTrue(true); } catch (Error | Exception e) { if (!e.getMessage().contains("process error . exitCode is : -1") @@ -174,7 +204,18 @@ public class ShellTaskTest { public void testHandleForWindows() throws Exception { try { Assume.assumeTrue(OSUtils.isWindows()); - shellTask.handle(); + TaskProps props = new TaskProps(); + props.setTaskAppId(String.valueOf(System.currentTimeMillis())); + props.setTenantCode("1"); + props.setEnvFile(".dolphinscheduler_env.sh"); + props.setTaskStartTime(new Date()); + props.setTaskTimeout(0); + props.setScheduleTime(new Date()); + props.setCmdTypeIfComplement(CommandType.START_PROCESS); + props.setTaskParams("{\"rawScript\": \" echo ${test}\", \"localParams\": [{\"prop\":\"test\", \"direct\":\"IN\", \"type\":\"VARCHAR\", \"value\":\"123\"}]}"); + ShellTask shellTask1 = new ShellTask(taskExecutionContext, logger); + shellTask1.init(); + shellTask1.handle(); Assert.assertTrue(true); } catch (Error | Exception e) { if (!e.getMessage().contains("process error . exitCode is : -1")) { diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java index a18e0b2a9d..f0bcd9ec27 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java @@ -135,7 +135,7 @@ public class SparkTaskTest { logger.info("spark task command : {}", sparkArgs); - Assert.assertEquals(sparkArgs.split(" ")[0], SPARK2_COMMAND ); + Assert.assertEquals(SPARK2_COMMAND, sparkArgs.split(" ")[0]); } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java index 511102e4b5..bfc8205c2d 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java @@ -16,10 +16,11 @@ */ package org.apache.dolphinscheduler.server.worker.task.sqoop; -import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.JSON; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; @@ -58,47 +59,45 @@ public class SqoopTaskTest { Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskProps props = new TaskProps(); - props.setTaskDir("/tmp"); props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTaskInstId(1); props.setTenantCode("1"); props.setEnvFile(".dolphinscheduler_env.sh"); props.setTaskStartTime(new Date()); props.setTaskTimeout(0); props.setTaskParams("{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); - sqoopTask = new SqoopTask(props,logger); + sqoopTask = new SqoopTask(new TaskExecutionContext(),logger); sqoopTask.init(); } @Test public void testGenerator(){ String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; - SqoopParameters sqoopParameters1 = JSONObject.parseObject(data1,SqoopParameters.class); + SqoopParameters sqoopParameters1 = JSON.parseObject(data1,SqoopParameters.class); SqoopJobGenerator generator = new SqoopJobGenerator(); - String script = generator.generateSqoopJob(sqoopParameters1); + String script = generator.generateSqoopJob(sqoopParameters1,new TaskExecutionContext()); String expected = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; Assert.assertEquals(expected, script); String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; - SqoopParameters sqoopParameters2 = JSONObject.parseObject(data2,SqoopParameters.class); + SqoopParameters sqoopParameters2 = JSON.parseObject(data2,SqoopParameters.class); - String script2 = generator.generateSqoopJob(sqoopParameters2); + String script2 = generator.generateSqoopJob(sqoopParameters2,new TaskExecutionContext()); String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; Assert.assertEquals(expected2, script2); String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; - SqoopParameters sqoopParameters3 = JSONObject.parseObject(data3,SqoopParameters.class); + SqoopParameters sqoopParameters3 = JSON.parseObject(data3,SqoopParameters.class); - String script3 = generator.generateSqoopJob(sqoopParameters3); + String script3 = generator.generateSqoopJob(sqoopParameters3,new TaskExecutionContext()); String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'"; Assert.assertEquals(expected3, script3); String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; - SqoopParameters sqoopParameters4 = JSONObject.parseObject(data4,SqoopParameters.class); + SqoopParameters sqoopParameters4 = JSON.parseObject(data4,SqoopParameters.class); - String script4 = generator.generateSqoopJob(sqoopParameters4); + String script4 = generator.generateSqoopJob(sqoopParameters4,new TaskExecutionContext()); String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; Assert.assertEquals(expected4, script4); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/SpringZKServer.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/SpringZKServer.java new file mode 100644 index 0000000000..ec42cad1ce --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/zk/SpringZKServer.java @@ -0,0 +1,178 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.zk; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.CuratorFrameworkFactory; +import org.apache.curator.retry.ExponentialBackoffRetry; +import org.apache.zookeeper.server.ZooKeeperServerMain; +import org.apache.zookeeper.server.quorum.QuorumPeerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.PriorityOrdered; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.io.File; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + + +/** + * just for test + */ +@Service +public class SpringZKServer implements PriorityOrdered { + + private static final Logger logger = LoggerFactory.getLogger(SpringZKServer.class); + + private static volatile PublicZooKeeperServerMain zkServer = null; + + public static final int DEFAULT_ZK_TEST_PORT = 2181; + + public static final String DEFAULT_ZK_STR = "localhost:" + DEFAULT_ZK_TEST_PORT; + + private static String dataDir = null; + + private static final AtomicBoolean isStarted = new AtomicBoolean(false); + + @PostConstruct + public void start() { + try { + startLocalZkServer(DEFAULT_ZK_TEST_PORT); + } catch (Exception e) { + logger.error("Failed to start ZK: " + e); + } + } + + public static boolean isStarted(){ + return isStarted.get(); + } + + + @Override + public int getOrder() { + return PriorityOrdered.HIGHEST_PRECEDENCE; + } + + static class PublicZooKeeperServerMain extends ZooKeeperServerMain { + + @Override + public void initializeAndRun(String[] args) + throws QuorumPeerConfig.ConfigException, IOException { + super.initializeAndRun(args); + } + + @Override + public void shutdown() { + super.shutdown(); + } + } + + /** + * Starts a local Zk instance with a generated empty data directory + * + * @param port The port to listen on + */ + public void startLocalZkServer(final int port) { + startLocalZkServer(port, org.apache.commons.io.FileUtils.getTempDirectoryPath() + File.separator + "test-" + System.currentTimeMillis()); + } + + /** + * Starts a local Zk instance + * + * @param port The port to listen on + * @param dataDirPath The path for the Zk data directory + */ + private void startLocalZkServer(final int port, final String dataDirPath) { + if (zkServer != null) { + throw new RuntimeException("Zookeeper server is already started!"); + } + try { + zkServer = new PublicZooKeeperServerMain(); + logger.info("Zookeeper data path : {} ", dataDirPath); + dataDir = dataDirPath; + final String[] args = new String[]{Integer.toString(port), dataDirPath}; + Thread init = new Thread(new Runnable() { + @Override + public void run() { + try { + System.setProperty("zookeeper.jmx.log4j.disable", "true"); + zkServer.initializeAndRun(args); + } catch (QuorumPeerConfig.ConfigException e) { + logger.warn("Caught exception while starting ZK", e); + } catch (IOException e) { + logger.warn("Caught exception while starting ZK", e); + } + } + }, "init-zk-thread"); + init.start(); + } catch (Exception e) { + logger.warn("Caught exception while starting ZK", e); + throw new RuntimeException(e); + } + + CuratorFramework zkClient = CuratorFrameworkFactory.builder() + .connectString(DEFAULT_ZK_STR) + .retryPolicy(new ExponentialBackoffRetry(10,100)) + .sessionTimeoutMs(1000 * 30) + .connectionTimeoutMs(1000 * 30) + .build(); + + try { + zkClient.blockUntilConnected(10, TimeUnit.SECONDS); + zkClient.close(); + } catch (InterruptedException ignore) { + } + isStarted.compareAndSet(false, true); + logger.info("zk server started"); + } + + @PreDestroy + public void stop() { + try { + stopLocalZkServer(true); + logger.info("zk server stopped"); + + } catch (Exception e) { + logger.error("Failed to stop ZK ",e); + } + } + + /** + * Stops a local Zk instance. + * + * @param deleteDataDir Whether or not to delete the data directory + */ + private void stopLocalZkServer(final boolean deleteDataDir) { + if (zkServer != null) { + try { + zkServer.shutdown(); + zkServer = null; + if (deleteDataDir) { + org.apache.commons.io.FileUtils.deleteDirectory(new File(dataDir)); + } + isStarted.compareAndSet(true, false); + } catch (Exception e) { + logger.warn("Caught exception while stopping ZK server", e); + throw new RuntimeException(e); + } + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-service/pom.xml b/dolphinscheduler-service/pom.xml index 03bb94dc29..c150e834b9 100644 --- a/dolphinscheduler-service/pom.xml +++ b/dolphinscheduler-service/pom.xml @@ -1,5 +1,20 @@ - + diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java index 01a49910df..8e63c89405 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java @@ -20,7 +20,7 @@ import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.log.*; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; -import org.apache.dolphinscheduler.remote.utils.Address; +import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,7 +40,7 @@ public class LogClientService { /** * request time out */ - private final long logRequestTimeout = 10 * 1000; + private static final long LOG_REQUEST_TIMEOUT = 10 * 1000L; /** * construct client @@ -72,10 +72,10 @@ public class LogClientService { logger.info("roll view log, host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, skipLineNum, limit); RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit); String result = ""; - final Address address = new Address(host, port); + final Host address = new Host(host, port); try { Command command = request.convert2Command(); - Command response = this.client.sendSync(address, command, logRequestTimeout); + Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if(response != null){ RollViewLogResponseCommand rollReviewLog = FastJsonSerializer.deserialize( response.getBody(), RollViewLogResponseCommand.class); @@ -100,10 +100,10 @@ public class LogClientService { logger.info("view log path {}", path); ViewLogRequestCommand request = new ViewLogRequestCommand(path); String result = ""; - final Address address = new Address(host, port); + final Host address = new Host(host, port); try { Command command = request.convert2Command(); - Command response = this.client.sendSync(address, command, logRequestTimeout); + Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if(response != null){ ViewLogResponseCommand viewLog = FastJsonSerializer.deserialize( response.getBody(), ViewLogResponseCommand.class); @@ -128,10 +128,10 @@ public class LogClientService { logger.info("log path {}", path); GetLogBytesRequestCommand request = new GetLogBytesRequestCommand(path); byte[] result = null; - final Address address = new Address(host, port); + final Host address = new Host(host, port); try { Command command = request.convert2Command(); - Command response = this.client.sendSync(address, command, logRequestTimeout); + Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if(response != null){ GetLogBytesResponseCommand getLog = FastJsonSerializer.deserialize( response.getBody(), GetLogBytesResponseCommand.class); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java index 027666f053..9f93f4ce3e 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.service.permission; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -45,6 +46,11 @@ public class PermissionCheck { */ private T[] needChecks; + /** + * resoruce info + */ + private List resourceList; + /** * user id */ @@ -90,6 +96,22 @@ public class PermissionCheck { this.logger = logger; } + /** + * permission check + * @param logger + * @param authorizationType + * @param processService + * @param resourceList + * @param userId + */ + public PermissionCheck(AuthorizationType authorizationType, ProcessService processService, List resourceList, int userId,Logger logger) { + this.authorizationType = authorizationType; + this.processService = processService; + this.resourceList = resourceList; + this.userId = userId; + this.logger = logger; + } + public AuthorizationType getAuthorizationType() { return authorizationType; } @@ -122,6 +144,14 @@ public class PermissionCheck { this.userId = userId; } + public List getResourceList() { + return resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + /** * has permission * @return true if has permission @@ -141,13 +171,14 @@ public class PermissionCheck { */ public void checkPermission() throws Exception{ if(this.needChecks.length > 0){ + // get user type in order to judge whether the user is admin User user = processService.getUserById(userId); if (user.getUserType() != UserType.ADMIN_USER){ List unauthorizedList = processService.listUnauthorized(userId,needChecks,authorizationType); // if exist unauthorized resource if(CollectionUtils.isNotEmpty(unauthorizedList)){ - logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList.toString()); + logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList); throw new RuntimeException(String.format("user %s didn't has permission of %s %s", user.getUserName(), authorizationType.getDescp(), unauthorizedList.get(0))); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 8bcd64f1fd..26462d2337 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.service.process; +import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import com.cronutils.model.Cron; import org.apache.commons.lang.ArrayUtils; @@ -29,7 +30,6 @@ import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; -import org.apache.dolphinscheduler.service.queue.ITaskQueue; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -98,11 +98,6 @@ public class ProcessService { @Autowired private ProjectMapper projectMapper; - /** - * task queue impl - */ - @Autowired - private ITaskQueue taskQueue; /** * handle Command (construct ProcessInstance from Command) , wrapped in transaction * @param logger logger @@ -124,6 +119,10 @@ public class ProcessService { logger.info("there is not enough thread for this command: {}", command); return setWaitingThreadProcess(command, processInstance); } + if (processInstance.getCommandType().equals(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS)){ + delCommandByid(command.getId()); + return null; + } processInstance.setCommandType(command.getCommandType()); processInstance.addHistoryCmd(command.getCommandType()); saveProcessInstance(processInstance); @@ -207,7 +206,7 @@ public class ProcessService { CommandType commandType = command.getCommandType(); if(cmdTypeMap.containsKey(commandType)){ - JSONObject cmdParamObj = (JSONObject) JSONObject.parse(command.getCommandParam()); + JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam()); JSONObject tempObj; int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING); @@ -215,7 +214,7 @@ public class ProcessService { // for all commands for (Command tmpCommand:commands){ if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){ - tempObj = (JSONObject) JSONObject.parse(tmpCommand.getCommandParam()); + tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam()); if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){ isNeedCreate = false; break; @@ -235,6 +234,30 @@ public class ProcessService { return processInstanceMapper.queryDetailById(processId); } + /** + * get task node list by definitionId + * @param defineId + * @return + */ + public List getTaskNodeListByDefinitionId(Integer defineId){ + ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); + if (processDefinition == null) { + logger.info("process define not exists"); + return null; + } + + String processDefinitionJson = processDefinition.getProcessDefinitionJson(); + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + + //process data check + if (null == processData) { + logger.error("process data is null"); + return null; + } + + return processData.getTasks(); + } + /** * find process instance by id * @param processId processId @@ -309,7 +332,7 @@ public class ProcessService { for (TaskNode taskNode : taskNodeList){ String parameter = taskNode.getParams(); if (parameter.contains(CMDPARAM_SUB_PROCESS_DEFINE_ID)){ - SubProcessParameters subProcessParam = JSONObject.parseObject(parameter, SubProcessParameters.class); + SubProcessParameters subProcessParam = JSON.parseObject(parameter, SubProcessParameters.class); ids.add(subProcessParam.getProcessDefinitionId()); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids); } @@ -433,8 +456,8 @@ public class ProcessService { processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); // set process instance priority processInstance.setProcessInstancePriority(command.getProcessInstancePriority()); - int workerGroupId = command.getWorkerGroupId() == 0 ? -1 : command.getWorkerGroupId(); - processInstance.setWorkerGroupId(workerGroupId); + String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup(); + processInstance.setWorkerGroup(workerGroup); processInstance.setTimeout(processDefinition.getTimeout()); processInstance.setTenantId(processDefinition.getTenantId()); return processInstance; @@ -763,14 +786,13 @@ public class ProcessService { * submit task to db * submit sub process to command * @param taskInstance taskInstance - * @param processInstance processInstance * @return task instance */ @Transactional(rollbackFor = Exception.class) - public TaskInstance submitTask(TaskInstance taskInstance, ProcessInstance processInstance){ - logger.info("start submit task : {}, instance id:{}, state: {}, ", - taskInstance.getName(), processInstance.getId(), processInstance.getState() ); - processInstance = this.findProcessInstanceDetailById(processInstance.getId()); + public TaskInstance submitTask(TaskInstance taskInstance){ + ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); + logger.info("start submit task : {}, instance id:{}, state: {}", + taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); //submit to db TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance); if(task == null){ @@ -961,40 +983,6 @@ public class ProcessService { return taskInstance; } - /** - * submit task to queue - * @param taskInstance taskInstance - * @return whether submit task to queue success - */ - public Boolean submitTaskToQueue(TaskInstance taskInstance) { - - try{ - if(taskInstance.isSubProcess()){ - return true; - } - if(taskInstance.getState().typeIsFinished()){ - logger.info("submit to task queue, but task [{}] state [{}] is already finished. ", taskInstance.getName(), taskInstance.getState()); - return true; - } - // task cannot submit when running - if(taskInstance.getState() == ExecutionStatus.RUNNING_EXEUTION){ - logger.info("submit to task queue, but task [{}] state already be running. ", taskInstance.getName()); - return true; - } - if(checkTaskExistsInTaskQueue(taskInstance)){ - logger.info("submit to task queue, but task [{}] already exists in the queue.", taskInstance.getName()); - return true; - } - logger.info("task ready to queue: {}" , taskInstance); - boolean insertQueueResult = taskQueue.add(DOLPHINSCHEDULER_TASKS_QUEUE, taskZkInfo(taskInstance)); - logger.info("master insert into queue success, task : {}", taskInstance.getName()); - return insertQueueResult; - }catch (Exception e){ - logger.error("submit task to queue Exception: ", e); - logger.error("task queue error : {}", JSONUtils.toJson(taskInstance)); - return false; - } - } /** * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskInstanceId}_${task executed by ip1},${ip2}... @@ -1004,7 +992,7 @@ public class ProcessService { */ public String taskZkInfo(TaskInstance taskInstance) { - int taskWorkerGroupId = getTaskWorkerGroupId(taskInstance); + String taskWorkerGroup = getTaskWorkerGroup(taskInstance); ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); if(processInstance == null){ logger.error("process instance is null. please check the task info, task id: " + taskInstance.getId()); @@ -1016,44 +1004,8 @@ public class ProcessService { sb.append(processInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE) .append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE) .append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE) - .append(taskInstance.getId()).append(Constants.UNDERLINE); - - if(taskWorkerGroupId > 0){ - //not to find data from db - WorkerGroup workerGroup = queryWorkerGroupById(taskWorkerGroupId); - if(workerGroup == null ){ - logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); - - sb.append(Constants.DEFAULT_WORKER_ID); - return sb.toString(); - } - - String ips = workerGroup.getIpList(); - - if(StringUtils.isBlank(ips)){ - logger.error("task:{} worker group:{} parameters(ip_list) is null, this task would be running on all workers", - taskInstance.getId(), workerGroup.getId()); - sb.append(Constants.DEFAULT_WORKER_ID); - return sb.toString(); - } - - StringBuilder ipSb = new StringBuilder(100); - String[] ipArray = ips.split(COMMA); - - for (String ip : ipArray) { - long ipLong = IpUtils.ipToLong(ip); - ipSb.append(ipLong).append(COMMA); - } - - if(ipSb.length() > 0) { - ipSb.deleteCharAt(ipSb.length() - 1); - } - - sb.append(ipSb); - }else{ - sb.append(Constants.DEFAULT_WORKER_ID); - } - + .append(taskInstance.getId()).append(Constants.UNDERLINE) + .append(taskInstance.getWorkerGroup()); return sb.toString(); } @@ -1128,7 +1080,7 @@ public class ProcessService { String taskZkInfo = taskZkInfo(taskInstance); - return taskQueue.checkTaskExists(DOLPHINSCHEDULER_TASKS_QUEUE, taskZkInfo); + return false; } /** @@ -1412,8 +1364,12 @@ public class ProcessService { */ public void changeTaskState(ExecutionStatus state, Date endTime, + int processId, + String appIds, int taskInstId) { TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId); + taskInstance.setPid(processId); + taskInstance.setAppLink(appIds); taskInstance.setState(state); taskInstance.setEndTime(endTime); saveTaskInstance(taskInstance); @@ -1548,17 +1504,17 @@ public class ProcessService { * @return udf function list */ public List queryUdfFunListByids(int[] ids){ - return udfFuncMapper.queryUdfByIdStr(ids, null); } /** * find tenant code by resource name * @param resName resource name + * @param resourceType resource type * @return tenant code */ - public String queryTenantCodeByResName(String resName){ - return resourceMapper.queryTenantCodeByResourceName(resName); + public String queryTenantCodeByResName(String resName,ResourceType resourceType){ + return resourceMapper.queryTenantCodeByResourceName(resName, resourceType.ordinal()); } /** @@ -1684,13 +1640,14 @@ public class ProcessService { /** * find last running process instance * @param definitionId process definition id - * @param dateInterval dateInterval + * @param startTime start time + * @param endTime end time * @return process instance */ - public ProcessInstance findLastRunningProcess(int definitionId, DateInterval dateInterval) { + public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) { return processInstanceMapper.queryLastRunningProcess(definitionId, - dateInterval.getStartTime(), - dateInterval.getEndTime(), + startTime, + endTime, stateArray); } @@ -1724,24 +1681,24 @@ public class ProcessService { } /** - * get task worker group id + * get task worker group * @param taskInstance taskInstance * @return workerGroupId */ - public int getTaskWorkerGroupId(TaskInstance taskInstance) { - int taskWorkerGroupId = taskInstance.getWorkerGroupId(); + public String getTaskWorkerGroup(TaskInstance taskInstance) { + String workerGroup = taskInstance.getWorkerGroup(); - if(taskWorkerGroupId > 0){ - return taskWorkerGroupId; + if(StringUtils.isNotBlank(workerGroup)){ + return workerGroup; } int processInstanceId = taskInstance.getProcessInstanceId(); ProcessInstance processInstance = findProcessInstanceById(processInstanceId); if(processInstance != null){ - return processInstance.getWorkerGroupId(); + return processInstance.getWorkerGroup(); } - logger.info("task : {} will use default worker group id", taskInstance.getId()); - return Constants.DEFAULT_WORKER_ID; + logger.info("task : {} will use default worker group", taskInstance.getId()); + return Constants.DEFAULT_WORKER_GROUP; } /** @@ -1790,10 +1747,18 @@ public class ProcessService { Set originResSet = new HashSet(Arrays.asList(needChecks)); switch (authorizationType){ - case RESOURCE_FILE: - Set authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(t -> t.getAlias()).collect(toSet()); + case RESOURCE_FILE_ID: + Set authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet()); + originResSet.removeAll(authorizedResourceFiles); + break; + case RESOURCE_FILE_NAME: + Set authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(t -> t.getFullName()).collect(toSet()); originResSet.removeAll(authorizedResources); break; + case UDF_FILE: + Set authorizedUdfFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet()); + originResSet.removeAll(authorizedUdfFiles); + break; case DATASOURCE: Set authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId,needChecks).stream().map(t -> t.getId()).collect(toSet()); originResSet.removeAll(authorizedDatasources); @@ -1819,5 +1784,41 @@ public class ProcessService { return userMapper.queryDetailsById(userId); } + /** + * get resource by resoruce id + * @param resoruceId resource id + * @return Resource + */ + public Resource getResourceById(int resoruceId){ + return resourceMapper.selectById(resoruceId); + } + + + /** + * list resources by ids + * @param resIds resIds + * @return resource list + */ + public List listResourceByIds(Integer[] resIds){ + return resourceMapper.listResourceByIds(resIds); + } + + /** + * format task app id in task instance + * @param taskInstance + * @return + */ + public String formatTaskAppId(TaskInstance taskInstance){ + ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId()); + ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); + + if(definition == null || processInstanceById == null){ + return ""; + } + return String.format("%s_%s_%s", + definition.getId(), + processInstanceById.getId(), + taskInstance.getId()); + } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java index d51e8e82bf..3ac6ccaedc 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.service.quartz; import com.alibaba.druid.pool.DruidDataSource; -import org.quartz.SchedulerException; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.quartz.utils.ConnectionProvider; import java.sql.Connection; @@ -28,196 +28,24 @@ import java.sql.SQLException; */ public class DruidConnectionProvider implements ConnectionProvider { - /** - * JDBC driver - */ - public String driver; + private final DruidDataSource dataSource; - /** - * JDBC URL - */ - public String URL; - - /** - * Database user name - */ - public String user; - - /** - * Database password - */ - public String password; - - /** - * Maximum number of database connections - */ - public int maxConnections; - - /** - * The query that validates the database connection - */ - public String validationQuery; - - /** - * Whether the database sql query to validate connections should be executed every time - * a connection is retrieved from the pool to ensure that it is still valid. If false, - * then validation will occur on check-in. Default is false. - */ - private boolean validateOnCheckout; - - /** - * The number of seconds between tests of idle connections - only enabled - * if the validation query property is set. Default is 50 seconds. - */ - private int idleConnectionValidationSeconds; - - /** - * The maximum number of prepared statements that will be cached per connection in the pool. - * Depending upon your JDBC Driver this may significantly help performance, or may slightly - * hinder performance. - * Default is 120, as Quartz uses over 100 unique statements. 0 disables the feature. - */ - public String maxCachedStatementsPerConnection; - - /** - * Discard connections after they have been idle this many seconds. 0 disables the feature. Default is 0. - */ - private String discardIdleConnectionsSeconds; - - /** - * Default maximum number of database connections in the pool. - */ - public static final int DEFAULT_DB_MAX_CONNECTIONS = 10; - - /** - * The maximum number of prepared statements that will be cached per connection in the pool. - */ - public static final int DEFAULT_DB_MAX_CACHED_STATEMENTS_PER_CONNECTION = 120; - - /** - * Druid connection pool - */ - private DruidDataSource datasource; + public DruidConnectionProvider(){ + this.dataSource = SpringApplicationContext.getBean(DruidDataSource.class); + } - /** - * get connection - * @return Connection - * @throws SQLException sql exception - */ @Override public Connection getConnection() throws SQLException { - return datasource.getConnection(); + return dataSource.getConnection(); } - /** - * shutdown data source - * @throws SQLException sql exception - */ @Override public void shutdown() throws SQLException { - datasource.close(); + dataSource.close(); } - /** - * data source initialize - * @throws SQLException sql exception - */ @Override - public void initialize() throws SQLException{ - if (this.URL == null) { - throw new SQLException("DBPool could not be created: DB URL cannot be null"); - } - if (this.driver == null) { - throw new SQLException("DBPool driver could not be created: DB driver class name cannot be null!"); - } - if (this.maxConnections < 0) { - throw new SQLException("DBPool maxConnectins could not be created: Max connections must be greater than zero!"); - } - datasource = new DruidDataSource(); - try{ - datasource.setDriverClassName(this.driver); - } catch (Exception e) { - try { - throw new SchedulerException("Problem setting driver class name on datasource", e); - } catch (SchedulerException e1) { - } - } - datasource.setUrl(this.URL); - datasource.setUsername(this.user); - datasource.setPassword(this.password); - datasource.setMaxActive(this.maxConnections); - datasource.setMinIdle(1); - datasource.setMaxWait(0); - datasource.setMaxPoolPreparedStatementPerConnectionSize(DEFAULT_DB_MAX_CONNECTIONS); - if (this.validationQuery != null) { - datasource.setValidationQuery(this.validationQuery); - if(!this.validateOnCheckout){ - datasource.setTestOnReturn(true); - } else { - datasource.setTestOnBorrow(true); - } - datasource.setValidationQueryTimeout(this.idleConnectionValidationSeconds); - } - } - - public String getDriver() { - return driver; - } - public void setDriver(String driver) { - this.driver = driver; - } - public String getURL() { - return URL; - } - public void setURL(String URL) { - this.URL = URL; - } - public String getUser() { - return user; - } - public void setUser(String user) { - this.user = user; - } - public String getPassword() { - return password; - } - public void setPassword(String password) { - this.password = password; - } - public int getMaxConnections() { - return maxConnections; - } - public void setMaxConnections(int maxConnections) { - this.maxConnections = maxConnections; - } - public String getValidationQuery() { - return validationQuery; - } - public void setValidationQuery(String validationQuery) { - this.validationQuery = validationQuery; - } - public boolean isValidateOnCheckout() { - return validateOnCheckout; - } - public void setValidateOnCheckout(boolean validateOnCheckout) { - this.validateOnCheckout = validateOnCheckout; - } - public int getIdleConnectionValidationSeconds() { - return idleConnectionValidationSeconds; - } - public void setIdleConnectionValidationSeconds(int idleConnectionValidationSeconds) { - this.idleConnectionValidationSeconds = idleConnectionValidationSeconds; - } - public DruidDataSource getDatasource() { - return datasource; - } - public void setDatasource(DruidDataSource datasource) { - this.datasource = datasource; - } - public String getDiscardIdleConnectionsSeconds() { - return discardIdleConnectionsSeconds; - } - public void setDiscardIdleConnectionsSeconds(String discardIdleConnectionsSeconds) { - this.discardIdleConnectionsSeconds = discardIdleConnectionsSeconds; + public void initialize() throws SQLException { + //NOP } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java index 69a80e65f5..c89b7affb8 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.quartz.Job; import org.quartz.JobDataMap; @@ -44,18 +45,8 @@ public class ProcessScheduleJob implements Job { */ private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleJob.class); - /** - * process service - */ - private static ProcessService processService; - - - /** - * init - * @param processService process dao - */ - public static void init(ProcessService processService) { - ProcessScheduleJob.processService = processService; + public ProcessService getProcessService(){ + return SpringApplicationContext.getBean(ProcessService.class); } /** @@ -67,7 +58,7 @@ public class ProcessScheduleJob implements Job { @Override public void execute(JobExecutionContext context) throws JobExecutionException { - Assert.notNull(processService, "please call init() method first"); + Assert.notNull(getProcessService(), "please call init() method first"); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); @@ -83,7 +74,7 @@ public class ProcessScheduleJob implements Job { logger.info("scheduled fire time :{}, fire time :{}, process id :{}", scheduledFireTime, fireTime, scheduleId); // query schedule - Schedule schedule = processService.querySchedule(scheduleId); + Schedule schedule = getProcessService().querySchedule(scheduleId); if (schedule == null) { logger.warn("process schedule does not exist in db,delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId); deleteJob(projectId, scheduleId); @@ -91,7 +82,7 @@ public class ProcessScheduleJob implements Job { } - ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); + ProcessDefinition processDefinition = getProcessService().findProcessDefineById(schedule.getProcessDefinitionId()); // release state : online/offline ReleaseState releaseState = processDefinition.getReleaseState(); if (processDefinition == null || releaseState == ReleaseState.OFFLINE) { @@ -107,11 +98,11 @@ public class ProcessScheduleJob implements Job { command.setScheduleTime(scheduledFireTime); command.setStartTime(fireTime); command.setWarningGroupId(schedule.getWarningGroupId()); - command.setWorkerGroupId(schedule.getWorkerGroupId()); + command.setWorkerGroup(schedule.getWorkerGroup()); command.setWarningType(schedule.getWarningType()); command.setProcessInstancePriority(schedule.getProcessInstancePriority()); - processService.createCommand(command); + getProcessService().createCommand(command); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java index 60cdb1dd97..69ca97a3d8 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java @@ -16,13 +16,19 @@ */ package org.apache.dolphinscheduler.service.quartz; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.lang.StringUtils; -import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.quartz.*; import org.quartz.impl.StdSchedulerFactory; +import org.quartz.impl.jdbcjobstore.JobStoreTX; +import org.quartz.impl.jdbcjobstore.PostgreSQLDelegate; +import org.quartz.impl.jdbcjobstore.StdJDBCDelegate; import org.quartz.impl.matchers.GroupMatcher; +import org.quartz.simpl.SimpleThreadPool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,6 +36,7 @@ import java.util.*; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import static org.apache.dolphinscheduler.common.Constants.*; import static org.quartz.CronScheduleBuilder.cronSchedule; import static org.quartz.JobBuilder.newJob; import static org.quartz.TriggerBuilder.newTrigger; @@ -59,7 +66,19 @@ public class QuartzExecutors { */ private static volatile QuartzExecutors INSTANCE = null; - private QuartzExecutors() {} + /** + * load conf + */ + private static Configuration conf; + + + private QuartzExecutors() { + try { + conf = new PropertiesConfiguration(QUARTZ_PROPERTIES_PATH); + }catch (ConfigurationException e){ + logger.warn("not loaded quartz configuration file, will used default value",e); + } + } /** * thread safe and performance promote @@ -70,9 +89,10 @@ public class QuartzExecutors { synchronized (QuartzExecutors.class) { // when more than two threads run into the first null check same time, to avoid instanced more than one time, it needs to be checked again. if (INSTANCE == null) { - INSTANCE = new QuartzExecutors(); + QuartzExecutors quartzExecutors = new QuartzExecutors(); //finish QuartzExecutors init - INSTANCE.init(); + quartzExecutors.init(); + INSTANCE = quartzExecutors; } } } @@ -87,7 +107,33 @@ public class QuartzExecutors { */ private void init() { try { - SchedulerFactory schedulerFactory = new StdSchedulerFactory(Constants.QUARTZ_PROPERTIES_PATH); + StdSchedulerFactory schedulerFactory = new StdSchedulerFactory(); + Properties properties = new Properties(); + + String dataSourceDriverClass = org.apache.dolphinscheduler.dao.utils.PropertyUtils.getString(SPRING_DATASOURCE_DRIVER_CLASS_NAME); + if (dataSourceDriverClass.equals(ORG_POSTGRESQL_DRIVER)){ + properties.setProperty(ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS,conf.getString(ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS, PostgreSQLDelegate.class.getName())); + } else { + properties.setProperty(ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS,conf.getString(ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS, StdJDBCDelegate.class.getName())); + } + properties.setProperty(ORG_QUARTZ_SCHEDULER_INSTANCENAME, conf.getString(ORG_QUARTZ_SCHEDULER_INSTANCENAME, QUARTZ_INSTANCENAME)); + properties.setProperty(ORG_QUARTZ_SCHEDULER_INSTANCEID, conf.getString(ORG_QUARTZ_SCHEDULER_INSTANCEID, QUARTZ_INSTANCEID)); + properties.setProperty(ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON,conf.getString(ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON,STRING_TRUE)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_USEPROPERTIES,conf.getString(ORG_QUARTZ_JOBSTORE_USEPROPERTIES,STRING_FALSE)); + properties.setProperty(ORG_QUARTZ_THREADPOOL_CLASS,conf.getString(ORG_QUARTZ_THREADPOOL_CLASS, SimpleThreadPool.class.getName())); + properties.setProperty(ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS,conf.getString(ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS,STRING_TRUE)); + properties.setProperty(ORG_QUARTZ_THREADPOOL_THREADCOUNT,conf.getString(ORG_QUARTZ_THREADPOOL_THREADCOUNT, QUARTZ_THREADCOUNT)); + properties.setProperty(ORG_QUARTZ_THREADPOOL_THREADPRIORITY,conf.getString(ORG_QUARTZ_THREADPOOL_THREADPRIORITY, QUARTZ_THREADPRIORITY)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_CLASS,conf.getString(ORG_QUARTZ_JOBSTORE_CLASS, JobStoreTX.class.getName())); + properties.setProperty(ORG_QUARTZ_JOBSTORE_TABLEPREFIX,conf.getString(ORG_QUARTZ_JOBSTORE_TABLEPREFIX, QUARTZ_TABLE_PREFIX)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_ISCLUSTERED,conf.getString(ORG_QUARTZ_JOBSTORE_ISCLUSTERED,STRING_TRUE)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD,conf.getString(ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD, QUARTZ_MISFIRETHRESHOLD)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL,conf.getString(ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL, QUARTZ_CLUSTERCHECKININTERVAL)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK,conf.getString(ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK, QUARTZ_ACQUIRETRIGGERSWITHINLOCK)); + properties.setProperty(ORG_QUARTZ_JOBSTORE_DATASOURCE,conf.getString(ORG_QUARTZ_JOBSTORE_DATASOURCE, QUARTZ_DATASOURCE)); + properties.setProperty(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS,conf.getString(ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS,DruidConnectionProvider.class.getName())); + + schedulerFactory.initialize(properties); scheduler = schedulerFactory.getScheduler(); } catch (SchedulerException e) { @@ -261,7 +307,7 @@ public class QuartzExecutors { */ public static String buildJobName(int processId) { StringBuilder sb = new StringBuilder(30); - sb.append(Constants.QUARTZ_JOB_PRIFIX).append(Constants.UNDERLINE).append(processId); + sb.append(QUARTZ_JOB_PRIFIX).append(UNDERLINE).append(processId); return sb.toString(); } @@ -272,7 +318,7 @@ public class QuartzExecutors { */ public static String buildJobGroupName(int projectId) { StringBuilder sb = new StringBuilder(30); - sb.append(Constants.QUARTZ_JOB_GROUP_PRIFIX).append(Constants.UNDERLINE).append(projectId); + sb.append(QUARTZ_JOB_GROUP_PRIFIX).append(UNDERLINE).append(projectId); return sb.toString(); } @@ -286,9 +332,9 @@ public class QuartzExecutors { */ public static Map buildDataMap(int projectId, int scheduleId, Schedule schedule) { Map dataMap = new HashMap<>(3); - dataMap.put(Constants.PROJECT_ID, projectId); - dataMap.put(Constants.SCHEDULE_ID, scheduleId); - dataMap.put(Constants.SCHEDULE, JSONUtils.toJson(schedule)); + dataMap.put(PROJECT_ID, projectId); + dataMap.put(SCHEDULE_ID, scheduleId); + dataMap.put(SCHEDULE, JSONUtils.toJson(schedule)); return dataMap; } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/ITaskQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/ITaskQueue.java deleted file mode 100644 index bed8a11247..0000000000 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/ITaskQueue.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.service.queue; - -import java.util.List; -import java.util.Set; - -public interface ITaskQueue { - - /** - * take out all the elements - * - * - * @param key - * @return - */ - List getAllTasks(String key); - - /** - * check if has a task - * @param key queue name - * @return true if has; false if not - */ - boolean hasTask(String key); - - /** - * check task exists in the task queue or not - * - * @param key queue name - * @param task ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - * @return true if exists in the queue - */ - boolean checkTaskExists(String key, String task); - - /** - * add an element to the queue - * - * @param key queue name - * @param value - */ - boolean add(String key, String value); - - /** - * an element pops out of the queue - * - * @param key queue name - * @param n how many elements to poll - * @return - */ - List poll(String key, int n); - - /** - * remove a element from queue - * @param key - * @param value - */ - void removeNode(String key, String value); - - /** - * add an element to the set - * - * @param key - * @param value - */ - void sadd(String key, String value); - - /** - * delete the value corresponding to the key in the set - * - * @param key - * @param value - */ - void srem(String key, String value); - - /** - * gets all the elements of the set based on the key - * - * @param key - * @return - */ - Set smembers(String key); - - - /** - * clear the task queue for use by junit tests only - */ - void delete(); -} \ No newline at end of file diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java new file mode 100644 index 0000000000..3ad9aef6c5 --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.service.queue; + + +public interface TaskPriorityQueue { + + /** + * put task info + * + * @param taskInfo taskInfo + * @throws Exception + */ + void put(String taskInfo) throws Exception; + + /** + * take taskInfo + * @return taskInfo + * @throws Exception + */ + String take()throws Exception; + + /** + * size + * + * @return size + * @throws Exception + */ + int size() throws Exception; +} \ No newline at end of file diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java new file mode 100644 index 0000000000..0a0fb1b9b0 --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.service.queue; + + +import org.springframework.stereotype.Service; + +import java.util.*; +import java.util.concurrent.PriorityBlockingQueue; + +import static org.apache.dolphinscheduler.common.Constants.*; + +/** + * A singleton of a task queue implemented with zookeeper + * tasks queue implementation + */ +@Service +public class TaskPriorityQueueImpl implements TaskPriorityQueue { + /** + * queue size + */ + private static final Integer QUEUE_MAX_SIZE = 3000; + + /** + * queue + */ + private PriorityBlockingQueue queue = new PriorityBlockingQueue<>(QUEUE_MAX_SIZE, new TaskInfoComparator()); + + /** + * put task takePriorityInfo + * + * @param taskPriorityInfo takePriorityInfo + * @throws Exception + */ + @Override + public void put(String taskPriorityInfo) throws Exception { + queue.put(taskPriorityInfo); + } + + /** + * take taskInfo + * @return taskInfo + * @throws Exception + */ + @Override + public String take() throws Exception { + return queue.take(); + } + + /** + * queue size + * @return size + * @throws Exception + */ + @Override + public int size() throws Exception { + return queue.size(); + } + + /** + * TaskInfoComparator + */ + private class TaskInfoComparator implements Comparator{ + + /** + * compare o1 o2 + * @param o1 o1 + * @param o2 o2 + * @return compare result + */ + @Override + public int compare(String o1, String o2) { + String s1 = o1; + String s2 = o2; + String[] s1Array = s1.split(UNDERLINE); + if(s1Array.length > TASK_INFO_LENGTH){ + // warning: if this length > 5, need to be changed + s1 = s1.substring(0, s1.lastIndexOf(UNDERLINE) ); + } + + String[] s2Array = s2.split(UNDERLINE); + if(s2Array.length > TASK_INFO_LENGTH){ + // warning: if this length > 5, need to be changed + s2 = s2.substring(0, s2.lastIndexOf(UNDERLINE) ); + } + + return s1.compareTo(s2); + } + } +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueFactory.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueFactory.java deleted file mode 100644 index 6be419f5a9..0000000000 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueFactory.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.service.queue; - -import org.apache.commons.lang.StringUtils; -import org.apache.dolphinscheduler.common.utils.CommonUtils; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * task queue factory - */ -public class TaskQueueFactory { - - private static final Logger logger = LoggerFactory.getLogger(TaskQueueFactory.class); - - - private TaskQueueFactory(){ - - } - - - /** - * get instance (singleton) - * - * @return instance - */ - public static ITaskQueue getTaskQueueInstance() { - String queueImplValue = CommonUtils.getQueueImplValue(); - if (StringUtils.isNotBlank(queueImplValue)) { - logger.info("task queue impl use zookeeper "); - return SpringApplicationContext.getBean(TaskQueueZkImpl.class); - }else{ - logger.error("property dolphinscheduler.queue.impl can't be blank, system will exit "); - System.exit(-1); - } - - return null; - } -} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueZkImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueZkImpl.java deleted file mode 100644 index 9c1d318ea5..0000000000 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueZkImpl.java +++ /dev/null @@ -1,375 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.service.queue; - - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.IpUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.service.zk.ZookeeperOperator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Service; - -import java.util.*; - -/** - * A singleton of a task queue implemented with zookeeper - * tasks queue implementation - */ -@Service -public class TaskQueueZkImpl implements ITaskQueue { - - private static final Logger logger = LoggerFactory.getLogger(TaskQueueZkImpl.class); - - private final ZookeeperOperator zookeeperOperator; - - @Autowired - public TaskQueueZkImpl(ZookeeperOperator zookeeperOperator) { - this.zookeeperOperator = zookeeperOperator; - - try { - String tasksQueuePath = getTasksPath(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - String tasksKillPath = getTasksPath(Constants.DOLPHINSCHEDULER_TASKS_KILL); - - for (String key : new String[]{tasksQueuePath,tasksKillPath}){ - if (!zookeeperOperator.isExisted(key)){ - zookeeperOperator.persist(key, ""); - logger.info("create tasks queue parent node success : {}", key); - } - } - } catch (Exception e) { - logger.error("create tasks queue parent node failure", e); - } - } - - - /** - * get all tasks from tasks queue - * @param key task queue name - * @return - */ - @Override - public List getAllTasks(String key) { - try { - List list = zookeeperOperator.getChildrenKeys(getTasksPath(key)); - return list; - } catch (Exception e) { - logger.error("get all tasks from tasks queue exception",e); - } - return Collections.emptyList(); - } - - /** - * check if has a task - * @param key queue name - * @return true if has; false if not - */ - @Override - public boolean hasTask(String key) { - try { - return zookeeperOperator.hasChildren(getTasksPath(key)); - } catch (Exception e) { - logger.error("check has task in tasks queue exception",e); - } - return false; - } - - /** - * check task exists in the task queue or not - * - * @param key queue name - * @param task ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - * @return true if exists in the queue - */ - @Override - public boolean checkTaskExists(String key, String task) { - String taskPath = getTasksPath(key) + Constants.SINGLE_SLASH + task; - - return zookeeperOperator.isExisted(taskPath); - - } - - - /** - * add task to tasks queue - * - * @param key task queue name - * @param value ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_host1,host2,... - */ - @Override - public boolean add(String key, String value){ - try { - String taskIdPath = getTasksPath(key) + Constants.SINGLE_SLASH + value; - zookeeperOperator.persist(taskIdPath, value); - return true; - } catch (Exception e) { - logger.error("add task to tasks queue exception",e); - return false; - } - - } - - - /** - * An element pops out of the queue

- * note: - * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_host1,host2,... - * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. - * - * @param key task queue name - * @param tasksNum how many elements to poll - * @return the task ids to be executed - */ - @Override - public List poll(String key, int tasksNum) { - try{ - List list = zookeeperOperator.getChildrenKeys(getTasksPath(key)); - - if(list != null && list.size() > 0){ - - String workerIp = OSUtils.getHost(); - String workerIpLongStr = String.valueOf(IpUtils.ipToLong(workerIp)); - - int size = list.size(); - - Set taskTreeSet = new TreeSet<>(new Comparator() { - @Override - public int compare(String o1, String o2) { - - String s1 = o1; - String s2 = o2; - String[] s1Array = s1.split(Constants.UNDERLINE); - if(s1Array.length>4){ - // warning: if this length > 5, need to be changed - s1 = s1.substring(0, s1.lastIndexOf(Constants.UNDERLINE) ); - } - - String[] s2Array = s2.split(Constants.UNDERLINE); - if(s2Array.length>4){ - // warning: if this length > 5, need to be changed - s2 = s2.substring(0, s2.lastIndexOf(Constants.UNDERLINE) ); - } - - return s1.compareTo(s2); - } - }); - - for (int i = 0; i < size; i++) { - - String taskDetail = list.get(i); - String[] taskDetailArrs = taskDetail.split(Constants.UNDERLINE); - - //forward compatibility - if(taskDetailArrs.length >= 4){ - - //format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - String formatTask = String.format("%s_%010d_%s_%010d", taskDetailArrs[0], Long.parseLong(taskDetailArrs[1]), taskDetailArrs[2], Long.parseLong(taskDetailArrs[3])); - if(taskDetailArrs.length > 4){ - String taskHosts = taskDetailArrs[4]; - - //task can assign to any worker host if equals default ip value of worker server - if(!taskHosts.equals(String.valueOf(Constants.DEFAULT_WORKER_ID))){ - String[] taskHostsArr = taskHosts.split(Constants.COMMA); - if(!Arrays.asList(taskHostsArr).contains(workerIpLongStr)){ - continue; - } - } - formatTask += Constants.UNDERLINE + taskDetailArrs[4]; - } - taskTreeSet.add(formatTask); - } - } - - List tasksList = getTasksListFromTreeSet(tasksNum, taskTreeSet); - - logger.info("consume tasks: {},there still have {} tasks need to be executed", Arrays.toString(tasksList.toArray()), size - tasksList.size()); - - return tasksList; - }else{ - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - } - - } catch (Exception e) { - logger.error("add task to tasks queue exception",e); - } - return Collections.emptyList(); - } - - - /** - * get task list from tree set - * - * @param tasksNum - * @param taskTreeSet - */ - public List getTasksListFromTreeSet(int tasksNum, Set taskTreeSet) { - Iterator iterator = taskTreeSet.iterator(); - int j = 0; - List tasksList = new ArrayList<>(tasksNum); - while(iterator.hasNext()){ - if(j++ >= tasksNum){ - break; - } - String task = iterator.next(); - tasksList.add(getOriginTaskFormat(task)); - } - return tasksList; - } - - /** - * format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - * processInstanceId and task id need to be convert to int. - * @param formatTask - * @return - */ - private String getOriginTaskFormat(String formatTask){ - String[] taskArray = formatTask.split(Constants.UNDERLINE); - if(taskArray.length< 4){ - return formatTask; - } - int processInstanceId = Integer.parseInt(taskArray[1]); - int taskId = Integer.parseInt(taskArray[3]); - - StringBuilder sb = new StringBuilder(50); - String destTask = String.format("%s_%s_%s_%s", taskArray[0], processInstanceId, taskArray[2], taskId); - - sb.append(destTask); - - if(taskArray.length > 4){ - for(int index = 4; index < taskArray.length; index++){ - sb.append(Constants.UNDERLINE).append(taskArray[index]); - } - } - return sb.toString(); - } - - @Override - public void removeNode(String key, String nodeValue){ - - String tasksQueuePath = getTasksPath(key) + Constants.SINGLE_SLASH; - String taskIdPath = tasksQueuePath + nodeValue; - logger.info("removeNode task {}", taskIdPath); - try{ - zookeeperOperator.remove(taskIdPath); - - }catch(Exception e){ - logger.error("delete task:{} from zookeeper fail, exception:" ,nodeValue ,e); - } - - } - - - - /** - * In order to be compatible with redis implementation - * - * To be compatible with the redis implementation, add an element to the set - * @param key The key is the kill/cancel queue path name - * @param value host-taskId The name of the zookeeper node - */ - @Override - public void sadd(String key,String value) { - try { - - if(value != null && value.trim().length() > 0){ - String path = getTasksPath(key) + Constants.SINGLE_SLASH; - if(!zookeeperOperator.isExisted(path + value)){ - zookeeperOperator.persist(path + value,value); - logger.info("add task:{} to tasks set ",value); - } else{ - logger.info("task {} exists in tasks set ",value); - } - - }else{ - logger.warn("add host-taskId:{} to tasks set is empty ",value); - } - - } catch (Exception e) { - logger.error("add task to tasks set exception",e); - } - } - - - /** - * delete the value corresponding to the key in the set - * @param key The key is the kill/cancel queue path name - * @param value host-taskId-taskType The name of the zookeeper node - */ - @Override - public void srem(String key, String value) { - try{ - String path = getTasksPath(key) + Constants.SINGLE_SLASH; - zookeeperOperator.remove(path + value); - - }catch(Exception e){ - logger.error("delete task:{} exception",value,e); - } - } - - - /** - * Gets all the elements of the set based on the key - * @param key The key is the kill/cancel queue path name - * @return - */ - @Override - public Set smembers(String key) { - try { - List list = zookeeperOperator.getChildrenKeys(getTasksPath(key)); - return new HashSet<>(list); - } catch (Exception e) { - logger.error("get all tasks from tasks queue exception",e); - } - return Collections.emptySet(); - } - - /** - * Clear the task queue of zookeeper node - */ - @Override - public void delete(){ - try { - String tasksQueuePath = getTasksPath(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - String tasksKillPath = getTasksPath(Constants.DOLPHINSCHEDULER_TASKS_KILL); - - for (String key : new String[]{tasksQueuePath,tasksKillPath}){ - if (zookeeperOperator.isExisted(key)){ - List list = zookeeperOperator.getChildrenKeys(key); - for (String task : list) { - zookeeperOperator.remove(key + Constants.SINGLE_SLASH + task); - logger.info("delete task from tasks queue : {}/{} ", key, task); - } - } - } - - } catch (Exception e) { - logger.error("delete all tasks in tasks queue failure", e); - } - } - - /** - * Get the task queue path - * @param key task queue name - * @return - */ - public String getTasksPath(String key){ - return zookeeperOperator.getZookeeperConfig().getDsRoot() + Constants.SINGLE_SLASH + key; - } - -} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractListener.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractListener.java new file mode 100644 index 0000000000..3e3e6c8c20 --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractListener.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service.zk; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.cache.TreeCacheEvent; +import org.apache.curator.framework.recipes.cache.TreeCacheListener; + +public abstract class AbstractListener implements TreeCacheListener { + + @Override + public final void childEvent(final CuratorFramework client, final TreeCacheEvent event) throws Exception { + String path = null == event.getData() ? "" : event.getData().getPath(); + if (path.isEmpty()) { + return; + } + dataChanged(client, event, path); + } + + protected abstract void dataChanged(final CuratorFramework client, final TreeCacheEvent event, final String path); +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java index fa1a0bfced..e75e20becb 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java @@ -16,19 +16,15 @@ */ package org.apache.dolphinscheduler.service.zk; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.imps.CuratorFrameworkState; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.enums.ZKNodeType; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ResInfo; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; import java.util.*; @@ -37,126 +33,30 @@ import static org.apache.dolphinscheduler.common.Constants.*; /** * abstract zookeeper client */ +@Component public abstract class AbstractZKClient extends ZookeeperCachedOperator { private static final Logger logger = LoggerFactory.getLogger(AbstractZKClient.class); - /** - * server stop or not - */ - protected IStoppable stoppable = null; - - /** - * heartbeat for zookeeper - * @param znode zookeeper node - * @param serverType server type - */ - public void heartBeatForZk(String znode, String serverType){ - try { - - //check dead or not in zookeeper - if(zkClient.getState() == CuratorFrameworkState.STOPPED || checkIsDeadServer(znode, serverType)){ - stoppable.stop("i was judged to death, release resources and stop myself"); - return; - } - - String resInfoStr = super.get(znode); - String[] splits = resInfoStr.split(Constants.COMMA); - if (splits.length != Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH){ - return; - } - StringBuilder sb = new StringBuilder(); - sb.append(splits[0]).append(Constants.COMMA) - .append(splits[1]).append(Constants.COMMA) - .append(OSUtils.cpuUsage()).append(Constants.COMMA) - .append(OSUtils.memoryUsage()).append(Constants.COMMA) - .append(OSUtils.loadAverage()).append(Constants.COMMA) - .append(splits[5]).append(Constants.COMMA) - .append(DateUtils.dateToString(new Date())); - - zkClient.setData().forPath(znode, sb.toString().getBytes()); - - } catch (Exception e) { - logger.error("heartbeat for zk failed", e); - stoppable.stop("heartbeat for zk exception, release resources and stop myself"); - } - } /** - * check dead server or not , if dead, stop self - * - * @param zNode node path - * @param serverType master or worker prefix - * @return true if not exists - * @throws Exception errors + * remove dead server by host + * @param host host + * @param serverType serverType + * @throws Exception */ - protected boolean checkIsDeadServer(String zNode, String serverType) throws Exception{ - //ip_sequenceno - String[] zNodesPath = zNode.split("\\/"); - String ipSeqNo = zNodesPath[zNodesPath.length - 1]; - - String type = serverType.equals(MASTER_PREFIX) ? MASTER_PREFIX : WORKER_PREFIX; - String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + ipSeqNo; - - if(!isExisted(zNode) || isExisted(deadServerPath)){ - return true; - } - - - return false; - } - - public void removeDeadServerByHost(String host, String serverType) throws Exception { - List deadServers = super.getChildrenKeys(getDeadZNodeParentPath()); - for(String serverPath : deadServers){ - if(serverPath.startsWith(serverType+UNDERLINE+host)){ - String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath; - super.remove(server); + List deadServers = super.getChildrenKeys(getDeadZNodeParentPath()); + for(String serverPath : deadServers){ + if(serverPath.startsWith(serverType+UNDERLINE+host)){ + String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath; + super.remove(server); logger.info("{} server {} deleted from zk dead server path success" , serverType , host); - } - } - } - - /** - * create zookeeper path according the zk node type. - * @param zkNodeType zookeeper node type - * @return register zookeeper path - * @throws Exception - */ - private String createZNodePath(ZKNodeType zkNodeType, String host) throws Exception { - // specify the format of stored data in ZK nodes - String heartbeatZKInfo = ResInfo.getHeartBeatInfo(new Date()); - // create temporary sequence nodes for master znode - String registerPath= getZNodeParentPath(zkNodeType) + SINGLE_SLASH + host; - - super.persistEphemeral(registerPath, heartbeatZKInfo); - logger.info("register {} node {} success" , zkNodeType.toString(), registerPath); - return registerPath; - } - - /** - * register server, if server already exists, return null. - * @param zkNodeType zookeeper node type - * @return register server path in zookeeper - * @throws Exception errors - */ - public String registerServer(ZKNodeType zkNodeType) throws Exception { - String registerPath = null; - String host = OSUtils.getHost(); - if(checkZKNodeExists(host, zkNodeType)){ - logger.error("register failure , {} server already started on host : {}" , - zkNodeType.toString(), host); - return registerPath; + } } - registerPath = createZNodePath(zkNodeType, host); - - // handle dead server - handleDeadServer(registerPath, zkNodeType, Constants.DELETE_ZK_OP); - - return registerPath; } + /** * opType(add): if find dead server , then add to zk deadServerPath * opType(delete): delete path from zk @@ -188,16 +88,6 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { } - - - /** - * for stop server - * @param serverStoppable server stoppable interface - */ - public void setStoppable(IStoppable serverStoppable){ - this.stoppable = serverStoppable; - } - /** * get active master num * @return active master number @@ -236,7 +126,7 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { int i = 0; for (Map.Entry entry : masterMap.entrySet()) { Server masterServer = ResInfo.parseHeartbeatForZKInfo(entry.getValue()); - masterServer.setZkDirectory( parentPath + "/"+ entry.getKey()); + masterServer.setZkDirectory(parentPath + "/"+ entry.getKey()); masterServer.setId(i); i ++; masterServers.add(masterServer); @@ -255,8 +145,18 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { try { String path = getZNodeParentPath(zkNodeType); List serverList = super.getChildrenKeys(path); + if(zkNodeType == ZKNodeType.WORKER){ + List workerList = new ArrayList<>(); + for(String group : serverList){ + List groupServers = super.getChildrenKeys(path + Constants.SLASH + group); + for(String groupServer : groupServers){ + workerList.add(group + Constants.SLASH + groupServer); + } + } + serverList = workerList; + } for(String server : serverList){ - masterMap.putIfAbsent(server, super.get(path + "/" + server)); + masterMap.putIfAbsent(server, super.get(path + Constants.SLASH + server)); } } catch (Exception e) { logger.error("get server list failed", e); @@ -311,14 +211,6 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS; } - /** - * - * @return get master lock path - */ - public String getWorkerLockPath(){ - return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_WORKERS; - } - /** * * @param zkNodeType zookeeper node type @@ -375,7 +267,7 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { * release mutex * @param mutex mutex */ - public static void releaseMutex(InterProcessMutex mutex) { + public void releaseMutex(InterProcessMutex mutex) { if (mutex != null){ try { mutex.release(); @@ -405,23 +297,6 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { } } - /** - * server self dead, stop all threads - * @param serverHost server host - * @param zkNodeType zookeeper node type - * @return true if server dead and stop all threads - */ - protected boolean checkServerSelfDead(String serverHost, ZKNodeType zkNodeType) { - if (serverHost.equals(OSUtils.getHost())) { - logger.error("{} server({}) of myself dead , stopping...", - zkNodeType.toString(), serverHost); - stoppable.stop(String.format(" %s server %s of myself dead , stopping...", - zkNodeType.toString(), serverHost)); - return true; - } - return false; - } - /** * get host ip, string format: masterParentPath/ip * @param path path @@ -429,7 +304,7 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { */ protected String getHostByEventDataPath(String path) { if(StringUtils.isEmpty(path)){ - logger.error("empty path!"); + logger.error("empty path!"); return ""; } String[] pathArray = path.split(SINGLE_SLASH); @@ -440,18 +315,6 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { return pathArray[pathArray.length - 1]; } - /** - * acquire zk lock - * @param zkClient zk client - * @param zNodeLockPath zk lock path - * @return zk lock - * @throws Exception errors - */ - public InterProcessMutex acquireZkLock(CuratorFramework zkClient,String zNodeLockPath)throws Exception{ - InterProcessMutex mutex = new InterProcessMutex(zkClient, zNodeLockPath); - mutex.acquire(); - return mutex; - } @Override public String toString() { @@ -460,7 +323,6 @@ public abstract class AbstractZKClient extends ZookeeperCachedOperator { ", deadServerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.DEAD_SERVER) + '\'' + ", masterZNodeParentPath='" + getZNodeParentPath(ZKNodeType.MASTER) + '\'' + ", workerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.WORKER) + '\'' + - ", stoppable=" + stoppable + '}'; } -} +} \ No newline at end of file diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java index dccb768f8b..e71cb74e15 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java @@ -20,6 +20,7 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.TreeCache; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; +import org.apache.curator.framework.recipes.cache.TreeCacheListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; @@ -32,13 +33,13 @@ public class ZookeeperCachedOperator extends ZookeeperOperator { private final Logger logger = LoggerFactory.getLogger(ZookeeperCachedOperator.class); - TreeCache treeCache; + private TreeCache treeCache; /** * register a unified listener of /${dsRoot}, */ @Override protected void registerListener() { - treeCache = new TreeCache(zkClient, getZookeeperConfig().getDsRoot()); + treeCache = new TreeCache(zkClient, getZookeeperConfig().getDsRoot() + "/nodes"); logger.info("add listener to zk path: {}", getZookeeperConfig().getDsRoot()); try { treeCache.start(); @@ -72,6 +73,10 @@ public class ZookeeperCachedOperator extends ZookeeperOperator { return treeCache; } + public void addListener(TreeCacheListener listener){ + this.treeCache.getListenable().addListener(listener); + } + @Override public void close() { treeCache.close(); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java index c6bdfc3b02..5bdc6f8cd7 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java @@ -24,7 +24,7 @@ import org.springframework.stereotype.Component; * zookeeper conf */ @Component -@PropertySource("classpath:common.properties") +@PropertySource("classpath:zookeeper.properties") public class ZookeeperConfig { //zk connect config diff --git a/dolphinscheduler-service/src/main/resources/quartz.properties b/dolphinscheduler-service/src/main/resources/quartz.properties index b01be394c6..6e208f62d4 100644 --- a/dolphinscheduler-service/src/main/resources/quartz.properties +++ b/dolphinscheduler-service/src/main/resources/quartz.properties @@ -19,50 +19,36 @@ # Configure Main Scheduler Properties #============================================================================ #org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate -org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate -# postgre -org.quartz.dataSource.myDs.driver = org.postgresql.Driver -org.quartz.dataSource.myDs.URL = jdbc:postgresql://localhost:5432/dolphinscheduler?characterEncoding=utf8 -# mysql -#org.quartz.dataSource.myDs.driver = com.mysql.jdbc.Driver -#org.quartz.dataSource.myDs.URL = jdbc:mysql://localhost:3306/dolphinscheduler?characterEncoding=utf8 -#h2 -#org.quartz.dataSource.myDs.driver=org.h2.Driver -#org.quartz.dataSource.myDs.URL=jdbc:h2:file:/Users/stone/work/myworkspace/incubator-dolphinscheduler/h2;AUTO_SERVER=TRUE +#org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate -org.quartz.dataSource.myDs.user = test -org.quartz.dataSource.myDs.password = test - -org.quartz.scheduler.instanceName = DolphinScheduler -org.quartz.scheduler.instanceId = AUTO -org.quartz.scheduler.makeSchedulerThreadDaemon = true -org.quartz.jobStore.useProperties = false +#org.quartz.scheduler.instanceName = DolphinScheduler +#org.quartz.scheduler.instanceId = AUTO +#org.quartz.scheduler.makeSchedulerThreadDaemon = true +#org.quartz.jobStore.useProperties = false #============================================================================ # Configure ThreadPool #============================================================================ -org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool -org.quartz.threadPool.makeThreadsDaemons = true -org.quartz.threadPool.threadCount = 25 -org.quartz.threadPool.threadPriority = 5 +#org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool +#org.quartz.threadPool.makeThreadsDaemons = true +#org.quartz.threadPool.threadCount = 25 +#org.quartz.threadPool.threadPriority = 5 #============================================================================ # Configure JobStore #============================================================================ -org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX +#org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX -org.quartz.jobStore.tablePrefix = QRTZ_ -org.quartz.jobStore.isClustered = true -org.quartz.jobStore.misfireThreshold = 60000 -org.quartz.jobStore.clusterCheckinInterval = 5000 -org.quartz.jobStore.acquireTriggersWithinLock=true -org.quartz.jobStore.dataSource = myDs +#org.quartz.jobStore.tablePrefix = QRTZ_ +#org.quartz.jobStore.isClustered = true +#org.quartz.jobStore.misfireThreshold = 60000 +#org.quartz.jobStore.clusterCheckinInterval = 5000 +#org.quartz.jobStore.acquireTriggersWithinLock=true +#org.quartz.jobStore.dataSource = myDs #============================================================================ # Configure Datasources #============================================================================ -org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider -org.quartz.dataSource.myDs.maxConnections = 10 -org.quartz.dataSource.myDs.validationQuery = select 1 \ No newline at end of file +#org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider \ No newline at end of file diff --git a/dolphinscheduler-service/src/main/resources/zookeeper.properties b/dolphinscheduler-service/src/main/resources/zookeeper.properties new file mode 100644 index 0000000000..2204467ac9 --- /dev/null +++ b/dolphinscheduler-service/src/main/resources/zookeeper.properties @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# zookeeper cluster. multiple are separated by commas. eg. 192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181 +zookeeper.quorum=localhost:2181 + +# dolphinscheduler root directory +#zookeeper.dolphinscheduler.root=/dolphinscheduler + +# dolphinscheduler failover directory +#zookeeper.session.timeout=60000 +#zookeeper.connection.timeout=30000 +#zookeeper.retry.base.sleep=100 +#zookeeper.retry.max.sleep=30000 +#zookeeper.retry.maxtime=10 \ No newline at end of file diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/TaskQueueZKImplTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/TaskQueueZKImplTest.java deleted file mode 100644 index 5d464ac3c9..0000000000 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/queue/TaskQueueZKImplTest.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.service.queue; - -import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.utils.IpUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; - -import java.util.List; -import java.util.Random; - -import static org.junit.Assert.*; - -/** - * task queue test - */ -@Ignore -public class TaskQueueZKImplTest extends BaseTaskQueueTest { - - @Before - public void before(){ - - //clear all data - tasksQueue.delete(); - } - - @After - public void after(){ - //clear all data - tasksQueue.delete(); - } - - /** - * test take out all the elements - */ - @Test - public void getAllTasks(){ - - //add - init(); - // get all - List allTasks = tasksQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - assertEquals(allTasks.size(),2); - //delete all - tasksQueue.delete(); - allTasks = tasksQueue.getAllTasks(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - assertEquals(allTasks.size(),0); - } - @Test - public void hasTask(){ - init(); - boolean hasTask = tasksQueue.hasTask(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - assertTrue(hasTask); - //delete all - tasksQueue.delete(); - hasTask = tasksQueue.hasTask(Constants.DOLPHINSCHEDULER_TASKS_QUEUE); - assertFalse(hasTask); - } - /** - * test check task exists in the task queue or not - */ - @Test - public void checkTaskExists(){ - - String task= "1_0_1_1_-1"; - //add - init(); - // check Exist true - boolean taskExists = tasksQueue.checkTaskExists(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, task); - assertTrue(taskExists); - - //remove task - tasksQueue.removeNode(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - // check Exist false - taskExists = tasksQueue.checkTaskExists(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, task); - assertFalse(taskExists); - } - - /** - * test add element to the queue - */ - @Test - public void add(){ - - //add - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,"1_0_1_1_-1"); - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,"0_1_1_1_-1"); - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,"0_0_0_1_" + IpUtils.ipToLong(OSUtils.getHost())); - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,"1_2_1_1_" + IpUtils.ipToLong(OSUtils.getHost()) + 10); - - List tasks = tasksQueue.poll(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, 1); - - if(tasks.size() <= 0){ - return; - } - - //pop - String node1 = tasks.get(0); - assertEquals(node1,"0_0_0_1_" + IpUtils.ipToLong(OSUtils.getHost())); - } - - /** - * test element pops out of the queue - */ - @Test - public void poll(){ - - //add - init(); - List taskList = tasksQueue.poll(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, 2); - assertEquals(taskList.size(),2); - - assertEquals(taskList.get(0),"0_1_1_1_-1"); - assertEquals(taskList.get(1),"1_0_1_1_-1"); - } - - /** - * test remove element from queue - */ - @Test - public void removeNode(){ - String task = "1_0_1_1_-1"; - //add - init(); - tasksQueue.removeNode(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - assertFalse(tasksQueue.checkTaskExists(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task)); - } - - /** - * test add an element to the set - */ - @Test - public void sadd(){ - - String task = "1_0_1_1_-1"; - tasksQueue.sadd(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - //check size - assertEquals(tasksQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_QUEUE).size(),1); - } - - - /** - * test delete the value corresponding to the key in the set - */ - @Test - public void srem(){ - - String task = "1_0_1_1_-1"; - tasksQueue.sadd(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - //check size - assertEquals(tasksQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_QUEUE).size(),1); - //remove and get size - tasksQueue.srem(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - assertEquals(tasksQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_QUEUE).size(),0); - } - - /** - * test gets all the elements of the set based on the key - */ - @Test - public void smembers(){ - - //first init - assertEquals(tasksQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_QUEUE).size(),0); - //add - String task = "1_0_1_1_-1"; - tasksQueue.sadd(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - //check size - assertEquals(tasksQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_QUEUE).size(),1); - //add - task = "0_1_1_1_"; - tasksQueue.sadd(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,task); - //check size - assertEquals(tasksQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_QUEUE).size(),2); - } - - - /** - * init data - */ - private void init(){ - //add - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,"1_0_1_1_-1"); - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE,"0_1_1_1_-1"); - } - - - - /** - * test one million data from zookeeper queue - */ - @Ignore - @Test - public void extremeTest(){ - int total = 30 * 10000; - - for(int i = 0; i < total; i++) { - for(int j = 0; j < total; j++) { - //${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - //format ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId} - String formatTask = String.format("%s_%d_%s_%d", i, i + 1, j, j == 0 ? 0 : j + new Random().nextInt(100)); - tasksQueue.add(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, formatTask); - } - } - - String node1 = tasksQueue.poll(Constants.DOLPHINSCHEDULER_TASKS_QUEUE, 1).get(0); - assertEquals(node1,"0"); - - } - -} diff --git a/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java b/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java new file mode 100644 index 0000000000..ca6c083a67 --- /dev/null +++ b/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package queue; + +import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; +import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl; +import org.junit.Test; + +import static org.junit.Assert.*; + +public class TaskUpdateQueueTest { + + /** + * test put + */ + @Test + public void testQueue() throws Exception{ + + // ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${groupName} + + /** + * 1_1_2_1_default + * 1_1_2_2_default + * 1_1_0_3_default + * 1_1_0_4_default + */ + + String taskInfo1 = "1_1_2_1_default"; + String taskInfo2 = "1_1_2_2_default"; + String taskInfo3 = "1_1_0_3_default"; + String taskInfo4 = "1_1_0_4_default"; + + TaskPriorityQueue queue = new TaskPriorityQueueImpl(); + queue.put(taskInfo1); + queue.put(taskInfo2); + queue.put(taskInfo3); + queue.put(taskInfo4); + + assertEquals("1_1_0_3_default", queue.take()); + assertEquals("1_1_0_4_default", queue.take()); + assertEquals("1_1_2_1_default",queue.take()); + assertEquals("1_1_2_2_default",queue.take()); + } +} diff --git a/dolphinscheduler-ui/package.json b/dolphinscheduler-ui/package.json index da15b722fc..b23969803b 100644 --- a/dolphinscheduler-ui/package.json +++ b/dolphinscheduler-ui/package.json @@ -11,7 +11,8 @@ "build:release": "npm run clean && cross-env NODE_ENV=production PUBLIC_PATH=/dolphinscheduler/ui webpack --config ./build/webpack.config.release.js" }, "dependencies": { - "ans-ui": "1.1.7", + "@riophae/vue-treeselect": "^0.4.0", + "ans-ui": "1.1.9", "axios": "^0.16.2", "bootstrap": "3.3.7", "canvg": "1.5.1", @@ -26,6 +27,7 @@ "js-cookie": "^2.2.1", "jsplumb": "^2.8.6", "lodash": "^4.17.11", + "normalize.css": "^8.0.1", "vue": "^2.5.17", "vue-router": "2.7.0", "vuex": "^3.0.0", diff --git a/dolphinscheduler-ui/pom.xml b/dolphinscheduler-ui/pom.xml index 3fd9aa6650..78869ffbc4 100644 --- a/dolphinscheduler-ui/pom.xml +++ b/dolphinscheduler-ui/pom.xml @@ -89,6 +89,61 @@ + + rpmbuild + + + + com.github.eirslett + frontend-maven-plugin + ${frontend-maven-plugin.version} + + + install node and npm + + install-node-and-npm + + + ${node.version} + ${npm.version} + + + + npm install node-sass --unsafe-perm + + npm + + generate-resources + + install node-sass --unsafe-perm + + + + npm install + + npm + + generate-resources + + install + + + + npm run build:release + + npm + + + run build:release + + + + + + + + + nginx diff --git a/dolphinscheduler-ui/src/js/conf/home/index.js b/dolphinscheduler-ui/src/js/conf/home/index.js index 33fc63d8b0..1913088eca 100644 --- a/dolphinscheduler-ui/src/js/conf/home/index.js +++ b/dolphinscheduler-ui/src/js/conf/home/index.js @@ -31,6 +31,7 @@ import Permissions from '@/module/permissions' import 'ans-ui/lib/ans-ui.min.css' import ans from 'ans-ui/lib/ans-ui.min' import en_US from 'ans-ui/lib/locale/en' // eslint-disable-line +import'normalize.css/normalize.css' import 'sass/conf/home/index.scss' import'bootstrap/dist/css/bootstrap.min.css' diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss index 6d97856960..886ee692bf 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss @@ -130,12 +130,12 @@ } .toolbar-btn { overflow: hidden; - padding: 11px 11px 0 11px; + padding: 8px 11px 0 11px; .bar-box { width: 36px; height: 36px; float: left; - margin-bottom: 11px; + margin-bottom: 3px; border-radius: 3px; .disabled { .icos { diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue index 7d6f95d753..6f630071c1 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue @@ -25,7 +25,7 @@ :key="v" v-for="(item,v) in tasksTypeList" @mousedown="_getDagId(v)"> -

+
@@ -177,7 +177,7 @@ Endpoint: [ 'Dot', { radius: 1, cssClass: 'dot-style' } ], - Connector: 'Straight', + Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style ConnectionOverlays: [ [ @@ -326,45 +326,62 @@ * Storage interface */ _save (sourceType) { - return new Promise((resolve, reject) => { - this.spinnerLoading = true - // Storage store - Dag.saveStore().then(res => { - if (this.urlParam.id) { - /** - * Edit - * @param saveInstanceEditDAGChart => Process instance editing - * @param saveEditDAGChart => Process definition editing - */ - this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => { - this.$message.success(res.msg) - this.spinnerLoading = false - resolve() - }).catch(e => { - this.$message.error(e.msg || '') - this.spinnerLoading = false - reject(e) - }) - } else { - // New - this.saveDAGchart().then(res => { - this.$message.success(res.msg) - this.spinnerLoading = false - // source @/conf/home/pages/dag/_source/editAffirmModel/index.js - if (sourceType !== 'affirm') { - // Jump process definition - this.$router.push({ name: 'projects-definition-list' }) - } - resolve() - }).catch(e => { - this.$message.error(e.msg || '') - this.setName('') - this.spinnerLoading = false - reject(e) - }) - } + if(this._verifConditions()) { + return new Promise((resolve, reject) => { + this.spinnerLoading = true + // Storage store + Dag.saveStore().then(res => { + if (this.urlParam.id) { + /** + * Edit + * @param saveInstanceEditDAGChart => Process instance editing + * @param saveEditDAGChart => Process definition editing + */ + this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => { + this.$message.success(res.msg) + this.spinnerLoading = false + resolve() + }).catch(e => { + this.$message.error(e.msg || '') + this.spinnerLoading = false + reject(e) + }) + } else { + // New + this.saveDAGchart().then(res => { + this.$message.success(res.msg) + this.spinnerLoading = false + // source @/conf/home/pages/dag/_source/editAffirmModel/index.js + if (sourceType !== 'affirm') { + // Jump process definition + this.$router.push({ name: 'projects-definition-list' }) + } + resolve() + }).catch(e => { + this.$message.error(e.msg || '') + this.setName('') + this.spinnerLoading = false + reject(e) + }) + } + }) }) + } + }, + _verifConditions () { + let tasks = this.$store.state.dag.tasks + let bool = true + tasks.map(v=>{ + if(v.type == 'CONDITIONS' && (v.conditionResult.successNode[0] =='' || v.conditionResult.successNode[0] == null || v.conditionResult.failedNode[0] =='' || v.conditionResult.failedNode[0] == null)) { + bool = false + return false + } }) + if(!bool) { + this.$message.warning(`${i18n.$t('Successful branch flow and failed branch flow are required')}`) + return false + } + return true }, /** * Global parameter @@ -589,7 +606,7 @@ Endpoint: [ 'Dot', { radius: 1, cssClass: 'dot-style' } ], - Connector: 'Straight', + Connector: 'Bezier', PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style ConnectionOverlays: [ [ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue index 8b10d2b738..8efe5c2860 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/_source/workerGroups.vue @@ -40,8 +40,8 @@ mixins: [disabledState], props: { value: { - type: Number, - default: -1 + type: String, + default: 'default' } }, model: { diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue old mode 100755 new mode 100644 index cc1c8b6d6c..459f9a8605 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue @@ -90,7 +90,7 @@ {{$t('Worker group')}} - + @@ -333,7 +333,7 @@ // Task priority taskInstancePriority: 'MEDIUM', // worker group id - workerGroupId: -1, + workerGroup: 'default', stateList:[ { value: 'success', @@ -430,7 +430,7 @@ * return params */ _onParams (o) { - this.params = Object.assign(this.params, {}, o) + this.params = Object.assign({}, o) }, _onCacheParams (o) { @@ -455,7 +455,7 @@ retryInterval: this.retryInterval, timeout: this.timeout, taskInstancePriority: this.taskInstancePriority, - workerGroupId: this.workerGroupId, + workerGroup: this.workerGroup, status: this.status, branch: this.branch }, @@ -470,7 +470,7 @@ this.$message.warning(`${i18n.$t('Please enter name (required)')}`) return false } - if (this.successBranch !='' && this.successBranch == this.failedBranch) { + if (this.successBranch !='' && this.successBranch !=null && this.successBranch == this.failedBranch) { this.$message.warning(`${i18n.$t('Cannot select the same node for successful branch flow and failed branch flow')}`) return false } @@ -519,7 +519,7 @@ retryInterval: this.retryInterval, timeout: this.timeout, taskInstancePriority: this.taskInstancePriority, - workerGroupId: this.workerGroupId, + workerGroup: this.workerGroup, status: this.status, branch: this.branch }, @@ -611,25 +611,27 @@ this.failedBranch = o.conditionResult.failedNode[0] } // If the workergroup has been deleted, set the default workergroup - var hasMatch = false; - for (let i = 0; i < this.store.state.security.workerGroupsListAll.length; i++) { - var workerGroupId = this.store.state.security.workerGroupsListAll[i].id - if (o.workerGroupId == workerGroupId) { - hasMatch = true; - break; - } + var hasMatch = false; + for (let i = 0; i < this.store.state.security.workerGroupsListAll.length; i++) { + var workerGroup = this.store.state.security.workerGroupsListAll[i].id + if (o.workerGroup == workerGroup) { + hasMatch = true; + break; } + } - if(!hasMatch){ - this.workerGroupId = -1 - }else{ - this.workerGroupId = o.workerGroupId - } + if(!hasMatch){ + this.workerGroup = 'default' + } else { + this.workerGroup = o.workerGroup + } this.params = o.params || {} this.dependence = o.dependence || {} this.cacheDependence = o.dependence || {} + } else { + this.workerGroup = this.store.state.security.workerGroupsListAll[0].id } this.isContentBox = true }, @@ -663,7 +665,7 @@ retryInterval: this.retryInterval, timeout: this.timeout, taskInstancePriority: this.taskInstancePriority, - workerGroupId: this.workerGroupId, + workerGroup: this.workerGroup, successBranch: this.successBranch, failedBranch: this.failedBranch } @@ -694,4 +696,9 @@ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue index dbe3e1d6b1..7874b53885 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/log.vue @@ -170,7 +170,7 @@ */ _downloadLog () { downloadFile('/dolphinscheduler/log/download-log', { - taskInstId: this.stateId || this.logId + taskInstanceId: this.stateId || this.logId }) }, /** @@ -256,7 +256,7 @@ computed: { _rtParam () { return { - taskInstId: this.stateId || this.logId, + taskInstanceId: this.stateId || this.logId, skipLineNum: parseInt(`${this.loadingIndex ? this.loadingIndex + '000' : 0}`), limit: parseInt(`${this.loadingIndex ? this.loadingIndex + 1 : 1}000`) } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue index fa7ee89e98..0c3f7433a3 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue @@ -128,10 +128,6 @@ this.store.dispatch('dag/getProcessTasksList', { processDefinitionId: ids }).then(res => { resolve(['ALL'].concat(_.map(res, v => v.name))) }) - } else { - this.store.dispatch('dag/getTaskListDefIdAll', { processDefinitionIdList: ids }).then(res => { - resolve(res) - }) } }) }, diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/datax.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/datax.vue index 959610f95a..f1c9b757bd 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/datax.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/datax.vue @@ -17,90 +17,127 @@ @@ -437,4 +557,12 @@ } } } + .vue-treeselect--disabled { + .vue-treeselect__control { + background-color: #ecf3f8; + .vue-treeselect__single-value { + color: #6d859e; + } + } + } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue index 706a35f4fe..8fb2ebadfe 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/mr.vue @@ -44,19 +44,9 @@
{{$t('Main jar package')}}
- - - - + +
{{ node.raw.fullName }}
+
@@ -88,12 +78,9 @@
{{$t('Resources')}}
- - + +
{{ node.raw.fullName }}
+
@@ -115,16 +102,20 @@ import mListBox from './_source/listBox' import mResources from './_source/resources' import mLocalParams from './_source/localParams' + import Treeselect from '@riophae/vue-treeselect' + import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' export default { name: 'mr', data () { return { + valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Main jar package (List) + mainJarLists: [], mainJarList: [], // Resource(list) resourceList: [], @@ -139,7 +130,14 @@ // Program type programType: 'JAVA', // Program type(List) - programTypeList: [{ code: 'JAVA' }, { code: 'PYTHON' }] + programTypeList: [{ code: 'JAVA' }, { code: 'PYTHON' }], + normalizer(node) { + return { + label: node.name + } + }, + allNoResources: [], + noRes: [] } }, props: { @@ -147,6 +145,19 @@ }, mixins: [disabledState], methods: { + /** + * getResourceId + */ + marjarId(name) { + this.store.dispatch('dag/getResourceId',{ + type: 'FILE', + fullName: '/'+name + }).then(res => { + this.mainJar = res.id + }).catch(e => { + this.$message.error(e.msg || '') + }) + }, /** * return localParams */ @@ -165,6 +176,79 @@ _onCacheResourcesData (a) { this.cacheResourceList = a }, + diGuiTree(item) { // Recursive convenience tree structure + item.forEach(item => { + item.children === '' || item.children === undefined || item.children === null || item.children.length === 0?         + this.operationTree(item) : this.diGuiTree(item.children); + }) + }, + operationTree(item) { + if(item.dirctory) { + item.isDisabled =true + } + delete item.children + }, + searchTree(element, id) { + // 根据id查找节点 + if (element.id == id) { + return element; + } else if (element.children != null) { + var i; + var result = null; + for (i = 0; result == null && i < element.children.length; i++) { + result = this.searchTree(element.children[i], id); + } + return result; + } + return null; + }, + dataProcess(backResource) { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.mainJarList.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return item.id + }) + Array.prototype.diff = function(a) { + return this.filter(function(i) {return a.indexOf(i) < 0;}); + }; + let diffSet = this.resourceList.diff(resourceIdArr); + let optionsCmp = [] + if(diffSet.length>0) { + diffSet.forEach(item=>{ + backResource.forEach(item1=>{ + if(item==item1.id || item==item1.res) { + optionsCmp.push(item1) + } + }) + }) + } + let noResources = [{ + id: -1, + name: $t('Unauthorized or deleted resources'), + fullName: '/'+$t('Unauthorized or deleted resources'), + children: [] + }] + if(optionsCmp.length>0) { + this.allNoResources = optionsCmp + optionsCmp = optionsCmp.map(item=>{ + return {id: item.id,name: item.name,fullName: item.res} + }) + optionsCmp.forEach(item=>{ + item.isNew = true + }) + noResources[0].children = optionsCmp + this.mainJarList = this.mainJarList.concat(noResources) + } + } + }, /** * verification */ @@ -179,7 +263,9 @@ return false } - if (!this.$refs.refResources._verifResources()) { + // noRes + if (this.noRes.length>0) { + this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } @@ -187,14 +273,15 @@ if (!this.$refs.refLocalParams._verifProp()) { return false } - // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { - res: this.mainJar + id: this.mainJar }, - resourceList: this.resourceList, + resourceList: _.map(this.resourceList, v => { + return {id: v} + }), localParams: this.localParams, mainArgs: this.mainArgs, others: this.others, @@ -202,24 +289,7 @@ }) return true }, - /** - * Get resource data - */ - _getResourcesList () { - return new Promise((resolve, reject) => { - let isJar = (alias) => { - return alias.substring(alias.lastIndexOf('.') + 1, alias.length) !== 'jar' - } - this.mainJarList = _.map(_.cloneDeep(this.store.state.dag.resourcesListS), v => { - return { - id: v.id, - code: v.alias, - disabled: isJar(v.alias) - } - }) - resolve() - }) - } + }, watch: { /** @@ -237,12 +307,36 @@ }, computed: { cacheParams () { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.mainJarList.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return {id: item.id,name: item.name,res: item.fullName} + }) + } + let result = [] + resourceIdArr.forEach(item=>{ + this.allNoResources.forEach(item1=>{ + if(item.id==item1.id) { + // resultBool = true + result.push(item1) + } + }) + }) + this.noRes = result return { mainClass: this.mainClass, mainJar: { - res: this.mainJar + id: this.mainJar }, - resourceList: this.cacheResourceList, + resourceList: resourceIdArr, localParams: this.localParams, mainArgs: this.mainArgs, others: this.others, @@ -251,13 +345,24 @@ } }, created () { - this._getResourcesList().then(() => { + let item = this.store.state.dag.resourcesListS + let items = this.store.state.dag.resourcesListJar + this.diGuiTree(item) + this.diGuiTree(items) + this.mainJarList = item + this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' - this.mainJar = o.params.mainJar.res || '' + if(o.params.mainJar.res) { + this.marjarId(o.params.mainJar.res) + } else if(o.params.mainJar.res=='') { + this.mainJar = '' + } else { + this.mainJar = o.params.mainJar.id || '' + } this.mainArgs = o.params.mainArgs || '' this.others = o.params.others this.programType = o.params.programType || 'JAVA' @@ -265,22 +370,38 @@ // backfill resourceList let resourceList = o.params.resourceList || [] if (resourceList.length) { - this.resourceList = resourceList + _.map(resourceList, v => { + if(!v.id) { + this.store.dispatch('dag/getResourceId',{ + type: 'FILE', + fullName: '/'+v.res + }).then(res => { + this.resourceList.push(res.id) + this.dataProcess(backResource) + }).catch(e => { + this.resourceList.push(v.res) + this.dataProcess(backResource) + }) + } else { + this.resourceList.push(v.id) + this.dataProcess(backResource) + } + }) this.cacheResourceList = resourceList } // backfill localParams + let backResource = o.params.resourceList || [] let localParams = o.params.localParams || [] if (localParams.length) { this.localParams = localParams } } - }) }, mounted () { }, - components: { mLocalParams, mListBox, mResources } + components: { mLocalParams, mListBox, mResources, Treeselect } } @@ -306,4 +427,12 @@ } } } + .vue-treeselect--disabled { + .vue-treeselect__control { + background-color: #ecf3f8; + .vue-treeselect__single-value { + color: #6d859e; + } + } + } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue index 6f495d22a0..851f8bee8a 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/python.vue @@ -28,12 +28,15 @@
{{$t('Resources')}}
- - + +
{{ node.raw.fullName }}
+
+
@@ -56,6 +59,8 @@ import mListBox from './_source/listBox' import mResources from './_source/resources' import mLocalParams from './_source/localParams' + import Treeselect from '@riophae/vue-treeselect' + import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' @@ -65,6 +70,7 @@ name: 'python', data () { return { + valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // Custom parameter @@ -72,7 +78,15 @@ // resource(list) resourceList: [], // Cache ResourceList - cacheResourceList: [] + cacheResourceList: [], + resourceOptions: [], + normalizer(node) { + return { + label: node.name + } + }, + allNoResources: [], + noRes: [] } }, mixins: [disabledState], @@ -89,9 +103,9 @@ /** * return resourceList */ - _onResourcesData (a) { - this.resourceList = a - }, + // _onResourcesData (a) { + // this.resourceList = a + // }, /** * cache resourceList */ @@ -108,18 +122,22 @@ return false } - if (!this.$refs.refResources._verifResources()) { + // localParams Subcomponent verification + if (!this.$refs.refLocalParams._verifProp()) { return false } - // localParams Subcomponent verification - if (!this.$refs.refLocalParams._verifProp()) { + // noRes + if (this.noRes.length>0) { + this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) return false } // storage this.$emit('on-params', { - resourceList: this.resourceList, + resourceList: _.map(this.resourceList, v => { + return {id: v} + }), localParams: this.localParams, rawScript: editor.getValue() }) @@ -129,8 +147,6 @@ * Processing code highlighting */ _handlerEditor () { - this._destroyEditor() - // editor editor = codemirror('code-python-mirror', { mode: 'python', @@ -145,49 +161,129 @@ } } - this.changes = () => { - this._cacheParams() - } - // Monitor keyboard editor.on('keypress', this.keypress) - editor.on('changes', this.changes) - editor.setValue(this.rawScript) return editor }, - _cacheParams () { - this.$emit('on-cache-params', { - resourceList: this.cacheResourceList, - localParams: this.localParams, - rawScript: editor ? editor.getValue() : '' - }); + diGuiTree(item) { // Recursive convenience tree structure + item.forEach(item => { + item.children === '' || item.children === undefined || item.children === null || item.children.length === 0?         + this.operationTree(item) : this.diGuiTree(item.children); + }) }, - _destroyEditor () { - if (editor) { - editor.toTextArea() // Uninstall - editor.off($('.code-python-mirror'), 'keypress', this.keypress) - editor.off($('.code-python-mirror'), 'changes', this.changes) + operationTree(item) { + if(item.dirctory) { + item.isDisabled =true + } + delete item.children + }, + searchTree(element, id) { + // 根据id查找节点 + if (element.id == id) { + return element; + } else if (element.children != null) { + var i; + var result = null; + for (i = 0; result == null && i < element.children.length; i++) { + result = this.searchTree(element.children[i], id); + } + return result; + } + return null; + }, + dataProcess(backResource) { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.resourceOptions.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return item.id + }) + Array.prototype.diff = function(a) { + return this.filter(function(i) {return a.indexOf(i) < 0;}); + }; + let diffSet = this.resourceList.diff(resourceIdArr); + let optionsCmp = [] + if(diffSet.length>0) { + diffSet.forEach(item=>{ + backResource.forEach(item1=>{ + if(item==item1.id || item==item1.res) { + optionsCmp.push(item1) + } + }) + }) + } + let noResources = [{ + id: -1, + name: $t('Unauthorized or deleted resources'), + fullName: '/'+$t('Unauthorized or deleted resources'), + children: [] + }] + if(optionsCmp.length>0) { + this.allNoResources = optionsCmp + optionsCmp = optionsCmp.map(item=>{ + return {id: item.id,name: item.name,fullName: item.res} + }) + optionsCmp.forEach(item=>{ + item.isNew = true + }) + noResources[0].children = optionsCmp + this.resourceOptions = this.resourceOptions.concat(noResources) + } } } }, watch: { //Watch the cacheParams cacheParams (val) { - this._cacheParams() + this.$emit('on-cache-params', val); } }, computed: { cacheParams () { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.resourceOptions.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return {id: item.id,name: item.name,res: item.fullName} + }) + } + let result = [] + resourceIdArr.forEach(item=>{ + this.allNoResources.forEach(item1=>{ + if(item.id==item1.id) { + // resultBool = true + result.push(item1) + } + }) + }) + this.noRes = result return { - resourceList: this.cacheResourceList, + resourceList: resourceIdArr, localParams: this.localParams } } }, created () { + let item = this.store.state.dag.resourcesListS + this.diGuiTree(item) + this.resourceOptions = item let o = this.backfillItem // Non-null objects represent backfill @@ -195,9 +291,26 @@ this.rawScript = o.params.rawScript || '' // backfill resourceList + let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { - this.resourceList = resourceList + _.map(resourceList, v => { + if(!v.id) { + this.store.dispatch('dag/getResourceId',{ + type: 'FILE', + fullName: '/'+v.res + }).then(res => { + this.resourceList.push(res.id) + this.dataProcess(backResource) + }).catch(e => { + this.resourceList.push(v.res) + this.dataProcess(backResource) + }) + } else { + this.resourceList.push(v.id) + this.dataProcess(backResource) + } + }) this.cacheResourceList = resourceList } @@ -214,12 +327,19 @@ }, 200) }, destroyed () { - if (editor) { - editor.toTextArea() // Uninstall - editor.off($('.code-python-mirror'), 'keypress', this.keypress) - editor.off($('.code-python-mirror'), 'changes', this.changes) - } + editor.toTextArea() // Uninstall + editor.off($('.code-python-mirror'), 'keypress', this.keypress) }, - components: { mLocalParams, mListBox, mResources } + components: { mLocalParams, mListBox, mResources,Treeselect } } + \ No newline at end of file diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue index a4b20f3310..7a462a1f27 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/shell.vue @@ -32,6 +32,14 @@
+
{{$t('Resources')}}
+
+ +
{{ node.raw.fullName }}
+
+
+
+
{{$t('Custom Parameters')}}
@@ -63,6 +71,8 @@ import mResources from './_source/resources' import mLocalParams from './_source/localParams' import disabledState from '@/module/mixin/disabledState' + import Treeselect from '@riophae/vue-treeselect' + import '@riophae/vue-treeselect/dist/vue-treeselect.css' import codemirror from '@/conf/home/pages/resource/pages/file/pages/_source/codemirror' let editor @@ -71,6 +81,7 @@ name: 'shell', data () { return { + valueConsistsOf: 'LEAF_PRIORITY', // script rawScript: '', // Custom parameter @@ -78,7 +89,16 @@ // resource(list) resourceList: [], // Cache ResourceList - cacheResourceList: [] + cacheResourceList: [], + // define options + options: [], + normalizer(node) { + return { + label: node.name + } + }, + allNoResources: [], + noRes: [] } }, mixins: [disabledState], @@ -143,17 +163,24 @@ return false } - if (!this.$refs.refResources._verifResources()) { - return false - } - // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } + // noRes + if (this.noRes.length>0) { + this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) + return false + } + // Process resourcelist + let dataProcessing= _.map(this.resourceList, v => { + return { + id: v + } + }) // storage this.$emit('on-params', { - resourceList: this.resourceList, + resourceList: dataProcessing, localParams: this.localParams, rawScript: editor.getValue() }) @@ -163,8 +190,6 @@ * Processing code highlighting */ _handlerEditor () { - this._destroyEditor() - // editor editor = codemirror('code-shell-mirror', { mode: 'shell', @@ -179,62 +204,158 @@ } } - this.changes = () => { - this._cacheParams() - } - // Monitor keyboard editor.on('keypress', this.keypress) - - editor.on('changes', this.changes) - editor.setValue(this.rawScript) return editor }, - _cacheParams () { - this.$emit('on-cache-params', { - resourceList: this.cacheResourceList, - localParams: this.localParams, - rawScript: editor ? editor.getValue() : '' - }); + diGuiTree(item) { // Recursive convenience tree structure + item.forEach(item => { + item.children === '' || item.children === undefined || item.children === null || item.children.length === 0?         + this.operationTree(item) : this.diGuiTree(item.children); + }) }, - _destroyEditor () { - if (editor) { - editor.toTextArea() // Uninstall - editor.off($('.code-sql-mirror'), 'keypress', this.keypress) - editor.off($('.code-sql-mirror'), 'changes', this.changes) + operationTree(item) { + if(item.dirctory) { + item.isDisabled =true + } + delete item.children + }, + searchTree(element, id) { + // 根据id查找节点 + if (element.id == id) { + return element; + } else if (element.children != null) { + var i; + var result = null; + for (i = 0; result == null && i < element.children.length; i++) { + result = this.searchTree(element.children[i], id); + } + return result; + } + return null; + }, + dataProcess(backResource) { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.options.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return item.id + }) + Array.prototype.diff = function(a) { + return this.filter(function(i) {return a.indexOf(i) < 0;}); + }; + let diffSet = this.resourceList.diff(resourceIdArr); + let optionsCmp = [] + if(diffSet.length>0) { + diffSet.forEach(item=>{ + backResource.forEach(item1=>{ + if(item==item1.id || item==item1.res) { + optionsCmp.push(item1) + } + }) + }) + } + let noResources = [{ + id: -1, + name: $t('Unauthorized or deleted resources'), + fullName: '/'+$t('Unauthorized or deleted resources'), + children: [] + }] + if(optionsCmp.length>0) { + this.allNoResources = optionsCmp + optionsCmp = optionsCmp.map(item=>{ + return {id: item.id,name: item.name,fullName: item.res} + }) + optionsCmp.forEach(item=>{ + item.isNew = true + }) + noResources[0].children = optionsCmp + this.options = this.options.concat(noResources) + } } } }, watch: { //Watch the cacheParams cacheParams (val) { - this._cacheParams() + this.$emit('on-cache-params', val); } }, computed: { cacheParams () { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.options.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return {id: item.id,name: item.name,res: item.fullName} + }) + } + let result = [] + resourceIdArr.forEach(item=>{ + this.allNoResources.forEach(item1=>{ + if(item.id==item1.id) { + // resultBool = true + result.push(item1) + } + }) + }) + this.noRes = result return { - resourceList: this.cacheResourceList, + resourceList: resourceIdArr, localParams: this.localParams } } }, created () { + let item = this.store.state.dag.resourcesListS + this.diGuiTree(item) + this.options = item let o = this.backfillItem - + // Non-null objects represent backfill if (!_.isEmpty(o)) { this.rawScript = o.params.rawScript || '' // backfill resourceList + let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { - this.resourceList = resourceList + _.map(resourceList, v => { + if(!v.id) { + this.store.dispatch('dag/getResourceId',{ + type: 'FILE', + fullName: '/'+v.res + }).then(res => { + this.resourceList.push(res.id) + this.dataProcess(backResource) + }).catch(e => { + this.resourceList.push(v.res) + this.dataProcess(backResource) + }) + } else { + this.resourceList.push(v.id) + this.dataProcess(backResource) + } + }) this.cacheResourceList = resourceList } - + // backfill localParams let localParams = o.params.localParams || [] if (localParams.length) { @@ -251,10 +372,9 @@ if (editor) { editor.toTextArea() // Uninstall editor.off($('.code-shell-mirror'), 'keypress', this.keypress) - editor.off($('.code-shell-mirror'), 'changes', this.changes) } }, - components: { mLocalParams, mListBox, mResources, mScriptBox } + components: { mLocalParams, mListBox, mResources, mScriptBox, Treeselect } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue index feef19856c..4cceb3b27b 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/spark.vue @@ -63,19 +63,9 @@
{{$t('Main jar package')}}
- - - - + +
{{ node.raw.fullName }}
+
@@ -177,6 +167,14 @@
+
{{$t('Resources')}}
+
+ +
{{ node.raw.fullName }}
+
+
+
+
{{$t('Custom Parameters')}}
@@ -206,17 +204,21 @@ import mLocalParams from './_source/localParams' import mListBox from './_source/listBox' import mResources from './_source/resources' + import Treeselect from '@riophae/vue-treeselect' + import '@riophae/vue-treeselect/dist/vue-treeselect.css' import disabledState from '@/module/mixin/disabledState' export default { name: 'spark', data () { return { + valueConsistsOf: 'LEAF_PRIORITY', // Main function class mainClass: '', // Master jar package mainJar: null, // Master jar package(List) + mainJarLists: [], mainJarList: [], // Deployment method deployMode: 'cluster', @@ -247,7 +249,14 @@ // Spark version sparkVersion: 'SPARK2', // Spark version(LIst) - sparkVersionList: [{ code: 'SPARK2' }, { code: 'SPARK1' }] + sparkVersionList: [{ code: 'SPARK2' }, { code: 'SPARK1' }], + normalizer(node) { + return { + label: node.name + } + }, + allNoResources: [], + noRes: [] } }, props: { @@ -255,6 +264,19 @@ }, mixins: [disabledState], methods: { + /** + * getResourceId + */ + marjarId(name) { + this.store.dispatch('dag/getResourceId',{ + type: 'FILE', + fullName: '/'+name + }).then(res => { + this.mainJar = res.id + }).catch(e => { + this.$message.error(e.msg || '') + }) + }, /** * return localParams */ @@ -273,6 +295,79 @@ _onCacheResourcesData (a) { this.cacheResourceList = a }, + diGuiTree(item) { // Recursive convenience tree structure + item.forEach(item => { + item.children === '' || item.children === undefined || item.children === null || item.children.length === 0?         + this.operationTree(item) : this.diGuiTree(item.children); + }) + }, + operationTree(item) { + if(item.dirctory) { + item.isDisabled =true + } + delete item.children + }, + searchTree(element, id) { + // 根据id查找节点 + if (element.id == id) { + return element; + } else if (element.children != null) { + var i; + var result = null; + for (i = 0; result == null && i < element.children.length; i++) { + result = this.searchTree(element.children[i], id); + } + return result; + } + return null; + }, + dataProcess(backResource) { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.mainJarList.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return item.id + }) + Array.prototype.diff = function(a) { + return this.filter(function(i) {return a.indexOf(i) < 0;}); + }; + let diffSet = this.resourceList.diff(resourceIdArr); + let optionsCmp = [] + if(diffSet.length>0) { + diffSet.forEach(item=>{ + backResource.forEach(item1=>{ + if(item==item1.id || item==item1.res) { + optionsCmp.push(item1) + } + }) + }) + } + let noResources = [{ + id: -1, + name: $t('Unauthorized or deleted resources'), + fullName: '/'+$t('Unauthorized or deleted resources'), + children: [] + }] + if(optionsCmp.length>0) { + this.allNoResources = optionsCmp + optionsCmp = optionsCmp.map(item=>{ + return {id: item.id,name: item.name,fullName: item.res} + }) + optionsCmp.forEach(item=>{ + item.isNew = true + }) + noResources[0].children = optionsCmp + this.mainJarList = this.mainJarList.concat(noResources) + } + } + }, /** * verification */ @@ -292,6 +387,12 @@ return false } + // noRes + if (this.noRes.length>0) { + this.$message.warning(`${i18n.$t('Please delete all non-existent resources')}`) + return false + } + if (!Number.isInteger(parseInt(this.numExecutors))) { this.$message.warning(`${i18n.$t('The number of Executors should be a positive integer')}`) return false @@ -321,24 +422,25 @@ this.$message.warning(`${i18n.$t('Core number should be positive integer')}`) return false } - - if (!this.$refs.refResources._verifResources()) { - return false - } - // localParams Subcomponent verification if (!this.$refs.refLocalParams._verifProp()) { return false } + // Process resourcelist + let dataProcessing= _.map(this.resourceList, v => { + return { + id: v + } + }) // storage this.$emit('on-params', { mainClass: this.mainClass, mainJar: { - res: this.mainJar + id: this.mainJar }, deployMode: this.deployMode, - resourceList: this.resourceList, + resourceList: dataProcessing, localParams: this.localParams, driverCores: this.driverCores, driverMemory: this.driverMemory, @@ -351,24 +453,6 @@ sparkVersion: this.sparkVersion }) return true - }, - /** - * get resources list - */ - _getResourcesList () { - return new Promise((resolve, reject) => { - let isJar = (alias) => { - return alias.substring(alias.lastIndexOf('.') + 1, alias.length) !== 'jar' - } - this.mainJarList = _.map(_.cloneDeep(this.store.state.dag.resourcesListS), v => { - return { - id: v.id, - code: v.alias, - disabled: isJar(v.alias) - } - }) - resolve() - }) } }, watch: { @@ -385,13 +469,37 @@ }, computed: { cacheParams () { + let isResourceId = [] + let resourceIdArr = [] + if(this.resourceList.length>0) { + this.resourceList.forEach(v=>{ + this.mainJarList.forEach(v1=>{ + if(this.searchTree(v1,v)) { + isResourceId.push(this.searchTree(v1,v)) + } + }) + }) + resourceIdArr = isResourceId.map(item=>{ + return {id: item.id,name: item.name,res: item.fullName} + }) + } + let result = [] + resourceIdArr.forEach(item=>{ + this.allNoResources.forEach(item1=>{ + if(item.id==item1.id) { + // resultBool = true + result.push(item1) + } + }) + }) + this.noRes = result return { mainClass: this.mainClass, mainJar: { - res: this.mainJar + id: this.mainJar }, deployMode: this.deployMode, - resourceList: this.cacheResourceList, + resourceList: resourceIdArr, localParams: this.localParams, driverCores: this.driverCores, driverMemory: this.driverMemory, @@ -406,13 +514,24 @@ } }, created () { - this._getResourcesList().then(() => { + let item = this.store.state.dag.resourcesListS + let items = this.store.state.dag.resourcesListJar + this.diGuiTree(item) + this.diGuiTree(items) + this.mainJarList = item + this.mainJarLists = items let o = this.backfillItem // Non-null objects represent backfill if (!_.isEmpty(o)) { this.mainClass = o.params.mainClass || '' - this.mainJar = o.params.mainJar && o.params.mainJar.res ? o.params.mainJar.res : '' + if(o.params.mainJar.res) { + this.marjarId(o.params.mainJar.res) + } else if(o.params.mainJar.res=='') { + this.mainJar = '' + } else { + this.mainJar = o.params.mainJar.id || '' + } this.deployMode = o.params.deployMode || '' this.driverCores = o.params.driverCores || 1 this.driverMemory = o.params.driverMemory || '512M' @@ -425,9 +544,26 @@ this.sparkVersion = o.params.sparkVersion || 'SPARK2' // backfill resourceList + let backResource = o.params.resourceList || [] let resourceList = o.params.resourceList || [] if (resourceList.length) { - this.resourceList = resourceList + _.map(resourceList, v => { + if(!v.id) { + this.store.dispatch('dag/getResourceId',{ + type: 'FILE', + fullName: '/'+v.res + }).then(res => { + this.resourceList.push(res.id) + this.dataProcess(backResource) + }).catch(e => { + this.resourceList.push(v.res) + this.dataProcess(backResource) + }) + } else { + this.resourceList.push(v.id) + this.dataProcess(backResource) + } + }) this.cacheResourceList = resourceList } @@ -437,12 +573,11 @@ this.localParams = localParams } } - }) }, mounted () { }, - components: { mLocalParams, mListBox, mResources } + components: { mLocalParams, mListBox, mResources, Treeselect } } @@ -471,4 +606,12 @@ } } } + .vue-treeselect--disabled { + .vue-treeselect__control { + background-color: #ecf3f8; + .vue-treeselect__single-value { + color: #6d859e; + } + } + } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue index 843c1eaf1f..9e7f3305e5 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sql.vue @@ -37,7 +37,7 @@
- {{$t('Table')}} + {{$t('TableMode')}} {{$t('Attachment')}}
diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js index 6ac87b3372..88a258c6fe 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/jumpAffirm/index.js @@ -100,7 +100,7 @@ Affirm.isPop = (fn) => { Vue.$modal.destroy() }) }, - close () { + close () { fn() Vue.$modal.destroy() } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js index 598c94209e..c77127d49a 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js @@ -68,7 +68,7 @@ JSP.prototype.init = function ({ dag, instance, options }) { // Register jsplumb connection type and configuration this.JspInstance.registerConnectionType('basic', { anchor: 'Continuous', - connector: 'Straight' // Line type + connector: 'Bezier' // Line type }) // Initial configuration @@ -236,7 +236,7 @@ JSP.prototype.initNode = function (el) { filter: '.ep', anchor: 'Continuous', connectorStyle: { - stroke: '#555', + stroke: '#2d8cf0', strokeWidth: 2, outlineStroke: 'transparent', outlineWidth: 4 @@ -297,6 +297,7 @@ JSP.prototype.tasksContextmenu = function (event) { if (isOne) { // start run $('#startRunning').on('click', () => { + let name = store.state.dag.name let id = router.history.current.params.id store.dispatch('dag/getStartCheck', { processDefinitionId: id }).then(res => { let modal = Vue.$modal.dialog({ @@ -317,7 +318,8 @@ JSP.prototype.tasksContextmenu = function (event) { }, props: { item: { - id: id + id: id, + name: name }, startNodeList: $name, sourceType: 'contextmenu' @@ -330,7 +332,7 @@ JSP.prototype.tasksContextmenu = function (event) { }) }) } - if (!isTwo) { + if (!isTwo) { // edit node $(`#editNodes`).click(ev => { findComponentDownward(this.dag.$root, 'dag-chart')._createNodes({ @@ -378,7 +380,7 @@ JSP.prototype.tasksClick = function (e) { $('.w').removeClass('jtk-tasks-active') $(e.currentTarget).addClass('jtk-tasks-active') if ($connect) { - setSvgColor($connect, '#555') + setSvgColor($connect, '#2d8cf0') this.selectedElement.connect = null } this.selectedElement.id = $(e.currentTarget).attr('id') @@ -437,19 +439,19 @@ JSP.prototype.handleEventPointer = function (is) { isClick: is, isAttachment: false }) - wDom.removeClass('jtk-ep') - if (!is) { - wDom.removeClass('jtk-tasks-active') - this.selectedElement = {} - _.map($('#canvas svg'), v => { - if ($(v).attr('class')) { - _.map($(v).find('path'), v1 => { - $(v1).attr('fill', '#555') - $(v1).attr('stroke', '#555') - }) - } - }) - } + // wDom.removeClass('jtk-ep') + // if (!is) { + // wDom.removeClass('jtk-tasks-active') + // this.selectedElement = {} + // _.map($('#canvas svg'), v => { + // if ($(v).attr('class')) { + // _.map($(v).find('path'), v1 => { + // $(v1).attr('fill', '#555') + // $(v1).attr('stroke', '#555') + // }) + // } + // }) + // } } /** @@ -722,7 +724,7 @@ JSP.prototype.handleEvent = function () { } else { $(`#${sourceId}`).attr('data-nodenumber',Number($(`#${sourceId}`).attr('data-nodenumber'))+1) } - + // Storage node dependency information saveTargetarr(sourceId, targetId) @@ -764,7 +766,7 @@ JSP.prototype.jspBackfill = function ({ connects, locations, largeJson }) { source: sourceId, target: targetId, type: 'basic', - paintStyle: { strokeWidth: 2, stroke: '#555' } + paintStyle: { strokeWidth: 2, stroke: '#2d8cf0' } }) }) }) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js index 4b485fec0b..17e7faf477 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js @@ -100,7 +100,7 @@ const setSvgColor = (e, color) => { // Traverse clear all colors $('.jtk-connector').each((i, o) => { _.map($(o)[0].childNodes, v => { - $(v).attr('fill', '#555').attr('stroke', '#555').attr('stroke-width', 2) + $(v).attr('fill', '#2d8cf0').attr('stroke', '#2d8cf0').attr('stroke-width', 2) }) }) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue index 892cead413..7927976059 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue @@ -24,7 +24,7 @@
  • {{$t('Complement range')}}:{{startupParam.commandParam.complementStartDate}}-{{startupParam.commandParam.complementEndDate}}-
  • {{$t('Failure Strategy')}}:{{startupParam.failureStrategy === 'END' ? $t('End') : $t('Continue')}}
  • {{$t('Process priority')}}:{{startupParam.processInstancePriority}}
  • -
  • {{$t('Worker group')}}:{{_rtWorkerGroupName(startupParam.workerGroupId)}}
  • +
  • {{$t('Worker group')}}:{{_rtWorkerGroupName(startupParam.workerGroup)}}
  • {{$t('Notification strategy')}}:{{_rtWarningType(startupParam.warningType)}}
  • {{$t('Notification group')}}:{{_rtNotifyGroupName(startupParam.warningGroupId)}}
  • {{$t('Recipient')}}:{{startupParam.receivers || '-'}}
  • diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue index 08672eb32a..feaba86e36 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/_source/selectTenant.vue @@ -40,8 +40,8 @@ mixins: [disabledState], props: { value: { - type: Number, - default: -1 + type: String, + default: 'default' } }, model: { @@ -55,7 +55,7 @@ } }) if(!result) { - this.value = -1 + this.value = 'default' } }, methods: { diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue index bfe971c8df..b1d7a7b1e2 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/definitionDetails.vue @@ -41,7 +41,7 @@ props: {}, methods: { ...mapMutations('dag', ['resetParams', 'setIsDetails']), - ...mapActions('dag', ['getProcessList','getProjectList', 'getResourcesList', 'getProcessDetails']), + ...mapActions('dag', ['getProcessList','getProjectList', 'getResourcesList', 'getProcessDetails','getResourcesListJar']), ...mapActions('security', ['getTenantList','getWorkerGroupsAll']), /** * init @@ -60,6 +60,8 @@ this.getProjectList(), // get resource this.getResourcesList(), + // get jar + this.getResourcesListJar(), // get worker group list this.getWorkerGroupsAll(), this.getTenantList() diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/index.vue index 3fd4eeda28..eedf741b6e 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/index.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/index.vue @@ -40,7 +40,7 @@ props: {}, methods: { ...mapMutations('dag', ['resetParams']), - ...mapActions('dag', ['getProcessList','getProjectList', 'getResourcesList']), + ...mapActions('dag', ['getProcessList','getProjectList', 'getResourcesList','getResourcesListJar','getResourcesListJar']), ...mapActions('security', ['getTenantList','getWorkerGroupsAll']), /** * init @@ -55,8 +55,12 @@ this.getProcessList(), // get project this.getProjectList(), + // get jar + this.getResourcesListJar(), // get resource this.getResourcesList(), + // get jar + this.getResourcesListJar(), // get worker group list this.getWorkerGroupsAll(), this.getTenantList() diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue index 22acfba408..daa30d7c44 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/instanceDetails.vue @@ -43,7 +43,7 @@ props: {}, methods: { ...mapMutations('dag', ['setIsDetails', 'resetParams']), - ...mapActions('dag', ['getProcessList','getProjectList', 'getResourcesList', 'getInstancedetail']), + ...mapActions('dag', ['getProcessList','getProjectList', 'getResourcesList', 'getInstancedetail','getResourcesListJar']), ...mapActions('security', ['getTenantList','getWorkerGroupsAll']), /** * init @@ -62,6 +62,8 @@ this.getProjectList(), // get resources this.getResourcesList(), + // get jar + this.getResourcesListJar(), // get worker group list this.getWorkerGroupsAll(), this.getTenantList() diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue index 001535b8fb..db99d00a0c 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/createDataSource.vue @@ -128,6 +128,15 @@ + + + + + \ No newline at end of file diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue index b082f883fb..f7639bb959 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/rename.vue @@ -47,9 +47,9 @@ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subFileFolder/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subFileFolder/index.vue new file mode 100755 index 0000000000..9f903a127b --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subFileFolder/index.vue @@ -0,0 +1,144 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/list.vue new file mode 100755 index 0000000000..f5e801a205 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/list.vue @@ -0,0 +1,251 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/rename.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/rename.vue new file mode 100755 index 0000000000..6f7dacae89 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/rename.vue @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + \ No newline at end of file diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/index.vue new file mode 100755 index 0000000000..12be6b0bc8 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/index.vue @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/createUdfFolder/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/createUdfFolder/index.vue new file mode 100755 index 0000000000..c707ce8c90 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/createUdfFolder/index.vue @@ -0,0 +1,128 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue index 01d8d22650..1408c552db 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/createUdf.vue @@ -15,7 +15,7 @@ * limitations under the License. */