diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 510e9b0a34..0103282ae3 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -7,10 +7,7 @@ assignees: '' --- -*For better global communication, please give priority to using English description, thx! * - -*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* - +**For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! ** **Describe the bug** A clear and concise description of what the bug is. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md index 4c8ec568dd..6fdb06ea6c 100644 --- a/.github/ISSUE_TEMPLATE/question.md +++ b/.github/ISSUE_TEMPLATE/question.md @@ -7,10 +7,7 @@ assignees: '' --- -*For better global communication, please give priority to using English description, thx! * - -*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* - +**For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! ** **Describe the question** A clear and concise description of what the question is. diff --git a/.github/ISSUE_TEMPLATE/test.md b/.github/ISSUE_TEMPLATE/test.md deleted file mode 100644 index 3f8715351e..0000000000 --- a/.github/ISSUE_TEMPLATE/test.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -name: Test -about: Test to enhance the robustness of this project -title: "[Test][Module Name] Test title" -labels: test -assignees: '' - ---- - -*For better global communication, please give priority to using English description, thx! * - -*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.* - -**Describe the question** -A clear and concise description of what the test part is. - -**What are the current deficiencies and the benefits of changing or adding this test** -- A clear and concise description of the current deficiencies, the benefits of changing or adding this test, and the scope involved. - -**Which version of DolphinScheduler:** - -[1.1.0-preview] - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions you've considered. \ No newline at end of file diff --git a/.github/workflows/ci_frontend.yml b/.github/workflows/ci_frontend.yml index b8e11252c8..1feaabaeb3 100644 --- a/.github/workflows/ci_frontend.yml +++ b/.github/workflows/ci_frontend.yml @@ -52,7 +52,6 @@ jobs: npm install npm run lint npm run build - License-check: runs-on: ubuntu-latest steps: @@ -68,4 +67,4 @@ jobs: with: java-version: 1.8 - name: Check - run: mvn -B apache-rat:check + run: mvn -B apache-rat:check \ No newline at end of file diff --git a/.gitignore b/.gitignore index 7a99e2e4b0..7cf1d4d7db 100644 --- a/.gitignore +++ b/.gitignore @@ -4,25 +4,27 @@ .zip .gz .DS_Store +.idea .idea/ -dist/ -all-dependencies.txt -self-modules.txt -third-party-dependencies.txt -**/target/ +.idea/* +.target +.target/ +**/**/target/** +target/* +*/target +*/target/* .settings .nbproject .classpath .project -**/*.iml +*.iml *.ipr *.iws *.tgz .*.swp -.factorypath .vim .tmp -**/node_modules +node_modules npm-debug.log .vscode logs/* @@ -39,10 +41,110 @@ dolphinscheduler-alert/logs/ dolphinscheduler-alert/src/main/resources/alert.properties_bak dolphinscheduler-alert/src/main/resources/logback.xml dolphinscheduler-server/src/main/resources/logback.xml -dolphinscheduler-ui/dist/ +dolphinscheduler-ui/dist dolphinscheduler-ui/node -dolphinscheduler-dao/src/main/resources/dao/data_source.properties +dolphinscheduler-ui/dist/css/common.16ac5d9.css +dolphinscheduler-ui/dist/css/home/index.b444b91.css +dolphinscheduler-ui/dist/css/login/index.5866c64.css +dolphinscheduler-ui/dist/js/0.ac94e5d.js +dolphinscheduler-ui/dist/js/0.ac94e5d.js.map +dolphinscheduler-ui/dist/js/1.0b043a3.js +dolphinscheduler-ui/dist/js/1.0b043a3.js.map +dolphinscheduler-ui/dist/js/10.1bce3dc.js +dolphinscheduler-ui/dist/js/10.1bce3dc.js.map +dolphinscheduler-ui/dist/js/11.79f04d8.js +dolphinscheduler-ui/dist/js/11.79f04d8.js.map +dolphinscheduler-ui/dist/js/12.420daa5.js +dolphinscheduler-ui/dist/js/12.420daa5.js.map +dolphinscheduler-ui/dist/js/13.e5bae1c.js +dolphinscheduler-ui/dist/js/13.e5bae1c.js.map +dolphinscheduler-ui/dist/js/14.f2a0dca.js +dolphinscheduler-ui/dist/js/14.f2a0dca.js.map +dolphinscheduler-ui/dist/js/15.45373e8.js +dolphinscheduler-ui/dist/js/15.45373e8.js.map +dolphinscheduler-ui/dist/js/16.fecb0fc.js +dolphinscheduler-ui/dist/js/16.fecb0fc.js.map +dolphinscheduler-ui/dist/js/17.84be279.js +dolphinscheduler-ui/dist/js/17.84be279.js.map +dolphinscheduler-ui/dist/js/18.307ea70.js +dolphinscheduler-ui/dist/js/18.307ea70.js.map +dolphinscheduler-ui/dist/js/19.144db9c.js +dolphinscheduler-ui/dist/js/19.144db9c.js.map +dolphinscheduler-ui/dist/js/2.8b4ef29.js +dolphinscheduler-ui/dist/js/2.8b4ef29.js.map +dolphinscheduler-ui/dist/js/20.4c527e9.js +dolphinscheduler-ui/dist/js/20.4c527e9.js.map +dolphinscheduler-ui/dist/js/21.831b2a2.js +dolphinscheduler-ui/dist/js/21.831b2a2.js.map +dolphinscheduler-ui/dist/js/22.2b4bb2a.js +dolphinscheduler-ui/dist/js/22.2b4bb2a.js.map +dolphinscheduler-ui/dist/js/23.81467ef.js +dolphinscheduler-ui/dist/js/23.81467ef.js.map +dolphinscheduler-ui/dist/js/24.54a00e4.js +dolphinscheduler-ui/dist/js/24.54a00e4.js.map +dolphinscheduler-ui/dist/js/25.8d7bd36.js +dolphinscheduler-ui/dist/js/25.8d7bd36.js.map +dolphinscheduler-ui/dist/js/26.2ec5e78.js +dolphinscheduler-ui/dist/js/26.2ec5e78.js.map +dolphinscheduler-ui/dist/js/27.3ab48c2.js +dolphinscheduler-ui/dist/js/27.3ab48c2.js.map +dolphinscheduler-ui/dist/js/28.363088a.js +dolphinscheduler-ui/dist/js/28.363088a.js.map +dolphinscheduler-ui/dist/js/29.6c5853a.js +dolphinscheduler-ui/dist/js/29.6c5853a.js.map +dolphinscheduler-ui/dist/js/3.a0edb5b.js +dolphinscheduler-ui/dist/js/3.a0edb5b.js.map +dolphinscheduler-ui/dist/js/30.940fdd3.js +dolphinscheduler-ui/dist/js/30.940fdd3.js.map +dolphinscheduler-ui/dist/js/31.168a460.js +dolphinscheduler-ui/dist/js/31.168a460.js.map +dolphinscheduler-ui/dist/js/32.8df6594.js +dolphinscheduler-ui/dist/js/32.8df6594.js.map +dolphinscheduler-ui/dist/js/33.4480bbe.js +dolphinscheduler-ui/dist/js/33.4480bbe.js.map +dolphinscheduler-ui/dist/js/34.b407fe1.js +dolphinscheduler-ui/dist/js/34.b407fe1.js.map +dolphinscheduler-ui/dist/js/35.f340b0a.js +dolphinscheduler-ui/dist/js/35.f340b0a.js.map +dolphinscheduler-ui/dist/js/36.8880c2d.js +dolphinscheduler-ui/dist/js/36.8880c2d.js.map +dolphinscheduler-ui/dist/js/37.ea2a25d.js +dolphinscheduler-ui/dist/js/37.ea2a25d.js.map +dolphinscheduler-ui/dist/js/38.98a59ee.js +dolphinscheduler-ui/dist/js/38.98a59ee.js.map +dolphinscheduler-ui/dist/js/39.a5e958a.js +dolphinscheduler-ui/dist/js/39.a5e958a.js.map +dolphinscheduler-ui/dist/js/4.4ca44db.js +dolphinscheduler-ui/dist/js/4.4ca44db.js.map +dolphinscheduler-ui/dist/js/40.e187b1e.js +dolphinscheduler-ui/dist/js/40.e187b1e.js.map +dolphinscheduler-ui/dist/js/41.0e89182.js +dolphinscheduler-ui/dist/js/41.0e89182.js.map +dolphinscheduler-ui/dist/js/42.341047c.js +dolphinscheduler-ui/dist/js/42.341047c.js.map +dolphinscheduler-ui/dist/js/43.27b8228.js +dolphinscheduler-ui/dist/js/43.27b8228.js.map +dolphinscheduler-ui/dist/js/44.e8869bc.js +dolphinscheduler-ui/dist/js/44.e8869bc.js.map +dolphinscheduler-ui/dist/js/45.8d54901.js +dolphinscheduler-ui/dist/js/45.8d54901.js.map +dolphinscheduler-ui/dist/js/5.e1ed7f3.js +dolphinscheduler-ui/dist/js/5.e1ed7f3.js.map +dolphinscheduler-ui/dist/js/6.241ba07.js +dolphinscheduler-ui/dist/js/6.241ba07.js.map +dolphinscheduler-ui/dist/js/7.ab2e297.js +dolphinscheduler-ui/dist/js/7.ab2e297.js.map +dolphinscheduler-ui/dist/js/8.83ff814.js +dolphinscheduler-ui/dist/js/8.83ff814.js.map +dolphinscheduler-ui/dist/js/9.39cb29f.js +dolphinscheduler-ui/dist/js/9.39cb29f.js.map +dolphinscheduler-ui/dist/js/common.733e342.js +dolphinscheduler-ui/dist/js/common.733e342.js.map +dolphinscheduler-ui/dist/js/home/index.78a5d12.js +dolphinscheduler-ui/dist/js/home/index.78a5d12.js.map +dolphinscheduler-ui/dist/js/login/index.291b8e3.js +dolphinscheduler-ui/dist/js/login/index.291b8e3.js.map +dolphinscheduler-ui/dist/lib/external/ +dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue +/dolphinscheduler-dao/src/main/resources/dao/data_source.properties -.mvn/wrapper/*.jar - -!/zookeeper_data/ diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100644 index 0000000000..2cc7d4a55c Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml deleted file mode 100644 index 6e50a1b649..0000000000 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml +++ /dev/null @@ -1,467 +0,0 @@ - - - - spring.datasource.initialSize - 5 - - Init connection number - - - int - - - - - spring.datasource.minIdle - 5 - - Min connection number - - - int - - - - - spring.datasource.maxActive - 50 - - Max connection number - - - int - - - - - spring.datasource.maxWait - 60000 - - Max wait time for get a connection in milliseconds. - If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. - If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. - - - int - - - - - spring.datasource.timeBetweenEvictionRunsMillis - 60000 - - Milliseconds for check to close free connections - - - int - - - - - spring.datasource.timeBetweenConnectErrorMillis - 60000 - - The Destroy thread detects the connection interval and closes the physical connection in milliseconds - if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. - - - int - - - - - spring.datasource.minEvictableIdleTimeMillis - 300000 - - The longest time a connection remains idle without being evicted, in milliseconds - - - int - - - - - spring.datasource.validationQuery - SELECT 1 - - The SQL used to check whether the connection is valid requires a query statement. - If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. - - - - - spring.datasource.validationQueryTimeout - 3 - - int - - - Check whether the connection is valid for timeout, in seconds - - - - - spring.datasource.testWhileIdle - true - - boolean - - - When applying for a connection, - if it is detected that the connection is idle longer than time Between Eviction Runs Millis, - validation Query is performed to check whether the connection is valid - - - - - spring.datasource.testOnBorrow - true - - boolean - - - Execute validation to check if the connection is valid when applying for a connection - - - - - spring.datasource.testOnReturn - false - - boolean - - - Execute validation to check if the connection is valid when the connection is returned - - - - - spring.datasource.defaultAutoCommit - true - - boolean - - - - - - - spring.datasource.keepAlive - false - - boolean - - - - - - - - spring.datasource.poolPreparedStatements - true - - boolean - - - Open PSCache, specify count PSCache for every connection - - - - - spring.datasource.maxPoolPreparedStatementPerConnectionSize - 20 - - int - - - - - - spring.datasource.spring.datasource.filters - stat,wall,log4j - - - - - spring.datasource.connectionProperties - druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 - - - - - - mybatis-plus.mapper-locations - classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml - - - - - mybatis-plus.typeEnumsPackage - org.apache.dolphinscheduler.*.enums - - - - - mybatis-plus.typeAliasesPackage - org.apache.dolphinscheduler.dao.entity - - Entity scan, where multiple packages are separated by a comma or semicolon - - - - - mybatis-plus.global-config.db-config.id-type - AUTO - - value-list - - - AUTO - - - - INPUT - - - - ID_WORKER - - - - UUID - - - - 1 - - - Primary key type AUTO:" database ID AUTO ", - INPUT:" user INPUT ID", - ID_WORKER:" global unique ID (numeric type unique ID)", - UUID:" global unique ID UUID"; - - - - - mybatis-plus.global-config.db-config.field-strategy - NOT_NULL - - value-list - - - IGNORED - - - - NOT_NULL - - - - NOT_EMPTY - - - - 1 - - - Field policy IGNORED:" ignore judgment ", - NOT_NULL:" not NULL judgment "), - NOT_EMPTY:" not NULL judgment" - - - - - mybatis-plus.global-config.db-config.column-underline - true - - boolean - - - - - - mybatis-plus.global-config.db-config.logic-delete-value - 1 - - int - - - - - - mybatis-plus.global-config.db-config.logic-not-delete-value - 0 - - int - - - - - - mybatis-plus.global-config.db-config.banner - true - - boolean - - - - - - - mybatis-plus.configuration.map-underscore-to-camel-case - true - - boolean - - - - - - mybatis-plus.configuration.cache-enabled - false - - boolean - - - - - - mybatis-plus.configuration.call-setters-on-nulls - true - - boolean - - - - - - mybatis-plus.configuration.jdbc-type-for-null - null - - - - - master.exec.threads - 100 - - int - - - - - - master.exec.task.num - 20 - - int - - - - - - master.heartbeat.interval - 10 - - int - - - - - - master.task.commit.retryTimes - 5 - - int - - - - - - master.task.commit.interval - 1000 - - int - - - - - - master.max.cpuload.avg - 100 - - int - - - - - - master.reserved.memory - 0.1 - - float - - - - - - worker.exec.threads - 100 - - int - - - - - - worker.heartbeat.interval - 10 - - int - - - - - - worker.fetch.task.num - 3 - - int - - - - - - worker.max.cpuload.avg - 100 - - int - - - - - - worker.reserved.memory - 0.1 - - float - - - - - - \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json b/ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json similarity index 95% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json rename to ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json index 769245b366..184f021ac3 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/alerts.json @@ -65,7 +65,7 @@ "enabled": true, "source": { "type": "SCRIPT", - "path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", + "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py", "parameters": [ { @@ -98,7 +98,7 @@ "enabled": true, "source": { "type": "SCRIPT", - "path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", + "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py", "parameters": [ { @@ -131,7 +131,7 @@ "enabled": true, "source": { "type": "SCRIPT", - "path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", + "path": "DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py", "parameters": [ { diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml similarity index 95% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml rename to ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml index 5b82230148..32abcc791d 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-alert.xml @@ -90,13 +90,6 @@ - - xls.file.path - /tmp/xls - - - - enterprise.wechat.enable false diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml similarity index 82% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml rename to ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml index ea4cb82afd..766c0f477d 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-application-api.xml @@ -34,6 +34,12 @@ + + server.servlet.context-path + /dolphinscheduler/ + + + spring.servlet.multipart.max-file-size 1024 @@ -68,4 +74,14 @@ UTF-8 + + spring.messages.basename + i18n/messages + + + + security.authentication.type + PASSWORD + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml rename to ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-common.xml diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml new file mode 100644 index 0000000000..02d8de0482 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-datasource.xml @@ -0,0 +1,206 @@ + + + + spring.datasource.initialSize + 5 + + Init connection number + + + int + + + + + spring.datasource.minIdle + 5 + + Min connection number + + + int + + + + + spring.datasource.maxActive + 50 + + Max connection number + + + int + + + + + spring.datasource.maxWait + 60000 + + Max wait time for get a connection in milliseconds. + If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. + If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. + + + int + + + + + spring.datasource.timeBetweenEvictionRunsMillis + 60000 + + Milliseconds for check to close free connections + + + int + + + + + spring.datasource.timeBetweenConnectErrorMillis + 60000 + + The Destroy thread detects the connection interval and closes the physical connection in milliseconds + if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. + + + int + + + + + spring.datasource.minEvictableIdleTimeMillis + 300000 + + The longest time a connection remains idle without being evicted, in milliseconds + + + int + + + + + spring.datasource.validationQuery + SELECT 1 + + The SQL used to check whether the connection is valid requires a query statement. + If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. + + + + + spring.datasource.validationQueryTimeout + 3 + + int + + + Check whether the connection is valid for timeout, in seconds + + + + + spring.datasource.testWhileIdle + true + + boolean + + + When applying for a connection, + if it is detected that the connection is idle longer than time Between Eviction Runs Millis, + validation Query is performed to check whether the connection is valid + + + + + spring.datasource.testOnBorrow + true + + boolean + + + Execute validation to check if the connection is valid when applying for a connection + + + + + spring.datasource.testOnReturn + false + + boolean + + + Execute validation to check if the connection is valid when the connection is returned + + + + + spring.datasource.defaultAutoCommit + true + + boolean + + + + + + + spring.datasource.keepAlive + false + + boolean + + + + + + + + spring.datasource.poolPreparedStatements + true + + boolean + + + Open PSCache, specify count PSCache for every connection + + + + + spring.datasource.maxPoolPreparedStatementPerConnectionSize + 20 + + int + + + + + + spring.datasource.spring.datasource.filters + stat,wall,log4j + + + + + spring.datasource.connectionProperties + druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml rename to ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-env.xml diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml new file mode 100644 index 0000000000..c8eec047fc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-master.xml @@ -0,0 +1,88 @@ + + + + master.exec.threads + 100 + + int + + master execute thread num + + + + master.exec.task.num + 20 + + int + + master execute task number in parallel + + + + master.heartbeat.interval + 10 + + int + + master heartbeat interval + + + + master.task.commit.retryTimes + 5 + + int + + master commit task retry times + + + + master.task.commit.interval + 1000 + + int + + master commit task interval + + + + master.max.cpuload.avg + 100 + + int + + only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 + + + + master.reserved.memory + 0.3 + only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. + + + + + master.listen.port + 5678 + + int + + master listen port + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml similarity index 91% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml rename to ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml index 82b59d8827..7a0c68b051 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-quartz.xml @@ -106,26 +106,21 @@ - org.quartz.jobStore.dataSource - myDs - - - - org.quartz.dataSource.myDs.connectionProvider.class - org.apache.dolphinscheduler.server.quartz.DruidConnectionProvider + org.quartz.jobStore.acquireTriggersWithinLock + true + + boolean + - org.quartz.dataSource.myDs.maxConnections - 10 - - int - + org.quartz.jobStore.dataSource + myDs - org.quartz.dataSource.myDs.validationQuery - select 1 + org.quartz.dataSource.myDs.connectionProvider.class + org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml new file mode 100644 index 0000000000..1ae7a1a765 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml @@ -0,0 +1,67 @@ + + + + worker.exec.threads + 100 + + int + + worker execute thread num + + + + worker.heartbeat.interval + 10 + + int + + worker heartbeat interval + + + + worker.max.cpuload.avg + 100 + + int + + only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 + + + + worker.reserved.memory + 0.3 + only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G. + + + + + worker.listen.port + 1234 + + int + + worker listen port + + + + worker.groups + default + default worker group + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml new file mode 100644 index 0000000000..e89962d900 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-zookeeper.xml @@ -0,0 +1,76 @@ + + + + zookeeper.dolphinscheduler.root + /dolphinscheduler + + dolphinscheduler root directory + + + + + zookeeper.session.timeout + 300 + + int + + + + + + + zookeeper.connection.timeout + 300 + + int + + + + + + + zookeeper.retry.base.sleep + 100 + + int + + + + + + + zookeeper.retry.max.sleep + 30000 + + int + + + + + + + zookeeper.retry.maxtime + 5 + + int + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml b/ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml similarity index 98% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml rename to ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml index 0d2bbe3163..074306d5cb 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/metainfo.xml @@ -22,7 +22,7 @@ DOLPHIN Dolphin Scheduler 分布式易扩展的可视化DAG工作流任务调度系统 - 1.2.1 + 1.3.3 DOLPHIN_MASTER @@ -103,7 +103,7 @@ any - apache-dolphinscheduler-incubating-1.2.1* + apache-dolphinscheduler-incubating* @@ -134,4 +134,4 @@ - + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/alerts/alert_dolphin_scheduler_status.py diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py similarity index 92% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py index 62255a3432..e78c38d272 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_alert_service.py @@ -26,7 +26,8 @@ class DolphinAlertService(Script): import params env.set_params(params) self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + Execute(('chmod', '-R', '777', params.dolphin_home)) + Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) def configure(self, env): import params diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py similarity index 93% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py index bdc18fb602..5a28924a9a 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_api_service.py @@ -26,7 +26,8 @@ class DolphinApiService(Script): import params env.set_params(params) self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + Execute(('chmod', '-R', '777', params.dolphin_home)) + Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) def configure(self, env): import params diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py similarity index 78% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py index 235605894f..1661d76c75 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_env.py @@ -42,31 +42,12 @@ def dolphin_env(): create_parents=True ) - - Directory(params.dolphin_alert_map['xls.file.path'], - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) Directory(params.dolphin_common_map['data.basedir.path'], mode=0777, owner=params.dolphin_user, group=params.dolphin_group, create_parents=True ) - Directory(params.dolphin_common_map['data.download.basedir.path'], - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) - Directory(params.dolphin_common_map['process.exec.basepath'], - mode=0777, - owner=params.dolphin_user, - group=params.dolphin_group, - create_parents=True - ) File(format(params.dolphin_env_path), @@ -79,11 +60,25 @@ def dolphin_env(): File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), mode=0755, - content=Template("dolphin-daemon.j2"), + content=Template("dolphin-daemon.sh.j2"), owner=params.dolphin_user, group=params.dolphin_group ) + File(format(params.dolphin_conf_dir + "/master.properties"), + mode=0755, + content=Template("master.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/worker.properties"), + mode=0755, + content=Template("worker.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + File(format(params.dolphin_conf_dir + "/alert.properties"), mode=0755, @@ -92,9 +87,9 @@ def dolphin_env(): group=params.dolphin_group ) - File(format(params.dolphin_conf_dir + "/application.properties"), + File(format(params.dolphin_conf_dir + "/datasource.properties"), mode=0755, - content=Template("application.properties.j2"), + content=Template("datasource.properties.j2"), owner=params.dolphin_user, group=params.dolphin_group ) @@ -119,3 +114,10 @@ def dolphin_env(): owner=params.dolphin_user, group=params.dolphin_group ) + + File(format(params.dolphin_conf_dir + "/zookeeper.properties"), + mode=0755, + content=Template("zookeeper.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py similarity index 92% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py index f1c19bd66f..fb47e132e1 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_logger_service.py @@ -26,8 +26,8 @@ class DolphinLoggerService(Script): import params env.set_params(params) self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) - + Execute(('chmod', '-R', '777', params.dolphin_home)) + Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) def configure(self, env): import params params.pika_slave = True diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py similarity index 92% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py index 6ee7ecfcf3..8d64935d26 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_master_service.py @@ -27,7 +27,8 @@ class DolphinMasterService(Script): import params env.set_params(params) self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + Execute(('chmod', '-R', '777', params.dolphin_home)) + Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) def configure(self, env): import params diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py similarity index 92% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py index 2d145ee730..1f542c06c2 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/dolphin_worker_service.py @@ -26,7 +26,8 @@ class DolphinWorkerService(Script): import params env.set_params(params) self.install_packages(env) - Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + Execute(('chmod', '-R', '777', params.dolphin_home)) + Execute(('chown', '-R', params.dolphin_user + ":" + params.dolphin_group, params.dolphin_home)) def configure(self, env): import params diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py similarity index 68% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py index 049b2cf3ae..5a9994f559 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/params.py @@ -54,11 +54,8 @@ dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content'] # database config dolphin_database_config = {} dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] -dolphin_database_config['dolphin_database_host'] = dolphin_env_map['dolphin.database.host'] -dolphin_database_config['dolphin_database_port'] = dolphin_env_map['dolphin.database.port'] dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] - if 'mysql' == dolphin_database_config['dolphin_database_type']: dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' @@ -72,6 +69,10 @@ else: + ':' + dolphin_env_map['dolphin.database.port'] \ + '/dolphinscheduler' + + + + # application-alert.properties dolphin_alert_map = {} wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' @@ -79,27 +80,22 @@ wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId& wechat_team_send_msg = '{\"toparty\":\"{toParty}\",\"agentid\":\"{agentId}\",\"msgtype\":\"text\",\"text\":{\"content\":\"{msg}\"},\"safe\":\"0\"}' wechat_user_send_msg = '{\"touser\":\"{toUser}\",\"agentid\":\"{agentId}\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"{msg}\"}}' -dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url -dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url -dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg -dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg -dolphin_alert_map.update(config['configurations']['dolphin-alert']) +dolphin_alert_config_map = config['configurations']['dolphin-alert'] + +if dolphin_alert_config_map['enterprise.wechat.enable']: + dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url + dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url + dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg + dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg + +dolphin_alert_map.update(dolphin_alert_config_map) + + # application-api.properties dolphin_app_api_map = {} -dolphin_app_api_map['logging.config'] = 'classpath:apiserver_logback.xml' -dolphin_app_api_map['spring.messages.basename'] = 'i18n/messages' -dolphin_app_api_map['server.servlet.context-path'] = '/dolphinscheduler/' dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) -# application-dao.properties -dolphin_application_map = {} -dolphin_application_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' -dolphin_application_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] -dolphin_application_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] -dolphin_application_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] -dolphin_application_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] -dolphin_application_map.update(config['configurations']['dolphin-application']) # common.properties dolphin_common_map = {} @@ -118,33 +114,42 @@ else: dolphin_common_map_tmp = config['configurations']['dolphin-common'] data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] -process_exec_basepath = data_basedir_path + '/exec' -data_download_basedir_path = data_basedir_path + '/download' -dolphin_common_map['process.exec.basepath'] = process_exec_basepath -dolphin_common_map['data.download.basedir.path'] = data_download_basedir_path dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path +dolphin_common_map.update(config['configurations']['dolphin-common']) -zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) -if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: - clientPort = config['configurations']['zoo.cfg']['clientPort'] - zookeeperPort = ":" + clientPort + "," - dolphin_common_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort +# datasource.properties +dolphin_datasource_map = {} +dolphin_datasource_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' +dolphin_datasource_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] +dolphin_datasource_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] +dolphin_datasource_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] +dolphin_datasource_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] +dolphin_datasource_map.update(config['configurations']['dolphin-datasource']) -dolphin_common_map.update(config['configurations']['dolphin-common']) +# master.properties +dolphin_master_map = config['configurations']['dolphin-master'] # quartz.properties dolphin_quartz_map = {} dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] -dolphin_quartz_map['org.quartz.dataSource.myDs.driver'] = dolphin_database_config['dolphin_database_driver'] -dolphin_quartz_map['org.quartz.dataSource.myDs.URL'] = dolphin_database_config['dolphin_database_url'] -dolphin_quartz_map['org.quartz.dataSource.myDs.user'] = dolphin_database_config['dolphin_database_username'] -dolphin_quartz_map['org.quartz.dataSource.myDs.password'] = dolphin_database_config['dolphin_database_password'] dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) -# if 'ganglia_server_host' in config['clusterHostInfo'] and \ -# len(config['clusterHostInfo']['ganglia_server_host'])>0: -# ganglia_installed = True -# ganglia_server = config['clusterHostInfo']['ganglia_server_host'][0] -# ganglia_report_interval = 60 -# else: -# ganglia_installed = False +# worker.properties +dolphin_worker_map = config['configurations']['dolphin-worker'] + +# zookeeper.properties +dolphin_zookeeper_map={} +zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) +if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: + clientPort = config['configurations']['zoo.cfg']['clientPort'] + zookeeperPort = ":" + clientPort + "," + dolphin_zookeeper_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort +dolphin_zookeeper_map.update(config['configurations']['dolphin-zookeeper']) +if 'spring.servlet.multipart.max-file-size' in dolphin_app_api_map: + file_size = dolphin_app_api_map['spring.servlet.multipart.max-file-size'] + dolphin_app_api_map['spring.servlet.multipart.max-file-size'] = file_size + "MB" +if 'spring.servlet.multipart.max-request-size' in dolphin_app_api_map: + request_size = dolphin_app_api_map['spring.servlet.multipart.max-request-size'] + dolphin_app_api_map['spring.servlet.multipart.max-request-size'] = request_size + "MB" + + diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/service_check.py diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/scripts/status_params.py diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2 similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/alert.properties.j2 diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2 similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/application-api.properties.j2 diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2 similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/common.properties.j2 diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2 new file mode 100644 index 0000000000..40aed83543 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/datasource.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_datasource_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2 similarity index 83% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2 index 1dc4bac0ab..c5cc11fb62 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/dolphin-daemon.sh.j2 @@ -48,22 +48,19 @@ pid={{dolphin_pidfile_dir}}/$command.pid cd $DOLPHINSCHEDULER_HOME if [ "$command" = "api-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/apiserver_logback.xml -Dspring.profiles.active=api" + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-api.xml -Dspring.profiles.active=api" CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer elif [ "$command" = "master-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/master_logback.xml -Ddruid.mysql.usePingMethod=false" + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-master.xml -Ddruid.mysql.usePingMethod=false" CLASS=org.apache.dolphinscheduler.server.master.MasterServer elif [ "$command" = "worker-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/worker_logback.xml -Ddruid.mysql.usePingMethod=false" + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-worker.xml -Ddruid.mysql.usePingMethod=false" CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer elif [ "$command" = "alert-server" ]; then - LOG_FILE="-Dlogback.configurationFile={{dolphin_conf_dir}}/alert_logback.xml" + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/logback-alert.xml" CLASS=org.apache.dolphinscheduler.alert.AlertServer elif [ "$command" = "logger-server" ]; then - CLASS=org.apache.dolphinscheduler.server.rpc.LoggerServer -elif [ "$command" = "combined-server" ]; then - LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/combined_logback.xml -Dspring.profiles.active=api -Dserver.is-combined-server=true" - CLASS=org.apache.dolphinscheduler.api.CombinedApplicationServer + CLASS=org.apache.dolphinscheduler.server.log.LoggerServer else echo "Error: No command named \`$command' was found." exit 1 diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2 new file mode 100644 index 0000000000..d9b85e14cf --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/master.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_master_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2 similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 rename to ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/quartz.properties.j2 diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2 new file mode 100644 index 0000000000..a008b74084 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/worker.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_worker_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2 new file mode 100644 index 0000000000..9eb14eaef3 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/package/templates/zookeeper.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_zookeeper_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json b/ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json old mode 100755 new mode 100644 similarity index 100% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json rename to ambari_plugin/common-services/DOLPHIN/1.3.3/quicklinks/quicklinks.json diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json b/ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json similarity index 86% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json rename to ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json index 23e46076aa..953e2323f8 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json +++ b/ambari_plugin/common-services/DOLPHIN/1.3.3/themes/theme.json @@ -151,18 +151,40 @@ "subsection-name": "env-row1-col2" }, { - "config": "dolphin-common/res.upload.startup.type", + "config": "dolphin-common/resource.storage.type", "subsection-name": "dynamic-row1-col1" }, + { + "config": "dolphin-common/resource.upload.path", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/resource.storage.type" + ], + "if": "${dolphin-common/resource.storage.type} === HDFS || ${dolphin-common/resource.storage.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, { "config": "dolphin-common/hdfs.root.user", "subsection-name": "dynamic-row1-col1", "depends-on": [ { "configs":[ - "dolphin-common/res.upload.startup.type" + "dolphin-common/resource.storage.type" ], - "if": "${dolphin-common/res.upload.startup.type} === HDFS", + "if": "${dolphin-common/resource.storage.type} === HDFS", "then": { "property_value_attributes": { "visible": true @@ -182,9 +204,9 @@ "depends-on": [ { "configs":[ - "dolphin-common/res.upload.startup.type" + "dolphin-common/resource.storage.type" ], - "if": "${dolphin-common/res.upload.startup.type} === HDFS", + "if": "${dolphin-common/resource.storage.type} === HDFS", "then": { "property_value_attributes": { "visible": true @@ -204,9 +226,9 @@ "depends-on": [ { "configs":[ - "dolphin-common/res.upload.startup.type" + "dolphin-common/resource.storage.type" ], - "if": "${dolphin-common/res.upload.startup.type} === HDFS", + "if": "${dolphin-common/resource.storage.type} === HDFS", "then": { "property_value_attributes": { "visible": true @@ -226,9 +248,9 @@ "depends-on": [ { "configs":[ - "dolphin-common/res.upload.startup.type" + "dolphin-common/resource.storage.type" ], - "if": "${dolphin-common/res.upload.startup.type} === S3", + "if": "${dolphin-common/resource.storage.type} === S3", "then": { "property_value_attributes": { "visible": true @@ -248,9 +270,9 @@ "depends-on": [ { "configs":[ - "dolphin-common/res.upload.startup.type" + "dolphin-common/resource.storage.type" ], - "if": "${dolphin-common/res.upload.startup.type} === S3", + "if": "${dolphin-common/resource.storage.type} === S3", "then": { "property_value_attributes": { "visible": true @@ -270,9 +292,9 @@ "depends-on": [ { "configs":[ - "dolphin-common/res.upload.startup.type" + "dolphin-common/resource.storage.type" ], - "if": "${dolphin-common/res.upload.startup.type} === S3", + "if": "${dolphin-common/resource.storage.type} === S3", "then": { "property_value_attributes": { "visible": true @@ -356,6 +378,28 @@ } ] }, + { + "config": "dolphin-common/kerberos.expire.time", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, { "config": "dolphin-alert/enterprise.wechat.enable", "subsection-name": "dynamic-row1-col3" @@ -505,11 +549,17 @@ } }, { - "config": "dolphin-common/res.upload.startup.type", + "config": "dolphin-common/resource.storage.type", "widget": { "type": "combo" } }, + { + "config": "dolphin-common/resource.upload.path", + "widget": { + "type": "text-field" + } + }, { "config": "dolphin-common/hdfs.root.user", "widget": { @@ -570,6 +620,12 @@ "type": "text-field" } }, + { + "config": "dolphin-common/kerberos.expire.time", + "widget": { + "type": "text-field" + } + }, { "config": "dolphin-alert/enterprise.wechat.enable", "widget": { diff --git a/docker/build/startup.sh b/docker/build/startup.sh index ea4fbc0cd1..0511788d48 100644 --- a/docker/build/startup.sh +++ b/docker/build/startup.sh @@ -38,13 +38,13 @@ initDatabase() { echo "connect ${DATABASE_TYPE} service" if [ ${DATABASE_TYPE} = "mysql" ]; then v=$(mysql -h${DATABASE_HOST} -P${DATABASE_PORT} -u${DATABASE_USERNAME} --password=${DATABASE_PASSWORD} -D ${DATABASE_DATABASE} -e "select 1" 2>&1) - if [ "$(echo '${v}' | grep 'ERROR' | wc -l)" -eq 1 ]; then + if [ "$(echo ${v} | grep 'ERROR' | wc -l)" -eq 1 ]; then echo "Error: Can't connect to database...${v}" exit 1 fi else v=$(sudo -u postgres PGPASSWORD=${DATABASE_PASSWORD} psql -h ${DATABASE_HOST} -p ${DATABASE_PORT} -U ${DATABASE_USERNAME} -d ${DATABASE_DATABASE} -tAc "select 1") - if [ "$(echo '${v}' | grep 'FATAL' | wc -l)" -eq 1 ]; then + if [ "$(echo ${v} | grep 'FATAL' | wc -l)" -eq 1 ]; then echo "Error: Can't connect to database...${v}" exit 1 fi diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 b/docker/kubernetes/dolphinscheduler/requirements.yaml similarity index 76% rename from ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 rename to docker/kubernetes/dolphinscheduler/requirements.yaml index 7bb9f8aff3..e219975995 100644 --- a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 +++ b/docker/kubernetes/dolphinscheduler/requirements.yaml @@ -14,7 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -{% for key, value in dolphin_application_map.iteritems() -%} - {{key}}={{value}} -{% endfor %} \ No newline at end of file +dependencies: +- name: postgresql + version: 8.x.x + repository: https://charts.bitnami.com/bitnami + condition: postgresql.enabled +- name: zookeeper + version: 5.x.x + repository: https://charts.bitnami.com/bitnami + condition: redis.enabled \ No newline at end of file diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml index 1f9c46e5f8..930b92f855 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-dingtalk/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml index fcc1469dde..f4524b2046 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml index ce2e0c0478..5223730e3e 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-http/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml index 6b3277fe61..8c35b3cbf6 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-script/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml index 2e729ba09d..123cebc5c5 100644 --- a/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml +++ b/dolphinscheduler-alert-plugin/dolphinscheduler-alert-wechat/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler-alert-plugin org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-alert-plugin/pom.xml b/dolphinscheduler-alert-plugin/pom.xml index 707f0ceafb..ede5051049 100644 --- a/dolphinscheduler-alert-plugin/pom.xml +++ b/dolphinscheduler-alert-plugin/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-alert/pom.xml b/dolphinscheduler-alert/pom.xml index ebea0e8716..c7a8118a13 100644 --- a/dolphinscheduler-alert/pom.xml +++ b/dolphinscheduler-alert/pom.xml @@ -21,7 +21,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT dolphinscheduler-alert ${project.artifactId} diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java index ea78f6b12a..5ed25cc004 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManagerTest.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.alert.utils.PropertyUtils; import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,6 +33,7 @@ import com.google.common.collect.ImmutableList; /** * AlertPluginManager Tester. */ +@Ignore public class AlertPluginManagerTest { private static final Logger logger = LoggerFactory.getLogger(AlertPluginManagerTest.class); diff --git a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java index b9d3d1a0ae..5c792db451 100644 --- a/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java +++ b/dolphinscheduler-alert/src/test/java/org/apache/dolphinscheduler/alert/plugin/DolphinPluginLoaderTest.java @@ -20,6 +20,7 @@ package org.apache.dolphinscheduler.alert.plugin; import org.junit.After; import org.junit.Assert; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import com.google.common.collect.ImmutableList; @@ -27,6 +28,7 @@ import com.google.common.collect.ImmutableList; /** * DolphinPluginLoader Tester. */ +@Ignore public class DolphinPluginLoaderTest { @Before diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml index cd6e04fc26..3317598ac9 100644 --- a/dolphinscheduler-api/pom.xml +++ b/dolphinscheduler-api/pom.xml @@ -16,242 +16,256 @@ ~ limitations under the License. --> - - 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler - 1.3.2-SNAPSHOT - - dolphinscheduler-api - ${project.artifactId} - jar + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.3.4-SNAPSHOT + + dolphinscheduler-api + ${project.artifactId} + jar - - - org.apache.dolphinscheduler - dolphinscheduler-alert - + + + org.apache.dolphinscheduler + dolphinscheduler-alert + - - org.apache.dolphinscheduler - dolphinscheduler-dao - + + org.apache.dolphinscheduler + dolphinscheduler-dao + - - - org.springframework.boot - spring-boot-starter-web - - - org.springframework.boot - spring-boot-starter-tomcat - - - log4j-to-slf4j - org.apache.logging.log4j - - - + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-tomcat + + + log4j-to-slf4j + org.apache.logging.log4j + + + - - - org.springframework.boot - spring-boot-starter-jetty - - - org.eclipse.jetty.websocket - javax-websocket-server-impl - - - org.eclipse.jetty.websocket - websocket-server - - - + + + org.springframework.boot + spring-boot-starter-jetty + + + org.eclipse.jetty.websocket + javax-websocket-server-impl + + + org.eclipse.jetty.websocket + websocket-server + + + - - org.springframework.boot - spring-boot-starter-aop - - - org.springframework.boot - spring-boot-starter - - - + + org.springframework.boot + spring-boot-starter-aop + + + org.springframework.boot + spring-boot-starter + + + - - org.springframework - spring-context - + + org.springframework + spring-context + - - commons-collections - commons-collections - + + commons-collections + commons-collections + - - org.quartz-scheduler - quartz - - - c3p0 - c3p0 - - - + + org.quartz-scheduler + quartz + + + com.mchange + c3p0 + + + com.mchange + mchange-commons-java + + + com.zaxxer + HikariCP-java6 + + + - - org.quartz-scheduler - quartz-jobs - + + org.quartz-scheduler + quartz-jobs + - - io.springfox - springfox-swagger2 - + + io.springfox + springfox-swagger2 + - - io.springfox - springfox-swagger-ui - + + io.springfox + springfox-swagger-ui + - - org.apache.dolphinscheduler - dolphinscheduler-service - + + io.swagger + swagger-models + - - com.github.xiaoymin - swagger-bootstrap-ui - + + org.apache.dolphinscheduler + dolphinscheduler-service + - - org.apache.curator - curator-framework - + + com.github.xiaoymin + swagger-bootstrap-ui + - - org.apache.curator - curator-recipes - - - org.apache.zookeeper - zookeeper - - - + + org.apache.curator + curator-framework + - - - org.apache.hadoop - hadoop-common - - - javax.servlet - servlet-api - - - org.apache.curator - curator-client - - - + + org.apache.curator + curator-recipes + + + org.apache.zookeeper + zookeeper + + + - - org.apache.hadoop - hadoop-client - - - org.slf4j - slf4j-log4j12 - - - + + + org.apache.hadoop + hadoop-common + + + javax.servlet + servlet-api + + + org.apache.curator + curator-client + + + - - org.apache.hadoop - hadoop-hdfs - - - servlet-api - javax.servlet - - - + + org.apache.hadoop + hadoop-client + + + org.slf4j + slf4j-log4j12 + + + - - org.apache.hadoop - hadoop-yarn-common - - - servlet-api - javax.servlet - - - + + org.apache.hadoop + hadoop-hdfs + + + servlet-api + javax.servlet + + + - - org.apache.hadoop - hadoop-aws - + + org.apache.hadoop + hadoop-yarn-common + + + servlet-api + javax.servlet + + + - - org.mortbay.jetty - jsp-2.1 - - - org.mortbay.jetty - servlet-api-2.5 - - - + + org.apache.hadoop + hadoop-aws + - - - org.springframework.boot - spring-boot-starter-test - test - - - org.ow2.asm - asm - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-autoconfigure - - - + + org.mortbay.jetty + jsp-2.1 + + + org.mortbay.jetty + servlet-api-2.5 + + + - - junit - junit - test - + + + org.springframework.boot + spring-boot-starter-test + test + + + org.ow2.asm + asm + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-autoconfigure + + + - - org.powermock - powermock-module-junit4 - test - + + junit + junit + test + - - org.powermock - powermock-api-mockito2 - test - - - org.mockito - mockito-core - - - + + org.powermock + powermock-module-junit4 + test + - + + org.powermock + powermock-api-mockito2 + test + + + org.mockito + mockito-core + + + + + diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java index 3698370fbd..28a42929ca 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/AppConfiguration.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.configuration; import org.apache.dolphinscheduler.api.interceptor.LoginHandlerInterceptor; @@ -33,85 +34,78 @@ import java.util.Locale; @Configuration public class AppConfiguration implements WebMvcConfigurer { - public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; - public static final String LOGIN_PATH_PATTERN = "/login"; - public static final String REGISTER_PATH_PATTERN = "/users/register"; - public static final String PATH_PATTERN = "/**"; - public static final String LOCALE_LANGUAGE_COOKIE = "language"; - public static final int COOKIE_MAX_AGE = 3600; - - - @Bean - public LoginHandlerInterceptor loginInterceptor() { - return new LoginHandlerInterceptor(); - } - - - /** - * Cookie - * @return local resolver - */ - @Bean(name = "localeResolver") - public LocaleResolver localeResolver() { - CookieLocaleResolver localeResolver = new CookieLocaleResolver(); - localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE); - /** set default locale **/ - localeResolver.setDefaultLocale(Locale.US); - /** set cookie max age **/ - localeResolver.setCookieMaxAge(COOKIE_MAX_AGE); - return localeResolver; - } - - @Bean - public LocaleChangeInterceptor localeChangeInterceptor() { - LocaleChangeInterceptor lci = new LocaleChangeInterceptor(); - /** **/ - lci.setParamName("language"); - - return lci; - } - - - @Override - public void addInterceptors(InterceptorRegistry registry) { - //i18n - registry.addInterceptor(localeChangeInterceptor()); - - registry.addInterceptor(loginInterceptor()).addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN).excludePathPatterns(LOGIN_PATH_PATTERN, REGISTER_PATH_PATTERN, "/swagger-resources/**", "/webjars/**", "/v2/**", "/doc.html", "*.html", "/ui/**"); - } - - - @Override - public void addResourceHandlers(ResourceHandlerRegistry registry) { - registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/"); - registry.addResourceHandler("doc.html").addResourceLocations("classpath:/META-INF/resources/"); - registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/"); - registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/"); - } - - @Override - public void addViewControllers(ViewControllerRegistry registry) { - registry.addViewController("/ui/").setViewName("forward:/ui/index.html"); - registry.addViewController("/").setViewName("forward:/ui/index.html"); - } - - @Override - public void addCorsMappings(CorsRegistry registry) { - registry.addMapping(PATH_PATTERN).allowedOrigins("*").allowedMethods("*"); - } - - - /** - * Turn off suffix-based content negotiation - * - * @param configurer configurer - */ - @Override - public void configureContentNegotiation(final ContentNegotiationConfigurer configurer) { - configurer.favorPathExtension(false); - } - - - - + public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; + public static final String LOGIN_PATH_PATTERN = "/login"; + public static final String REGISTER_PATH_PATTERN = "/users/register"; + public static final String PATH_PATTERN = "/**"; + public static final String LOCALE_LANGUAGE_COOKIE = "language"; + public static final int COOKIE_MAX_AGE = 3600; + + @Bean + public LoginHandlerInterceptor loginInterceptor() { + return new LoginHandlerInterceptor(); + } + + /** + * Cookie + * @return local resolver + */ + @Bean(name = "localeResolver") + public LocaleResolver localeResolver() { + CookieLocaleResolver localeResolver = new CookieLocaleResolver(); + localeResolver.setCookieName(LOCALE_LANGUAGE_COOKIE); + // set default locale + localeResolver.setDefaultLocale(Locale.US); + // set cookie max age + localeResolver.setCookieMaxAge(COOKIE_MAX_AGE); + return localeResolver; + } + + @Bean + public LocaleChangeInterceptor localeChangeInterceptor() { + LocaleChangeInterceptor lci = new LocaleChangeInterceptor(); + lci.setParamName("language"); + return lci; + } + + @Override + public void addInterceptors(InterceptorRegistry registry) { + // i18n + registry.addInterceptor(localeChangeInterceptor()); + registry.addInterceptor(loginInterceptor()) + .addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN) + .excludePathPatterns(LOGIN_PATH_PATTERN, REGISTER_PATH_PATTERN, + "/swagger-resources/**", "/webjars/**", "/v2/**", + "/doc.html", "/swagger-ui.html", "*.html", "/ui/**"); + } + + @Override + public void addResourceHandlers(ResourceHandlerRegistry registry) { + registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/"); + registry.addResourceHandler("doc.html").addResourceLocations("classpath:/META-INF/resources/"); + registry.addResourceHandler("swagger-ui.html").addResourceLocations("classpath:/META-INF/resources/"); + registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/"); + registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/"); + } + + @Override + public void addViewControllers(ViewControllerRegistry registry) { + registry.addViewController("/ui/").setViewName("forward:/ui/index.html"); + registry.addViewController("/").setViewName("forward:/ui/index.html"); + } + + @Override + public void addCorsMappings(CorsRegistry registry) { + registry.addMapping(PATH_PATTERN).allowedOrigins("*").allowedMethods("*"); + } + + /** + * Turn off suffix-based content negotiation + * + * @param configurer configurer + */ + @Override + public void configureContentNegotiation(final ContentNegotiationConfigurer configurer) { + configurer.favorPathExtension(false); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java index 2457177cdf..17faad04bc 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AccessTokenController.java @@ -86,7 +86,7 @@ public class AccessTokenController extends BaseController { logger.info("login user {}, create token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), userId, expireTime, token); - Map result = accessTokenService.createToken(userId, expireTime, token); + Map result = accessTokenService.createToken(loginUser, userId, expireTime, token); return returnDataList(result); } @@ -106,7 +106,7 @@ public class AccessTokenController extends BaseController { @RequestParam(value = "userId") int userId, @RequestParam(value = "expireTime") String expireTime) { logger.info("login user {}, generate token , userId : {} , token expire time : {}", loginUser, userId, expireTime); - Map result = accessTokenService.generateToken(userId, expireTime); + Map result = accessTokenService.generateToken(loginUser, userId, expireTime); return returnDataList(result); } @@ -185,7 +185,7 @@ public class AccessTokenController extends BaseController { logger.info("login user {}, update token , userId : {} , token expire time : {} , token : {}", loginUser.getUserName(), userId, expireTime, token); - Map result = accessTokenService.updateToken(id, userId, expireTime, token); + Map result = accessTokenService.updateToken(loginUser, id, userId, expireTime, token); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java index b3a3ee55e9..cf8dc8088e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java @@ -131,8 +131,8 @@ public class AlertGroupController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(LIST_PAGING_ALERT_GROUP_ERROR) public Result listPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", loginUser.getUserName(), pageNo, searchVal, pageSize); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index 48cb53c5b2..1c13c1374c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -142,7 +142,7 @@ public class ProcessDefinitionController extends BaseController { @ApiOperation(value = "copyProcessDefinition", notes = "COPY_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"), - @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer") + @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, dataType = "Int", example = "10") }) @PostMapping(value = "/copy") @ResponseStatus(HttpStatus.OK) @@ -173,7 +173,7 @@ public class ProcessDefinitionController extends BaseController { @ApiOperation(value = "moveProcessDefinition", notes = "MOVE_PROCESS_DEFINITION_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"), - @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, type = "Integer") + @ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, dataType = "Int", example = "10") }) @PostMapping(value = "/move") @ResponseStatus(HttpStatus.OK) diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index dac97bca9d..1bf5003946 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -228,21 +228,20 @@ public class ProjectController extends BaseController { } /** - * query user created project + * query authorized and user created project * * @param loginUser login user - * @return projects which the user create + * @return projects which the user create and authorized */ - @ApiOperation(value = "queryProjectCreatedByUser", notes = "QUERY_USER_CREATED_PROJECT_NOTES") - - @GetMapping(value = "/login-user-created-project") + @ApiOperation(value = "queryProjectCreatedAndAuthorizedByUser", notes = "QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES") + @GetMapping(value = "/created-and-authorized-project") @ResponseStatus(HttpStatus.OK) - @ApiException(QUERY_USER_CREATED_PROJECT_ERROR) - public Result queryProjectCreatedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { - logger.info("login user {}, query authorized project by user id: {}.", + @ApiException(QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR) + public Result queryProjectCreatedAndAuthorizedByUser(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { + logger.info("login user {}, query authorized and user created project by user id: {}.", StringUtils.replaceNRTtoUnderline(loginUser.getUserName()), StringUtils.replaceNRTtoUnderline(String.valueOf(loginUser.getId()))); - Map result = projectService.queryProjectCreatedByUser(loginUser); + Map result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java index a0ec666ed7..52fd023c35 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -14,23 +14,48 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_FILE_RESOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_UDF_FUNCTION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZE_RESOURCE_TREE; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_UDF_FUNCTION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_RESOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_UDF_FUNCTION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_RESOURCE_FILE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_BY_TYPE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_PAGING; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_FILE_IS_EMPTY; +import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_NOT_EXIST; +import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_UDF_FUNCTION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_RESOURCE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_UDF_FUNCTION_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_UDF_FUNCTION_NAME_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.VIEW_UDF_FUNCTION_ERROR; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.ResourcesService; import org.apache.dolphinscheduler.api.service.UdfFuncService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiImplicitParam; -import io.swagger.annotations.ApiImplicitParams; -import io.swagger.annotations.ApiOperation; -import org.apache.commons.lang.StringUtils; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -38,13 +63,21 @@ import org.springframework.core.io.Resource; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.*; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; import org.springframework.web.multipart.MultipartFile; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; -import static org.apache.dolphinscheduler.api.enums.Status.*; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; /** * resources controller @@ -56,37 +89,28 @@ public class ResourcesController extends BaseController { private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class); - @Autowired private ResourcesService resourceService; @Autowired private UdfFuncService udfFuncService; /** - * create directory * - * @param loginUser login user - * @param alias alias - * @param description description - * @param type type - * @return create result code - */ - - /** * @param loginUser login user * @param type type * @param alias alias * @param description description * @param pid parent id * @param currentDir current directory - * @return + * @return create result code */ @ApiOperation(value = "createDirctory", notes = "CREATE_RESOURCE_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), - @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") + @ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"), + @ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String") }) @PostMapping(value = "/directory/create") @ApiException(CREATE_RESOURCE_ERROR) @@ -103,12 +127,13 @@ public class ResourcesController extends BaseController { /** * create resource - * - * @param loginUser login user - * @param alias alias - * @param description description - * @param type type - * @param file file + * @param loginUser + * @param type + * @param alias + * @param description + * @param file + * @param pid + * @param currentDir * @return create result code */ @ApiOperation(value = "createResource", notes = "CREATE_RESOURCE_NOTES") @@ -116,7 +141,9 @@ public class ResourcesController extends BaseController { @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), - @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") + @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile"), + @ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"), + @ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String") }) @PostMapping(value = "/create") @ApiException(CREATE_RESOURCE_ERROR) @@ -135,11 +162,12 @@ public class ResourcesController extends BaseController { /** * update resource * - * @param loginUser login user - * @param alias alias - * @param resourceId resource id - * @param type resource type + * @param loginUser login user + * @param alias alias + * @param resourceId resource id + * @param type resource type * @param description description + * @param file resource file * @return update result code */ @ApiOperation(value = "updateResource", notes = "UPDATE_RESOURCE_NOTES") @@ -147,7 +175,8 @@ public class ResourcesController extends BaseController { @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), @ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String") + @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), + @ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile") }) @PostMapping(value = "/update") @ApiException(UPDATE_RESOURCE_ERROR) @@ -155,17 +184,18 @@ public class ResourcesController extends BaseController { @RequestParam(value = "id") int resourceId, @RequestParam(value = "type") ResourceType type, @RequestParam(value = "name") String alias, - @RequestParam(value = "description", required = false) String description) { - logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", - loginUser.getUserName(), type, alias, description); - return resourceService.updateResource(loginUser, resourceId, alias, description, type); + @RequestParam(value = "description", required = false) String description, + @RequestParam(value = "file" ,required = false) MultipartFile file) { + logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}, file: {}", + loginUser.getUserName(), type, alias, description, file); + return resourceService.updateResource(loginUser, resourceId, alias, description, type, file); } /** * query resources list * * @param loginUser login user - * @param type resource type + * @param type resource type * @return resource list */ @ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES") @@ -187,16 +217,16 @@ public class ResourcesController extends BaseController { * query resources list paging * * @param loginUser login user - * @param type resource type + * @param type resource type * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return resource list page */ @ApiOperation(value = "queryResourceListPaging", notes = "QUERY_RESOURCE_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "int"), + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "int", example = "10"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"), @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20") @@ -227,7 +257,7 @@ public class ResourcesController extends BaseController { /** * delete resource * - * @param loginUser login user + * @param loginUser login user * @param resourceId resource id * @return delete result code */ @@ -251,8 +281,8 @@ public class ResourcesController extends BaseController { * verify resource by alias and type * * @param loginUser login user - * @param fullName resource full name - * @param type resource type + * @param fullName resource full name + * @param type resource type * @return true if the resource name not exists, otherwise return false */ @ApiOperation(value = "verifyResourceName", notes = "VERIFY_RESOURCE_NAME_NOTES") @@ -277,10 +307,10 @@ public class ResourcesController extends BaseController { * query resources jar list * * @param loginUser login user - * @param type resource type + * @param type resource type * @return resource list */ - @ApiOperation(value = "queryResourceJarList", notes = "QUERY_RESOURCE_LIST_NOTES") + @ApiOperation(value = "queryResourceByProgramType", notes = "QUERY_RESOURCE_LIST_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType") }) @@ -288,10 +318,14 @@ public class ResourcesController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_RESOURCES_LIST_ERROR) public Result queryResourceJarList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam(value = "type") ResourceType type + @RequestParam(value = "type") ResourceType type, + @RequestParam(value = "programType",required = false) ProgramType programType ) { - logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); - Map result = resourceService.queryResourceJarList(loginUser, type); + String programTypeName = programType == null ? "" : programType.name(); + String userName = loginUser.getUserName(); + userName = userName.replaceAll("[\n|\r|\t]", "_"); + logger.info("query resource list, login user:{}, resource type:{}, program type:{}", userName,programTypeName); + Map result = resourceService.queryResourceByProgramType(loginUser, type,programType); return returnDataList(result); } @@ -299,14 +333,16 @@ public class ResourcesController extends BaseController { * query resource by full name and type * * @param loginUser login user - * @param fullName resource full name - * @param type resource type + * @param fullName resource full name + * @param type resource type + * @param id resource id * @return true if the resource name not exists, otherwise return false */ @ApiOperation(value = "queryResource", notes = "QUERY_BY_RESOURCE_NAME") @ApiImplicitParams({ @ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"), - @ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String") + @ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = false, dataType = "Int", example = "10") }) @GetMapping(value = "/queryResource") @ResponseStatus(HttpStatus.OK) @@ -325,10 +361,10 @@ public class ResourcesController extends BaseController { /** * view resource file online * - * @param loginUser login user - * @param resourceId resource id + * @param loginUser login user + * @param resourceId resource id * @param skipLineNum skip line number - * @param limit limit + * @param limit limit * @return resource content */ @ApiOperation(value = "viewResource", notes = "VIEW_RESOURCE_BY_ID_NOTES") @@ -352,13 +388,14 @@ public class ResourcesController extends BaseController { /** * create resource file online - * - * @param loginUser login user - * @param type resource type - * @param fileName file name - * @param fileSuffix file suffix - * @param description description - * @param content content + * @param loginUser + * @param type + * @param fileName + * @param fileSuffix + * @param description + * @param content + * @param pid + * @param currentDir * @return create result code */ @ApiOperation(value = "onlineCreateResource", notes = "ONLINE_CREATE_RESOURCE_NOTES") @@ -367,7 +404,9 @@ public class ResourcesController extends BaseController { @ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType = "String"), @ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"), - @ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String") + @ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String"), + @ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"), + @ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String") }) @PostMapping(value = "/online-create") @ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR) @@ -392,9 +431,9 @@ public class ResourcesController extends BaseController { /** * edit resource file online * - * @param loginUser login user + * @param loginUser login user * @param resourceId resource id - * @param content content + * @param content content * @return update result code */ @ApiOperation(value = "updateResourceContent", notes = "UPDATE_RESOURCE_NOTES") @@ -420,7 +459,7 @@ public class ResourcesController extends BaseController { /** * download resource file * - * @param loginUser login user + * @param loginUser login user * @param resourceId resource id * @return resource content */ @@ -449,14 +488,14 @@ public class ResourcesController extends BaseController { /** * create udf function * - * @param loginUser login user - * @param type udf type - * @param funcName function name - * @param argTypes argument types - * @param database database + * @param loginUser login user + * @param type udf type + * @param funcName function name + * @param argTypes argument types + * @param database database * @param description description - * @param className class name - * @param resourceId resource id + * @param className class name + * @param resourceId resource id * @return create result code */ @ApiOperation(value = "createUdfFunc", notes = "CREATE_UDF_FUNCTION_NOTES") @@ -490,12 +529,12 @@ public class ResourcesController extends BaseController { * view udf function * * @param loginUser login user - * @param id resource id + * @param id resource id * @return udf function detail */ @ApiOperation(value = "viewUIUdfFunction", notes = "VIEW_UDF_FUNCTION_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") + @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @GetMapping(value = "/udf-func/update-ui") @@ -512,26 +551,27 @@ public class ResourcesController extends BaseController { /** * update udf function * - * @param loginUser login user - * @param type resource type - * @param funcName function name - * @param argTypes argument types - * @param database data base + * @param loginUser login user + * @param type resource type + * @param funcName function name + * @param argTypes argument types + * @param database data base * @param description description - * @param resourceId resource id - * @param className class name - * @param udfFuncId udf function id + * @param resourceId resource id + * @param className class name + * @param udfFuncId udf function id * @return update result code */ @ApiOperation(value = "updateUdfFunc", notes = "UPDATE_UDF_FUNCTION_NOTES") @ApiImplicitParams({ + @ApiImplicitParam(name = "id", value = "UDF_ID", required = true, dataType = "Int"), @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"), @ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"), - @ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType = "String"), + @ApiImplicitParam(name = "className", value = "CLASS_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"), @ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"), @ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"), - @ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") + @ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100") }) @PostMapping(value = "/udf-func/update") @@ -556,8 +596,8 @@ public class ResourcesController extends BaseController { * * @param loginUser login user * @param searchVal search value - * @param pageNo page number - * @param pageSize page size + * @param pageNo page number + * @param pageSize page size * @return udf function list page */ @ApiOperation(value = "queryUdfFuncListPaging", notes = "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES") @@ -569,7 +609,7 @@ public class ResourcesController extends BaseController { @GetMapping(value = "/udf-func/list-paging") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR) - public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + public Result queryUdfFuncListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam("pageSize") Integer pageSize @@ -586,23 +626,25 @@ public class ResourcesController extends BaseController { } /** - * query resource list by type + * query udf func list by type * * @param loginUser login user - * @param type resource type + * @param type resource type * @return resource list */ - @ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES") + @ApiOperation(value = "queryUdfFuncList", notes = "QUERY_UDF_FUNC_LIST_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType") }) @GetMapping(value = "/udf-func/list") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR) - public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + public Result queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("type") UdfType type) { - logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type); - Map result = udfFuncService.queryResourceList(loginUser, type.ordinal()); + String userName = loginUser.getUserName(); + userName = userName.replaceAll("[\n|\r|\t]", "_"); + logger.info("query udf func list, user:{}, type:{}", userName, type); + Map result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal()); return returnDataList(result); } @@ -610,7 +652,7 @@ public class ResourcesController extends BaseController { * verify udf function name can use or not * * @param loginUser login user - * @param name name + * @param name name * @return true if the name can user, otherwise return false */ @ApiOperation(value = "verifyUdfFuncName", notes = "VERIFY_UDF_FUNCTION_NAME_NOTES") @@ -655,7 +697,7 @@ public class ResourcesController extends BaseController { * authorized file resource list * * @param loginUser login user - * @param userId user id + * @param userId user id * @return authorized result */ @ApiOperation(value = "authorizedFile", notes = "AUTHORIZED_FILE_NOTES") @@ -677,7 +719,7 @@ public class ResourcesController extends BaseController { * unauthorized file resource list * * @param loginUser login user - * @param userId user id + * @param userId user id * @return unauthorized result code */ @ApiOperation(value = "authorizeResourceTree", notes = "AUTHORIZE_RESOURCE_TREE_NOTES") @@ -699,7 +741,7 @@ public class ResourcesController extends BaseController { * unauthorized udf function * * @param loginUser login user - * @param userId user id + * @param userId user id * @return unauthorized result code */ @ApiOperation(value = "unauthUDFFunc", notes = "UNAUTHORIZED_UDF_FUNC_NOTES") @@ -722,7 +764,7 @@ public class ResourcesController extends BaseController { * authorized udf function * * @param loginUser login user - * @param userId user id + * @param userId user id * @return authorized result code */ @ApiOperation(value = "authUDFFunc", notes = "AUTHORIZED_UDF_FUNC_NOTES") diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java index f55492a69e..d2322bba48 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java @@ -14,8 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.CREATE_SCHEDULE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.OFFLINE_SCHEDULE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.PREVIEW_SCHEDULE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.PUBLISH_SCHEDULE_ONLINE_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_SCHEDULE_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_SCHEDULE_ERROR; +import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.SchedulerService; @@ -26,19 +36,28 @@ import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.*; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.io.IOException; -import java.util.Map; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.*; -import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import springfox.documentation.annotations.ApiIgnore; /** * schedule controller @@ -61,17 +80,15 @@ public class SchedulerController extends BaseController { /** * create schedule * - * @param loginUser login user - * @param projectName project name - * @param processDefinitionId process definition id - * @param schedule scheduler - * @param warningType warning type - * @param warningGroupId warning group id - * @param failureStrategy failure strategy + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process definition id + * @param schedule scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy * @param processInstancePriority process instance priority - * @param receivers receivers - * @param receiversCc receivers cc - * @param workerGroup worker group + * @param workerGroup worker group * @return create result code */ @ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES") @@ -81,8 +98,6 @@ public class SchedulerController extends BaseController { @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @@ -96,16 +111,14 @@ public class SchedulerController extends BaseController { @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, - @RequestParam(value = "receivers", required = false) String receivers, - @RequestParam(value = "receiversCc", required = false) String receiversCc, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, - @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) throws IOException { - logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + - "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}", + @RequestParam(value = "processInstancePriority", required = false, defaultValue = DEFAULT_PROCESS_INSTANCE_PRIORITY) Priority processInstancePriority) { + logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + + "failure policy: {},processInstancePriority : {}, workGroupId:{}", loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId, - failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup); + failureStrategy, processInstancePriority, workerGroup); Map result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, - warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup); + warningType, warningGroupId, failureStrategy, processInstancePriority, workerGroup); return returnDataList(result); } @@ -113,17 +126,15 @@ public class SchedulerController extends BaseController { /** * updateProcessInstance schedule * - * @param loginUser login user - * @param projectName project name - * @param id scheduler id - * @param schedule scheduler - * @param warningType warning type - * @param warningGroupId warning group id - * @param failureStrategy failure strategy - * @param receivers receivers - * @param workerGroup worker group + * @param loginUser login user + * @param projectName project name + * @param id scheduler id + * @param schedule scheduler + * @param warningType warning type + * @param warningGroupId warning group id + * @param failureStrategy failure strategy + * @param workerGroup worker group * @param processInstancePriority process instance priority - * @param receiversCc receivers cc * @return update result code */ @ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES") @@ -133,8 +144,6 @@ public class SchedulerController extends BaseController { @ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"), @ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"), - @ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"), - @ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"), @ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"), }) @@ -147,26 +156,24 @@ public class SchedulerController extends BaseController { @RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, @RequestParam(value = "warningGroupId", required = false) int warningGroupId, @RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, - @RequestParam(value = "receivers", required = false) String receivers, - @RequestParam(value = "receiversCc", required = false) String receiversCc, @RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup, - @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException { - logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + - "failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}", + @RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { + logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + + "failure policy: {},processInstancePriority : {},workerGroupId:{}", loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy, - receivers, receiversCc, processInstancePriority, workerGroup); + processInstancePriority, workerGroup); Map result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, - warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroup); + warningType, warningGroupId, failureStrategy, null, processInstancePriority, workerGroup); return returnDataList(result); } /** * publish schedule setScheduleState * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param id scheduler id + * @param id scheduler id * @return publish result code */ @ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES") @@ -176,7 +183,7 @@ public class SchedulerController extends BaseController { @PostMapping("/online") @ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR) public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("id") Integer id) { logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}", loginUser.getUserName(), projectName, id); @@ -187,9 +194,9 @@ public class SchedulerController extends BaseController { /** * offline schedule * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param id schedule id + * @param id schedule id * @return operation result code */ @ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES") @@ -199,7 +206,7 @@ public class SchedulerController extends BaseController { @PostMapping("/offline") @ApiException(OFFLINE_SCHEDULE_ERROR) public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, - @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName, + @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam("id") Integer id) { logger.info("login user {}, schedule offline, project name: {}, process definition id: {}", loginUser.getUserName(), projectName, id); @@ -211,12 +218,12 @@ public class SchedulerController extends BaseController { /** * query schedule list paging * - * @param loginUser login user - * @param projectName project name + * @param loginUser login user + * @param projectName project name * @param processDefinitionId process definition id - * @param pageNo page number - * @param pageSize page size - * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @param searchVal search value * @return schedule list page */ @ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES") @@ -245,9 +252,9 @@ public class SchedulerController extends BaseController { /** * delete schedule by id * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param scheduleId scheule id + * @param scheduleId scheule id * @return delete result code */ @ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES") @@ -270,7 +277,7 @@ public class SchedulerController extends BaseController { /** * query schedule list * - * @param loginUser login user + * @param loginUser login user * @param projectName project name * @return schedule list */ @@ -288,9 +295,9 @@ public class SchedulerController extends BaseController { /** * preview schedule * - * @param loginUser login user + * @param loginUser login user * @param projectName project name - * @param schedule schedule expression + * @param schedule schedule expression * @return the next five fire time */ @ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES") diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java index c0ad88f481..56e7ef2087 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -14,8 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_PAGING_ERROR; import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TaskInstanceService; @@ -23,18 +25,29 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; -import io.swagger.annotations.*; + +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; -import org.springframework.web.bind.annotation.*; -import springfox.documentation.annotations.ApiIgnore; - -import java.util.Map; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; -import static org.apache.dolphinscheduler.api.enums.Status.QUERY_TASK_LIST_PAGING_ERROR; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import springfox.documentation.annotations.ApiIgnore; /** * task instance controller @@ -69,6 +82,7 @@ public class TaskInstanceController extends BaseController { @ApiOperation(value = "queryTaskListPaging", notes = "QUERY_TASK_INSTANCE_LIST_PAGING_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = false, dataType = "Int", example = "100"), + @ApiImplicitParam(name = "processInstanceName", value = "PROCESS_INSTANCE_NAME", required = false, type = "String"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), @ApiImplicitParam(name = "taskName", value = "TASK_NAME", type = "String"), @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type = "String"), @@ -85,6 +99,7 @@ public class TaskInstanceController extends BaseController { public Result queryTaskListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, + @RequestParam(value = "processInstanceName", required = false) String processInstanceName, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "taskName", required = false) String taskName, @RequestParam(value = "executorName", required = false) String executorName, @@ -95,11 +110,20 @@ public class TaskInstanceController extends BaseController { @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { - logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, executor name: {},state type:{}, host:{}, start:{}, end:{}", - projectName, processInstanceId, searchVal, taskName, executorName, stateType, host, startTime, endTime); + logger.info("query task instance list, projectName:{}, processInstanceId:{}, processInstanceName:{}, search value:{}, taskName:{}, executorName: {}, stateType:{}, host:{}, start:{}, end:{}", + StringUtils.replaceNRTtoUnderline(projectName), + processInstanceId, + StringUtils.replaceNRTtoUnderline(processInstanceName), + StringUtils.replaceNRTtoUnderline(searchVal), + StringUtils.replaceNRTtoUnderline(taskName), + StringUtils.replaceNRTtoUnderline(executorName), + stateType, + StringUtils.replaceNRTtoUnderline(host), + StringUtils.replaceNRTtoUnderline(startTime), + StringUtils.replaceNRTtoUnderline(endTime)); searchVal = ParameterUtils.handleEscapes(searchVal); Map result = taskInstanceService.queryTaskListPaging( - loginUser, projectName, processInstanceId, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); + loginUser, projectName, processInstanceId, processInstanceName, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); return returnDataListPaging(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java index 2676a774e7..9b9790fe23 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TenantController.java @@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.api.exceptions.ApiException; import org.apache.dolphinscheduler.api.service.TenantService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.entity.User; @@ -72,7 +73,6 @@ public class TenantController extends BaseController { * * @param loginUser login user * @param tenantCode tenant code - * @param tenantName tenant name * @param queueId queue id * @param description description * @return create result code @@ -80,7 +80,6 @@ public class TenantController extends BaseController { @ApiOperation(value = "createTenant", notes = "CREATE_TENANT_NOTES") @ApiImplicitParams({ @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), - @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "description", value = "TENANT_DESC", dataType = "String") @@ -90,12 +89,13 @@ public class TenantController extends BaseController { @ApiException(CREATE_TENANT_ERROR) public Result createTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "tenantName") String tenantName, @RequestParam(value = "queueId") int queueId, @RequestParam(value = "description", required = false) String description) throws Exception { - logger.info("login user {}, create tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", - loginUser.getUserName(), tenantCode, tenantName, queueId, description); - Map result = tenantService.createTenant(loginUser, tenantCode, tenantName, queueId, description); + String userReplace = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); + String tenantCodeReplace = StringUtils.replaceNRTtoUnderline(tenantCode); + String descReplace = StringUtils.replaceNRTtoUnderline(description); + logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace); + Map result = tenantService.createTenant(loginUser, tenantCode, queueId, description); return returnDataList(result); } @@ -119,8 +119,8 @@ public class TenantController extends BaseController { @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_TENANT_LIST_PAGING_ERROR) public Result queryTenantlistPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, - @RequestParam("pageNo") Integer pageNo, @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", loginUser.getUserName(), pageNo, searchVal, pageSize); @@ -157,7 +157,6 @@ public class TenantController extends BaseController { * @param loginUser login user * @param id tennat id * @param tenantCode tennat code - * @param tenantName tennat name * @param queueId queue id * @param description description * @return update result code @@ -166,7 +165,6 @@ public class TenantController extends BaseController { @ApiImplicitParams({ @ApiImplicitParam(name = "ID", value = "TENANT_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "tenantCode", value = "TENANT_CODE", required = true, dataType = "String"), - @ApiImplicitParam(name = "tenantName", value = "TENANT_NAME", required = true, dataType = "String"), @ApiImplicitParam(name = "queueId", value = "QUEUE_ID", required = true, dataType = "Int", example = "100"), @ApiImplicitParam(name = "description", value = "TENANT_DESC", type = "String") @@ -177,12 +175,13 @@ public class TenantController extends BaseController { public Result updateTenant(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "id") int id, @RequestParam(value = "tenantCode") String tenantCode, - @RequestParam(value = "tenantName") String tenantName, @RequestParam(value = "queueId") int queueId, @RequestParam(value = "description", required = false) String description) throws Exception { - logger.info("login user {}, updateProcessInstance tenant, tenantCode: {}, tenantName: {}, queueId: {}, description: {}", - loginUser.getUserName(), tenantCode, tenantName, queueId, description); - Map result = tenantService.updateTenant(loginUser, id, tenantCode, tenantName, queueId, description); + String userReplace = StringUtils.replaceNRTtoUnderline(loginUser.getUserName()); + String tenantCodeReplace = StringUtils.replaceNRTtoUnderline(tenantCode); + String descReplace = StringUtils.replaceNRTtoUnderline(description); + logger.info("login user {}, create tenant, tenantCode: {}, queueId: {}, desc: {}", userReplace, tenantCodeReplace, queueId, descReplace); + Map result = tenantService.updateTenant(loginUser, id, tenantCode, queueId, description); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index 8d6f9fc820..b63e201e60 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -72,9 +72,9 @@ public class UsersController extends BaseController { @ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"), @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type = "String"), @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "String"), + @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "String"), + @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "String"), @ApiImplicitParam(name = "state", value = "STATE", dataType = "Int", example = "1") }) @PostMapping(value = "/create") @@ -105,8 +105,8 @@ public class UsersController extends BaseController { */ @ApiOperation(value = "queryUserList", notes = "QUERY_USER_LIST_NOTES") @ApiImplicitParams({ - @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", type = "String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "10"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String") }) @GetMapping(value = "/list-paging") @@ -114,8 +114,8 @@ public class UsersController extends BaseController { @ApiException(QUERY_USER_LIST_PAGING_ERROR) public Result queryUserList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("pageNo") Integer pageNo, - @RequestParam(value = "searchVal", required = false) String searchVal, - @RequestParam("pageSize") Integer pageSize) { + @RequestParam("pageSize") Integer pageSize, + @RequestParam(value = "searchVal", required = false) String searchVal) { logger.info("login user {}, list user paging, pageNo: {}, searchVal: {}, pageSize: {}", loginUser.getUserName(), pageNo, searchVal, pageSize); Map result = checkPageParams(pageNo, pageSize); @@ -147,9 +147,9 @@ public class UsersController extends BaseController { @ApiImplicitParam(name = "userName", value = "USER_NAME", type = "String"), @ApiImplicitParam(name = "userPassword", value = "USER_PASSWORD", type = "String"), @ApiImplicitParam(name = "tenantId", value = "TENANT_ID", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "Int", example = "100"), - @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "Int", example = "100"), + @ApiImplicitParam(name = "queue", value = "QUEUE", dataType = "String"), + @ApiImplicitParam(name = "email", value = "EMAIL", dataType = "String"), + @ApiImplicitParam(name = "phone", value = "PHONE", dataType = "String"), @ApiImplicitParam(name = "state", value = "STATE", dataType = "Int", example = "1") }) @PostMapping(value = "/update") @@ -166,7 +166,7 @@ public class UsersController extends BaseController { @RequestParam(value = "state", required = false) int state) throws Exception { logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, user queue: {}, state: {}", loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone, queue, state); - Map result = usersService.updateUser(id, userName, userPassword, email, tenantId, phone, queue, state); + Map result = usersService.updateUser(loginUser, id, userName, userPassword, email, tenantId, phone, queue, state); return returnDataList(result); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java index 895d2cd8da..8468a65cdb 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java @@ -16,11 +16,14 @@ */ package org.apache.dolphinscheduler.api.controller; +import io.swagger.annotations.ApiParam; + import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import io.swagger.annotations.ApiParam; +import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -34,6 +37,7 @@ import java.util.Map; import java.util.Set; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_WORKFLOW_LINEAGE_ERROR; +import static org.apache.dolphinscheduler.common.Constants.SESSION_USER; @RestController @RequestMapping("lineages/{projectId}") @@ -45,7 +49,9 @@ public class WorkFlowLineageController extends BaseController { @GetMapping(value="/list-name") @ResponseStatus(HttpStatus.OK) - public Result> queryWorkFlowLineageByName(@ApiIgnore @RequestParam(value = "searchVal", required = false) String searchVal, @ApiParam(name = "projectId", value = "PROJECT_ID", required = true) @PathVariable int projectId) { + public Result> queryWorkFlowLineageByName(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, + @ApiIgnore @RequestParam(value = "searchVal", required = false) String searchVal) { try { searchVal = ParameterUtils.handleEscapes(searchVal); Map result = workFlowLineageService.queryWorkFlowLineageByName(searchVal,projectId); @@ -58,7 +64,9 @@ public class WorkFlowLineageController extends BaseController { @GetMapping(value="/list-ids") @ResponseStatus(HttpStatus.OK) - public Result> queryWorkFlowLineageByIds(@ApiIgnore @RequestParam(value = "ids", required = false) String ids,@ApiParam(name = "projectId", value = "PROJECT_ID", required = true) @PathVariable int projectId) { + public Result> queryWorkFlowLineageByIds(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser, + @ApiParam(name = "projectId", value = "PROJECT_ID", required = true, example = "1") @PathVariable int projectId, + @ApiIgnore @RequestParam(value = "ids", required = false) String ids) { try { ids = ParameterUtils.handleEscapes(ids); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java index 3c3c31bfce..d116e46578 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/CommandStateCount.java @@ -57,4 +57,32 @@ public class CommandStateCount { public void setCommandState(CommandType commandState) { this.commandState = commandState; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CommandStateCount that = (CommandStateCount) o; + + if (errorCount != that.errorCount) { + return false; + } + if (normalCount != that.normalCount) { + return false; + } + return commandState == that.commandState; + } + + @Override + public int hashCode() { + int result = errorCount; + result = 31 * result + normalCount; + result = 31 * result + (commandState != null ? commandState.hashCode() : 0); + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java index 35aaaf34dd..6f42ba9f56 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java @@ -19,8 +19,10 @@ package org.apache.dolphinscheduler.api.dto; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; -import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; /** * task count dto @@ -43,77 +45,16 @@ public class TaskCountDto { } private void countTaskDtos(List taskInstanceStateCounts) { - int submittedSuccess = 0; - int runningExecution = 0; - int delayExecution = 0; - int readyPause = 0; - int pause = 0; - int readyStop = 0; - int stop = 0; - int failure = 0; - int success = 0; - int needFaultTolerance = 0; - int kill = 0; - int waittingThread = 0; + Map statusCountMap = taskInstanceStateCounts.stream() + .collect(Collectors.toMap(ExecuteStatusCount::getExecutionStatus, ExecuteStatusCount::getCount, Integer::sum)); - for (ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts) { - ExecutionStatus status = taskInstanceStateCount.getExecutionStatus(); - totalCount += taskInstanceStateCount.getCount(); - switch (status) { - case SUBMITTED_SUCCESS: - submittedSuccess += taskInstanceStateCount.getCount(); - break; - case RUNNING_EXECUTION: - runningExecution += taskInstanceStateCount.getCount(); - break; - case DELAY_EXECUTION: - delayExecution += taskInstanceStateCount.getCount(); - break; - case READY_PAUSE: - readyPause += taskInstanceStateCount.getCount(); - break; - case PAUSE: - pause += taskInstanceStateCount.getCount(); - break; - case READY_STOP: - readyStop += taskInstanceStateCount.getCount(); - break; - case STOP: - stop += taskInstanceStateCount.getCount(); - break; - case FAILURE: - failure += taskInstanceStateCount.getCount(); - break; - case SUCCESS: - success += taskInstanceStateCount.getCount(); - break; - case NEED_FAULT_TOLERANCE: - needFaultTolerance += taskInstanceStateCount.getCount(); - break; - case KILL: - kill += taskInstanceStateCount.getCount(); - break; - case WAITTING_THREAD: - waittingThread += taskInstanceStateCount.getCount(); - break; + taskCountDtos = Arrays.stream(ExecutionStatus.values()) + .map(status -> new TaskStateCount(status, statusCountMap.getOrDefault(status, 0))) + .collect(Collectors.toList()); - default: - break; - } - } - this.taskCountDtos = new ArrayList<>(); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submittedSuccess)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXECUTION, runningExecution)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.DELAY_EXECUTION, delayExecution)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, readyPause)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, readyStop)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.STOP, stop)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.FAILURE, failure)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUCCESS, success)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.NEED_FAULT_TOLERANCE, needFaultTolerance)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.KILL, kill)); - this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waittingThread)); + totalCount = taskCountDtos.stream() + .mapToInt(TaskStateCount::getCount) + .sum(); } public List getTaskCountDtos() { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java index a2fe348e40..460ea7da4b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskStateCount.java @@ -47,4 +47,28 @@ public class TaskStateCount { public void setTaskStateType(ExecutionStatus taskStateType) { this.taskStateType = taskStateType; } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TaskStateCount that = (TaskStateCount) o; + + if (count != that.count) { + return false; + } + return taskStateType == that.taskStateType; + } + + @Override + public int hashCode() { + int result = count; + result = 31 * result + (taskStateType != null ? taskStateType.hashCode() : 0); + return result; + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index ca85d60829..e05c0973e9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -36,7 +36,7 @@ public enum Status { USER_NAME_NULL(10004, "user name is null", "用户名不能为空"), HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"), TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"), - TENANT_NAME_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"), + TENANT_CODE_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"), USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"), ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"), ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"), @@ -133,7 +133,7 @@ public enum Status { QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"), DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"), CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"), - VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流名称已存在"), + VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流定义名称已存在"), UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"), RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"), QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"), @@ -193,13 +193,13 @@ public enum Status { BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"), BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"), QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"), - DELETE_PROCESS_DEFINITION_BY_ID_FAIL(10162, "delete process definition by id fail, for there are {0} process instances in executing using it", "删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"), - CHECK_TENANT_CODE_ERROR(10163, "Please enter the English tenant code", "请输入英文租户编码"), - - CREATE_ALERT_PLUGIN_INSTANCE_ERROR(10164, "create alert plugin instance error", "创建告警组和告警组插件实例错误"), + QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_ERROR(10162, "query authorized and user created project error error", "查询授权的和用户创建的项目错误"), + DELETE_PROCESS_DEFINITION_BY_ID_FAIL(10163,"delete process definition by id fail, for there are {0} process instances in executing using it", "删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"), + CHECK_TENANT_CODE_ERROR(10164, "Please enter the English tenant code", "请输入英文租户编码"), UPDATE_ALERT_PLUGIN_INSTANCE_ERROR(10165, "update alert plugin instance error", "更新告警组和告警组插件实例错误"), DELETE_ALERT_PLUGIN_INSTANCE_ERROR(10166, "delete alert plugin instance error", "删除告警组和告警组插件实例错误"), GET_ALERT_PLUGIN_INSTANCE_ERROR(10167, "get alert plugin instance error", "获取告警组和告警组插件实例错误"), + CREATE_ALERT_PLUGIN_INSTANCE_ERROR(10168, "create alert plugin instance error", "创建告警组和告警组插件实例错误"), UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/AuthenticationType.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/AuthenticationType.java index 5610a2285a..ecf1d4f8ea 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/AuthenticationType.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/AuthenticationType.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.security; import com.baomidou.mybatisplus.annotation.EnumValue; @@ -24,6 +25,7 @@ import com.baomidou.mybatisplus.annotation.EnumValue; public enum AuthenticationType { PASSWORD(0, "verify via user name and password"), + LDAP(1, "verify via LDAP server"), ; AuthenticationType(int code, String desc) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/Authenticator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/Authenticator.java index 8de1c7f407..99769b840b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/Authenticator.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/Authenticator.java @@ -14,13 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.security; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.dao.entity.User; -import javax.servlet.http.HttpServletRequest; + import java.util.Map; +import javax.servlet.http.HttpServletRequest; + public interface Authenticator { /** * Verifying legality via username and password diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java index 823c9bdfba..cb0d0646a1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java @@ -14,9 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.security; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.api.security.impl.ldap.LdapAuthenticator; +import org.apache.dolphinscheduler.api.security.impl.pwd.PasswordAuthenticator; +import org.apache.dolphinscheduler.common.utils.StringUtils; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -58,6 +62,9 @@ public class SecurityConfig { case PASSWORD: authenticator = new PasswordAuthenticator(); break; + case LDAP: + authenticator = new LdapAuthenticator(); + break; default: throw new IllegalStateException("Unexpected value: " + authenticationType); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/PasswordAuthenticator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java similarity index 80% rename from dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/PasswordAuthenticator.java rename to dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java index 69930fc9d7..837e1f1f79 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/PasswordAuthenticator.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java @@ -14,9 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.security; + +package org.apache.dolphinscheduler.api.security.impl; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.security.Authenticator; import org.apache.dolphinscheduler.api.service.SessionService; import org.apache.dolphinscheduler.api.service.UsersService; import org.apache.dolphinscheduler.api.utils.Result; @@ -24,26 +26,38 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Collections; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import javax.servlet.http.HttpServletRequest; -import java.util.Collections; -import java.util.Map; -public class PasswordAuthenticator implements Authenticator { - private static final Logger logger = LoggerFactory.getLogger(PasswordAuthenticator.class); +public abstract class AbstractAuthenticator implements Authenticator { + private static final Logger logger = LoggerFactory.getLogger(AbstractAuthenticator.class); @Autowired private UsersService userService; @Autowired private SessionService sessionService; + /** + * user login and return user in db + * + * @param userId user identity field + * @param password user login password + * @param extra extra user login field + * @return user object in databse + */ + public abstract User login(String userId, String password, String extra); + @Override - public Result> authenticate(String username, String password, String extra) { + public Result> authenticate(String userId, String password, String extra) { Result> result = new Result<>(); - // verify username and password - User user = userService.queryUser(username, password); + User user = login(userId, password, extra); if (user == null) { result.setCode(Status.USER_NAME_PASSWD_ERROR.getCode()); result.setMsg(Status.USER_NAME_PASSWD_ERROR.getMsg()); @@ -64,7 +78,7 @@ public class PasswordAuthenticator implements Authenticator { result.setMsg(Status.LOGIN_SESSION_FAILED.getMsg()); return result; } - logger.info("sessionId : {}" , sessionId); + logger.info("sessionId : {}", sessionId); result.setData(Collections.singletonMap(Constants.SESSION_ID, sessionId)); result.setCode(Status.SUCCESS.getCode()); result.setMsg(Status.LOGIN_SUCCESS.getMsg()); @@ -81,4 +95,5 @@ public class PasswordAuthenticator implements Authenticator { //get user object from session return userService.queryUser(session.getUserId()); } + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticator.java new file mode 100644 index 0000000000..1604b79b6a --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticator.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.security.impl.ldap; + +import org.apache.dolphinscheduler.api.security.impl.AbstractAuthenticator; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.dao.entity.User; + +import org.springframework.beans.factory.annotation.Autowired; + +public class LdapAuthenticator extends AbstractAuthenticator { + @Autowired + private UsersService usersService; + @Autowired + LdapService ldapService; + + @Override + public User login(String userId, String password, String extra) { + User user = null; + String ldapEmail = ldapService.ldapLogin(userId, password); + if (ldapEmail != null) { + //check if user exist + user = usersService.getUserByUserName(userId); + if (user == null) { + user = usersService.createUser(ldapService.getUserType(userId), userId, ldapEmail); + } + } + return user; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java new file mode 100644 index 0000000000..9f4fd1f0ae --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.security.impl.ldap; + +import org.apache.dolphinscheduler.common.enums.UserType; + +import java.util.Properties; + +import javax.naming.Context; +import javax.naming.NamingEnumeration; +import javax.naming.NamingException; +import javax.naming.directory.Attribute; +import javax.naming.directory.InitialDirContext; +import javax.naming.directory.SearchControls; +import javax.naming.directory.SearchResult; +import javax.naming.ldap.InitialLdapContext; +import javax.naming.ldap.LdapContext; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; +import org.springframework.stereotype.Component; + +@Component +@Configuration +public class LdapService { + private static final Logger logger = LoggerFactory.getLogger(LdapService.class); + + @Value("${security.authentication.ldap.user.admin:null}") + private String adminUserId; + + @Value("${ldap.urls:null}") + private String ldapUrls; + + @Value("${ldap.base.dn:null}") + private String ldapBaseDn; + + @Value("${ldap.username:null}") + private String ldapSecurityPrincipal; + + @Value("${ldap.password:null}") + private String ldapPrincipalPassword; + + @Value("${ldap.user.identity.attribute:null}") + private String ldapUserIdentifyingAttribute; + + @Value("${ldap.user.email.attribute:null}") + private String ldapEmailAttribute; + + /*** + * get user type by configured admin userId + * @param userId login userId + * @return user type + */ + public UserType getUserType(String userId) { + return adminUserId.equalsIgnoreCase(userId) ? UserType.ADMIN_USER : UserType.GENERAL_USER; + } + + /** + * login by userId and return user email + * + * @param userId user identity id + * @param userPwd user login password + * @return user email + */ + public String ldapLogin(String userId, String userPwd) { + Properties searchEnv = getManagerLdapEnv(); + try { + //Connect to the LDAP server and Authenticate with a service user of whom we know the DN and credentials + LdapContext ctx = new InitialLdapContext(searchEnv, null); + SearchControls sc = new SearchControls(); + sc.setReturningAttributes(new String[]{ldapEmailAttribute}); + sc.setSearchScope(SearchControls.SUBTREE_SCOPE); + String searchFilter = String.format("(%s=%s)", ldapUserIdentifyingAttribute, userId); + //Search for the user you want to authenticate, search him with some attribute + NamingEnumeration results = ctx.search(ldapBaseDn, searchFilter, sc); + if (results.hasMore()) { + // get the users DN (distinguishedName) from the result + SearchResult result = results.next(); + NamingEnumeration attrs = result.getAttributes().getAll(); + while (attrs.hasMore()) { + //Open another connection to the LDAP server with the found DN and the password + searchEnv.put(Context.SECURITY_PRINCIPAL, result.getNameInNamespace()); + searchEnv.put(Context.SECURITY_CREDENTIALS, userPwd); + try { + new InitialDirContext(searchEnv); + } catch (Exception e) { + logger.warn("invalid ldap credentials or ldap search error", e); + return null; + } + Attribute attr = (Attribute) attrs.next(); + if (attr.getID().equals(ldapEmailAttribute)) { + return (String) attr.get(); + } + } + } + } catch (NamingException e) { + logger.error("ldap search error", e); + return null; + } + return null; + } + + /*** + * get ldap env fot ldap server search + * @return Properties + */ + Properties getManagerLdapEnv() { + Properties env = new Properties(); + env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); + env.put(Context.SECURITY_AUTHENTICATION, "simple"); + env.put(Context.SECURITY_PRINCIPAL, ldapSecurityPrincipal); + env.put(Context.SECURITY_CREDENTIALS, ldapPrincipalPassword); + env.put(Context.PROVIDER_URL, ldapUrls); + return env; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticator.java new file mode 100644 index 0000000000..a45bb84604 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticator.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.security.impl.pwd; + +import org.apache.dolphinscheduler.api.security.impl.AbstractAuthenticator; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.dao.entity.User; + +import org.springframework.beans.factory.annotation.Autowired; + +public class PasswordAuthenticator extends AbstractAuthenticator { + @Autowired + private UsersService userService; + + @Override + public User login(String userId, String password, String extra) { + return userService.queryUser(userId, password); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java index 98eef47090..b1c320566f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/AccessTokenService.java @@ -44,7 +44,8 @@ public interface AccessTokenService { * @param token token string * @return create result code */ - Map createToken(int userId, String expireTime, String token); + Map createToken(User loginUser, int userId, String expireTime, String token); + /** * generate token @@ -53,7 +54,7 @@ public interface AccessTokenService { * @param expireTime token expire time * @return token string */ - Map generateToken(int userId, String expireTime); + Map generateToken(User loginUser, int userId, String expireTime); /** * delete access token @@ -73,5 +74,5 @@ public interface AccessTokenService { * @param token token string * @return update result code */ - Map updateToken(int id, int userId, String expireTime, String token); + Map updateToken(User loginUser, int id, int userId, String expireTime, String token); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java index 4b094ea494..e9b25250a8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseService.java @@ -95,6 +95,25 @@ public class BaseService { } + /** + * check + * + * @param result result + * @param bool bool + * @param userNoOperationPerm status + * @return check result + */ + protected boolean check(Map result, boolean bool, Status userNoOperationPerm) { + //only admin can operate + if (bool) { + result.put(Constants.STATUS, userNoOperationPerm); + result.put(Constants.MSG, userNoOperationPerm.getMsg()); + return true; + } + return false; + } + + /** * get cookie info by name * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index 74c2f6908f..1fa0494d13 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -499,7 +499,13 @@ public class DataSourceService extends BaseService { String address = buildAddress(type, host, port, connectType); Map parameterMap = new LinkedHashMap(6); - String jdbcUrl = address + "/" + database; + String jdbcUrl; + if (DbType.SQLSERVER == type) { + jdbcUrl = address + ";databaseName=" + database; + } else { + jdbcUrl = address + "/" + database; + } + if (Constants.ORACLE.equals(type.name())) { parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); } @@ -533,19 +539,17 @@ public class DataSourceService extends BaseService { (type == DbType.HIVE || type == DbType.SPARK)) { parameterMap.put(Constants.PRINCIPAL, principal); } - if (other != null && !"".equals(other)) { - Map map = JSONUtils.toMap(other); - if (map.size() > 0) { - StringBuilder otherSb = new StringBuilder(); - for (Map.Entry entry : map.entrySet()) { - otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator)); - } - if (!Constants.DB2.equals(type.name())) { - otherSb.deleteCharAt(otherSb.length() - 1); - } - parameterMap.put(Constants.OTHER, otherSb); - } + Map map = JSONUtils.toMap(other); + if (map != null) { + StringBuilder otherSb = new StringBuilder(); + for (Map.Entry entry: map.entrySet()) { + otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator)); + } + if (!Constants.DB2.equals(type.name())) { + otherSb.deleteCharAt(otherSb.length() - 1); + } + parameterMap.put(Constants.OTHER, otherSb); } if (logger.isDebugEnabled()) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index e5e1513460..aa58aaaaab 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -14,39 +14,62 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; +import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; import org.apache.dolphinscheduler.api.enums.ExecuteType; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.RunMode; +import org.apache.dolphinscheduler.common.enums.TaskDependType; +import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import java.text.ParseException; -import java.util.*; - -import static org.apache.dolphinscheduler.common.Constants.*; - /** * executor service */ @Service -public class ExecutorService extends BaseService{ +public class ExecutorService extends BaseService { private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class); @@ -73,20 +96,20 @@ public class ExecutorService extends BaseService{ /** * execute process instance * - * @param loginUser login user - * @param projectName project name - * @param processDefinitionId process Definition Id - * @param cronTime cron time - * @param commandType command type - * @param failureStrategy failuer strategy - * @param startNodeList start nodelist - * @param taskDependType node dependency type - * @param warningType warning type - * @param warningGroupId notify group id + * @param loginUser login user + * @param projectName project name + * @param processDefinitionId process Definition Id + * @param cronTime cron time + * @param commandType command type + * @param failureStrategy failuer strategy + * @param startNodeList start nodelist + * @param taskDependType node dependency type + * @param warningType warning type + * @param warningGroupId notify group id * @param processInstancePriority process instance priority * @param workerGroup worker group name * @param runMode run mode - * @param timeout timeout + * @param timeout timeout * @return execute process instance code * @throws ParseException Parse Exception */ @@ -99,23 +122,23 @@ public class ExecutorService extends BaseService{ Map result = new HashMap<>(); // timeout is invalid if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { - putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR); + putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); return result; } Project project = projectMapper.queryByName(projectName); Map checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); - if (checkResultAndAuth != null){ + if (checkResultAndAuth != null) { return checkResultAndAuth; } // check process define release state ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); result = checkProcessDefinitionValid(processDefinition, processDefinitionId); - if(result.get(Constants.STATUS) != Status.SUCCESS){ + if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; } - if (!checkTenantSuitable(processDefinition)){ + if (!checkTenantSuitable(processDefinition)) { logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); @@ -127,14 +150,14 @@ public class ExecutorService extends BaseService{ return result; } - /** * create command */ int create = this.createCommand(commandType, processDefinitionId, taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), - warningGroupId, runMode,processInstancePriority, workerGroup); - if(create > 0 ){ + warningGroupId, runMode, processInstancePriority, workerGroup); + + if (create > 0) { processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); putMsg(result, Status.SUCCESS); @@ -146,6 +169,7 @@ public class ExecutorService extends BaseService{ /** * check whether master exists + * * @param result result * @return master exists return true , otherwise return false */ @@ -161,7 +185,6 @@ public class ExecutorService extends BaseService{ return true; } - /** * check whether the process definition can be executed * @@ -169,22 +192,20 @@ public class ExecutorService extends BaseService{ * @param processDefineId process definition id * @return check result code */ - public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){ + public Map checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) { Map result = new HashMap<>(); if (processDefinition == null) { // check process definition exists - putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId); + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); } else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { // check process definition online - putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE,processDefineId); - }else{ + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId); + } else { result.put(Constants.STATUS, Status.SUCCESS); } return result; } - - /** * do action to process instance:pause, stop, repeat, recover from pause, recover from stop * @@ -208,7 +229,6 @@ public class ExecutorService extends BaseService{ return result; } - ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); if (processInstance == null) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); @@ -216,7 +236,7 @@ public class ExecutorService extends BaseService{ } ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); - if(executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE){ + if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; @@ -228,7 +248,7 @@ public class ExecutorService extends BaseService{ if (status != Status.SUCCESS) { return checkResult; } - if (!checkTenantSuitable(processDefinition)){ + if (!checkTenantSuitable(processDefinition)) { logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); @@ -269,6 +289,7 @@ public class ExecutorService extends BaseService{ /** * check tenant suitable + * * @param processDefinition process definition * @return true if tenant suitable, otherwise return false */ @@ -309,7 +330,7 @@ public class ExecutorService extends BaseService{ } break; case RECOVER_SUSPENDED_PROCESS: - if (executionStatus.typeIsPause()|| executionStatus.typeIsCancel()) { + if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { checkResult = true; } break; @@ -317,7 +338,7 @@ public class ExecutorService extends BaseService{ break; } if (!checkResult) { - putMsg(result,Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); + putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); } else { putMsg(result, Status.SUCCESS); } @@ -325,7 +346,7 @@ public class ExecutorService extends BaseService{ } /** - * prepare to update process instance command type and status + * prepare to update process instance command type and status * * @param processInstance process instance * @param commandType command type @@ -364,11 +385,11 @@ public class ExecutorService extends BaseService{ command.setCommandType(commandType); command.setProcessDefinitionId(processDefinitionId); command.setCommandParam(String.format("{\"%s\":%d}", - CMDPARAM_RECOVER_PROCESS_ID_STRING, instanceId)); + CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId)); command.setExecutorId(loginUser.getId()); - if(!processService.verifyIsNeedCreateCommand(command)){ - putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,processDefinitionId); + if (!processService.verifyIsNeedCreateCommand(command)) { + putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId); return result; } @@ -385,28 +406,29 @@ public class ExecutorService extends BaseService{ /** * check if sub processes are offline before starting process definition + * * @param processDefineId process definition id * @return check result code */ public Map startCheckByProcessDefinedId(int processDefineId) { Map result = new HashMap<>(); - if (processDefineId == 0){ + if (processDefineId == 0) { logger.error("process definition id is null"); - putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id"); + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id"); } List ids = new ArrayList<>(); processService.recurseFindSubProcessId(processDefineId, ids); Integer[] idArray = ids.toArray(new Integer[ids.size()]); - if (!ids.isEmpty()){ + if (!ids.isEmpty()) { List processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); - if (processDefinitionList != null){ - for (ProcessDefinition processDefinition : processDefinitionList){ + if (processDefinitionList != null) { + for (ProcessDefinition processDefinition : processDefinitionList) { /** * if there is no online process, exit directly */ - if (processDefinition.getReleaseState() != ReleaseState.ONLINE){ - putMsg(result,Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); + if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { + putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); return result; @@ -420,6 +442,7 @@ public class ExecutorService extends BaseService{ /** * create command + * * @param commandType commandType * @param processDefineId processDefineId * @param nodeDep nodeDep @@ -433,37 +456,36 @@ public class ExecutorService extends BaseService{ * @param processInstancePriority processInstancePriority * @param workerGroup workerGroup * @return command id - * @throws ParseException */ private int createCommand(CommandType commandType, int processDefineId, TaskDependType nodeDep, FailureStrategy failureStrategy, String startNodeList, String schedule, WarningType warningType, int executorId, int warningGroupId, - RunMode runMode,Priority processInstancePriority, String workerGroup) throws ParseException { + RunMode runMode, Priority processInstancePriority, String workerGroup) throws ParseException { /** * instantiate command schedule instance */ Command command = new Command(); - Map cmdParam = new HashMap<>(); - if(commandType == null){ + Map cmdParam = new HashMap<>(); + if (commandType == null) { command.setCommandType(CommandType.START_PROCESS); - }else{ + } else { command.setCommandType(commandType); } command.setProcessDefinitionId(processDefineId); - if(nodeDep != null){ + if (nodeDep != null) { command.setTaskDependType(nodeDep); } - if(failureStrategy != null){ + if (failureStrategy != null) { command.setFailureStrategy(failureStrategy); } - if(StringUtils.isNotEmpty(startNodeList)){ - cmdParam.put(CMDPARAM_START_NODE_NAMES, startNodeList); + if (StringUtils.isNotEmpty(startNodeList)) { + cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); } - if(warningType != null){ + if (warningType != null) { command.setWarningType(warningType); } command.setCommandParam(JSONUtils.toJsonString(cmdParam)); @@ -474,32 +496,32 @@ public class ExecutorService extends BaseService{ Date start = null; Date end = null; - if(StringUtils.isNotEmpty(schedule)){ + if (StringUtils.isNotEmpty(schedule)) { String[] interval = schedule.split(","); - if(interval.length == 2){ + if (interval.length == 2) { start = DateUtils.getScheduleDate(interval[0]); end = DateUtils.getScheduleDate(interval[1]); } } // determine whether to complement - if(commandType == CommandType.COMPLEMENT_DATA){ + if (commandType == CommandType.COMPLEMENT_DATA) { runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; - if(null != start && null != end && !start.after(end)){ - if(runMode == RunMode.RUN_MODE_SERIAL){ + if (null != start && null != end && !start.after(end)) { + if (runMode == RunMode.RUN_MODE_SERIAL) { cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); return processService.createCommand(command); - }else if (runMode == RunMode.RUN_MODE_PARALLEL){ + } else if (runMode == RunMode.RUN_MODE_PARALLEL) { List schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); List listDate = new LinkedList<>(); - if(!CollectionUtils.isEmpty(schedules)){ + if (!CollectionUtils.isEmpty(schedules)) { for (Schedule item : schedules) { listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab())); } } - if(!CollectionUtils.isEmpty(listDate)){ + if (!CollectionUtils.isEmpty(listDate)) { // loop by schedule date for (Date date : listDate) { cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); @@ -508,10 +530,10 @@ public class ExecutorService extends BaseService{ processService.createCommand(command); } return listDate.size(); - }else{ + } else { // loop by day int runCunt = 0; - while(!start.after(end)) { + while (!start.after(end)) { runCunt += 1; cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); @@ -522,11 +544,11 @@ public class ExecutorService extends BaseService{ return runCunt; } } - }else{ + } else { logger.error("there is not valid schedule date for the process definition: id:{},date:{}", processDefineId, schedule); } - }else{ + } else { command.setCommandParam(JSONUtils.toJsonString(cmdParam)); return processService.createCommand(command); } @@ -536,11 +558,6 @@ public class ExecutorService extends BaseService{ /** * check result and auth - * - * @param loginUser - * @param projectName - * @param project - * @return */ private Map checkResultAndAuth(User loginUser, String projectName, Project project) { // check project auth diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 24fa87170c..4c98266607 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -304,10 +304,10 @@ public class ProcessInstanceService extends BaseService { private void addDependResultForTaskList(List taskInstanceList) throws IOException { for (TaskInstance taskInstance : taskInstanceList) { if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) { - Result logResult = loggerService.queryLog( + Result logResult = loggerService.queryLog( taskInstance.getId(), 0, 4098); if (logResult.getCode() == Status.SUCCESS.ordinal()) { - String log = (String) logResult.getData(); + String log = logResult.getData(); Map resultMap = parseLogForDependentResult(log); taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap)); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java index ca0e1fc0ec..4753725d31 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java @@ -120,4 +120,11 @@ public interface ProjectService { */ Map queryAllProjectList(); + /** + * query authorized and user create project list by user id + * @param loginUser + * @return + */ + Map queryProjectCreatedAndAuthorizedByUser(User loginUser); + } \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index bd7598979d..dffc55edfe 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; @@ -87,7 +88,7 @@ public class ResourcesService extends BaseService { * @param currentDir current directory * @return create directory result */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public Result createDirectory(User loginUser, String name, String description, @@ -101,8 +102,11 @@ public class ResourcesService extends BaseService { putMsg(result, Status.HDFS_NOT_STARTUP); return result; } - String fullName = "/".equals(currentDir) ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); - + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); + result = verifyResourceName(fullName,type,loginUser); + if (!result.getCode().equals(Status.SUCCESS.getCode())) { + return result; + } if (pid != -1) { Resource parentResource = resourcesMapper.selectById(pid); @@ -165,7 +169,7 @@ public class ResourcesService extends BaseService { * @param currentDir current directory * @return create result code */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public Result createResource(User loginUser, String name, String desc, @@ -230,7 +234,7 @@ public class ResourcesService extends BaseService { } // check resoure name exists - String fullName = "/".equals(currentDir) ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); + String fullName = currentDir.equals("/") ? String.format("%s%s",currentDir,name):String.format("%s/%s",currentDir,name); if (checkResourceExists(fullName, 0, type.ordinal())) { logger.error("resource {} has exist, can't recreate", name); putMsg(result, Status.RESOURCE_EXIST); @@ -288,14 +292,16 @@ public class ResourcesService extends BaseService { * @param name name * @param desc description * @param type resource type + * @param file resource file * @return update result code */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public Result updateResource(User loginUser, int resourceId, String name, String desc, - ResourceType type) { + ResourceType type, + MultipartFile file) { Result result = new Result(); // if resource upload startup @@ -315,7 +321,7 @@ public class ResourcesService extends BaseService { return result; } - if (name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { + if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { putMsg(result, Status.SUCCESS); return result; } @@ -331,6 +337,42 @@ public class ResourcesService extends BaseService { return result; } + if (file != null) { + + // file is empty + if (file.isEmpty()) { + logger.error("file is empty: {}", file.getOriginalFilename()); + putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); + return result; + } + + // file suffix + String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); + String nameSuffix = FileUtils.suffix(name); + + // determine file suffix + if (!(StringUtils.isNotEmpty(fileSuffix) && fileSuffix.equalsIgnoreCase(nameSuffix))) { + /** + * rename file suffix and original suffix must be consistent + */ + logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename()); + putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); + return result; + } + + //If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar + if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(FileUtils.suffix(originFullName))) { + logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); + putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); + return result; + } + if (file.getSize() > Constants.MAX_FILE_SIZE) { + logger.error("file size is too large: {}", file.getOriginalFilename()); + putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); + return result; + } + } + // query tenant by user id String tenantCode = getTenantCode(resource.getUserId(),result); if (StringUtils.isEmpty(tenantCode)){ @@ -380,31 +422,61 @@ public class ResourcesService extends BaseService { } // updateResource data - List childrenResource = listAllChildren(resource,false); Date now = new Date(); resource.setAlias(name); resource.setFullName(fullName); resource.setDescription(desc); resource.setUpdateTime(now); + if (file != null) { + resource.setFileName(file.getOriginalFilename()); + resource.setSize(file.getSize()); + } try { resourcesMapper.updateById(resource); - if (resource.isDirectory() && CollectionUtils.isNotEmpty(childrenResource)) { - String matcherFullName = Matcher.quoteReplacement(fullName); - List childResourceList = new ArrayList<>(); - List resourceList = resourcesMapper.listResourceByIds(childrenResource.toArray(new Integer[childrenResource.size()])); - childResourceList = resourceList.stream().map(t -> { - t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName)); - t.setUpdateTime(now); - return t; - }).collect(Collectors.toList()); - resourcesMapper.batchUpdateResource(childResourceList); + if (resource.isDirectory()) { + List childrenResource = listAllChildren(resource,false); + if (CollectionUtils.isNotEmpty(childrenResource)) { + String matcherFullName = Matcher.quoteReplacement(fullName); + List childResourceList = new ArrayList<>(); + Integer[] childResIdArray = childrenResource.toArray(new Integer[childrenResource.size()]); + List resourceList = resourcesMapper.listResourceByIds(childResIdArray); + childResourceList = resourceList.stream().map(t -> { + t.setFullName(t.getFullName().replaceFirst(originFullName, matcherFullName)); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + resourcesMapper.batchUpdateResource(childResourceList); + + if (ResourceType.UDF.equals(resource.getType())) { + List udfFuncs = udfFunctionMapper.listUdfByResourceId(childResIdArray); + if (CollectionUtils.isNotEmpty(udfFuncs)) { + udfFuncs = udfFuncs.stream().map(t -> { + t.setResourceName(t.getResourceName().replaceFirst(originFullName, matcherFullName)); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); + } + } + } + } else if (ResourceType.UDF.equals(resource.getType())) { + List udfFuncs = udfFunctionMapper.listUdfByResourceId(new Integer[]{resourceId}); + if (CollectionUtils.isNotEmpty(udfFuncs)) { + udfFuncs = udfFuncs.stream().map(t -> { + t.setResourceName(fullName); + t.setUpdateTime(now); + return t; + }).collect(Collectors.toList()); + udfFunctionMapper.batchUpdateUdfFunc(udfFuncs); + } + } putMsg(result, Status.SUCCESS); Map dataMap = new BeanMap(resource); - Map resultMap = new HashMap<>(); + Map resultMap = new HashMap<>(5); for (Map.Entry entry: dataMap.entrySet()) { if (!Constants.CLASS.equalsIgnoreCase(entry.getKey().toString())) { resultMap.put(entry.getKey().toString(), entry.getValue()); @@ -415,11 +487,31 @@ public class ResourcesService extends BaseService { logger.error(Status.UPDATE_RESOURCE_ERROR.getMsg(), e); throw new ServiceException(Status.UPDATE_RESOURCE_ERROR); } + // if name unchanged, return directly without moving on HDFS - if (originResourceName.equals(name)) { + if (originResourceName.equals(name) && file == null) { + return result; + } + + if (file != null) { + // fail upload + if (!upload(loginUser, fullName, file, type)) { + logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); + putMsg(result, Status.HDFS_OPERATION_ERROR); + throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); + } + if (!fullName.equals(originFullName)) { + try { + HadoopUtils.getInstance().delete(originHdfsFileName,false); + } catch (IOException e) { + logger.error(e.getMessage(),e); + throw new RuntimeException(String.format("delete resource: %s failed.", originFullName)); + } + } return result; } + // get the path of dest file in hdfs String destHdfsFileName = HadoopUtils.getHdfsFileName(resource.getType(),tenantCode,fullName); @@ -449,7 +541,7 @@ public class ResourcesService extends BaseService { */ public Map queryResourceListPaging(User loginUser, int direcotryId, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { - HashMap result = new HashMap<>(); + HashMap result = new HashMap<>(5); Page page = new Page(pageNo, pageSize); int userId = loginUser.getId(); if (isAdmin(loginUser)) { @@ -550,7 +642,7 @@ public class ResourcesService extends BaseService { */ public Map queryResourceList(User loginUser, ResourceType type) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); int userId = loginUser.getId(); if(isAdmin(loginUser)){ @@ -565,21 +657,33 @@ public class ResourcesService extends BaseService { } /** - * query resource list + * query resource list by program type * * @param loginUser login user * @param type resource type * @return resource list */ - public Map queryResourceJarList(User loginUser, ResourceType type) { + public Map queryResourceByProgramType(User loginUser, ResourceType type, ProgramType programType) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); + String suffix = ".jar"; int userId = loginUser.getId(); if(isAdmin(loginUser)){ userId = 0; } + if (programType != null) { + switch (programType) { + case JAVA: + break; + case SCALA: + break; + case PYTHON: + suffix = ".py"; + break; + } + } List allResourceList = resourcesMapper.queryResourceListAuthored(userId, type.ordinal(),0); - List resources = new ResourceFilter(".jar",new ArrayList<>(allResourceList)).filter(); + List resources = new ResourceFilter(suffix,new ArrayList<>(allResourceList)).filter(); Visitor resourceTreeVisitor = new ResourceTreeVisitor(resources); result.put(Constants.DATA_LIST, resourceTreeVisitor.visit().getChildren()); putMsg(result,Status.SUCCESS); @@ -829,7 +933,7 @@ public class ResourcesService extends BaseService { * @param content content * @return create result code */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content,int pid,String currentDirectory) { Result result = new Result(); // if resource upload startup @@ -852,12 +956,25 @@ public class ResourcesService extends BaseService { } String name = fileName.trim() + "." + nameSuffix; - String fullName = "/".equals(currentDirectory) ? String.format("%s%s",currentDirectory,name):String.format("%s/%s",currentDirectory,name); + String fullName = currentDirectory.equals("/") ? String.format("%s%s",currentDirectory,name):String.format("%s/%s",currentDirectory,name); result = verifyResourceName(fullName,type,loginUser); if (!result.getCode().equals(Status.SUCCESS.getCode())) { return result; } + if (pid != -1) { + Resource parentResource = resourcesMapper.selectById(pid); + + if (parentResource == null) { + putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); + return result; + } + + if (!hasPerm(loginUser, parentResource.getUserId())) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + } // save data Date now = new Date(); @@ -891,7 +1008,7 @@ public class ResourcesService extends BaseService { * @param content content * @return update result cod */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public Result updateResourceContent(int resourceId, String content) { Result result = new Result(); @@ -1096,7 +1213,7 @@ public class ResourcesService extends BaseService { * @return unauthorized result code */ public Map unauthorizedUDFFunction(User loginUser, Integer userId) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); //only admin can operate if (checkAdmin(loginUser, result)) { return result; @@ -1148,7 +1265,7 @@ public class ResourcesService extends BaseService { * @return authorized result */ public Map authorizedFile(User loginUser, Integer userId) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); if (checkAdmin(loginUser, result)){ return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java index 09ad58c2d2..55880ad63c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -14,18 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.dto.ScheduleParam; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.enums.FailureStrategy; +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ReleaseState; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; @@ -34,12 +38,18 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,8 +57,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.text.ParseException; -import java.util.*; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * scheduler service @@ -86,12 +96,10 @@ public class SchedulerService extends BaseService { * @param projectName project name * @param processDefineId process definition id * @param schedule scheduler - * @param warningType warning type + * @param warningType warning type * @param warningGroupId warning group id * @param failureStrategy failure strategy * @param processInstancePriority process instance priority - * @param receivers receivers - * @param receiversCc receivers cc * @param workerGroup worker group * @return create result code */ @@ -102,12 +110,10 @@ public class SchedulerService extends BaseService { WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, - String receivers, - String receiversCc, Priority processInstancePriority, String workerGroup) { - Map result = new HashMap(5); + Map result = new HashMap(); Project project = projectMapper.queryByName(projectName); @@ -132,9 +138,9 @@ public class SchedulerService extends BaseService { scheduleObj.setProcessDefinitionName(processDefinition.getName()); ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(),scheduleParam.getEndTime()) == 0) { + if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); - putMsg(result,Status.SCHEDULE_START_TIME_END_TIME_SAME); + putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } scheduleObj.setStartTime(scheduleParam.getStartTime()); @@ -172,7 +178,6 @@ public class SchedulerService extends BaseService { return result; } - /** * updateProcessInstance schedule * @@ -185,8 +190,6 @@ public class SchedulerService extends BaseService { * @param failureStrategy failure strategy * @param workerGroup worker group * @param processInstancePriority process instance priority - * @param receiversCc receiver cc - * @param receivers receivers * @param scheduleStatus schedule status * @return update result code */ @@ -198,8 +201,6 @@ public class SchedulerService extends BaseService { WarningType warningType, int warningGroupId, FailureStrategy failureStrategy, - String receivers, - String receiversCc, ReleaseState scheduleStatus, Priority processInstancePriority, String workerGroup) { @@ -239,9 +240,9 @@ public class SchedulerService extends BaseService { // updateProcessInstance param if (StringUtils.isNotEmpty(scheduleExpression)) { ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); - if (DateUtils.differSec(scheduleParam.getStartTime(),scheduleParam.getEndTime()) == 0) { + if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { logger.warn("The start time must not be the same as the end"); - putMsg(result,Status.SCHEDULE_START_TIME_END_TIME_SAME); + putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } schedule.setStartTime(scheduleParam.getStartTime()); @@ -289,7 +290,7 @@ public class SchedulerService extends BaseService { * @param loginUser login user * @param projectName project name * @param id scheduler id - * @param scheduleStatus schedule status + * @param scheduleStatus schedule status * @return publish result code */ @Transactional(rollbackFor = RuntimeException.class) @@ -315,7 +316,7 @@ public class SchedulerService extends BaseService { return result; } // check schedule release state - if(scheduleObj.getReleaseState() == scheduleStatus){ + if (scheduleObj.getReleaseState() == scheduleStatus) { logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); @@ -327,9 +328,9 @@ public class SchedulerService extends BaseService { return result; } - if(scheduleStatus == ReleaseState.ONLINE){ + if (scheduleStatus == ReleaseState.ONLINE) { // check process definition release state - if(processDefinition.getReleaseState() != ReleaseState.ONLINE){ + if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); @@ -339,15 +340,15 @@ public class SchedulerService extends BaseService { List subProcessDefineIds = new ArrayList<>(); processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); - if (subProcessDefineIds.size() > 0){ + if (subProcessDefineIds.size() > 0) { List subProcessDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); - if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0){ - for (ProcessDefinition subProcessDefinition : subProcessDefinitionList){ + if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0) { + for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { /** * if there is no online process, exit directly */ - if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE){ + if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { logger.info("not release process definition id: {} , name : {}", subProcessDefinition.getId(), subProcessDefinition.getName()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); @@ -361,7 +362,6 @@ public class SchedulerService extends BaseService { // check master server exists List masterServers = monitorService.getServerListFromZK(true); - if (masterServers.size() == 0) { putMsg(result, Status.MASTER_NOT_EXISTS); return result; @@ -398,8 +398,6 @@ public class SchedulerService extends BaseService { return result; } - - /** * query schedule * @@ -407,7 +405,7 @@ public class SchedulerService extends BaseService { * @param projectName project name * @param processDefineId process definition id * @param pageNo page number - * @param pageSize page size + * @param pageSize page size * @param searchVal search value * @return schedule list page */ @@ -433,9 +431,8 @@ public class SchedulerService extends BaseService { page, processDefineId, searchVal ); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); - pageInfo.setTotalCount((int)scheduleIPage.getTotal()); + pageInfo.setTotalCount((int) scheduleIPage.getTotal()); pageInfo.setLists(scheduleIPage.getRecords()); result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); @@ -499,8 +496,8 @@ public class SchedulerService extends BaseService { String jobName = QuartzExecutors.buildJobName(scheduleId); String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); - if(!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)){ - logger.warn("set offline failure:projectId:{},scheduleId:{}",projectId,scheduleId); + if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { + logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); throw new ServiceException("set offline failure"); } @@ -550,19 +547,18 @@ public class SchedulerService extends BaseService { } // Determine if the login user is the owner of the schedule - if (loginUser.getId() != schedule.getUserId() && - loginUser.getUserType() != UserType.ADMIN_USER) { + if (loginUser.getId() != schedule.getUserId() + && loginUser.getUserType() != UserType.ADMIN_USER) { putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } // check schedule is already online - if(schedule.getReleaseState() == ReleaseState.ONLINE){ - putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); + if (schedule.getReleaseState() == ReleaseState.ONLINE) { + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); return result; } - int delete = scheduleMapper.deleteById(scheduleId); if (delete > 0) { @@ -581,7 +577,7 @@ public class SchedulerService extends BaseService { * @param schedule schedule expression * @return the next five fire time */ - public Map previewSchedule(User loginUser, String projectName, String schedule) { + public Map previewSchedule(User loginUser, String projectName, String schedule) { Map result = new HashMap<>(); CronExpression cronExpression; ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); @@ -592,11 +588,11 @@ public class SchedulerService extends BaseService { try { cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); } catch (ParseException e) { - logger.error(e.getMessage(),e); - putMsg(result,Status.PARSE_TO_CRON_EXPRESSION_ERROR); + logger.error(e.getMessage(), e); + putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); return result; } - List selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime,cronExpression,Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); + List selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); result.put(Constants.DATA_LIST, selfFireDateList.stream().map(t -> DateUtils.dateToString(t))); putMsg(result, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index 695b76b2bc..012af8fd38 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -14,8 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.service; +package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; @@ -69,7 +69,6 @@ public class TaskInstanceService extends BaseService { @Autowired UsersService usersService; - /** * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging * @@ -87,7 +86,7 @@ public class TaskInstanceService extends BaseService { * @return task list page */ public Map queryTaskListPaging(User loginUser, String projectName, - Integer processInstanceId, String taskName, String executorName, String startDate, + Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate, String endDate, String searchVal, ExecutionStatus stateType, String host, Integer pageNo, Integer pageSize) { Map result = new HashMap<>(); @@ -124,7 +123,7 @@ public class TaskInstanceService extends BaseService { int executorId = usersService.getUserIdByName(executorName); IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( - page, project.getId(), processInstanceId, searchVal, taskName, executorId, statusArray, host, start, end + page, project.getId(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end ); Set exclusionSet = new HashSet<>(); exclusionSet.add(Constants.CLASS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java index 8e83e22a3d..8ab84f9928 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TenantService.java @@ -32,7 +32,6 @@ public interface TenantService { * * @param loginUser login user * @param tenantCode tenant code - * @param tenantName tenant name * @param queueId queue id * @param desc description * @return create result code @@ -40,7 +39,6 @@ public interface TenantService { */ Map createTenant(User loginUser, String tenantCode, - String tenantName, int queueId, String desc) throws Exception; @@ -61,13 +59,12 @@ public interface TenantService { * @param loginUser login user * @param id tennat id * @param tenantCode tennat code - * @param tenantName tennat name * @param queueId queue id * @param desc description * @return update result code * @throws Exception exception */ - Map updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId, + Map updateTenant(User loginUser, int id, String tenantCode, int queueId, String desc) throws Exception; /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java index 04f641f279..cd962fdc70 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UdfFuncService.java @@ -148,7 +148,7 @@ public class UdfFuncService extends BaseService{ */ public Map queryUdfFuncDetail(int id) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); UdfFunc udfFunc = udfFuncMapper.selectById(id); if (udfFunc == null) { putMsg(result, Status.RESOURCE_NOT_EXIST); @@ -244,7 +244,7 @@ public class UdfFuncService extends BaseService{ * @return udf function list page */ public Map queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); PageInfo pageInfo = new PageInfo(pageNo, pageSize); @@ -276,15 +276,19 @@ public class UdfFuncService extends BaseService{ } /** - * query data resource by type + * query udf list * * @param loginUser login user - * @param type resource type - * @return resource list + * @param type udf type + * @return udf func list */ - public Map queryResourceList(User loginUser, Integer type) { - Map result = new HashMap<>(); - List udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type); + public Map queryUdfFuncList(User loginUser, Integer type) { + Map result = new HashMap<>(5); + int userId = loginUser.getId(); + if (isAdmin(loginUser)) { + userId = 0; + } + List udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type); result.put(Constants.DATA_LIST, udfFuncList); putMsg(result, Status.SUCCESS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index 92137be589..9ad2ab2903 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -31,21 +31,49 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.ResourceType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.*; -import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.EncryptionUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.dao.entity.AlertGroup; +import org.apache.dolphinscheduler.dao.entity.DatasourceUser; +import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.ResourcesUser; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.UDFUser; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; +import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils; +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import java.io.IOException; -import java.text.MessageFormat; -import java.util.*; -import java.util.stream.Collectors; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; /** * user service @@ -174,6 +202,37 @@ public class UsersService extends BaseService { return user; } + /*** + * create User for ldap login + */ + @Transactional(rollbackFor = Exception.class) + public User createUser(UserType userType, String userId, String email) { + User user = new User(); + Date now = new Date(); + + user.setUserName(userId); + user.setEmail(email); + // create general users, administrator users are currently built-in + user.setUserType(userType); + user.setCreateTime(now); + user.setUpdateTime(now); + user.setQueue(""); + + // save user + userMapper.insert(user); + return user; + } + + /** + * get user by user name + * + * @param userName user name + * @return exist user or null + */ + public User getUserByUserName(String userName) { + return userMapper.queryByUserNameAccurately(userName); + } + /** * query user by id * @@ -208,7 +267,6 @@ public class UsersService extends BaseService { /** * get user id by user name - * * @param name user name * @return if name empty 0, user not exists -1, user exist user id */ @@ -269,7 +327,7 @@ public class UsersService extends BaseService { * @return update result code * @throws Exception exception */ - public Map updateUser(int userId, + public Map updateUser(User loginUser, int userId, String userName, String userPassword, String email, @@ -280,13 +338,14 @@ public class UsersService extends BaseService { Map result = new HashMap<>(); result.put(Constants.STATUS, false); + if (check(result, !hasPerm(loginUser, userId), Status.USER_NO_OPERATION_PERM)) { + return result; + } User user = userMapper.selectById(userId); - if (user == null) { putMsg(result, Status.USER_NOT_EXIST, userId); return result; } - if (StringUtils.isNotEmpty(userName)) { if (!CheckUtils.checkUserName(userName)) { @@ -587,7 +646,7 @@ public class UsersService extends BaseService { */ @Transactional(rollbackFor = RuntimeException.class) public Map grantUDFFunction(User loginUser, int userId, String udfIds) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); //only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { @@ -627,14 +686,14 @@ public class UsersService extends BaseService { /** * grant datasource * - * @param loginUser login user - * @param userId user id + * @param loginUser login user + * @param userId user id * @param datasourceIds data source id array * @return grant result code */ @Transactional(rollbackFor = RuntimeException.class) public Map grantDataSource(User loginUser, int userId, String datasourceIds) { - Map result = new HashMap<>(); + Map result = new HashMap<>(5); result.put(Constants.STATUS, false); //only admin can operate @@ -687,6 +746,18 @@ public class UsersService extends BaseService { user = loginUser; } else { user = userMapper.queryDetailsById(loginUser.getId()); + + List alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); + + StringBuilder sb = new StringBuilder(); + + if (alertGroups != null && alertGroups.size() > 0) { + for (int i = 0; i < alertGroups.size() - 1; i++) { + sb.append(alertGroups.get(i).getGroupName() + ","); + } + sb.append(alertGroups.get(alertGroups.size() - 1)); + user.setAlertGroup(sb.toString()); + } } result.put(Constants.DATA_LIST, user); @@ -815,24 +886,6 @@ public class UsersService extends BaseService { return result; } - /** - * check - * - * @param result result - * @param bool bool - * @param userNoOperationPerm status - * @return check result - */ - private boolean check(Map result, boolean bool, Status userNoOperationPerm) { - //only admin can operate - if (bool) { - result.put(Constants.STATUS, userNoOperationPerm); - result.put(Constants.MSG, userNoOperationPerm.getMsg()); - return true; - } - return false; - } - /** * @param tenantId tenant id * @return true if tenant exists, otherwise return false @@ -842,10 +895,6 @@ public class UsersService extends BaseService { } /** - * @param userName - * @param password - * @param email - * @param phone * @return if check failed return the field, otherwise return null */ private String checkUserParams(String userName, String password, String email, String phone) { @@ -872,8 +921,8 @@ public class UsersService extends BaseService { * copy resource files * * @param resourceComponent resource component - * @param srcBasePath src base path - * @param dstBasePath dst base path + * @param srcBasePath src base path + * @param dstBasePath dst base path * @throws IOException io exception */ private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException { @@ -908,10 +957,10 @@ public class UsersService extends BaseService { /** * register user, default state is 0, default tenant_id is 1, no phone, no queue * - * @param userName user name - * @param userPassword user password + * @param userName user name + * @param userPassword user password * @param repeatPassword repeat password - * @param email email + * @param email email * @return register result code * @throws Exception exception */ @@ -941,7 +990,7 @@ public class UsersService extends BaseService { * activate user, only system admin have permission, change user state code 0 to 1 * * @param loginUser login user - * @param userName user name + * @param userName user name * @return create result code */ public Map activateUser(User loginUser, String userName) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java index 95257e8c8a..3f21679b34 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java @@ -14,8 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; +import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; @@ -26,11 +29,17 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import java.util.*; -import java.util.stream.Collectors; /** * work group service @@ -38,14 +47,11 @@ import java.util.stream.Collectors; @Service public class WorkerGroupService extends BaseService { - - @Autowired - ProcessInstanceMapper processInstanceMapper; - + private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException"; @Autowired protected ZookeeperCachedOperator zookeeperCachedOperator; - - + @Autowired + ProcessInstanceMapper processInstanceMapper; /** * query worker group paging @@ -56,7 +62,7 @@ public class WorkerGroupService extends BaseService { * @param pageSize page size * @return worker group list page */ - public Map queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) { + public Map queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) { // list from index Integer fromIndex = (pageNo - 1) * pageSize; @@ -72,20 +78,20 @@ public class WorkerGroupService extends BaseService { List resultDataList = new ArrayList<>(); - if (CollectionUtils.isNotEmpty(workerGroups)){ + if (CollectionUtils.isNotEmpty(workerGroups)) { List searchValDataList = new ArrayList<>(); - if (StringUtils.isNotEmpty(searchVal)){ - for (WorkerGroup workerGroup : workerGroups){ - if (workerGroup.getName().contains(searchVal)){ + if (StringUtils.isNotEmpty(searchVal)) { + for (WorkerGroup workerGroup : workerGroups) { + if (workerGroup.getName().contains(searchVal)) { searchValDataList.add(workerGroup); } } - }else { + } else { searchValDataList = workerGroups; } - if (searchValDataList.size() < pageSize){ + if (searchValDataList.size() < pageSize) { toIndex = (pageNo - 1) * pageSize + searchValDataList.size(); } resultDataList = searchValDataList.subList(fromIndex, toIndex); @@ -100,14 +106,12 @@ public class WorkerGroupService extends BaseService { return result; } - - /** * query all worker group * * @return all worker group list */ - public Map queryAllGroup() { + public Map queryAllGroup() { Map result = new HashMap<>(); List workerGroups = getWorkerGroups(false); @@ -120,30 +124,46 @@ public class WorkerGroupService extends BaseService { return result; } - /** - * get worker groups + * get worker groups * * @param isPaging whether paging * @return WorkerGroup list */ private List getWorkerGroups(boolean isPaging) { - String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot()+"/nodes" +"/worker"; - List workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath); + + String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; + List workerGroups = new ArrayList<>(); + List workerGroupList; + try { + workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath); + } catch (Exception e) { + if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) { + if (isPaging) { + return workerGroups; + } else { + //ignore noNodeException return Default + WorkerGroup wg = new WorkerGroup(); + wg.setName(DEFAULT_WORKER_GROUP); + workerGroups.add(wg); + return workerGroups; + } + } else { + throw e; + } + } // available workerGroup list List availableWorkerGroupList = new ArrayList<>(); - List workerGroups = new ArrayList<>(); - - for (String workerGroup : workerGroupList){ - String workerGroupPath= workerPath + "/" + workerGroup; + for (String workerGroup : workerGroupList) { + String workerGroupPath = workerPath + "/" + workerGroup; List childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath); - if (CollectionUtils.isNotEmpty(childrenNodes)){ + if (CollectionUtils.isNotEmpty(childrenNodes)) { availableWorkerGroupList.add(workerGroup); WorkerGroup wg = new WorkerGroup(); wg.setName(workerGroup); - if (isPaging){ + if (isPaging) { wg.setIpList(childrenNodes); String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0)); wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6])); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java index 54151d902f..da85621041 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java @@ -86,9 +86,14 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param token token string * @return create result code */ - public Map createToken(int userId, String expireTime, String token) { + public Map createToken(User loginUser, int userId, String expireTime, String token) { Map result = new HashMap<>(5); + if (!hasPerm(loginUser,userId)){ + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + if (userId <= 0) { throw new IllegalArgumentException("User id should not less than or equals to 0."); } @@ -118,8 +123,12 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param expireTime token expire time * @return token string */ - public Map generateToken(int userId, String expireTime) { + public Map generateToken(User loginUser, int userId, String expireTime) { Map result = new HashMap<>(5); + if (!hasPerm(loginUser,userId)){ + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } String token = EncryptionUtils.getMd5(userId + expireTime + System.currentTimeMillis()); result.put(Constants.DATA_LIST, token); putMsg(result, Status.SUCCESS); @@ -144,8 +153,8 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe return result; } - if (loginUser.getId() != accessToken.getUserId() && - loginUser.getUserType() != UserType.ADMIN_USER) { + + if (!hasPerm(loginUser,accessToken.getUserId())){ putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } @@ -164,9 +173,12 @@ public class AccessTokenServiceImpl extends BaseService implements AccessTokenSe * @param token token string * @return update result code */ - public Map updateToken(int id, int userId, String expireTime, String token) { + public Map updateToken(User loginUser, int id, int userId, String expireTime, String token) { Map result = new HashMap<>(5); - + if (!hasPerm(loginUser,userId)){ + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } AccessToken accessToken = accessTokenMapper.selectById(id); if (accessToken == null) { logger.error("access token not exist, access token id {}", id); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java index 21313b96d3..c9560e1c50 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java @@ -16,7 +16,6 @@ */ package org.apache.dolphinscheduler.api.service.impl; - import org.apache.dolphinscheduler.api.dto.CommandStateCount; import org.apache.dolphinscheduler.api.dto.DefineUserDto; import org.apache.dolphinscheduler.api.dto.TaskCountDto; @@ -45,14 +44,14 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import java.text.MessageFormat; import java.util.ArrayList; +import java.util.Arrays; import java.util.Date; -import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -62,8 +61,6 @@ import org.springframework.stereotype.Service; @Service public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService { - private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class); - @Autowired private ProjectMapper projectMapper; @@ -88,10 +85,6 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis @Autowired private ProcessService processService; - private static final String COMMAND_STATE = "commandState"; - - private static final String ERROR_COMMAND_STATE = "errorCommandState"; - /** * statistical task instance status data * @@ -137,16 +130,17 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis return result; } - Date start; - Date end; - try { + Date start = null; + Date end = null; + if (StringUtils.isNotEmpty(startDate) && StringUtils.isNotEmpty(endDate)) { start = DateUtils.getScheduleDate(startDate); end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(), e); - putErrorRequestParamsMsg(result); - return result; + if (Objects.isNull(start) || Objects.isNull(end)) { + putErrorRequestParamsMsg(result); + return result; + } } + Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); List processInstanceStateCounts = instanceStateCounter.apply(start, end, projectIdArray); @@ -204,79 +198,39 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis * statistics based on task status execution, failure, completion, wait, total */ Date start = null; + if (StringUtils.isNotEmpty(startDate)) { + start = DateUtils.getScheduleDate(startDate); + if (Objects.isNull(start)) { + putErrorRequestParamsMsg(result); + return result; + } + } Date end = null; - - if (startDate != null && endDate != null) { - try { - start = DateUtils.getScheduleDate(startDate); - end = DateUtils.getScheduleDate(endDate); - } catch (Exception e) { - logger.error(e.getMessage(), e); + if (StringUtils.isNotEmpty(endDate)) { + end = DateUtils.getScheduleDate(endDate); + if (Objects.isNull(end)) { putErrorRequestParamsMsg(result); return result; } } - Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId); - // count command state - List commandStateCounts = - commandMapper.countCommandState( - loginUser.getId(), - start, - end, - projectIdArray); + // count normal command state + Map normalCountCommandCounts = commandMapper.countCommandState(loginUser.getId(), start, end, projectIdArray) + .stream() + .collect(Collectors.toMap(CommandCount::getCommandType, CommandCount::getCount)); // count error command state - List errorCommandStateCounts = - errorCommandMapper.countCommandState( - start, end, projectIdArray); - - // enumMap - Map> dataMap = new EnumMap<>(CommandType.class); - - Map commonCommand = new HashMap<>(); - commonCommand.put(COMMAND_STATE, 0); - commonCommand.put(ERROR_COMMAND_STATE, 0); - - - // init data map - /** - * START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS, - START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD; - */ - dataMap.put(CommandType.START_PROCESS, commonCommand); - dataMap.put(CommandType.START_CURRENT_TASK_PROCESS, commonCommand); - dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS, commonCommand); - dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, commonCommand); - dataMap.put(CommandType.START_FAILURE_TASK_PROCESS, commonCommand); - dataMap.put(CommandType.COMPLEMENT_DATA, commonCommand); - dataMap.put(CommandType.SCHEDULER, commonCommand); - dataMap.put(CommandType.REPEAT_RUNNING, commonCommand); - dataMap.put(CommandType.PAUSE, commonCommand); - dataMap.put(CommandType.STOP, commonCommand); - dataMap.put(CommandType.RECOVER_WAITTING_THREAD, commonCommand); - - // put command state - for (CommandCount executeStatusCount : commandStateCounts) { - Map commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType())); - commandStateCountsMap.put(COMMAND_STATE, executeStatusCount.getCount()); - dataMap.put(executeStatusCount.getCommandType(), commandStateCountsMap); - } + Map errorCommandCounts = errorCommandMapper.countCommandState(start, end, projectIdArray) + .stream() + .collect(Collectors.toMap(CommandCount::getCommandType, CommandCount::getCount)); - // put error command state - for (CommandCount errorExecutionStatus : errorCommandStateCounts) { - Map errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType())); - errorCommandStateCountsMap.put(ERROR_COMMAND_STATE, errorExecutionStatus.getCount()); - dataMap.put(errorExecutionStatus.getCommandType(), errorCommandStateCountsMap); - } - - List list = new ArrayList<>(); - for (Map.Entry> next : dataMap.entrySet()) { - CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get(ERROR_COMMAND_STATE), - next.getValue().get(COMMAND_STATE), next.getKey()); - list.add(commandStateCount); - } + List list = Arrays.stream(CommandType.values()) + .map(commandType -> new CommandStateCount( + errorCommandCounts.getOrDefault(commandType, 0), + normalCountCommandCounts.getOrDefault(commandType, 0), + commandType) + ).collect(Collectors.toList()); result.put(Constants.DATA_LIST, list); putMsg(result, Status.SUCCESS); @@ -311,59 +265,10 @@ public class DataAnalysisServiceImpl extends BaseService implements DataAnalysis return result; } - // TODO tasksQueueList and tasksKillList is never updated. - List tasksQueueList = new ArrayList<>(); - List tasksKillList = new ArrayList<>(); - + //TODO need to add detail data info Map dataMap = new HashMap<>(); - if (loginUser.getUserType() == UserType.ADMIN_USER) { - dataMap.put("taskQueue", tasksQueueList.size()); - dataMap.put("taskKill", tasksKillList.size()); - - result.put(Constants.DATA_LIST, dataMap); - putMsg(result, Status.SUCCESS); - return result; - } - - int[] tasksQueueIds = new int[tasksQueueList.size()]; - int[] tasksKillIds = new int[tasksKillList.size()]; - - int i = 0; - for (String taskQueueStr : tasksQueueList) { - if (StringUtils.isNotEmpty(taskQueueStr)) { - String[] splits = taskQueueStr.split("_"); - if (splits.length >= 4) { - tasksQueueIds[i++] = Integer.parseInt(splits[3]); - } - } - } - - i = 0; - for (String taskKillStr : tasksKillList) { - if (StringUtils.isNotEmpty(taskKillStr)) { - String[] splits = taskKillStr.split("-"); - if (splits.length == 2) { - tasksKillIds[i++] = Integer.parseInt(splits[1]); - } - } - } - Integer taskQueueCount = 0; - Integer taskKillCount = 0; - - Integer[] projectIds = getProjectIdsArrays(loginUser, projectId); - if (tasksQueueIds.length != 0) { - taskQueueCount = taskInstanceMapper.countTask( - projectIds, - tasksQueueIds); - } - - if (tasksKillIds.length != 0) { - taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds); - } - - dataMap.put("taskQueue", taskQueueCount); - dataMap.put("taskKill", taskKillCount); - + dataMap.put("taskQueue", 0); + dataMap.put("taskKill", 0); result.put(Constants.DATA_LIST, dataMap); putMsg(result, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java index c71f2980f5..1574e7f0e7 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java @@ -130,7 +130,6 @@ public class LoggerServiceImpl implements LoggerService { logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath())); } - /** * get host * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java index 9046f51881..c06919b352 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -17,7 +17,7 @@ package org.apache.dolphinscheduler.api.service.impl; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; import org.apache.dolphinscheduler.api.dto.ProcessMeta; import org.apache.dolphinscheduler.api.dto.treeview.Instance; @@ -49,6 +49,7 @@ import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; @@ -77,7 +78,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; @@ -85,7 +85,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; @@ -159,6 +158,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param connects connects for nodes * @return create result code */ + @Override public Map createProcessDefinition(User loginUser, String projectName, String name, @@ -222,7 +222,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements // return processDefinition object with ID result.put(Constants.DATA_LIST, processDefineMapper.selectById(processDefine.getId())); putMsg(result, Status.SUCCESS); - result.put("processDefinitionId", processDefine.getId()); + result.put(PROCESSDEFINITIONID, processDefine.getId()); return result; } @@ -233,16 +233,35 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @return resource ids */ private String getResourceIds(ProcessData processData) { - return Optional.ofNullable(processData.getTasks()) - .orElse(Collections.emptyList()) - .stream() - .map(taskNode -> TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams())) - .filter(Objects::nonNull) - .flatMap(parameters -> parameters.getResourceFilesList().stream()) - .map(ResourceInfo::getId) - .distinct() - .map(Objects::toString) - .collect(Collectors.joining(",")); + List tasks = processData.getTasks(); + Set resourceIds = new HashSet<>(); + StringBuilder sb = new StringBuilder(); + if (CollectionUtils.isEmpty(tasks)) { + return sb.toString(); + } + for (TaskNode taskNode : tasks) { + String taskParameter = taskNode.getParams(); + AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter); + if (params == null) { + continue; + } + if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) { + Set tempSet = params.getResourceFilesList(). + stream() + .filter(t -> t.getId() != 0) + .map(ResourceInfo::getId) + .collect(Collectors.toSet()); + resourceIds.addAll(tempSet); + } + } + + for (int i : resourceIds) { + if (sb.length() > 0) { + sb.append(","); + } + sb.append(i); + } + return sb.toString(); } /** @@ -252,6 +271,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param projectName project name * @return definition list */ + @Override public Map queryProcessDefinitionList(User loginUser, String projectName) { HashMap result = new HashMap<>(); @@ -281,6 +301,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param userId user id * @return process definition page */ + @Override public Map queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) { Map result = new HashMap<>(); @@ -313,6 +334,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param processId process definition id * @return process definition detail */ + @Override public Map queryProcessDefinitionById(User loginUser, String projectName, Integer processId) { Map result = new HashMap<>(); @@ -347,10 +369,16 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param connects connects for nodes * @return update result code */ - public Map updateProcessDefinition(User loginUser, String projectName, int id, String name, - String processDefinitionJson, String desc, - String locations, String connects) { - Map result = new HashMap<>(); + @Override + public Map updateProcessDefinition(User loginUser, + String projectName, + int id, + String name, + String processDefinitionJson, + String desc, + String locations, + String connects) { + Map result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); @@ -365,16 +393,24 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return checkProcessJson; } ProcessDefinition processDefine = processService.findProcessDefineById(id); + // check process definition exists if (processDefine == null) { - // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); return result; - } else if (processDefine.getReleaseState() == ReleaseState.ONLINE) { + } + if (processDefine.getReleaseState() == ReleaseState.ONLINE) { // online can not permit edit putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefine.getName()); return result; - } else { - putMsg(result, Status.SUCCESS); + } + + if (!name.equals(processDefine.getName())) { + // check whether the new process define name exist + ProcessDefinition definition = processDefineMapper.verifyByDefineName(project.getId(), name); + if (definition != null) { + putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, name); + return result; + } } Date now = new Date(); @@ -423,6 +459,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param name name * @return true if process definition name not exists, otherwise false */ + @Override public Map verifyProcessDefinitionName(User loginUser, String projectName, String name) { Map result = new HashMap<>(); @@ -433,11 +470,11 @@ public class ProcessDefinitionServiceImpl extends BaseService implements if (resultEnum != Status.SUCCESS) { return checkResult; } - ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name); + ProcessDefinition processDefinition = processDefineMapper.verifyByDefineName(project.getId(), name); if (processDefinition == null) { putMsg(result, Status.SUCCESS); } else { - putMsg(result, Status.PROCESS_INSTANCE_EXIST, name); + putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, name); } return result; } @@ -450,6 +487,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param processDefinitionId process definition id * @return delete result code */ + @Override @Transactional(rollbackFor = RuntimeException.class) public Map deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) { @@ -481,9 +519,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements return result; } // check process instances is already running - List processInstances = processInstanceService.queryByProcessDefineIdAndStatus(processDefinitionId, Constants.NOT_TERMINATED_STATES); + List processInstances = processInstanceService.queryByProcessDefineIdAndStatus(processDefinitionId, Constants.NOT_TERMINATED_STATES); if (CollectionUtils.isNotEmpty(processInstances)) { - putMsg(result, Status.DELETE_PROCESS_DEFINITION_BY_ID_FAIL,processInstances.size()); + putMsg(result, Status.DELETE_PROCESS_DEFINITION_BY_ID_FAIL, processInstances.size()); return result; } @@ -522,6 +560,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param releaseState release state * @return release result code */ + @Override @Transactional(rollbackFor = RuntimeException.class) public Map releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) { HashMap result = new HashMap<>(); @@ -589,6 +628,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements /** * batch export process definition by ids */ + @Override public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) { if (StringUtils.isEmpty(processDefinitionIds)) { @@ -665,6 +705,18 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } } + /** + * get export process metadata string + * + * @param processDefinitionId process definition id + * @param processDefinition process definition + * @return export process metadata string + */ + public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) { + //create workflow json file + return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition)); + } + /** * get export process metadata string * @@ -747,6 +799,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param currentProjectName current project name * @return import process */ + @Override @Transactional(rollbackFor = RuntimeException.class) public Map importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) { Map result = new HashMap<>(); @@ -835,14 +888,19 @@ public class ProcessDefinitionServiceImpl extends BaseService implements String processDefinitionName, String importProcessParam) { Map createProcessResult = null; - createProcessResult = createProcessDefinition(loginUser - , currentProjectName, - processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp(), - importProcessParam, - processMeta.getProcessDefinitionDescription(), - processMeta.getProcessDefinitionLocations(), - processMeta.getProcessDefinitionConnects()); - putMsg(result, Status.SUCCESS); + try { + createProcessResult = createProcessDefinition(loginUser + , currentProjectName, + processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp(), + importProcessParam, + processMeta.getProcessDefinitionDescription(), + processMeta.getProcessDefinitionLocations(), + processMeta.getProcessDefinitionConnects()); + putMsg(result, Status.SUCCESS); + } catch (Exception e) { + logger.error("import process meta json data: {}", e.getMessage(), e); + putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); + } return createProcessResult; } @@ -913,7 +971,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } //recursive sub-process parameter correction map key for old process id value for new process id - Map subProcessIdMap = new HashMap<>(); + Map subProcessIdMap = new HashMap<>(20); List subProcessList = StreamUtils.asStream(jsonArray.elements()) .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText())) @@ -1074,6 +1132,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param processDefinitionJson process definition json * @return check result code */ + @Override public Map checkProcessNodeList(ProcessData processData, String processDefinitionJson) { Map result = new HashMap<>(); @@ -1125,6 +1184,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param defineId define id * @return task node list */ + @Override public Map getTaskNodeListByDefinitionId(Integer defineId) { Map result = new HashMap<>(); @@ -1161,6 +1221,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param defineIdList define id list * @return task node list */ + @Override public Map getTaskNodeListByDefinitionIdList(String defineIdList) { Map result = new HashMap<>(); @@ -1198,6 +1259,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @param projectId project id * @return process definitions in the project */ + @Override public Map queryProcessDefinitionAllByProjectId(Integer projectId) { HashMap result = new HashMap<>(); @@ -1217,6 +1279,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements * @return tree view json data * @throws Exception exception */ + @Override public Map viewTree(Integer processId, Integer limit) throws Exception { Map result = new HashMap<>(); @@ -1301,7 +1364,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements String taskJson = taskInstance.getTaskJson(); taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); subProcessId = Integer.parseInt(JSONUtils.parseObject( - taskNode.getParams()).path(CMDPARAM_SUB_PROCESS_DEFINE_ID).asText()); + taskNode.getParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_ID).asText()); } treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString() , taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); @@ -1323,7 +1386,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } runningNodeMap.remove(nodeName); } - if (waitingRunningNodeMap.size() == 0) { + if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { break; } else { runningNodeMap.putAll(waitingRunningNodeMap); @@ -1429,6 +1492,41 @@ public class ProcessDefinitionServiceImpl extends BaseService implements } } + /** + * copy process definition + * + * @param loginUser login user + * @param projectName project name + * @param processId process definition id + * @return copy result code + */ + public Map copyProcessDefinition(User loginUser, String projectName, Integer processId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultStatus = (Status) checkResult.get(Constants.STATUS); + if (resultStatus != Status.SUCCESS) { + return checkResult; + } + + ProcessDefinition processDefinition = processDefineMapper.selectById(processId); + if (processDefinition == null) { + putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); + return result; + } else { + return createProcessDefinition( + loginUser, + projectName, + processDefinition.getName() + "_copy_" + System.currentTimeMillis(), + processDefinition.getProcessDefinitionJson(), + processDefinition.getDescription(), + processDefinition.getLocations(), + processDefinition.getConnects()); + } + } + /** * batch copy process definition * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java index 395da6027f..fd47342c12 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -375,6 +375,28 @@ public class ProjectServiceImpl extends BaseService implements ProjectService { return result; } + /** + * query authorized and user create project list by user + * + * @param loginUser login user + * @return + */ + public Map queryProjectCreatedAndAuthorizedByUser(User loginUser) { + Map result = new HashMap<>(); + + List projects = null; + if (loginUser.getUserType() == UserType.ADMIN_USER) { + projects = projectMapper.selectList(null); + } else { + projects = projectMapper.queryProjectCreatedAndAuthorizedByUserId(loginUser.getId()); + } + + result.put(Constants.DATA_LIST, projects); + putMsg(result, Status.SUCCESS); + + return result; + } + /** * check whether have read permission * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java index 52f0d79ead..ecb9bae98c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -76,7 +76,6 @@ public class TenantServiceImpl extends BaseService implements TenantService { * * @param loginUser login user * @param tenantCode tenant code - * @param tenantName tenant name * @param queueId queue id * @param desc description * @return create result code @@ -85,7 +84,6 @@ public class TenantServiceImpl extends BaseService implements TenantService { @Transactional(rollbackFor = Exception.class) public Map createTenant(User loginUser, String tenantCode, - String tenantName, int queueId, String desc) throws Exception { @@ -113,7 +111,6 @@ public class TenantServiceImpl extends BaseService implements TenantService { return result; } tenant.setTenantCode(tenantCode); - tenant.setTenantName(tenantName); tenant.setQueueId(queueId); tenant.setDescription(desc); tenant.setCreateTime(now); @@ -166,13 +163,12 @@ public class TenantServiceImpl extends BaseService implements TenantService { * @param loginUser login user * @param id tennat id * @param tenantCode tennat code - * @param tenantName tennat name * @param queueId queue id * @param desc description * @return update result code * @throws Exception exception */ - public Map updateTenant(User loginUser, int id, String tenantCode, String tenantName, int queueId, + public Map updateTenant(User loginUser, int id, String tenantCode, int queueId, String desc) throws Exception { Map result = new HashMap<>(5); @@ -215,10 +211,6 @@ public class TenantServiceImpl extends BaseService implements TenantService { tenant.setTenantCode(tenantCode); } - if (StringUtils.isNotEmpty(tenantName)) { - tenant.setTenantName(tenantName); - } - if (queueId != 0) { tenant.setQueueId(queueId); } @@ -291,6 +283,26 @@ public class TenantServiceImpl extends BaseService implements TenantService { return processInstanceMapper.queryByTenantIdAndStatus(tenant.getId(), Constants.NOT_TERMINATED_STATES); } + /** + * query tenant list + * + * @param tenantCode tenant code + * @return tenant list + */ + public Map queryTenantList(String tenantCode) { + + Map result = new HashMap<>(5); + + List resourceList = tenantMapper.queryByTenantCode(tenantCode); + if (CollectionUtils.isNotEmpty(resourceList)) { + result.put(Constants.DATA_LIST, resourceList); + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.TENANT_NOT_EXIST); + } + return result; + } + /** * query tenant list * @@ -317,7 +329,7 @@ public class TenantServiceImpl extends BaseService implements TenantService { public Result verifyTenantCode(String tenantCode) { Result result = new Result(); if (checkTenantExists(tenantCode)) { - putMsg(result, Status.TENANT_NAME_EXIST, tenantCode); + putMsg(result, Status.TENANT_CODE_EXIST, tenantCode); } else { putMsg(result, Status.SUCCESS); } diff --git a/dolphinscheduler-api/src/main/resources/application-api.properties b/dolphinscheduler-api/src/main/resources/application-api.properties index 5b8bb5d495..e2cabfac67 100644 --- a/dolphinscheduler-api/src/main/resources/application-api.properties +++ b/dolphinscheduler-api/src/main/resources/application-api.properties @@ -30,8 +30,12 @@ spring.jackson.time-zone=GMT+8 spring.servlet.multipart.max-file-size=1024MB spring.servlet.multipart.max-request-size=1024MB +# enable response compression +server.compression.enabled=true +server.compression.mime-types=text/html,text/xml,text/plain,text/css,text/javascript,application/javascript,application/json,application/xml + #post content -server.jetty.max-http-post-size=5000000 +server.jetty.max-http-form-post-size=5000000 spring.messages.encoding=UTF-8 @@ -41,6 +45,18 @@ spring.messages.basename=i18n/messages # Authentication types (supported types: PASSWORD) security.authentication.type=PASSWORD - +#============================================================================ +# LDAP Config +# mock ldap server from https://www.forumsys.com/tutorials/integration-how-to/ldap/online-ldap-test-server/ +#============================================================================ +# admin userId +#security.authentication.ldap.user.admin=read-only-admin +# ldap server config +#ldap.urls=ldap://ldap.forumsys.com:389/ +#ldap.base.dn=dc=example,dc=com +#ldap.username=cn=read-only-admin,dc=example,dc=com +#ldap.password=password +#ldap.user.identity.attribute=uid +#ldap.user.email.attribute=mail diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index c426dedd15..4bfe5b4624 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -122,7 +122,6 @@ VERIFY_QUEUE_NOTES=verify queue TENANT_TAG=tenant related operation CREATE_TENANT_NOTES=create tenant TENANT_CODE=tenant code -TENANT_NAME=tenant name QUEUE_NAME=queue name PASSWORD=password DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} @@ -259,6 +258,7 @@ QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids QUERY_USER_CREATED_PROJECT_NOTES= query user created project +QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES= query authorized and user created project COPY_PROCESS_DEFINITION_NOTES= copy process definition notes MOVE_PROCESS_DEFINITION_NOTES= move process definition notes TARGET_PROJECT_ID= target project id diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index 976237aa26..1846ee7308 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -122,7 +122,6 @@ VERIFY_QUEUE_NOTES=verify queue TENANT_TAG=tenant related operation CREATE_TENANT_NOTES=create tenant TENANT_CODE=tenant code -TENANT_NAME=tenant name QUEUE_NAME=queue name PASSWORD=password DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} @@ -259,6 +258,7 @@ QUERY_ALERT_GROUP_LIST_PAGING_NOTES=query alert group list paging EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=export process definition by id BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES= batch export process definition by ids QUERY_USER_CREATED_PROJECT_NOTES= query user created project +QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES= query authorized and user created project COPY_PROCESS_DEFINITION_NOTES= copy process definition notes MOVE_PROCESS_DEFINITION_NOTES= move process definition notes TARGET_PROJECT_ID= target project id diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index 23395fe428..24e51ad522 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -15,253 +15,248 @@ # limitations under the License. # -QUERY_SCHEDULE_LIST_NOTES=\u67E5\u8BE2\u5B9A\u65F6\u5217\u8868 -PROCESS_INSTANCE_EXECUTOR_TAG=\u6D41\u7A0B\u5B9E\u4F8B\u6267\u884C\u76F8\u5173\u64CD\u4F5C -RUN_PROCESS_INSTANCE_NOTES=\u8FD0\u884C\u6D41\u7A0B\u5B9E\u4F8B -START_NODE_LIST=\u5F00\u59CB\u8282\u70B9\u5217\u8868(\u8282\u70B9name) -TASK_DEPEND_TYPE=\u4EFB\u52A1\u4F9D\u8D56\u7C7B\u578B -COMMAND_TYPE=\u6307\u4EE4\u7C7B\u578B -RUN_MODE=\u8FD0\u884C\u6A21\u5F0F -TIMEOUT=\u8D85\u65F6\u65F6\u95F4 -EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=\u6267\u884C\u6D41\u7A0B\u5B9E\u4F8B\u7684\u5404\u79CD\u64CD\u4F5C(\u6682\u505C\u3001\u505C\u6B62\u3001\u91CD\u8DD1\u3001\u6062\u590D\u7B49) -EXECUTE_TYPE=\u6267\u884C\u7C7B\u578B -START_CHECK_PROCESS_DEFINITION_NOTES=\u68C0\u67E5\u6D41\u7A0B\u5B9A\u4E49 -DESC=\u5907\u6CE8(\u63CF\u8FF0) -GROUP_NAME=\u7EC4\u540D\u79F0 -GROUP_TYPE=\u7EC4\u7C7B\u578B -QUERY_ALERT_GROUP_LIST_NOTES=\u544A\u8B66\u7EC4\u5217\u8868\ +QUERY_SCHEDULE_LIST_NOTES=查询定时列表 +PROCESS_INSTANCE_EXECUTOR_TAG=流程实例执行相关操作 +RUN_PROCESS_INSTANCE_NOTES=运行流程实例 +START_NODE_LIST=开始节点列表(节点name) +TASK_DEPEND_TYPE=任务依赖类型 +COMMAND_TYPE=指令类型 +RUN_MODE=运行模式 +TIMEOUT=超时时间 +EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=执行流程实例的各种操作(暂停、停止、重跑、恢复等) +EXECUTE_TYPE=执行类型 +START_CHECK_PROCESS_DEFINITION_NOTES=检查流程定义 +DESC=备注(描述) +GROUP_NAME=组名称 +GROUP_TYPE=组类型 +QUERY_ALERT_GROUP_LIST_NOTES=告警组列表\ -UPDATE_ALERT_GROUP_NOTES=\u7F16\u8F91(\u66F4\u65B0)\u544A\u8B66\u7EC4 -DELETE_ALERT_GROUP_BY_ID_NOTES=\u5220\u9664\u544A\u8B66\u7EC4\u901A\u8FC7ID -VERIFY_ALERT_GROUP_NAME_NOTES=\u68C0\u67E5\u544A\u8B66\u7EC4\u662F\u5426\u5B58\u5728 -GRANT_ALERT_GROUP_NOTES=\u6388\u6743\u544A\u8B66\u7EC4 -USER_IDS=\u7528\u6237ID\u5217\u8868 -ALERT_GROUP_TAG=\u544A\u8B66\u7EC4\u76F8\u5173\u64CD\u4F5C -ALERT_PLUGIN_INSTANCE_TAG=\u544A\u8B66\u7EC4\u548C\u544A\u8B66\u63D2\u4EF6\u76F8\u5173\u64CD\u4F5C -UPDATE_ALERT_PLUGIN_INSTANCE_NOTES=\u66F4\u65B0\u544A\u8B66\u7EC4\u63D2\u4EF6\u5B9E\u4F8B -CREATE_ALERT_PLUGIN_INSTANCE_NOTES=\u521B\u5EFA\u544A\u8B66\u7EC4\u63D2\u4EF6\u5B9E\u4F8B -DELETE_ALERT_PLUGIN_INSTANCE_NOTES=\u5220\u9664\u544A\u8B66\u7EC4\u63D2\u4EF6\u5B9E\u4F8B -GET_ALERT_PLUGIN_INSTANCE_NOTES=\u83B7\u53D6\u544A\u8B66\u7EC4\u63D2\u4EF6\u5B9E\u4F8B -WORKER_GROUP_TAG=Worker\u5206\u7EC4\u7BA1\u7406 -SAVE_WORKER_GROUP_NOTES=\u521B\u5EFAWorker\u5206\u7EC4\ +UPDATE_ALERT_GROUP_NOTES=编辑(更新)告警组 +DELETE_ALERT_GROUP_BY_ID_NOTES=删除告警组通过ID +VERIFY_ALERT_GROUP_NAME_NOTES=检查告警组是否存在 +GRANT_ALERT_GROUP_NOTES=授权告警组 +USER_IDS=用户ID列表 +ALERT_GROUP_TAG=告警组相关操作 +WORKER_GROUP_TAG=Worker分组管理 +SAVE_WORKER_GROUP_NOTES=创建Worker分组\ -WORKER_GROUP_NAME=Worker\u5206\u7EC4\u540D\u79F0 -WORKER_IP_LIST=Worker ip\u5217\u8868\uFF0C\u6CE8\u610F\uFF1A\u591A\u4E2AIP\u5730\u5740\u4EE5\u9017\u53F7\u5206\u5272\ +WORKER_GROUP_NAME=Worker分组名称 +WORKER_IP_LIST=Worker ip列表,注意:多个IP地址以逗号分割\ -QUERY_WORKER_GROUP_PAGING_NOTES=Worker\u5206\u7EC4\u7BA1\u7406 -QUERY_WORKER_GROUP_LIST_NOTES=\u67E5\u8BE2worker group\u5206\u7EC4 -DELETE_WORKER_GROUP_BY_ID_NOTES=\u5220\u9664worker group\u901A\u8FC7ID -DATA_ANALYSIS_TAG=\u4EFB\u52A1\u72B6\u6001\u5206\u6790\u76F8\u5173\u64CD\u4F5C -COUNT_TASK_STATE_NOTES=\u4EFB\u52A1\u72B6\u6001\u7EDF\u8BA1 -COUNT_PROCESS_INSTANCE_NOTES=\u7EDF\u8BA1\u6D41\u7A0B\u5B9E\u4F8B\u72B6\u6001 -COUNT_PROCESS_DEFINITION_BY_USER_NOTES=\u7EDF\u8BA1\u7528\u6237\u521B\u5EFA\u7684\u6D41\u7A0B\u5B9A\u4E49 -COUNT_COMMAND_STATE_NOTES=\u7EDF\u8BA1\u547D\u4EE4\u72B6\u6001 -COUNT_QUEUE_STATE_NOTES=\u7EDF\u8BA1\u961F\u5217\u91CC\u4EFB\u52A1\u72B6\u6001 -ACCESS_TOKEN_TAG=access token\u76F8\u5173\u64CD\u4F5C\uFF0C\u9700\u8981\u5148\u767B\u5F55 -MONITOR_TAG=\u76D1\u63A7\u76F8\u5173\u64CD\u4F5C -MASTER_LIST_NOTES=master\u670D\u52A1\u5217\u8868 -WORKER_LIST_NOTES=worker\u670D\u52A1\u5217\u8868 -QUERY_DATABASE_STATE_NOTES=\u67E5\u8BE2\u6570\u636E\u5E93\u72B6\u6001 -QUERY_ZOOKEEPER_STATE_NOTES=\u67E5\u8BE2Zookeeper\u72B6\u6001 -TASK_STATE=\u4EFB\u52A1\u5B9E\u4F8B\u72B6\u6001 -SOURCE_TABLE=\u6E90\u8868 -DEST_TABLE=\u76EE\u6807\u8868 -TASK_DATE=\u4EFB\u52A1\u65F6\u95F4 -QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u5386\u53F2\u4EFB\u52A1\u8BB0\u5F55\u5217\u8868 -DATA_SOURCE_TAG=\u6570\u636E\u6E90\u76F8\u5173\u64CD\u4F5C -CREATE_DATA_SOURCE_NOTES=\u521B\u5EFA\u6570\u636E\u6E90 -DATA_SOURCE_NAME=\u6570\u636E\u6E90\u540D\u79F0 -DATA_SOURCE_NOTE=\u6570\u636E\u6E90\u63CF\u8FF0 -DB_TYPE=\u6570\u636E\u6E90\u7C7B\u578B -DATA_SOURCE_HOST=IP\u4E3B\u673A\u540D -DATA_SOURCE_PORT=\u6570\u636E\u6E90\u7AEF\u53E3 -DATABASE_NAME=\u6570\u636E\u5E93\u540D -QUEUE_TAG=\u961F\u5217\u76F8\u5173\u64CD\u4F5C -QUERY_QUEUE_LIST_NOTES=\u67E5\u8BE2\u961F\u5217\u5217\u8868 -QUERY_QUEUE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u961F\u5217\u5217\u8868 -CREATE_QUEUE_NOTES=\u521B\u5EFA\u961F\u5217 -YARN_QUEUE_NAME=hadoop yarn\u961F\u5217\u540D -QUEUE_ID=\u961F\u5217ID -TENANT_DESC=\u79DF\u6237\u63CF\u8FF0 -QUERY_TENANT_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u79DF\u6237\u5217\u8868 -QUERY_TENANT_LIST_NOTES=\u67E5\u8BE2\u79DF\u6237\u5217\u8868 -UPDATE_TENANT_NOTES=\u66F4\u65B0\u79DF\u6237 -DELETE_TENANT_NOTES=\u5220\u9664\u79DF\u6237 -RESOURCES_TAG=\u8D44\u6E90\u4E2D\u5FC3\u76F8\u5173\u64CD\u4F5C -CREATE_RESOURCE_NOTES=\u521B\u5EFA\u8D44\u6E90 -RESOURCE_TYPE=\u8D44\u6E90\u6587\u4EF6\u7C7B\u578B -RESOURCE_NAME=\u8D44\u6E90\u6587\u4EF6\u540D\u79F0 -RESOURCE_DESC=\u8D44\u6E90\u6587\u4EF6\u63CF\u8FF0 -RESOURCE_FILE=\u8D44\u6E90\u6587\u4EF6 -RESOURCE_ID=\u8D44\u6E90ID -QUERY_RESOURCE_LIST_NOTES=\u67E5\u8BE2\u8D44\u6E90\u5217\u8868 -DELETE_RESOURCE_BY_ID_NOTES=\u5220\u9664\u8D44\u6E90\u901A\u8FC7ID -VIEW_RESOURCE_BY_ID_NOTES=\u6D4F\u89C8\u8D44\u6E90\u901A\u901A\u8FC7ID -ONLINE_CREATE_RESOURCE_NOTES=\u5728\u7EBF\u521B\u5EFA\u8D44\u6E90 -SUFFIX=\u8D44\u6E90\u6587\u4EF6\u540E\u7F00 -CONTENT=\u8D44\u6E90\u6587\u4EF6\u5185\u5BB9 -UPDATE_RESOURCE_NOTES=\u5728\u7EBF\u66F4\u65B0\u8D44\u6E90\u6587\u4EF6 -DOWNLOAD_RESOURCE_NOTES=\u4E0B\u8F7D\u8D44\u6E90\u6587\u4EF6 -CREATE_UDF_FUNCTION_NOTES=\u521B\u5EFAUDF\u51FD\u6570 -UDF_TYPE=UDF\u7C7B\u578B -FUNC_NAME=\u51FD\u6570\u540D\u79F0 -CLASS_NAME=\u5305\u540D\u7C7B\u540D -ARG_TYPES=\u53C2\u6570 -UDF_DESC=udf\u63CF\u8FF0\uFF0C\u4F7F\u7528\u8BF4\u660E -VIEW_UDF_FUNCTION_NOTES=\u67E5\u770Budf\u51FD\u6570 -UPDATE_UDF_FUNCTION_NOTES=\u66F4\u65B0udf\u51FD\u6570 -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2udf\u51FD\u6570\u5217\u8868 -VERIFY_UDF_FUNCTION_NAME_NOTES=\u9A8C\u8BC1udf\u51FD\u6570\u540D -DELETE_UDF_FUNCTION_NOTES=\u5220\u9664UDF\u51FD\u6570 -AUTHORIZED_FILE_NOTES=\u6388\u6743\u6587\u4EF6 -UNAUTHORIZED_FILE_NOTES=\u53D6\u6D88\u6388\u6743\u6587\u4EF6 -AUTHORIZED_UDF_FUNC_NOTES=\u6388\u6743udf\u51FD\u6570 -UNAUTHORIZED_UDF_FUNC_NOTES=\u53D6\u6D88udf\u51FD\u6570\u6388\u6743 -VERIFY_QUEUE_NOTES=\u9A8C\u8BC1\u961F\u5217 -TENANT_TAG=\u79DF\u6237\u76F8\u5173\u64CD\u4F5C -CREATE_TENANT_NOTES=\u521B\u5EFA\u79DF\u6237 -TENANT_CODE=\u79DF\u6237\u7F16\u7801 -TENANT_NAME=\u79DF\u6237\u540D\u79F0 -QUEUE_NAME=\u961F\u5217\u540D -PASSWORD=\u5BC6\u7801 -DATA_SOURCE_OTHER=jdbc\u8FDE\u63A5\u53C2\u6570\uFF0C\u683C\u5F0F\u4E3A:{"key1":"value1",...} -PROJECT_TAG=\u9879\u76EE\u76F8\u5173\u64CD\u4F5C -CREATE_PROJECT_NOTES=\u521B\u5EFA\u9879\u76EE -PROJECT_DESC=\u9879\u76EE\u63CF\u8FF0 -UPDATE_PROJECT_NOTES=\u66F4\u65B0\u9879\u76EE -PROJECT_ID=\u9879\u76EEID -QUERY_PROJECT_BY_ID_NOTES=\u901A\u8FC7\u9879\u76EEID\u67E5\u8BE2\u9879\u76EE\u4FE1\u606F -QUERY_PROJECT_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u9879\u76EE\u5217\u8868 -QUERY_ALL_PROJECT_LIST_NOTES=\u67E5\u8BE2\u6240\u6709\u9879\u76EE -DELETE_PROJECT_BY_ID_NOTES=\u5220\u9664\u9879\u76EE\u901A\u8FC7ID -QUERY_UNAUTHORIZED_PROJECT_NOTES=\u67E5\u8BE2\u672A\u6388\u6743\u7684\u9879\u76EE -QUERY_AUTHORIZED_PROJECT_NOTES=\u67E5\u8BE2\u6388\u6743\u9879\u76EE -TASK_RECORD_TAG=\u4EFB\u52A1\u8BB0\u5F55\u76F8\u5173\u64CD\u4F5C -QUERY_TASK_RECORD_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u4EFB\u52A1\u8BB0\u5F55\u5217\u8868 -CREATE_TOKEN_NOTES=\u521B\u5EFAtoken\uFF0C\u6CE8\u610F\u9700\u8981\u5148\u767B\u5F55 -QUERY_ACCESS_TOKEN_LIST_NOTES=\u5206\u9875\u67E5\u8BE2access token\u5217\u8868 -SCHEDULE=\u5B9A\u65F6 -WARNING_TYPE=\u53D1\u9001\u7B56\u7565 -WARNING_GROUP_ID=\u53D1\u9001\u7EC4ID -FAILURE_STRATEGY=\u5931\u8D25\u7B56\u7565 -RECEIVERS=\u6536\u4EF6\u4EBA -RECEIVERS_CC=\u6536\u4EF6\u4EBA(\u6284\u9001) -WORKER_GROUP_ID=Worker Server\u5206\u7EC4ID -PROCESS_INSTANCE_PRIORITY=\u6D41\u7A0B\u5B9E\u4F8B\u4F18\u5148\u7EA7 -UPDATE_SCHEDULE_NOTES=\u66F4\u65B0\u5B9A\u65F6 -SCHEDULE_ID=\u5B9A\u65F6ID -ONLINE_SCHEDULE_NOTES=\u5B9A\u65F6\u4E0A\u7EBF -OFFLINE_SCHEDULE_NOTES=\u5B9A\u65F6\u4E0B\u7EBF -QUERY_SCHEDULE_NOTES=\u67E5\u8BE2\u5B9A\u65F6 -QUERY_SCHEDULE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u5B9A\u65F6 -LOGIN_TAG=\u7528\u6237\u767B\u5F55\u76F8\u5173\u64CD\u4F5C -USER_NAME=\u7528\u6237\u540D -PROJECT_NAME=\u9879\u76EE\u540D\u79F0 -CREATE_PROCESS_DEFINITION_NOTES=\u521B\u5EFA\u6D41\u7A0B\u5B9A\u4E49 -PROCESS_DEFINITION_NAME=\u6D41\u7A0B\u5B9A\u4E49\u540D\u79F0 -PROCESS_DEFINITION_JSON=\u6D41\u7A0B\u5B9A\u4E49\u8BE6\u7EC6\u4FE1\u606F(json\u683C\u5F0F) -PROCESS_DEFINITION_LOCATIONS=\u6D41\u7A0B\u5B9A\u4E49\u8282\u70B9\u5750\u6807\u4F4D\u7F6E\u4FE1\u606F(json\u683C\u5F0F) -PROCESS_INSTANCE_LOCATIONS=\u6D41\u7A0B\u5B9E\u4F8B\u8282\u70B9\u5750\u6807\u4F4D\u7F6E\u4FE1\u606F(json\u683C\u5F0F) -PROCESS_DEFINITION_CONNECTS=\u6D41\u7A0B\u5B9A\u4E49\u8282\u70B9\u56FE\u6807\u8FDE\u63A5\u4FE1\u606F(json\u683C\u5F0F) -PROCESS_INSTANCE_CONNECTS=\u6D41\u7A0B\u5B9E\u4F8B\u8282\u70B9\u56FE\u6807\u8FDE\u63A5\u4FE1\u606F(json\u683C\u5F0F) -PROCESS_DEFINITION_DESC=\u6D41\u7A0B\u5B9A\u4E49\u63CF\u8FF0\u4FE1\u606F -PROCESS_DEFINITION_TAG=\u6D41\u7A0B\u5B9A\u4E49\u76F8\u5173\u64CD\u4F5C -SIGNOUT_NOTES=\u9000\u51FA\u767B\u5F55 -USER_PASSWORD=\u7528\u6237\u5BC6\u7801 -UPDATE_PROCESS_INSTANCE_NOTES=\u66F4\u65B0\u6D41\u7A0B\u5B9E\u4F8B -QUERY_PROCESS_INSTANCE_LIST_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9E\u4F8B\u5217\u8868 -VERIFY_PROCESS_DEFINITION_NAME_NOTES=\u9A8C\u8BC1\u6D41\u7A0B\u5B9A\u4E49\u540D\u5B57 -LOGIN_NOTES=\u7528\u6237\u767B\u5F55 -UPDATE_PROCESS_DEFINITION_NOTES=\u66F4\u65B0\u6D41\u7A0B\u5B9A\u4E49 -PROCESS_DEFINITION_ID=\u6D41\u7A0B\u5B9A\u4E49ID -RELEASE_PROCESS_DEFINITION_NOTES=\u53D1\u5E03\u6D41\u7A0B\u5B9A\u4E49 -QUERY_PROCESS_DEFINITION_BY_ID_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u901A\u8FC7\u6D41\u7A0B\u5B9A\u4E49ID -QUERY_PROCESS_DEFINITION_LIST_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u5217\u8868 -QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u5217\u8868 -QUERY_ALL_DEFINITION_LIST_NOTES=\u67E5\u8BE2\u6240\u6709\u6D41\u7A0B\u5B9A\u4E49 -PAGE_NO=\u9875\u7801\u53F7 -PROCESS_INSTANCE_ID=\u6D41\u7A0B\u5B9E\u4F8BID -PROCESS_INSTANCE_IDS=\u6D41\u7A0B\u5B9E\u4F8BID\u96C6\u5408 -PROCESS_INSTANCE_JSON=\u6D41\u7A0B\u5B9E\u4F8B\u4FE1\u606F(json\u683C\u5F0F) -SCHEDULE_TIME=\u5B9A\u65F6\u65F6\u95F4 -SYNC_DEFINE=\u66F4\u65B0\u6D41\u7A0B\u5B9E\u4F8B\u7684\u4FE1\u606F\u662F\u5426\u540C\u6B65\u5230\u6D41\u7A0B\u5B9A\u4E49 -RECOVERY_PROCESS_INSTANCE_FLAG=\u662F\u5426\u6062\u590D\u6D41\u7A0B\u5B9E\u4F8B -SEARCH_VAL=\u641C\u7D22\u503C -USER_ID=\u7528\u6237ID -PAGE_SIZE=\u9875\u5927\u5C0F -LIMIT=\u663E\u793A\u591A\u5C11\u6761 -VIEW_TREE_NOTES=\u6811\u72B6\u56FE -GET_NODE_LIST_BY_DEFINITION_ID_NOTES=\u83B7\u5F97\u4EFB\u52A1\u8282\u70B9\u5217\u8868\u901A\u8FC7\u6D41\u7A0B\u5B9A\u4E49ID -PROCESS_DEFINITION_ID_LIST=\u6D41\u7A0B\u5B9A\u4E49id\u5217\u8868 -QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9A\u4E49\u901A\u8FC7\u9879\u76EEID -BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=\u6279\u91CF\u5220\u9664\u6D41\u7A0B\u5B9A\u4E49\u901A\u8FC7\u6D41\u7A0B\u5B9A\u4E49ID\u96C6\u5408 -DELETE_PROCESS_DEFINITION_BY_ID_NOTES=\u5220\u9664\u6D41\u7A0B\u5B9A\u4E49\u901A\u8FC7\u6D41\u7A0B\u5B9A\u4E49ID -QUERY_PROCESS_INSTANCE_BY_ID_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9E\u4F8B\u901A\u8FC7\u6D41\u7A0B\u5B9E\u4F8BID -DELETE_PROCESS_INSTANCE_BY_ID_NOTES=\u5220\u9664\u6D41\u7A0B\u5B9E\u4F8B\u901A\u8FC7\u6D41\u7A0B\u5B9E\u4F8BID -TASK_ID=\u4EFB\u52A1\u5B9E\u4F8BID -SKIP_LINE_NUM=\u5FFD\u7565\u884C\u6570 -QUERY_TASK_INSTANCE_LOG_NOTES=\u67E5\u8BE2\u4EFB\u52A1\u5B9E\u4F8B\u65E5\u5FD7 -DOWNLOAD_TASK_INSTANCE_LOG_NOTES=\u4E0B\u8F7D\u4EFB\u52A1\u5B9E\u4F8B\u65E5\u5FD7 -USERS_TAG=\u7528\u6237\u76F8\u5173\u64CD\u4F5C -SCHEDULER_TAG=\u5B9A\u65F6\u76F8\u5173\u64CD\u4F5C -CREATE_SCHEDULE_NOTES=\u521B\u5EFA\u5B9A\u65F6 -CREATE_USER_NOTES=\u521B\u5EFA\u7528\u6237 -TENANT_ID=\u79DF\u6237ID -QUEUE=\u4F7F\u7528\u7684\u961F\u5217 -EMAIL=\u90AE\u7BB1 -PHONE=\u624B\u673A\u53F7 -QUERY_USER_LIST_NOTES=\u67E5\u8BE2\u7528\u6237\u5217\u8868 -UPDATE_USER_NOTES=\u66F4\u65B0\u7528\u6237 -DELETE_USER_BY_ID_NOTES=\u5220\u9664\u7528\u6237\u901A\u8FC7ID -GRANT_PROJECT_NOTES=\u6388\u6743\u9879\u76EE -PROJECT_IDS=\u9879\u76EEIDS(\u5B57\u7B26\u4E32\u683C\u5F0F\uFF0C\u591A\u4E2A\u9879\u76EE\u4EE5","\u5206\u5272) -GRANT_RESOURCE_NOTES=\u6388\u6743\u8D44\u6E90\u6587\u4EF6 -RESOURCE_IDS=\u8D44\u6E90ID\u5217\u8868(\u5B57\u7B26\u4E32\u683C\u5F0F\uFF0C\u591A\u4E2A\u8D44\u6E90ID\u4EE5","\u5206\u5272) -GET_USER_INFO_NOTES=\u83B7\u53D6\u7528\u6237\u4FE1\u606F -LIST_USER_NOTES=\u7528\u6237\u5217\u8868 -VERIFY_USER_NAME_NOTES=\u9A8C\u8BC1\u7528\u6237\u540D -UNAUTHORIZED_USER_NOTES=\u53D6\u6D88\u6388\u6743 -ALERT_GROUP_ID=\u62A5\u8B66\u7EC4ID -AUTHORIZED_USER_NOTES=\u6388\u6743\u7528\u6237 -GRANT_UDF_FUNC_NOTES=\u6388\u6743udf\u51FD\u6570 -UDF_IDS=udf\u51FD\u6570id\u5217\u8868(\u5B57\u7B26\u4E32\u683C\u5F0F\uFF0C\u591A\u4E2Audf\u51FD\u6570ID\u4EE5","\u5206\u5272) -GRANT_DATASOURCE_NOTES=\u6388\u6743\u6570\u636E\u6E90 -DATASOURCE_IDS=\u6570\u636E\u6E90ID\u5217\u8868(\u5B57\u7B26\u4E32\u683C\u5F0F\uFF0C\u591A\u4E2A\u6570\u636E\u6E90ID\u4EE5","\u5206\u5272) -QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=\u67E5\u8BE2\u5B50\u6D41\u7A0B\u5B9E\u4F8B\u901A\u8FC7\u4EFB\u52A1\u5B9E\u4F8BID -QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=\u67E5\u8BE2\u7236\u6D41\u7A0B\u5B9E\u4F8B\u4FE1\u606F\u901A\u8FC7\u5B50\u6D41\u7A0B\u5B9E\u4F8BID -QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5B9E\u4F8B\u5168\u5C40\u53D8\u91CF\u548C\u5C40\u90E8\u53D8\u91CF -VIEW_GANTT_NOTES=\u6D4F\u89C8Gantt\u56FE -SUB_PROCESS_INSTANCE_ID=\u5B50\u6D41\u7A0B\u662F\u54A7ID -TASK_NAME=\u4EFB\u52A1\u5B9E\u4F8B\u540D -TASK_INSTANCE_TAG=\u4EFB\u52A1\u5B9E\u4F8B\u76F8\u5173\u64CD\u4F5C -LOGGER_TAG=\u65E5\u5FD7\u76F8\u5173\u64CD\u4F5C -PROCESS_INSTANCE_TAG=\u6D41\u7A0B\u5B9E\u4F8B\u76F8\u5173\u64CD\u4F5C -EXECUTION_STATUS=\u5DE5\u4F5C\u6D41\u548C\u4EFB\u52A1\u8282\u70B9\u7684\u8FD0\u884C\u72B6\u6001 -HOST=\u8FD0\u884C\u4EFB\u52A1\u7684\u4E3B\u673AIP\u5730\u5740 -START_DATE=\u5F00\u59CB\u65F6\u95F4 -END_DATE=\u7ED3\u675F\u65F6\u95F4 -QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=\u901A\u8FC7\u6D41\u7A0B\u5B9E\u4F8BID\u67E5\u8BE2\u4EFB\u52A1\u5217\u8868 -UPDATE_DATA_SOURCE_NOTES=\u66F4\u65B0\u6570\u636E\u6E90 -DATA_SOURCE_ID=\u6570\u636E\u6E90ID -QUERY_DATA_SOURCE_NOTES=\u67E5\u8BE2\u6570\u636E\u6E90\u901A\u8FC7ID -QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=\u67E5\u8BE2\u6570\u636E\u6E90\u5217\u8868\u901A\u8FC7\u6570\u636E\u6E90\u7C7B\u578B -QUERY_DATA_SOURCE_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u6570\u636E\u6E90\u5217\u8868 -CONNECT_DATA_SOURCE_NOTES=\u8FDE\u63A5\u6570\u636E\u6E90 -CONNECT_DATA_SOURCE_TEST_NOTES=\u8FDE\u63A5\u6570\u636E\u6E90\u6D4B\u8BD5 -DELETE_DATA_SOURCE_NOTES=\u5220\u9664\u6570\u636E\u6E90 -VERIFY_DATA_SOURCE_NOTES=\u9A8C\u8BC1\u6570\u636E\u6E90 -UNAUTHORIZED_DATA_SOURCE_NOTES=\u672A\u6388\u6743\u7684\u6570\u636E\u6E90 -AUTHORIZED_DATA_SOURCE_NOTES=\u6388\u6743\u7684\u6570\u636E\u6E90 -DELETE_SCHEDULER_BY_ID_NOTES=\u6839\u636E\u5B9A\u65F6id\u5220\u9664\u5B9A\u65F6\u6570\u636E -QUERY_ALERT_GROUP_LIST_PAGING_NOTES=\u5206\u9875\u67E5\u8BE2\u544A\u8B66\u7EC4\u5217\u8868 -EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=\u901A\u8FC7\u5DE5\u4F5C\u6D41ID\u5BFC\u51FA\u5DE5\u4F5C\u6D41\u5B9A\u4E49 -BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=\u6279\u91CF\u5BFC\u51FA\u5DE5\u4F5C\u6D41\u5B9A\u4E49 -QUERY_USER_CREATED_PROJECT_NOTES= \u67E5\u8BE2\u7528\u6237\u521B\u5EFA\u7684\u9879\u76EE -COPY_PROCESS_DEFINITION_NOTES= \u590D\u5236\u5DE5\u4F5C\u6D41\u5B9A\u4E49 -MOVE_PROCESS_DEFINITION_NOTES= \u79FB\u52A8\u5DE5\u4F5C\u6D41\u5B9A\u4E49 -TARGET_PROJECT_ID= \u76EE\u6807\u9879\u76EEID -IS_COPY = \u662F\u5426\u590D\u5236 -DELETE_PROCESS_DEFINITION_VERSION_NOTES=\u5220\u9664\u6D41\u7A0B\u5386\u53F2\u7248\u672C -QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=\u67E5\u8BE2\u6D41\u7A0B\u5386\u53F2\u7248\u672C\u4FE1\u606F -SWITCH_PROCESS_DEFINITION_VERSION_NOTES=\u5207\u6362\u6D41\u7A0B\u7248\u672C -VERSION=\u7248\u672C\u53F7 +QUERY_WORKER_GROUP_PAGING_NOTES=Worker分组管理 +QUERY_WORKER_GROUP_LIST_NOTES=查询worker group分组 +DELETE_WORKER_GROUP_BY_ID_NOTES=删除worker group通过ID +DATA_ANALYSIS_TAG=任务状态分析相关操作 +COUNT_TASK_STATE_NOTES=任务状态统计 +COUNT_PROCESS_INSTANCE_NOTES=统计流程实例状态 +COUNT_PROCESS_DEFINITION_BY_USER_NOTES=统计用户创建的流程定义 +COUNT_COMMAND_STATE_NOTES=统计命令状态 +COUNT_QUEUE_STATE_NOTES=统计队列里任务状态 +ACCESS_TOKEN_TAG=access token相关操作,需要先登录 +MONITOR_TAG=监控相关操作 +MASTER_LIST_NOTES=master服务列表 +WORKER_LIST_NOTES=worker服务列表 +QUERY_DATABASE_STATE_NOTES=查询数据库状态 +QUERY_ZOOKEEPER_STATE_NOTES=查询Zookeeper状态 +TASK_STATE=任务实例状态 +SOURCE_TABLE=源表 +DEST_TABLE=目标表 +TASK_DATE=任务时间 +QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=分页查询历史任务记录列表 +DATA_SOURCE_TAG=数据源相关操作 +CREATE_DATA_SOURCE_NOTES=创建数据源 +DATA_SOURCE_NAME=数据源名称 +DATA_SOURCE_NOTE=数据源描述 +DB_TYPE=数据源类型 +DATA_SOURCE_HOST=IP主机名 +DATA_SOURCE_PORT=数据源端口 +DATABASE_NAME=数据库名 +QUEUE_TAG=队列相关操作 +QUERY_QUEUE_LIST_NOTES=查询队列列表 +QUERY_QUEUE_LIST_PAGING_NOTES=分页查询队列列表 +CREATE_QUEUE_NOTES=创建队列 +YARN_QUEUE_NAME=hadoop yarn队列名 +QUEUE_ID=队列ID +TENANT_DESC=租户描述 +QUERY_TENANT_LIST_PAGING_NOTES=分页查询租户列表 +QUERY_TENANT_LIST_NOTES=查询租户列表 +UPDATE_TENANT_NOTES=更新租户 +DELETE_TENANT_NOTES=删除租户 +RESOURCES_TAG=资源中心相关操作 +CREATE_RESOURCE_NOTES=创建资源 +RESOURCE_TYPE=资源文件类型 +RESOURCE_NAME=资源文件名称 +RESOURCE_DESC=资源文件描述 +RESOURCE_FILE=资源文件 +RESOURCE_ID=资源ID +QUERY_RESOURCE_LIST_NOTES=查询资源列表 +DELETE_RESOURCE_BY_ID_NOTES=删除资源通过ID +VIEW_RESOURCE_BY_ID_NOTES=浏览资源通通过ID +ONLINE_CREATE_RESOURCE_NOTES=在线创建资源 +SUFFIX=资源文件后缀 +CONTENT=资源文件内容 +UPDATE_RESOURCE_NOTES=在线更新资源文件 +DOWNLOAD_RESOURCE_NOTES=下载资源文件 +CREATE_UDF_FUNCTION_NOTES=创建UDF函数 +UDF_TYPE=UDF类型 +FUNC_NAME=函数名称 +CLASS_NAME=包名类名 +ARG_TYPES=参数 +UDF_DESC=udf描述,使用说明 +VIEW_UDF_FUNCTION_NOTES=查看udf函数 +UPDATE_UDF_FUNCTION_NOTES=更新udf函数 +QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=分页查询udf函数列表 +VERIFY_UDF_FUNCTION_NAME_NOTES=验证udf函数名 +DELETE_UDF_FUNCTION_NOTES=删除UDF函数 +AUTHORIZED_FILE_NOTES=授权文件 +UNAUTHORIZED_FILE_NOTES=取消授权文件 +AUTHORIZED_UDF_FUNC_NOTES=授权udf函数 +UNAUTHORIZED_UDF_FUNC_NOTES=取消udf函数授权 +VERIFY_QUEUE_NOTES=验证队列 +TENANT_TAG=租户相关操作 +CREATE_TENANT_NOTES=创建租户 +TENANT_CODE=租户编码 +QUEUE_NAME=队列名 +PASSWORD=密码 +DATA_SOURCE_OTHER=jdbc连接参数,格式为:{"key1":"value1",...} +PROJECT_TAG=项目相关操作 +CREATE_PROJECT_NOTES=创建项目 +PROJECT_DESC=项目描述 +UPDATE_PROJECT_NOTES=更新项目 +PROJECT_ID=项目ID +QUERY_PROJECT_BY_ID_NOTES=通过项目ID查询项目信息 +QUERY_PROJECT_LIST_PAGING_NOTES=分页查询项目列表 +QUERY_ALL_PROJECT_LIST_NOTES=查询所有项目 +DELETE_PROJECT_BY_ID_NOTES=删除项目通过ID +QUERY_UNAUTHORIZED_PROJECT_NOTES=查询未授权的项目 +QUERY_AUTHORIZED_PROJECT_NOTES=查询授权项目 +TASK_RECORD_TAG=任务记录相关操作 +QUERY_TASK_RECORD_LIST_PAGING_NOTES=分页查询任务记录列表 +CREATE_TOKEN_NOTES=创建token,注意需要先登录 +QUERY_ACCESS_TOKEN_LIST_NOTES=分页查询access token列表 +SCHEDULE=定时 +WARNING_TYPE=发送策略 +WARNING_GROUP_ID=发送组ID +FAILURE_STRATEGY=失败策略 +RECEIVERS=收件人 +RECEIVERS_CC=收件人(抄送) +WORKER_GROUP_ID=Worker Server分组ID +PROCESS_INSTANCE_PRIORITY=流程实例优先级 +UPDATE_SCHEDULE_NOTES=更新定时 +SCHEDULE_ID=定时ID +ONLINE_SCHEDULE_NOTES=定时上线 +OFFLINE_SCHEDULE_NOTES=定时下线 +QUERY_SCHEDULE_NOTES=查询定时 +QUERY_SCHEDULE_LIST_PAGING_NOTES=分页查询定时 +LOGIN_TAG=用户登录相关操作 +USER_NAME=用户名 +PROJECT_NAME=项目名称 +CREATE_PROCESS_DEFINITION_NOTES=创建流程定义 +PROCESS_DEFINITION_NAME=流程定义名称 +PROCESS_DEFINITION_JSON=流程定义详细信息(json格式) +PROCESS_DEFINITION_LOCATIONS=流程定义节点坐标位置信息(json格式) +PROCESS_INSTANCE_LOCATIONS=流程实例节点坐标位置信息(json格式) +PROCESS_DEFINITION_CONNECTS=流程定义节点图标连接信息(json格式) +PROCESS_INSTANCE_CONNECTS=流程实例节点图标连接信息(json格式) +PROCESS_DEFINITION_DESC=流程定义描述信息 +PROCESS_DEFINITION_TAG=流程定义相关操作 +SIGNOUT_NOTES=退出登录 +USER_PASSWORD=用户密码 +UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例 +QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表 +VERIFY_PROCESS_DEFINITION_NAME_NOTES=验证流程定义名字 +LOGIN_NOTES=用户登录 +UPDATE_PROCESS_DEFINITION_NOTES=更新流程定义 +PROCESS_DEFINITION_ID=流程定义ID +RELEASE_PROCESS_DEFINITION_NOTES=发布流程定义 +QUERY_PROCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID +QUERY_PROCESS_DEFINITION_LIST_NOTES=查询流程定义列表 +QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 +QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义 +PAGE_NO=页码号 +PROCESS_INSTANCE_ID=流程实例ID +PROCESS_INSTANCE_IDS=流程实例ID集合 +PROCESS_INSTANCE_JSON=流程实例信息(json格式) +SCHEDULE_TIME=定时时间 +SYNC_DEFINE=更新流程实例的信息是否同步到流程定义 +RECOVERY_PROCESS_INSTANCE_FLAG=是否恢复流程实例 +SEARCH_VAL=搜索值 +USER_ID=用户ID +PAGE_SIZE=页大小 +LIMIT=显示多少条 +VIEW_TREE_NOTES=树状图 +GET_NODE_LIST_BY_DEFINITION_ID_NOTES=获得任务节点列表通过流程定义ID +PROCESS_DEFINITION_ID_LIST=流程定义id列表 +QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=查询流程定义通过项目ID +BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=批量删除流程定义通过流程定义ID集合 +DELETE_PROCESS_DEFINITION_BY_ID_NOTES=删除流程定义通过流程定义ID +QUERY_PROCESS_INSTANCE_BY_ID_NOTES=查询流程实例通过流程实例ID +DELETE_PROCESS_INSTANCE_BY_ID_NOTES=删除流程实例通过流程实例ID +TASK_ID=任务实例ID +SKIP_LINE_NUM=忽略行数 +QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志 +DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志 +USERS_TAG=用户相关操作 +SCHEDULER_TAG=定时相关操作 +CREATE_SCHEDULE_NOTES=创建定时 +CREATE_USER_NOTES=创建用户 +TENANT_ID=租户ID +QUEUE=使用的队列 +EMAIL=邮箱 +PHONE=手机号 +QUERY_USER_LIST_NOTES=查询用户列表 +UPDATE_USER_NOTES=更新用户 +DELETE_USER_BY_ID_NOTES=删除用户通过ID +GRANT_PROJECT_NOTES=授权项目 +PROJECT_IDS=项目IDS(字符串格式,多个项目以","分割) +GRANT_RESOURCE_NOTES=授权资源文件 +RESOURCE_IDS=资源ID列表(字符串格式,多个资源ID以","分割) +GET_USER_INFO_NOTES=获取用户信息 +LIST_USER_NOTES=用户列表 +VERIFY_USER_NAME_NOTES=验证用户名 +UNAUTHORIZED_USER_NOTES=取消授权 +ALERT_GROUP_ID=报警组ID +AUTHORIZED_USER_NOTES=授权用户 +GRANT_UDF_FUNC_NOTES=授权udf函数 +UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割) +GRANT_DATASOURCE_NOTES=授权数据源 +DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割) +QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=查询子流程实例通过任务实例ID +QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=查询父流程实例信息通过子流程实例ID +QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=查询流程实例全局变量和局部变量 +VIEW_GANTT_NOTES=浏览Gantt图 +SUB_PROCESS_INSTANCE_ID=子流程是咧ID +TASK_NAME=任务实例名 +TASK_INSTANCE_TAG=任务实例相关操作 +LOGGER_TAG=日志相关操作 +PROCESS_INSTANCE_TAG=流程实例相关操作 +EXECUTION_STATUS=工作流和任务节点的运行状态 +HOST=运行任务的主机IP地址 +START_DATE=开始时间 +END_DATE=结束时间 +QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=通过流程实例ID查询任务列表 +UPDATE_DATA_SOURCE_NOTES=更新数据源 +DATA_SOURCE_ID=数据源ID +QUERY_DATA_SOURCE_NOTES=查询数据源通过ID +QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=查询数据源列表通过数据源类型 +QUERY_DATA_SOURCE_LIST_PAGING_NOTES=分页查询数据源列表 +CONNECT_DATA_SOURCE_NOTES=连接数据源 +CONNECT_DATA_SOURCE_TEST_NOTES=连接数据源测试 +DELETE_DATA_SOURCE_NOTES=删除数据源 +VERIFY_DATA_SOURCE_NOTES=验证数据源 +UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源 +AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源 +DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据 +QUERY_ALERT_GROUP_LIST_PAGING_NOTES=分页查询告警组列表 +EXPORT_PROCESS_DEFINITION_BY_ID_NOTES=通过工作流ID导出工作流定义 +BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES=批量导出工作流定义 +QUERY_USER_CREATED_PROJECT_NOTES= 查询用户创建的项目 +QUERY_AUTHORIZED_AND_USER_CREATED_PROJECT_NOTES= 查询授权和用户创建的项目 +COPY_PROCESS_DEFINITION_NOTES= 复制工作流定义 +MOVE_PROCESS_DEFINITION_NOTES= 移动工作流定义 +TARGET_PROJECT_ID= 目标项目ID +IS_COPY = 是否复制 +DELETE_PROCESS_DEFINITION_VERSION_NOTES=删除流程历史版本 +QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=查询流程历史版本信息 +SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本 +VERSION=版本号 diff --git a/dolphinscheduler-api/src/main/resources/logback-api.xml b/dolphinscheduler-api/src/main/resources/logback-api.xml index 2df90d8392..e5cb37afac 100644 --- a/dolphinscheduler-api/src/main/resources/logback-api.xml +++ b/dolphinscheduler-api/src/main/resources/logback-api.xml @@ -55,7 +55,6 @@ - diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java index f2a54a1a88..692d28b56e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java @@ -117,14 +117,14 @@ public class ProcessDefinitionControllerTest { public void testVerifyProcessDefinitionName() throws Exception { Map result = new HashMap<>(); - putMsg(result, Status.PROCESS_INSTANCE_EXIST); + putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR); String projectName = "test"; String name = "dag_test"; Mockito.when(processDefinitionService.verifyProcessDefinitionName(user, projectName, name)).thenReturn(result); Result response = processDefinitionController.verifyProcessDefinitionName(user, projectName, name); - Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST.getCode(), response.getCode().intValue()); + Assert.assertEquals(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), response.getCode().intValue()); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java index 368981c0d3..b97c7c192f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TaskInstanceControllerTest.java @@ -14,52 +14,60 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.controller; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.TaskInstanceService; +import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; + +import java.util.HashMap; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.http.MediaType; -import org.springframework.test.web.servlet.MvcResult; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; /** * task instance controller test */ -public class TaskInstanceControllerTest extends AbstractControllerTest{ - private static Logger logger = LoggerFactory.getLogger(TaskInstanceControllerTest.class); +@RunWith(MockitoJUnitRunner.Silent.class) +public class TaskInstanceControllerTest { + + @InjectMocks + private TaskInstanceController taskInstanceController; + + @Mock + private TaskInstanceService taskInstanceService; @Test - public void testQueryTaskListPaging() throws Exception { + public void testQueryTaskListPaging() { - MultiValueMap paramsMap = new LinkedMultiValueMap<>(); - //paramsMap.add("processInstanceId","1380"); - paramsMap.add("searchVal",""); - paramsMap.add("taskName",""); - //paramsMap.add("stateType",""); - paramsMap.add("startDate","2019-02-26 19:48:00"); - paramsMap.add("endDate","2019-02-26 19:48:22"); - paramsMap.add("pageNo","1"); - paramsMap.add("pageSize","20"); + Map result = new HashMap<>(); + Integer pageNo = 1; + Integer pageSize = 20; + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + result.put(Constants.DATA_LIST, pageInfo); + result.put(Constants.STATUS, Status.SUCCESS); - MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/task-instance/list-paging","cxc_1113") - .header(SESSION_ID, sessionId) - .params(paramsMap)) - .andExpect(status().isOk()) - .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) - .andReturn(); + when(taskInstanceService.queryTaskListPaging(any(), eq(""), eq(1), eq(""), eq(""), eq(""),any(), any(), + eq(""), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(result); + Result taskResult = taskInstanceController.queryTaskListPaging(null, "", 1, "", "", + "", "", ExecutionStatus.SUCCESS,"192.168.xx.xx", "2020-01-01 00:00:00", "2020-01-02 00:00:00",pageNo, pageSize); + Assert.assertEquals(Integer.valueOf(Status.SUCCESS.getCode()), taskResult.getCode()); - Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); - logger.info(mvcResult.getResponse().getContentAsString()); } + } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java index 7cf622ab9f..010b7ba36c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/TenantControllerTest.java @@ -43,7 +43,6 @@ public class TenantControllerTest extends AbstractControllerTest{ public void testCreateTenant() throws Exception { MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("tenantCode","tenantCode"); - paramsMap.add("tenantName","tenantName"); paramsMap.add("queueId","1"); paramsMap.add("description","tenant description"); @@ -84,7 +83,6 @@ public class TenantControllerTest extends AbstractControllerTest{ MultiValueMap paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("id","9"); paramsMap.add("tenantCode","cxc_te"); - paramsMap.add("tenantName","tenant_update_2"); paramsMap.add("queueId","1"); paramsMap.add("description","tenant description"); @@ -96,7 +94,7 @@ public class TenantControllerTest extends AbstractControllerTest{ .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + Assert.assertEquals(Status.TENANT_NOT_EXIST.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @@ -133,7 +131,7 @@ public class TenantControllerTest extends AbstractControllerTest{ .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.TENANT_NAME_EXIST.getCode(), result.getCode().intValue()); + Assert.assertEquals(Status.TENANT_CODE_EXIST.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } @@ -165,7 +163,7 @@ public class TenantControllerTest extends AbstractControllerTest{ .andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); - Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); + Assert.assertEquals(Status.TENANT_NOT_EXIST.getCode(),result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); } } diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/JsonSerializerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java similarity index 51% rename from dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/JsonSerializerTest.java rename to dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java index cb92db7f25..a96cec9158 100644 --- a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/JsonSerializerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigLDAPTest.java @@ -15,44 +15,31 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.remote; +package org.apache.dolphinscheduler.api.security; +import org.apache.dolphinscheduler.api.ApiApplicationServer; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.junit.Assert; import org.junit.Test; - -public class JsonSerializerTest { +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +@TestPropertySource(properties = { + "security.authentication.type=LDAP", +}) +public class SecurityConfigLDAPTest { + + @Autowired + private SecurityConfig securityConfig; @Test - public void testSerialize(){ - TestObj testObj = new TestObj(); - testObj.setAge(12); - byte[] serializeByte = JsonSerializer.serialize(testObj); - - // - TestObj deserialize = JsonSerializer.deserialize(serializeByte, TestObj.class); - - Assert.assertEquals(testObj.getAge(), deserialize.getAge()); - } - - static class TestObj { - - private int age; - - public int getAge() { - return age; - } - - public void setAge(int age) { - this.age = age; - } - - @Override - public String toString() { - return "TestObj{" + - "age=" + age + - '}'; - } + public void testAuthenticator() { + Authenticator authenticator = securityConfig.authenticator(); + Assert.assertNotNull(authenticator); } } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigPasswordTest.java similarity index 97% rename from dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigTest.java rename to dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigPasswordTest.java index 98e6829ac6..cf1023e786 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/SecurityConfigPasswordTest.java @@ -14,9 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.security; import org.apache.dolphinscheduler.api.ApiApplicationServer; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -30,7 +32,7 @@ import org.springframework.test.context.junit4.SpringRunner; @TestPropertySource(properties = { "security.authentication.type=PASSWORD", }) -public class SecurityConfigTest { +public class SecurityConfigPasswordTest { @Autowired private SecurityConfig securityConfig; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java new file mode 100644 index 0000000000..00612597b7 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapAuthenticatorTest.java @@ -0,0 +1,142 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.security.impl.ldap; + +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.SessionService; +import org.apache.dolphinscheduler.api.service.UsersService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.Flag; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.Session; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Date; +import java.util.UUID; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +@TestPropertySource( + properties = { + "security.authentication.type=LDAP", + "security.authentication.ldap.user.admin=read-only-admin", + "ldap.urls=ldap://ldap.forumsys.com:389/", + "ldap.base.dn=dc=example,dc=com", + "ldap.username=cn=read-only-admin,dc=example,dc=com", + "ldap.password=password", + "ldap.user.identity.attribute=uid", + "ldap.user.email.attribute=mail", + }) +public class LdapAuthenticatorTest { + private static Logger logger = LoggerFactory.getLogger(LdapAuthenticatorTest.class); + @Autowired + protected AutowireCapableBeanFactory beanFactory; + @MockBean + private LdapService ldapService; + @MockBean + private SessionService sessionService; + @MockBean + private UsersService usersService; + + private LdapAuthenticator ldapAuthenticator; + + //test param + private User mockUser; + private Session mockSession; + + private String ldapUid = "test"; + private String ldapUserPwd = "password"; + private String ldapEmail = "test@example.com"; + private String ip = "127.0.0.1"; + private UserType userType = UserType.GENERAL_USER; + + @Before + public void setUp() { + ldapAuthenticator = new LdapAuthenticator(); + beanFactory.autowireBean(ldapAuthenticator); + + mockUser = new User(); + mockUser.setId(1); + mockUser.setUserName(ldapUid); + mockUser.setEmail(ldapEmail); + mockUser.setUserType(userType); + mockUser.setState(Flag.YES.getCode()); + + mockSession = new Session(); + mockSession.setId(UUID.randomUUID().toString()); + mockSession.setIp(ip); + mockSession.setUserId(1); + mockSession.setLastLoginTime(new Date()); + + } + + @Test + public void testAuthenticate() { + when(usersService.createUser(userType, ldapUid, ldapEmail)).thenReturn(mockUser); + when(usersService.getUserByUserName(ldapUid)).thenReturn(mockUser); + when(sessionService.createSession(mockUser, ip)).thenReturn(mockSession.getId()); + + when(ldapService.ldapLogin(ldapUid, ldapUserPwd)).thenReturn(ldapEmail); + + Result result = ldapAuthenticator.authenticate(ldapUid, ldapUserPwd, ip); + Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode()); + logger.info(result.toString()); + + when(sessionService.createSession(mockUser, ip)).thenReturn(null); + result = ldapAuthenticator.authenticate(ldapUid, ldapUserPwd, ip); + Assert.assertEquals(Status.LOGIN_SESSION_FAILED.getCode(), (int) result.getCode()); + + when(sessionService.createSession(mockUser, ip)).thenReturn(mockSession.getId()); + when(usersService.getUserByUserName(ldapUid)).thenReturn(null); + result = ldapAuthenticator.authenticate(ldapUid, ldapUserPwd, ip); + Assert.assertEquals(Status.USER_NAME_PASSWD_ERROR.getCode(), (int) result.getCode()); + } + + @Test + public void testGetAuthUser() { + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + when(usersService.queryUser(mockUser.getId())).thenReturn(mockUser); + when(sessionService.getSession(request)).thenReturn(mockSession); + + User user = ldapAuthenticator.getAuthUser(request); + Assert.assertNotNull(user); + + when(sessionService.getSession(request)).thenReturn(null); + user = ldapAuthenticator.getAuthUser(request); + Assert.assertNull(user); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java new file mode 100644 index 0000000000..8cd435f954 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapServiceTest.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.security.impl.ldap; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.common.enums.UserType; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.config.AutowireCapableBeanFactory; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(classes = ApiApplicationServer.class) +@TestPropertySource( + properties = { + "security.authentication.type=LDAP", + "security.authentication.ldap.user.admin=read-only-admin", + "ldap.urls=ldap://ldap.forumsys.com:389/", + "ldap.base.dn=dc=example,dc=com", + "ldap.username=cn=read-only-admin,dc=example,dc=com", + "ldap.password=password", + "ldap.user.identity.attribute=uid", + "ldap.user.email.attribute=mail", + }) +public class LdapServiceTest { + @Autowired + protected AutowireCapableBeanFactory beanFactory; + + private LdapService ldapService; + + @Before + public void setUp() { + ldapService = new LdapService(); + beanFactory.autowireBean(ldapService); + } + + @Test + public void getUserType() { + UserType userType = ldapService.getUserType("read-only-admin"); + Assert.assertEquals(UserType.ADMIN_USER, userType); + } + + @Test + public void ldapLogin() { + String email = ldapService.ldapLogin("tesla", "password"); + Assert.assertEquals("tesla@ldap.forumsys.com", email); + + String email2 = ldapService.ldapLogin("tesla", "error password"); + Assert.assertNull(email2); + } + + @Test + public void ldapLoginError() { + String email = ldapService.ldapLogin("tesla", "password"); + Assert.assertEquals("tesla@ldap.forumsys.com", email); + + String email2 = ldapService.ldapLogin("tesla", "error password"); + Assert.assertNull(email2); + } +} \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/PasswordAuthenticatorTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java similarity index 91% rename from dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/PasswordAuthenticatorTest.java rename to dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java index dca70f8ed3..f3c90ff743 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/PasswordAuthenticatorTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/security/impl/pwd/PasswordAuthenticatorTest.java @@ -14,7 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.api.security; + +package org.apache.dolphinscheduler.api.security.impl.pwd; + +import static org.mockito.Mockito.when; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; @@ -23,12 +26,17 @@ import org.apache.dolphinscheduler.api.service.UsersService; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.dao.entity.Session; import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Date; +import java.util.UUID; + +import javax.servlet.http.HttpServletRequest; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; -import static org.mockito.Mockito.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -36,9 +44,6 @@ import org.springframework.beans.factory.config.AutowireCapableBeanFactory; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.test.context.junit4.SpringRunner; -import javax.servlet.http.HttpServletRequest; -import java.util.Date; -import java.util.UUID; @RunWith(SpringRunner.class) @SpringBootTest(classes = ApiApplicationServer.class) @@ -58,7 +63,7 @@ public class PasswordAuthenticatorTest { private Session mockSession; @Before - public void setUp() throws Exception { + public void setUp() { authenticator = new PasswordAuthenticator(); beanFactory.autowireBean(authenticator); @@ -76,6 +81,13 @@ public class PasswordAuthenticatorTest { mockSession.setLastLoginTime(new Date()); } + @Test + public void testLogin() { + when(usersService.queryUser("test", "test")).thenReturn(mockUser); + User login = authenticator.login("test", "test", "127.0.0.1"); + Assert.assertNotNull(login); + } + @Test public void testAuthenticate() { when(usersService.queryUser("test", "test")).thenReturn(mockUser); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java index f5543487ea..e10d7185f9 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AccessTokenServiceTest.java @@ -81,7 +81,7 @@ public class AccessTokenServiceTest { public void testCreateToken() { when(accessTokenMapper.insert(any(AccessToken.class))).thenReturn(2); - Map result = accessTokenService.createToken(1, getDate(), "AccessTokenServiceTest"); + Map result = accessTokenService.createToken(getLoginUser(), 1, getDate(), "AccessTokenServiceTest"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } @@ -89,7 +89,7 @@ public class AccessTokenServiceTest { @Test public void testGenerateToken() { - Map result = accessTokenService.generateToken(Integer.MAX_VALUE, getDate()); + Map result = accessTokenService.generateToken(getLoginUser(), Integer.MAX_VALUE,getDate()); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); String token = (String) result.get(Constants.DATA_LIST); @@ -121,16 +121,24 @@ public class AccessTokenServiceTest { public void testUpdateToken() { when(accessTokenMapper.selectById(1)).thenReturn(getEntity()); - Map result = accessTokenService.updateToken(1, Integer.MAX_VALUE, getDate(), "token"); + Map result = accessTokenService.updateToken(getLoginUser(), 1,Integer.MAX_VALUE,getDate(),"token"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); // not exist - result = accessTokenService.updateToken(2, Integer.MAX_VALUE, getDate(), "token"); + result = accessTokenService.updateToken(getLoginUser(), 2,Integer.MAX_VALUE,getDate(),"token"); logger.info(result.toString()); Assert.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST, result.get(Constants.STATUS)); } + + private User getLoginUser(){ + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.ADMIN_USER); + return loginUser; + } + /** * create entity */ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index 3d8ae91287..e4f4a4e18c 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -16,12 +16,19 @@ */ package org.apache.dolphinscheduler.api.service; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; + +import org.apache.dolphinscheduler.api.dto.CommandStateCount; +import org.apache.dolphinscheduler.api.dto.TaskStateCount; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl; import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.dao.entity.CommandCount; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; @@ -36,6 +43,7 @@ import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -123,6 +131,74 @@ public class DataAnalysisServiceTest { result = dataAnalysisService.countTaskStateByProject(user, 1, startDate, endDate); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + // when date in illegal format then return error message + String startDate2 = "illegalDateString"; + String endDate2 = "illegalDateString"; + result = dataAnalysisService.countTaskStateByProject(user, 0, startDate2, endDate2); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); + + // when one of date in illegal format then return error message + String startDate3 = "2020-08-28 14:13:40"; + String endDate3 = "illegalDateString"; + result = dataAnalysisService.countTaskStateByProject(user, 0, startDate3, endDate3); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); + + // when one of date in illegal format then return error message + String startDate4 = "illegalDateString"; + String endDate4 = "2020-08-28 14:13:40"; + result = dataAnalysisService.countTaskStateByProject(user, 0, startDate4, endDate4); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); + + // when counting general user's task status then return user's task status count + user.setUserType(UserType.GENERAL_USER); + Mockito.when(processService.getProjectIdListHavePerm(anyInt())) + .thenReturn(Collections.singletonList(123)); + ExecuteStatusCount executeStatusCount = new ExecuteStatusCount(); + executeStatusCount.setExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); + executeStatusCount.setCount(10); + Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(any(), any(), any())) + .thenReturn(Collections.singletonList(executeStatusCount)); + result = dataAnalysisService.countTaskStateByProject(user, 0, startDate, null); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList() + .hasSameSizeAs(ExecutionStatus.values()); + assertThat(result.get(Constants.DATA_LIST)).extracting("totalCount").first().isEqualTo(10); + TaskStateCount taskStateCount = new TaskStateCount(ExecutionStatus.RUNNING_EXECUTION, 10); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList().containsOnlyOnce(taskStateCount); + + // when general user doesn't have any task then return all count are 0 + user.setUserType(UserType.GENERAL_USER); + Mockito.when(processService.getProjectIdListHavePerm(anyInt())) + .thenReturn(new ArrayList<>()); + Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(any(), any(), any())) + .thenReturn(Collections.emptyList()); + result = dataAnalysisService.countTaskStateByProject(user, 0, null, null); + assertThat(result.get(Constants.DATA_LIST)).extracting("totalCount").first().isEqualTo(0); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList() + .hasSameSizeAs(ExecutionStatus.values()); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList() + .extracting("count").allMatch(count -> count.equals(0)); + + // when general user doesn't have any task then return all count are 0 + user.setUserType(UserType.GENERAL_USER); + Mockito.when(processService.getProjectIdListHavePerm(anyInt())) + .thenReturn(new ArrayList<>()); + Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(any(), any(), any())) + .thenReturn(Collections.emptyList()); + result = dataAnalysisService.countTaskStateByProject(user, 0, null, null); + assertThat(result.get(Constants.DATA_LIST)).extracting("totalCount").first().isEqualTo(0); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList() + .hasSameSizeAs(ExecutionStatus.values()); + assertThat(result.get(Constants.DATA_LIST)).extracting("taskCountDtos").first().asList() + .extracting("count").allMatch(count -> count.equals(0)); + + // when instanceStateCounter return null, then return nothing + user.setUserType(UserType.GENERAL_USER); + Mockito.when(processService.getProjectIdListHavePerm(anyInt())) + .thenReturn(new ArrayList<>()); + Mockito.when(taskInstanceMapper.countTaskInstanceStateByUser(any(), any(), any())) + .thenReturn(null); + result = dataAnalysisService.countTaskStateByProject(user, 0, null, null); + assertThat(result).isEmpty(); } @Test @@ -169,6 +245,67 @@ public class DataAnalysisServiceTest { result = dataAnalysisService.countCommandState(user, 1, startDate, endDate); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); + // when project check fail then return nothing + Map result1 = dataAnalysisService.countCommandState(user, 2, null, null); + Assert.assertTrue(result1.isEmpty()); + + // when all date in illegal format then return error message + String startDate2 = "illegalDateString"; + String endDate2 = "illegalDateString"; + Map result2 = dataAnalysisService.countCommandState(user, 0, startDate2, endDate2); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result2.get(Constants.STATUS)); + + // when one of date in illegal format then return error message + String startDate3 = "2020-08-22 09:23:10"; + String endDate3 = "illegalDateString"; + Map result3 = dataAnalysisService.countCommandState(user, 0, startDate3, endDate3); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result3.get(Constants.STATUS)); + + // when one of date in illegal format then return error message + String startDate4 = "illegalDateString"; + String endDate4 = "2020-08-22 09:23:10"; + Map result4 = dataAnalysisService.countCommandState(user, 0, startDate4, endDate4); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result4.get(Constants.STATUS)); + + // when no command found then return all count are 0 + Mockito.when(commandMapper.countCommandState(anyInt(), any(), any(), any())).thenReturn(Collections.emptyList()); + Mockito.when(errorCommandMapper.countCommandState(any(), any(), any())).thenReturn(Collections.emptyList()); + Map result5 = dataAnalysisService.countCommandState(user, 0, startDate, null); + assertThat(result5).containsEntry(Constants.STATUS, Status.SUCCESS); + assertThat(result5.get(Constants.DATA_LIST)).asList().extracting("errorCount").allMatch(count -> count.equals(0)); + assertThat(result5.get(Constants.DATA_LIST)).asList().extracting("normalCount").allMatch(count -> count.equals(0)); + + // when command found then return combination result + CommandCount normalCommandCount = new CommandCount(); + normalCommandCount.setCommandType(CommandType.START_PROCESS); + normalCommandCount.setCount(10); + CommandCount errorCommandCount = new CommandCount(); + errorCommandCount.setCommandType(CommandType.START_PROCESS); + errorCommandCount.setCount(5); + Mockito.when(commandMapper.countCommandState(anyInt(), any(), any(), any())).thenReturn(Collections.singletonList(normalCommandCount)); + Mockito.when(errorCommandMapper.countCommandState(any(), any(), any())).thenReturn(Collections.singletonList(errorCommandCount)); + + Map result6 = dataAnalysisService.countCommandState(user, 0, null, null); + + assertThat(result6).containsEntry(Constants.STATUS, Status.SUCCESS); + CommandStateCount commandStateCount = new CommandStateCount(); + commandStateCount.setCommandState(CommandType.START_PROCESS); + commandStateCount.setNormalCount(10); + commandStateCount.setErrorCount(5); + assertThat(result6.get(Constants.DATA_LIST)).asList().containsOnlyOnce(commandStateCount); + } + + @Test + public void testCountQueueState() { + // when project check fail then return nothing + Map result1 = dataAnalysisService.countQueueState(user, 2); + Assert.assertTrue(result1.isEmpty()); + + // when project check success when return all count are 0 + Map result2 = dataAnalysisService.countQueueState(user, 1); + assertThat(result2.get(Constants.DATA_LIST)).extracting("taskQueue", "taskKill") + .isNotEmpty() + .allMatch(count -> count.equals(0)); } /** diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index f35ff9509c..e197e56e1f 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -18,6 +18,8 @@ package org.apache.dolphinscheduler.api.service; import static org.assertj.core.api.Assertions.assertThat; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.when; import org.apache.dolphinscheduler.api.dto.ProcessMeta; import org.apache.dolphinscheduler.api.enums.Status; @@ -66,6 +68,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -83,33 +88,6 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.class) public class ProcessDefinitionServiceTest { - @InjectMocks - private ProcessDefinitionServiceImpl processDefinitionService; - - @Mock - private ProcessDefinitionMapper processDefineMapper; - - @Mock - private ProjectMapper projectMapper; - - @Mock - private ProjectServiceImpl projectService; - - @Mock - private ScheduleMapper scheduleMapper; - - @Mock - private ProcessService processService; - - @Mock - private ProcessInstanceService processInstanceService; - - @Mock - private TaskInstanceMapper taskInstanceMapper; - - @Mock - private ProcessDefinitionVersionService processDefinitionVersionService; - private static final String SHELL_JSON = "{\n" + " \"globalParams\": [\n" + " \n" @@ -150,7 +128,6 @@ public class ProcessDefinitionServiceTest { + " \"tenantId\": 1,\n" + " \"timeout\": 0\n" + "}"; - private static final String CYCLE_SHELL_JSON = "{\n" + " \"globalParams\": [\n" + " \n" @@ -253,6 +230,24 @@ public class ProcessDefinitionServiceTest { + " \"tenantId\": 1,\n" + " \"timeout\": 0\n" + "}"; + @InjectMocks + private ProcessDefinitionServiceImpl processDefinitionService; + @Mock + private ProcessDefinitionMapper processDefineMapper; + @Mock + private ProjectMapper projectMapper; + @Mock + private ProjectServiceImpl projectService; + @Mock + private ScheduleMapper scheduleMapper; + @Mock + private ProcessService processService; + @Mock + private ProcessInstanceService processInstanceService; + @Mock + private TaskInstanceMapper taskInstanceMapper; + @Mock + private ProcessDefinitionVersionService processDefinitionVersionService; @Test public void testQueryProcessDefinitionList() { @@ -618,16 +613,16 @@ public class ProcessDefinitionServiceTest { //project check auth success, process not exist putMsg(result, Status.SUCCESS, projectName); - Mockito.when(processDefineMapper.queryByDefineName(project.getId(), "test_pdf")).thenReturn(null); + Mockito.when(processDefineMapper.verifyByDefineName(project.getId(), "test_pdf")).thenReturn(null); Map processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS)); //process exist - Mockito.when(processDefineMapper.queryByDefineName(project.getId(), "test_pdf")).thenReturn(getProcessDefinition()); + Mockito.when(processDefineMapper.verifyByDefineName(project.getId(), "test_pdf")).thenReturn(getProcessDefinition()); Map processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser, "project_test1", "test_pdf"); - Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST, processExistRes.get(Constants.STATUS)); + Assert.assertEquals(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, processExistRes.get(Constants.STATUS)); } @Test @@ -751,6 +746,70 @@ public class ProcessDefinitionServiceTest { Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(taskInstance); Map taskNotNuLLRes = processDefinitionService.viewTree(46, 10); Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS)); + + } + + @Test + public void testSubProcessViewTree() throws Exception { + + ProcessDefinition processDefinition = getProcessDefinition(); + processDefinition.setProcessDefinitionJson(SHELL_JSON); + List processInstanceList = new ArrayList<>(); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setName("test_instance"); + processInstance.setState(ExecutionStatus.RUNNING_EXECUTION); + processInstance.setHost("192.168.xx.xx"); + processInstance.setStartTime(new Date()); + processInstance.setEndTime(new Date()); + processInstanceList.add(processInstance); + + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setStartTime(new Date()); + taskInstance.setEndTime(new Date()); + taskInstance.setTaskType("SUB_PROCESS"); + taskInstance.setId(1); + taskInstance.setName("test_task_instance"); + taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION); + taskInstance.setHost("192.168.xx.xx"); + taskInstance.setTaskJson("{\n" + + " \"conditionResult\": {\n" + + " \"failedNode\": [\n" + + " \"\"\n" + + " ],\n" + + " \"successNode\": [\n" + + " \"\"\n" + + " ]\n" + + " },\n" + + " \"delayTime\": \"0\",\n" + + " \"dependence\": {},\n" + + " \"description\": \"\",\n" + + " \"id\": \"1\",\n" + + " \"maxRetryTimes\": \"0\",\n" + + " \"name\": \"test_task_instance\",\n" + + " \"params\": {\n" + + " \"processDefinitionId\": \"222\",\n" + + " \"resourceList\": []\n" + + " },\n" + + " \"preTasks\": [],\n" + + " \"retryInterval\": \"1\",\n" + + " \"runFlag\": \"NORMAL\",\n" + + " \"taskInstancePriority\": \"MEDIUM\",\n" + + " \"timeout\": {\n" + + " \"enable\": false,\n" + + " \"interval\": null,\n" + + " \"strategy\": \"\"\n" + + " },\n" + + " \"type\": \"SUB_PROCESS\",\n" + + " \"workerGroup\": \"default\"\n" + + "}"); + //task instance exist + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + Mockito.when(processInstanceService.queryByProcessDefineId(46, 10)).thenReturn(processInstanceList); + Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(taskInstance); + Map taskNotNuLLRes = processDefinitionService.viewTree(46, 10); + Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS)); + } @Test @@ -973,7 +1032,7 @@ public class ProcessDefinitionServiceTest { } @Test - public void testBatchExportProcessDefinitionByIds() { + public void testBatchExportProcessDefinitionByIds() throws IOException { processDefinitionService.batchExportProcessDefinitionByIds( null, null, null, null); @@ -991,6 +1050,28 @@ public class ProcessDefinitionServiceTest { processDefinitionService.batchExportProcessDefinitionByIds( loginUser, projectName, "1", null); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(1); + processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":" + + "{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\",\"dependence\":{}" + + ",\"description\":\"\",\"id\":\"tasks-3011\",\"maxRetryTimes\":\"0\",\"name\":\"tsssss\"" + + ",\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]}" + + ",\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\"" + + ",\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":\"SHELL\"" + + ",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}"); + Map checkResult = new HashMap<>(); + checkResult.put(Constants.STATUS, Status.SUCCESS); + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); + Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkResult); + Mockito.when(processDefineMapper.queryByDefineId(1)).thenReturn(processDefinition); + HttpServletResponse response = mock(HttpServletResponse.class); + + ServletOutputStream outputStream = mock(ServletOutputStream.class); + when(response.getOutputStream()).thenReturn(outputStream); + processDefinitionService.batchExportProcessDefinitionByIds( + loginUser, projectName, "1", response); + } @Test @@ -1035,15 +1116,15 @@ public class ProcessDefinitionServiceTest { taskNode5.setType("SHELL"); ShellParameters shellParameters5 = new ShellParameters(); ResourceInfo resourceInfo5A = new ResourceInfo(); - resourceInfo5A.setId(0); + resourceInfo5A.setId(1); ResourceInfo resourceInfo5B = new ResourceInfo(); - resourceInfo5B.setId(1); + resourceInfo5B.setId(2); shellParameters5.setResourceList(Arrays.asList(resourceInfo5A, resourceInfo5B)); taskNode5.setParams(JSONUtils.toJsonString(shellParameters5)); input5.setTasks(Collections.singletonList(taskNode5)); String output5 = (String) testMethod.invoke(processDefinitionService, input5); assertThat(output5.split(",")).hasSize(2) - .containsExactlyInAnyOrder("0", "1"); + .containsExactlyInAnyOrder("1", "2"); // when resource id list is 0 1 1 2, then return 0,1,2 ProcessData input6 = new ProcessData(); @@ -1051,7 +1132,7 @@ public class ProcessDefinitionServiceTest { taskNode6.setType("SHELL"); ShellParameters shellParameters6 = new ShellParameters(); ResourceInfo resourceInfo6A = new ResourceInfo(); - resourceInfo6A.setId(0); + resourceInfo6A.setId(3); ResourceInfo resourceInfo6B = new ResourceInfo(); resourceInfo6B.setId(1); ResourceInfo resourceInfo6C = new ResourceInfo(); @@ -1065,7 +1146,7 @@ public class ProcessDefinitionServiceTest { String output6 = (String) testMethod.invoke(processDefinitionService, input6); assertThat(output6.split(",")).hasSize(3) - .containsExactlyInAnyOrder("0", "1", "2"); + .containsExactlyInAnyOrder("3", "1", "2"); } /** @@ -1182,4 +1263,35 @@ public class ProcessDefinitionServiceTest { result.put(Constants.MSG, status.getMsg()); } } + + @Test + public void testExportProcessMetaData() { + Integer processDefinitionId = 111; + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(processDefinitionId); + processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":" + + "{\"failedNode\":[\"\"],\"successNode\":" + + "[\"\"]},\"delayTime\":\"0\",\"dependence\":{}," + + "\"description\":\"\",\"id\":\"tasks-3011\",\"maxRetryTimes\":\"0\",\"name\":\"tsssss\"," + + "\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]}," + + "\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\"," + + "\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":\"SHELL\"," + + "\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}"); + Assert.assertNotNull(processDefinitionService.exportProcessMetaData(processDefinitionId, processDefinition)); + } + + @Test + public void testImportProcessSchedule() { + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.ADMIN_USER); + Integer processDefinitionId = 111; + String processDefinitionName = "testProcessDefinition"; + String projectName = "project_test1"; + Map result = new HashMap<>(); + putMsg(result, Status.PROJECT_NOT_FOUNT); + ProcessMeta processMeta = new ProcessMeta(); + Assert.assertEquals(0, processDefinitionService.importProcessSchedule(loginUser, projectName, processMeta, processDefinitionName, processDefinitionId)); + } + } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index 5511f69aeb..de23d7570e 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -211,8 +211,6 @@ public class ProcessInstanceServiceTest { //project auth success ProcessInstance processInstance = getProcessInstance(); - processInstance.setReceivers("xxx@qq.com"); - processInstance.setReceiversCc("xxx@qq.com"); processInstance.setProcessDefinitionId(46); putMsg(result, Status.SUCCESS, projectName); Project project = getProject(projectName); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java index 85b23b3fcb..cc39925aa5 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProjectServiceTest.java @@ -295,6 +295,28 @@ public class ProjectServiceTest { } + @Test + public void testQueryProjectCreatedAndAuthorizedByUser() { + + Map result = null; + User loginUser = getLoginUser(); + + // not admin user + Mockito.when(projectMapper.queryProjectCreatedAndAuthorizedByUserId(1)).thenReturn(getList()); + result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); + List notAdminUserResult = (List) result.get(Constants.DATA_LIST); + Assert.assertTrue(CollectionUtils.isNotEmpty(notAdminUserResult)); + + //admin user + loginUser.setUserType(UserType.ADMIN_USER); + Mockito.when(projectMapper.selectList(null)).thenReturn(getList()); + result = projectService.queryProjectCreatedAndAuthorizedByUser(loginUser); + List projects = (List) result.get(Constants.DATA_LIST); + + Assert.assertTrue(CollectionUtils.isNotEmpty(projects)); + + } + @Test public void testQueryAllProjectList() { @@ -340,13 +362,11 @@ public class ProjectServiceTest { * create admin user */ private User getLoginUser() { - User loginUser = new User(); loginUser.setUserType(UserType.GENERAL_USER); loginUser.setUserName(userName); loginUser.setId(1); return loginUser; - } /** diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java index 407f6b587f..d430d3a755 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ResourcesServiceTest.java @@ -14,12 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.exceptions.ServiceException; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; @@ -33,7 +31,18 @@ import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -49,14 +58,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.mock.web.MockMultipartFile; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(PowerMockRunner.class) @PowerMockIgnore({"sun.security.*", "javax.net.*"}) -@PrepareForTest({HadoopUtils.class,PropertyUtils.class, FileUtils.class,org.apache.dolphinscheduler.api.utils.FileUtils.class}) +@PrepareForTest({HadoopUtils.class, PropertyUtils.class, FileUtils.class, org.apache.dolphinscheduler.api.utils.FileUtils.class}) public class ResourcesServiceTest { private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); @@ -94,100 +101,103 @@ public class ResourcesServiceTest { } @Test - public void testCreateResource(){ + public void testCreateResource() { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); User user = new User(); //HDFS_NOT_STARTUP - Result result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,null,-1,"/"); + Result result = resourcesService.createResource(user, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null, -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); + Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //RESOURCE_FILE_IS_EMPTY - MockMultipartFile mockMultipartFile = new MockMultipartFile("test.pdf",new String().getBytes()); + MockMultipartFile mockMultipartFile = new MockMultipartFile("test.pdf", "".getBytes()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); - result = resourcesService.createResource(user,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile,-1,"/"); + result = resourcesService.createResource(user, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, mockMultipartFile, -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_FILE_IS_EMPTY.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_FILE_IS_EMPTY.getMsg(), result.getMsg()); //RESOURCE_SUFFIX_FORBID_CHANGE - mockMultipartFile = new MockMultipartFile("test.pdf","test.pdf","pdf",new String("test").getBytes()); + mockMultipartFile = new MockMultipartFile("test.pdf", "test.pdf", "pdf", "test".getBytes()); PowerMockito.when(FileUtils.suffix("test.pdf")).thenReturn("pdf"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); - result = resourcesService.createResource(user,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE,mockMultipartFile,-1,"/"); + result = resourcesService.createResource(user, "ResourcesServiceTest.jar", "ResourcesServiceTest", ResourceType.FILE, mockMultipartFile, -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_SUFFIX_FORBID_CHANGE.getMsg(), result.getMsg()); //UDF_RESOURCE_SUFFIX_NOT_JAR - mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.pdf","ResourcesServiceTest.pdf","pdf",new String("test").getBytes()); + mockMultipartFile = new MockMultipartFile("ResourcesServiceTest.pdf", "ResourcesServiceTest.pdf", "pdf", "test".getBytes()); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.pdf")).thenReturn("pdf"); - result = resourcesService.createResource(user,"ResourcesServiceTest.pdf","ResourcesServiceTest",ResourceType.UDF,mockMultipartFile,-1,"/"); + result = resourcesService.createResource(user, "ResourcesServiceTest.pdf", "ResourcesServiceTest", ResourceType.UDF, mockMultipartFile, -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(),result.getMsg()); - + Assert.assertEquals(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg(), result.getMsg()); } @Test - public void testCreateDirecotry(){ + public void testCreateDirecotry() { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); User user = new User(); //HDFS_NOT_STARTUP - Result result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,-1,"/"); + Result result = resourcesService.createDirectory(user, "directoryTest", "directory test", ResourceType.FILE, -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); + Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //PARENT_RESOURCE_NOT_EXIST + user.setId(1); + user.setTenantId(1); + Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); + Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.selectById(Mockito.anyInt())).thenReturn(null); - result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,1,"/"); + result = resourcesService.createDirectory(user, "directoryTest", "directory test", ResourceType.FILE, 1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.PARENT_RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); + Assert.assertEquals(Status.PARENT_RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); //RESOURCE_EXIST PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.queryResourceList("/directoryTest", 0, 0)).thenReturn(getResourceList()); - result = resourcesService.createDirectory(user,"directoryTest","directory test",ResourceType.FILE,-1,"/"); + result = resourcesService.createDirectory(user, "directoryTest", "directory test", ResourceType.FILE, -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); } @Test - public void testUpdateResource(){ + public void testUpdateResource() { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); User user = new User(); //HDFS_NOT_STARTUP - Result result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE); + Result result = resourcesService.updateResource(user, 1, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null); logger.info(result.toString()); - Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); + Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //RESOURCE_NOT_EXIST Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); - result = resourcesService.updateResource(user,0,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE); + result = resourcesService.updateResource(user, 0, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); //USER_NO_OPERATION_PERM - result = resourcesService.updateResource(user,1,"ResourcesServiceTest","ResourcesServiceTest",ResourceType.FILE); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest", "ResourcesServiceTest", ResourceType.FILE, null); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(),result.getMsg()); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getMsg(), result.getMsg()); //RESOURCE_NOT_EXIST user.setId(1); Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); - PowerMockito.when(HadoopUtils.getHdfsFileName(Mockito.any(), Mockito.any(),Mockito.anyString())).thenReturn("test1"); + PowerMockito.when(HadoopUtils.getHdfsFileName(Mockito.any(), Mockito.any(), Mockito.anyString())).thenReturn("test1"); try { Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(false); } catch (IOException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } - result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF); - Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF, null); + Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); //SUCCESS user.setId(1); @@ -196,57 +206,56 @@ public class ResourcesServiceTest { try { Mockito.when(HadoopUtils.getInstance().exists(Mockito.any())).thenReturn(true); } catch (IOException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } - result = resourcesService.updateResource(user,1,"ResourcesServiceTest.jar","ResourcesServiceTest",ResourceType.FILE); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest.jar", "ResourcesServiceTest", ResourceType.FILE, null); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); + Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); //RESOURCE_EXIST Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest1.jar", 0, 0)).thenReturn(getResourceList()); - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.FILE); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.FILE, null); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); //USER_NOT_EXIST Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(null); - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF, null); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); //TENANT_NOT_EXIST Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); Mockito.when(tenantMapper.queryById(Mockito.anyInt())).thenReturn(null); - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest",ResourceType.UDF); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest", ResourceType.UDF, null); logger.info(result.toString()); - Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg()); - + Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); //SUCCESS Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); PowerMockito.when(HadoopUtils.getHdfsResourceFileName(Mockito.any(), Mockito.any())).thenReturn("test"); try { - PowerMockito.when(HadoopUtils.getInstance().copy(Mockito.anyString(),Mockito.anyString(),true,true)).thenReturn(true); + PowerMockito.when(HadoopUtils.getInstance().copy(Mockito.anyString(), Mockito.anyString(), true, true)).thenReturn(true); } catch (Exception e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } - result = resourcesService.updateResource(user,1,"ResourcesServiceTest1.jar","ResourcesServiceTest1.jar",ResourceType.UDF); + result = resourcesService.updateResource(user, 1, "ResourcesServiceTest1.jar", "ResourcesServiceTest1.jar", ResourceType.UDF, null); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); + Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test - public void testQueryResourceListPaging(){ + public void testQueryResourceListPaging() { User loginUser = new User(); loginUser.setUserType(UserType.ADMIN_USER); - IPage resourcePage = new Page<>(1,10); + IPage resourcePage = new Page<>(1, 10); resourcePage.setTotal(1); resourcePage.setRecords(getResourceList()); Mockito.when(resourcesMapper.queryResourcePaging(Mockito.any(Page.class), - Mockito.eq(0),Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"))).thenReturn(resourcePage); - Map result = resourcesService.queryResourceListPaging(loginUser,-1,ResourceType.FILE,"test",1,10); + Mockito.eq(0), Mockito.eq(-1), Mockito.eq(0), Mockito.eq("test"))).thenReturn(resourcePage); + Map result = resourcesService.queryResourceListPaging(loginUser, -1, ResourceType.FILE, "test", 1, 10); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); @@ -255,11 +264,11 @@ public class ResourcesServiceTest { } @Test - public void testQueryResourceList(){ + public void testQueryResourceList() { User loginUser = new User(); loginUser.setId(0); loginUser.setUserType(UserType.ADMIN_USER); - Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0,0)).thenReturn(getResourceList()); + Mockito.when(resourcesMapper.queryResourceListAuthored(0, 0, 0)).thenReturn(getResourceList()); Map result = resourcesService.queryResourceList(loginUser, ResourceType.FILE); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -268,7 +277,7 @@ public class ResourcesServiceTest { } @Test - public void testDelete(){ + public void testDelete() { User loginUser = new User(); loginUser.setId(0); @@ -277,19 +286,19 @@ public class ResourcesServiceTest { try { // HDFS_NOT_STARTUP - Result result = resourcesService.delete(loginUser,1); + Result result = resourcesService.delete(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //RESOURCE_NOT_EXIST PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); - result = resourcesService.delete(loginUser,2); + result = resourcesService.delete(loginUser, 2); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); // USER_NO_OPERATION_PERM - result = resourcesService.delete(loginUser,2); + result = resourcesService.delete(loginUser, 2); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); @@ -297,30 +306,30 @@ public class ResourcesServiceTest { loginUser.setUserType(UserType.ADMIN_USER); loginUser.setTenantId(2); Mockito.when(userMapper.selectById(Mockito.anyInt())).thenReturn(loginUser); - result = resourcesService.delete(loginUser,1); + result = resourcesService.delete(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); //SUCCESS loginUser.setTenantId(1); Mockito.when(hadoopUtils.delete(Mockito.anyString(), Mockito.anyBoolean())).thenReturn(true); - result = resourcesService.delete(loginUser,1); + result = resourcesService.delete(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } catch (Exception e) { - logger.error("delete error",e); + logger.error("delete error", e); Assert.assertTrue(false); } } @Test - public void testVerifyResourceName(){ + public void testVerifyResourceName() { User user = new User(); user.setId(1); Mockito.when(resourcesMapper.queryResourceList("/ResourcesServiceTest.jar", 0, 0)).thenReturn(getResourceList()); - Result result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar",ResourceType.FILE,user); + Result result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar", ResourceType.FILE, user); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_EXIST.getMsg(), result.getMsg()); @@ -330,94 +339,89 @@ public class ResourcesServiceTest { try { Mockito.when(hadoopUtils.exists(unExistFullName)).thenReturn(false); } catch (IOException e) { - logger.error("hadoop error",e); + logger.error("hadoop error", e); } - result = resourcesService.verifyResourceName("/test.jar",ResourceType.FILE,user); + result = resourcesService.verifyResourceName("/test.jar", ResourceType.FILE, user); logger.info(result.toString()); Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); - //RESOURCE_FILE_EXIST user.setTenantId(1); try { Mockito.when(hadoopUtils.exists("test")).thenReturn(true); } catch (IOException e) { - logger.error("hadoop error",e); + logger.error("hadoop error", e); } PowerMockito.when(HadoopUtils.getHdfsResourceFileName("123", "test1")).thenReturn("test"); - result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar",ResourceType.FILE,user); + result = resourcesService.verifyResourceName("/ResourcesServiceTest.jar", ResourceType.FILE, user); logger.info(result.toString()); - Assert.assertTrue(Status.RESOURCE_EXIST.getCode()==result.getCode()); + Assert.assertTrue(Status.RESOURCE_EXIST.getCode() == result.getCode()); //SUCCESS - result = resourcesService.verifyResourceName("test2",ResourceType.FILE,user); + result = resourcesService.verifyResourceName("test2", ResourceType.FILE, user); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test - public void testReadResource(){ + public void testReadResource() { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); //HDFS_NOT_STARTUP - Result result = resourcesService.readResource(1,1,10); + Result result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); + Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //RESOURCE_NOT_EXIST Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); - result = resourcesService.readResource(2,1,10); + result = resourcesService.readResource(2, 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(),result.getMsg()); - + Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); //RESOURCE_SUFFIX_NOT_SUPPORT_VIEW PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); - result = resourcesService.readResource(1,1,10); + result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg()); //USER_NOT_EXIST PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); - result = resourcesService.readResource(1,1,10); + result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); - Assert.assertTrue(Status.USER_NOT_EXIST.getCode()==result.getCode()); - + Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); //TENANT_NOT_EXIST Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); - result = resourcesService.readResource(1,1,10); + result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(),result.getMsg()); - + Assert.assertEquals(Status.TENANT_NOT_EXIST.getMsg(), result.getMsg()); //RESOURCE_FILE_NOT_EXIST Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); try { Mockito.when(hadoopUtils.exists(Mockito.anyString())).thenReturn(false); } catch (IOException e) { - logger.error("hadoop error",e); + logger.error("hadoop error", e); } - result = resourcesService.readResource(1,1,10); + result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); - Assert.assertTrue(Status.RESOURCE_FILE_NOT_EXIST.getCode()==result.getCode()); + Assert.assertTrue(Status.RESOURCE_FILE_NOT_EXIST.getCode() == result.getCode()); //SUCCESS try { Mockito.when(hadoopUtils.exists(null)).thenReturn(true); - Mockito.when(hadoopUtils.catFile(null,1,10)).thenReturn(getContent()); + Mockito.when(hadoopUtils.catFile(null, 1, 10)).thenReturn(getContent()); } catch (IOException e) { - logger.error("hadoop error",e); + logger.error("hadoop error", e); } - result = resourcesService.readResource(1,1,10); + result = resourcesService.readResource(1, 1, 10); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); - + Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @@ -429,23 +433,23 @@ public class ResourcesServiceTest { PowerMockito.when(HadoopUtils.getHdfsUdfDir("udfDir")).thenReturn("udfDir"); User user = getUser(); //HDFS_NOT_STARTUP - Result result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); + Result result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(),result.getMsg()); + Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //RESOURCE_SUFFIX_NOT_SUPPORT_VIEW PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); - result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); + result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg()); //RuntimeException try { PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); - result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content",-1,"/"); - }catch (RuntimeException ex){ + result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/"); + } catch (RuntimeException ex) { logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), ex.getMessage()); } @@ -453,50 +457,48 @@ public class ResourcesServiceTest { //SUCCESS Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); - result = resourcesService.onlineCreateResource(user,ResourceType.FILE,"test","jar","desc","content",-1,"/"); + result = resourcesService.onlineCreateResource(user, ResourceType.FILE, "test", "jar", "desc", "content", -1, "/"); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS.getMsg(),result.getMsg()); - + Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test - public void testUpdateResourceContent(){ + public void testUpdateResourceContent() { User loginUser = new User(); loginUser.setId(0); PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); // HDFS_NOT_STARTUP - Result result = resourcesService.updateResourceContent(1,"content"); + Result result = resourcesService.updateResourceContent(1, "content"); logger.info(result.toString()); Assert.assertEquals(Status.HDFS_NOT_STARTUP.getMsg(), result.getMsg()); //RESOURCE_NOT_EXIST PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(resourcesMapper.selectById(1)).thenReturn(getResource()); - result = resourcesService.updateResourceContent(2,"content"); + result = resourcesService.updateResourceContent(2, "content"); logger.info(result.toString()); Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getMsg(), result.getMsg()); //RESOURCE_SUFFIX_NOT_SUPPORT_VIEW PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("class"); - result = resourcesService.updateResourceContent(1,"content"); + result = resourcesService.updateResourceContent(1, "content"); logger.info(result.toString()); - Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(),result.getMsg()); + Assert.assertEquals(Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW.getMsg(), result.getMsg()); //USER_NOT_EXIST PowerMockito.when(FileUtils.getResourceViewSuffixs()).thenReturn("jar"); PowerMockito.when(FileUtils.suffix("ResourcesServiceTest.jar")).thenReturn("jar"); - result = resourcesService.updateResourceContent(1,"content"); + result = resourcesService.updateResourceContent(1, "content"); logger.info(result.toString()); Assert.assertTrue(Status.USER_NOT_EXIST.getCode() == result.getCode()); - //TENANT_NOT_EXIST Mockito.when(userMapper.selectById(1)).thenReturn(getUser()); - result = resourcesService.updateResourceContent(1,"content"); + result = resourcesService.updateResourceContent(1, "content"); logger.info(result.toString()); Assert.assertTrue(Status.TENANT_NOT_EXIST.getCode() == result.getCode()); @@ -504,13 +506,13 @@ public class ResourcesServiceTest { Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); Mockito.when(FileUtils.getUploadFilename(Mockito.anyString(), Mockito.anyString())).thenReturn("test"); PowerMockito.when(FileUtils.writeContent2File(Mockito.anyString(), Mockito.anyString())).thenReturn(true); - result = resourcesService.updateResourceContent(1,"content"); + result = resourcesService.updateResourceContent(1, "content"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg()); } @Test - public void testDownloadResource(){ + public void testDownloadResource() { PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(true); Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant()); @@ -526,102 +528,117 @@ public class ResourcesServiceTest { resource = resourcesService.downloadResource(1); Assert.assertNotNull(resource); } catch (Exception e) { - logger.error("DownloadResource error",e); + logger.error("DownloadResource error", e); Assert.assertTrue(false); } } @Test - public void testUnauthorizedFile(){ + public void testUnauthorizedFile() { User user = getUser(); //USER_NO_OPERATION_PERM - Map result = resourcesService.unauthorizedFile(user,1); + Map result = resourcesService.unauthorizedFile(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //SUCCESS user.setUserType(UserType.ADMIN_USER); Mockito.when(resourcesMapper.queryResourceExceptUserId(1)).thenReturn(getResourceList()); - result = resourcesService.unauthorizedFile(user,1); + result = resourcesService.unauthorizedFile(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List resources = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(resources)); } @Test - public void testUnauthorizedUDFFunction(){ + public void testUnauthorizedUDFFunction() { User user = getUser(); //USER_NO_OPERATION_PERM - Map result = resourcesService.unauthorizedUDFFunction(user,1); + Map result = resourcesService.unauthorizedUDFFunction(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //SUCCESS user.setUserType(UserType.ADMIN_USER); Mockito.when(udfFunctionMapper.queryUdfFuncExceptUserId(1)).thenReturn(getUdfFuncList()); - result = resourcesService.unauthorizedUDFFunction(user,1); + result = resourcesService.unauthorizedUDFFunction(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List udfFuncs = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(udfFuncs)); } - @Test - public void testAuthorizedUDFFunction(){ + public void testAuthorizedUDFFunction() { User user = getUser(); //USER_NO_OPERATION_PERM - Map result = resourcesService.authorizedUDFFunction(user,1); + Map result = resourcesService.authorizedUDFFunction(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //SUCCESS user.setUserType(UserType.ADMIN_USER); Mockito.when(udfFunctionMapper.queryAuthedUdfFunc(1)).thenReturn(getUdfFuncList()); - result = resourcesService.authorizedUDFFunction(user,1); + result = resourcesService.authorizedUDFFunction(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List udfFuncs = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(udfFuncs)); } @Test - public void testAuthorizedFile(){ + public void testAuthorizedFile() { User user = getUser(); //USER_NO_OPERATION_PERM - Map result = resourcesService.authorizedFile(user,1); + Map result = resourcesService.authorizedFile(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.USER_NO_OPERATION_PERM,result.get(Constants.STATUS)); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //SUCCESS user.setUserType(UserType.ADMIN_USER); Mockito.when(resourcesMapper.queryAuthorizedResourceList(1)).thenReturn(getResourceList()); - result = resourcesService.authorizedFile(user,1); + result = resourcesService.authorizedFile(user, 1); logger.info(result.toString()); - Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); List resources = (List) result.get(Constants.DATA_LIST); Assert.assertTrue(CollectionUtils.isNotEmpty(resources)); } + @Test + public void testCatFile() { + + PowerMockito.when(PropertyUtils.getResUploadStartupState()).thenReturn(false); + + //SUCCESS + try { + Mockito.when(hadoopUtils.exists(null)).thenReturn(true); + Mockito.when(hadoopUtils.catFile(null, 1, 10)).thenReturn(getContent()); + + List list = hadoopUtils.catFile(null, 1, 10); + Assert.assertNotNull(list); + + } catch (IOException e) { + logger.error("hadoop error", e); + } + } - private List getResourceList(){ + private List getResourceList() { - List resources = new ArrayList<>(); + List resources = new ArrayList<>(); resources.add(getResource()); return resources; } - private Tenant getTenant() { Tenant tenant = new Tenant(); tenant.setTenantCode("123"); return tenant; } - private Resource getResource(){ + private Resource getResource() { Resource resource = new Resource(); resource.setPid(-1); @@ -633,7 +650,7 @@ public class ResourcesServiceTest { return resource; } - private Resource getUdfResource(){ + private Resource getUdfResource() { Resource resource = new Resource(); resource.setUserId(1); @@ -644,27 +661,28 @@ public class ResourcesServiceTest { return resource; } - private UdfFunc getUdfFunc(){ + private UdfFunc getUdfFunc() { UdfFunc udfFunc = new UdfFunc(); udfFunc.setId(1); return udfFunc; } - private List getUdfFuncList(){ + private List getUdfFuncList() { List udfFuncs = new ArrayList<>(); udfFuncs.add(getUdfFunc()); return udfFuncs; } - private User getUser(){ + private User getUser() { User user = new User(); user.setId(1); user.setTenantId(1); return user; } - private List getContent(){ + + private List getContent() { List contentList = new ArrayList<>(); contentList.add("test"); return contentList; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index 16547b3fd7..199b34cc1b 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; import static org.mockito.ArgumentMatchers.any; @@ -88,7 +89,7 @@ public class TaskInstanceServiceTest { //project auth fail when(projectMapper.queryByName(projectName)).thenReturn(null); when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); - Map proejctAuthFailRes = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", + Map proejctAuthFailRes = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", "", "test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); @@ -107,43 +108,43 @@ public class TaskInstanceServiceTest { when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); - when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), eq(""), eq(0), Mockito.any(), eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser); when(processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId())).thenReturn(processInstance); - Map successRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + Map successRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "", "test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); //executor name empty - when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), eq(""), eq(0), Mockito.any(), eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); - Map executorEmptyRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + Map executorEmptyRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "", "", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.SUCCESS, executorEmptyRes.get(Constants.STATUS)); //executor null when(usersService.queryUser(loginUser.getId())).thenReturn(null); when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(-1); - Map executorNullRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + Map executorNullRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "", "test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.SUCCESS, executorNullRes.get(Constants.STATUS)); //start/end date null - when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), eq(""), eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn); - Map executorNullDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + Map executorNullDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "", "", null, null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.SUCCESS, executorNullDateRes.get(Constants.STATUS)); //start date error format - when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), eq(""), eq(0), Mockito.any(), eq("192.168.xx.xx"), any(), any())).thenReturn(pageReturn); - Map executorErrorStartDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + Map executorErrorStartDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "", "", "error date", null, "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorStartDateRes.get(Constants.STATUS)); - Map executorErrorEndDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + Map executorErrorEndDateRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", "", "", null, "error date", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, executorErrorEndDateRes.get(Constants.STATUS)); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java index 5dcf59cf74..86b2079f26 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TenantServiceTest.java @@ -73,8 +73,6 @@ public class TenantServiceTest { private static final String tenantCode = "TenantServiceTest"; - private static final String tenantName = "TenantServiceTest"; - @Test public void testCreateTenant() { @@ -83,17 +81,17 @@ public class TenantServiceTest { try { //check tenantCode Map result = - tenantService.createTenant(getLoginUser(), "%!1111", tenantName, 1, "TenantServiceTest"); + tenantService.createTenant(getLoginUser(), "%!1111", 1, "TenantServiceTest"); logger.info(result.toString()); Assert.assertEquals(Status.VERIFY_TENANT_CODE_ERROR, result.get(Constants.STATUS)); //check exist - result = tenantService.createTenant(loginUser, tenantCode, tenantName, 1, "TenantServiceTest"); + result = tenantService.createTenant(loginUser, tenantCode, 1, "TenantServiceTest"); logger.info(result.toString()); Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); // success - result = tenantService.createTenant(loginUser, "test", "test", 1, "TenantServiceTest"); + result = tenantService.createTenant(loginUser, "test", 1, "TenantServiceTest"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); @@ -126,11 +124,11 @@ public class TenantServiceTest { try { // id not exist Map result = - tenantService.updateTenant(getLoginUser(), 912222, tenantCode, tenantName, 1, "desc"); + tenantService.updateTenant(getLoginUser(), 912222, tenantCode, 1, "desc"); logger.info(result.toString()); // success Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS)); - result = tenantService.updateTenant(getLoginUser(), 1, tenantCode, "TenantServiceTest001", 1, "desc"); + result = tenantService.updateTenant(getLoginUser(), 1, tenantCode, 1, "desc"); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { @@ -236,7 +234,6 @@ public class TenantServiceTest { Tenant tenant = new Tenant(); tenant.setId(id); tenant.setTenantCode(tenantCode); - tenant.setTenantName(tenantName); return tenant; } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java index 9ec24bbb50..884e9b6b36 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UdfFuncServiceTest.java @@ -149,9 +149,11 @@ public class UdfFuncServiceTest { } @Test - public void testQueryResourceList(){ - Mockito.when(udfFuncMapper.getUdfFuncByType(1, 1)).thenReturn(getList()); - Map result = udfFuncService.queryResourceList(getLoginUser(),1); + public void testQueryUdfFuncList(){ + User user = getLoginUser(); + user.setUserType(UserType.GENERAL_USER); + Mockito.when(udfFuncMapper.getUdfFuncByType(user.getId(), UdfType.HIVE.ordinal())).thenReturn(getList()); + Map result = udfFuncService.queryUdfFuncList(user,UdfType.HIVE.ordinal()); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); List udfFuncList = (List) result.get(Constants.DATA_LIST); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index c4d3d6e126..ca6c7216b9 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -14,10 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; @@ -29,7 +32,19 @@ import org.apache.dolphinscheduler.common.utils.EncryptionUtils; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; +import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; +import org.apache.dolphinscheduler.dao.mapper.ResourceUserMapper; +import org.apache.dolphinscheduler.dao.mapper.TenantMapper; +import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -42,13 +57,8 @@ import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.when; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; @RunWith(MockitoJUnitRunner.class) public class UsersServiceTest { @@ -73,30 +83,34 @@ public class UsersServiceTest { @Mock private ResourceMapper resourceMapper; - private String queueName ="UsersServiceTestQueue"; - + private String queueName = "UsersServiceTestQueue"; @Before - public void before(){ - - + public void before() { } + @After - public void after(){ + public void after() { } - @Test - public void testCreateUser(){ + public void testCreateUserForLdap() { + String userName = "user1"; + String email = "user1@ldap.com"; + User user = usersService.createUser(UserType.ADMIN_USER, userName, email); + Assert.assertNotNull(user); + } + @Test + public void testCreateUser() { User user = new User(); user.setUserType(UserType.ADMIN_USER); String userName = "userTest0001~"; String userPassword = "userTest"; String email = "123@qq.com"; int tenantId = Integer.MAX_VALUE; - String phone= "13456432345"; + String phone = "13456432345"; int state = 1; try { //userName error @@ -119,7 +133,7 @@ public class UsersServiceTest { Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS)); email = "122222@qq.com"; - phone ="2233"; + phone = "2233"; //phone error result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state); logger.info(result.toString()); @@ -137,20 +151,19 @@ public class UsersServiceTest { Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { - logger.error(Status.CREATE_USER_ERROR.getMsg(),e); + logger.error(Status.CREATE_USER_ERROR.getMsg(), e); Assert.assertTrue(false); } } @Test - public void testQueryUser(){ - + public void testQueryUser() { String userName = "userTest0001"; String userPassword = "userTest0001"; - when(userMapper.queryUserByNamePassword(userName,EncryptionUtils.getMd5(userPassword))).thenReturn(getGeneralUser()); - User queryUser = usersService.queryUser(userName, userPassword); + when(userMapper.queryUserByNamePassword(userName, EncryptionUtils.getMd5(userPassword))).thenReturn(getGeneralUser()); + User queryUser = usersService.queryUser(userName, userPassword); logger.info(queryUser.toString()); - Assert.assertTrue(queryUser!=null); + Assert.assertTrue(queryUser != null); } @Test @@ -176,11 +189,8 @@ public class UsersServiceTest { } - @Test - public void testQueryUserList(){ - - + public void testQueryUserList() { User user = new User(); //no operate @@ -190,93 +200,88 @@ public class UsersServiceTest { //success user.setUserType(UserType.ADMIN_USER); - when(userMapper.selectList(null )).thenReturn(getUserList()); + when(userMapper.selectList(null)).thenReturn(getUserList()); result = usersService.queryUserList(user); List userList = (List) result.get(Constants.DATA_LIST); - Assert.assertTrue(userList.size()>0); + Assert.assertTrue(userList.size() > 0); } @Test - public void testQueryUserListPage(){ - - + public void testQueryUserListPage() { User user = new User(); - IPage page = new Page<>(1,10); + IPage page = new Page<>(1, 10); page.setRecords(getUserList()); when(userMapper.queryUserPaging(any(Page.class), eq("userTest"))).thenReturn(page); //no operate - Map result = usersService.queryUserList(user,"userTest",1,10); + Map result = usersService.queryUserList(user, "userTest", 1, 10); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); //success user.setUserType(UserType.ADMIN_USER); - result = usersService.queryUserList(user,"userTest",1,10); + result = usersService.queryUserList(user, "userTest", 1, 10); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); - Assert.assertTrue(pageInfo.getLists().size()>0); + PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); + Assert.assertTrue(pageInfo.getLists().size() > 0); } @Test - public void testUpdateUser(){ - + public void testUpdateUser() { String userName = "userTest0001"; String userPassword = "userTest0001"; try { //user not exist - Map result = usersService.updateUser(0,userName,userPassword,"3443@qq.com",1,"13457864543","queue", 1); + Map result = usersService.updateUser(getLoginUser(), 0,userName,userPassword,"3443@qq.com",1,"13457864543","queue", 1); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); logger.info(result.toString()); //success when(userMapper.selectById(1)).thenReturn(getUser()); - result = usersService.updateUser(1,userName,userPassword,"32222s@qq.com",1,"13457864543","queue", 1); + result = usersService.updateUser(getLoginUser(), 1,userName,userPassword,"32222s@qq.com",1,"13457864543","queue", 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { - logger.error("update user error",e); + logger.error("update user error", e); Assert.assertTrue(false); } } @Test - public void testDeleteUserById(){ - + public void testDeleteUserById() { User loginUser = new User(); try { when(userMapper.queryTenantCodeByUserId(1)).thenReturn(getUser()); when(userMapper.selectById(1)).thenReturn(getUser()); //no operate - Map result = usersService.deleteUserById(loginUser,3); + Map result = usersService.deleteUserById(loginUser, 3); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); // user not exist loginUser.setUserType(UserType.ADMIN_USER); - result = usersService.deleteUserById(loginUser,3); + result = usersService.deleteUserById(loginUser, 3); logger.info(result.toString()); Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS)); //success - result = usersService.deleteUserById(loginUser,1); + result = usersService.deleteUserById(loginUser, 1); logger.info(result.toString()); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); } catch (Exception e) { - logger.error("delete user error",e); - Assert.assertTrue(false); + logger.error("delete user error", e); + Assert.assertTrue(false); } } @Test - public void testGrantProject(){ - + public void testGrantProject() { when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); - String projectIds= "100000,120000"; + String projectIds = "100000,120000"; Map result = usersService.grantProject(loginUser, 1, projectIds); logger.info(result.toString()); Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS)); @@ -292,8 +297,7 @@ public class UsersServiceTest { } @Test - public void testGrantResources(){ - + public void testGrantResources() { String resourceIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); @@ -317,8 +321,7 @@ public class UsersServiceTest { @Test - public void testGrantUDFFunction(){ - + public void testGrantUDFFunction() { String udfIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); @@ -337,8 +340,7 @@ public class UsersServiceTest { } @Test - public void testGrantDataSource(){ - + public void testGrantDataSource() { String datasourceIds = "100000,120000"; when(userMapper.selectById(1)).thenReturn(getUser()); User loginUser = new User(); @@ -357,10 +359,15 @@ public class UsersServiceTest { } + private User getLoginUser(){ + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.ADMIN_USER); + return loginUser; + } @Test - public void getUserInfo(){ - + public void getUserInfo() { User loginUser = new User(); loginUser.setUserName("admin"); loginUser.setUserType(UserType.ADMIN_USER); @@ -370,7 +377,7 @@ public class UsersServiceTest { Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); User tempUser = (User) result.get(Constants.DATA_LIST); //check userName - Assert.assertEquals("admin",tempUser.getUserName()); + Assert.assertEquals("admin", tempUser.getUserName()); //get general user loginUser.setUserType(null); @@ -381,13 +388,12 @@ public class UsersServiceTest { Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); tempUser = (User) result.get(Constants.DATA_LIST); //check userName - Assert.assertEquals("userTest0001",tempUser.getUserName()); + Assert.assertEquals("userTest0001", tempUser.getUserName()); } @Test - public void testQueryAllGeneralUsers(){ - + public void testQueryAllGeneralUsers() { User loginUser = new User(); //no operate Map result = usersService.queryAllGeneralUsers(loginUser); @@ -404,8 +410,7 @@ public class UsersServiceTest { } @Test - public void testVerifyUserName(){ - + public void testVerifyUserName() { //not exist user Result result = usersService.verifyUserName("admin89899"); logger.info(result.toString()); @@ -418,11 +423,10 @@ public class UsersServiceTest { } @Test - public void testUnauthorizedUser(){ - + public void testUnauthorizedUser() { User loginUser = new User(); - when(userMapper.selectList(null )).thenReturn(getUserList()); - when( userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); + when(userMapper.selectList(null)).thenReturn(getUserList()); + when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); //no operate Map result = usersService.unauthorizedUser(loginUser, 2); logger.info(result.toString()); @@ -436,8 +440,7 @@ public class UsersServiceTest { @Test - public void testAuthorizedUser(){ - + public void testAuthorizedUser() { User loginUser = new User(); when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList()); //no operate @@ -565,10 +568,8 @@ public class UsersServiceTest { /** * get disabled user - * @return */ private User getDisabledUser() { - User user = new User(); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); @@ -580,10 +581,8 @@ public class UsersServiceTest { /** * get user - * @return */ - private User getGeneralUser(){ - + private User getGeneralUser() { User user = new User(); user.setUserType(UserType.GENERAL_USER); user.setUserName("userTest0001"); @@ -591,8 +590,7 @@ public class UsersServiceTest { return user; } - - private List getUserList(){ + private List getUserList() { List userList = new ArrayList<>(); userList.add(getGeneralUser()); return userList; @@ -601,8 +599,7 @@ public class UsersServiceTest { /** * get user */ - private User getUser(){ - + private User getUser() { User user = new User(); user.setUserType(UserType.ADMIN_USER); user.setUserName("userTest0001"); @@ -613,9 +610,10 @@ public class UsersServiceTest { /** * get tenant + * * @return tenant */ - private Tenant getTenant(){ + private Tenant getTenant() { Tenant tenant = new Tenant(); tenant.setId(1); return tenant; @@ -623,10 +621,10 @@ public class UsersServiceTest { /** * get resource + * * @return resource */ - private Resource getResource(){ - + private Resource getResource() { Resource resource = new Resource(); resource.setPid(-1); resource.setUserId(1); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java index d3c60cc499..4a1d874c8a 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/WorkerGroupServiceTest.java @@ -14,20 +14,24 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.service; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; -import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -35,16 +39,10 @@ import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.internal.matchers.Any; import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Set; - @RunWith(MockitoJUnitRunner.class) public class WorkerGroupServiceTest { @@ -59,14 +57,13 @@ public class WorkerGroupServiceTest { @Mock private ZookeeperCachedOperator zookeeperCachedOperator; - @Before - public void init(){ + public void init() { ZookeeperConfig zookeeperConfig = new ZookeeperConfig(); zookeeperConfig.setDsRoot("/dolphinscheduler_qzw"); Mockito.when(zookeeperCachedOperator.getZookeeperConfig()).thenReturn(zookeeperConfig); - String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot()+"/nodes" +"/worker"; + String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; List workerGroupStrList = new ArrayList<>(); workerGroupStrList.add("default"); @@ -83,19 +80,18 @@ public class WorkerGroupServiceTest { } /** - * query worker group paging + * query worker group paging */ @Test - public void testQueryAllGroupPaging(){ + public void testQueryAllGroupPaging() { User user = new User(); // general user add user.setUserType(UserType.ADMIN_USER); Map result = workerGroupService.queryAllGroupPaging(user, 1, 10, null); PageInfo pageInfo = (PageInfo) result.get(Constants.DATA_LIST); - Assert.assertEquals(pageInfo.getLists().size(),1); + Assert.assertEquals(pageInfo.getLists().size(), 1); } - @Test public void testQueryAllGroup() throws Exception { Map result = workerGroupService.queryAllGroup(); @@ -103,16 +99,24 @@ public class WorkerGroupServiceTest { Assert.assertEquals(workerGroups.size(), 1); } - /** * get processInstances - * @return */ - private List getProcessInstanceList(){ + private List getProcessInstanceList() { List processInstances = new ArrayList<>(); processInstances.add(new ProcessInstance()); return processInstances; } + @Test + public void testQueryAllGroupWithNoNodeException() { + String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; + Mockito.when(zookeeperCachedOperator.getChildrenKeys(workerPath)).thenThrow(new RuntimeException("KeeperException$NoNodeException")); + Map result = workerGroupService.queryAllGroup(); + Set workerGroups = (Set) result.get(Constants.DATA_LIST); + Assert.assertEquals(1, workerGroups.size()); + Assert.assertEquals("default", workerGroups.toArray()[0]); + } + } diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml index 504dddc553..8aeca37e93 100644 --- a/dolphinscheduler-common/pom.xml +++ b/dolphinscheduler-common/pom.xml @@ -21,7 +21,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT dolphinscheduler-common dolphinscheduler-common diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index 5c0ae1d638..b285bd4df5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -221,6 +221,11 @@ public final class Constants { */ public static final String COLON = ":"; + /** + * SPACE " " + */ + public static final String SPACE = " "; + /** * SINGLE_SLASH / */ @@ -231,6 +236,15 @@ public final class Constants { */ public static final String DOUBLE_SLASH = "//"; + /** + * SINGLE_QUOTES "'" + */ + public static final String SINGLE_QUOTES = "'"; + /** + * DOUBLE_QUOTES "\"" + */ + public static final String DOUBLE_QUOTES = "\""; + /** * SEMICOLON ; */ @@ -432,21 +446,21 @@ public final class Constants { /** * command parameter keys */ - public static final String CMDPARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; + public static final String CMD_PARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId"; - public static final String CMDPARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; + public static final String CMD_PARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList"; - public static final String CMDPARAM_RECOVERY_WAITTING_THREAD = "WaittingThreadInstanceId"; + public static final String CMD_PARAM_RECOVERY_WAITING_THREAD = "WaitingThreadInstanceId"; - public static final String CMDPARAM_SUB_PROCESS = "processInstanceId"; + public static final String CMD_PARAM_SUB_PROCESS = "processInstanceId"; - public static final String CMDPARAM_EMPTY_SUB_PROCESS = "0"; + public static final String CMD_PARAM_EMPTY_SUB_PROCESS = "0"; - public static final String CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; + public static final String CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId"; - public static final String CMDPARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; + public static final String CMD_PARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId"; - public static final String CMDPARAM_START_NODE_NAMES = "StartNodeNameList"; + public static final String CMD_PARAM_START_NODE_NAMES = "StartNodeNameList"; /** * complement data start date @@ -527,10 +541,9 @@ public final class Constants { public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10; + /** - * hadoop params constant - */ - /** + * hadoop params * jar */ public static final String JAR = "jar"; @@ -595,12 +608,6 @@ public final class Constants { public static final String SPARK_QUEUE = "--queue"; - /** - * --queue --qu - */ - public static final String FLINK_QUEUE = "--qu"; - - /** * exit code success */ @@ -829,6 +836,7 @@ public final class Constants { public static final String FLINK_RUN_MODE = "-m"; public static final String FLINK_YARN_SLOT = "-ys"; public static final String FLINK_APP_NAME = "-ynm"; + public static final String FLINK_QUEUE = "-yqu"; public static final String FLINK_TASK_MANAGE = "-yn"; public static final String FLINK_JOB_MANAGE_MEM = "-yjm"; @@ -837,15 +845,15 @@ public final class Constants { public static final String FLINK_MAIN_CLASS = "-c"; - public static final int[] NOT_TERMINATED_STATES = new int[]{ - ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), - ExecutionStatus.RUNNING_EXECUTION.ordinal(), - ExecutionStatus.DELAY_EXECUTION.ordinal(), - ExecutionStatus.READY_PAUSE.ordinal(), - ExecutionStatus.READY_STOP.ordinal(), - ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(), - ExecutionStatus.WAITTING_THREAD.ordinal(), - ExecutionStatus.WAITTING_DEPEND.ordinal() + public static final int[] NOT_TERMINATED_STATES = new int[] { + ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), + ExecutionStatus.RUNNING_EXECUTION.ordinal(), + ExecutionStatus.DELAY_EXECUTION.ordinal(), + ExecutionStatus.READY_PAUSE.ordinal(), + ExecutionStatus.READY_STOP.ordinal(), + ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(), + ExecutionStatus.WAITTING_THREAD.ordinal(), + ExecutionStatus.WAITTING_DEPEND.ordinal() }; @@ -1016,4 +1024,14 @@ public final class Constants { * Network IP gets priority, default inner outer */ public static final String NETWORK_PRIORITY_STRATEGY = "dolphin.scheduler.network.priority.strategy"; + + /** + * exec shell scripts + */ + public static final String SH = "sh"; + + /** + * pstree, get pud and sub pid + */ + public static final String PSTREE = "pstree"; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java similarity index 74% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java rename to dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java index 13820b4bab..9cec2766f1 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/Event.java @@ -14,20 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.enums; - -public enum QueryType { - FORM, - SQL; +package org.apache.dolphinscheduler.common.enums; - public static QueryType getEnum(int value){ - for (QueryType e:QueryType.values()) { - if(e.ordinal() == value) { - return e; - } - } - //For values out of enum scope - return null; - } +public enum Event { + ACK, + RESULT; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopQueryType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopQueryType.java new file mode 100644 index 0000000000..4d279f5d8e --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/SqoopQueryType.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.enums; + +public enum SqoopQueryType { + + FORM(0, "SQOOP_QUERY_FORM"), + SQL(1, "SQOOP_QUERY_SQL"); + + private final Integer code; + + private final String desc; + + SqoopQueryType(Integer code, String desc) { + this.code = code; + this.desc = desc; + } + + public Integer getCode() { + return code; + } + + public String getDesc() { + return desc; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java index a7fc0839eb..287f7267bc 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java @@ -42,6 +42,4 @@ public class ResourceInfo { public void setRes(String res) { this.res = res; } - - } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskParams.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskParams.java deleted file mode 100644 index abea2d95b0..0000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/TaskParams.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.dolphinscheduler.common.task; - -import java.util.Map; - -public class TaskParams { - - private String rawScript; - private Map[] localParams; - - public void setRawScript(String rawScript) { - this.rawScript = rawScript; - } - - public void setLocalParams(Map[] localParams) { - this.localParams = localParams; - } - - public String getRawScript() { - return rawScript; - } - - public void setLocalParamValue(String prop, Object value) { - if (localParams == null || value == null) { - return; - } - for (int i = 0; i < localParams.length; i++) { - if (localParams[i].get("prop").equals(prop)) { - localParams[i].put("value", (String)value); - } - } - } - - public void setLocalParamValue(Map propToValue) { - if (localParams == null || propToValue == null) { - return; - } - for (int i = 0; i < localParams.length; i++) { - String prop = localParams[i].get("prop"); - if (propToValue.containsKey(prop)) { - localParams[i].put("value",(String)propToValue.get(prop)); - } - } - } - - public String getLocalParamValue(String prop) { - if (localParams == null) { - return null; - } - for (int i = 0; i < localParams.length; i++) { - String tmpProp = localParams[i].get("prop"); - if (tmpProp.equals(prop)) { - return localParams[i].get("value"); - } - } - return null; - } - - public Map[] getLocalParams() { - return localParams; - } -} \ No newline at end of file diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java index 7f0f2c8079..094620aee5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java @@ -21,6 +21,7 @@ import org.apache.dolphinscheduler.common.model.DependentTaskModel; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import java.util.ArrayList; import java.util.List; public class ConditionsParameters extends AbstractParameters { @@ -43,7 +44,7 @@ public class ConditionsParameters extends AbstractParameters { @Override public List getResourceFilesList() { - return null; + return new ArrayList<>(); } public List getDependTaskList() { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java index f54e107995..c1f5f1d81c 100755 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/datax/DataxParameters.java @@ -14,15 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.task.datax; -import java.util.ArrayList; -import java.util.List; +package org.apache.dolphinscheduler.common.task.datax; -import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.StringUtils; + +import java.util.ArrayList; +import java.util.List; /** * DataX parameter @@ -89,6 +90,16 @@ public class DataxParameters extends AbstractParameters { */ private int jobSpeedRecord; + /** + * Xms memory + */ + private int xms; + + /** + * Xmx memory + */ + private int xmx; + public int getCustomConfig() { return customConfig; } @@ -185,6 +196,22 @@ public class DataxParameters extends AbstractParameters { this.jobSpeedRecord = jobSpeedRecord; } + public int getXms() { + return xms; + } + + public void setXms(int xms) { + this.xms = xms; + } + + public int getXmx() { + return xmx; + } + + public void setXmx(int xmx) { + this.xmx = xmx; + } + @Override public boolean checkParameters() { if (customConfig == Flag.NO.ordinal()) { @@ -204,19 +231,21 @@ public class DataxParameters extends AbstractParameters { @Override public String toString() { - return "DataxParameters{" + - "customConfig=" + customConfig + - ", json='" + json + '\'' + - ", dsType='" + dsType + '\'' + - ", dataSource=" + dataSource + - ", dtType='" + dtType + '\'' + - ", dataTarget=" + dataTarget + - ", sql='" + sql + '\'' + - ", targetTable='" + targetTable + '\'' + - ", preStatements=" + preStatements + - ", postStatements=" + postStatements + - ", jobSpeedByte=" + jobSpeedByte + - ", jobSpeedRecord=" + jobSpeedRecord + - '}'; + return "DataxParameters{" + + "customConfig=" + customConfig + + ", json='" + json + '\'' + + ", dsType='" + dsType + '\'' + + ", dataSource=" + dataSource + + ", dtType='" + dtType + '\'' + + ", dataTarget=" + dataTarget + + ", sql='" + sql + '\'' + + ", targetTable='" + targetTable + '\'' + + ", preStatements=" + preStatements + + ", postStatements=" + postStatements + + ", jobSpeedByte=" + jobSpeedByte + + ", jobSpeedRecord=" + jobSpeedRecord + + ", xms=" + xms + + ", xmx=" + xmx + + '}'; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java index 231dd33146..1b1f0a6c5d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/flink/FlinkParameters.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.common.task.flink; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import java.util.ArrayList; import java.util.List; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java index 4e58201bf3..32a2a6b05d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/spark/SparkParameters.java @@ -216,7 +216,7 @@ public class SparkParameters extends AbstractParameters { @Override public boolean checkParameters() { - return mainJar != null && programType != null && sparkVersion != null; + return mainJar != null && programType != null; } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java index e3e33566e9..0dcfbddaf4 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java @@ -30,13 +30,14 @@ import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; -import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.charset.UnsupportedCharsetException; import java.util.Optional; @@ -157,13 +158,12 @@ public class FileUtils { } /** - * create directory and user + * create directory if absent * * @param execLocalPath execute local path - * @param userName user name * @throws IOException errors */ - public static void createWorkDirAndUserIfAbsent(String execLocalPath, String userName) throws IOException { + public static void createWorkDirIfAbsent(String execLocalPath) throws IOException { //if work dir exists, first delete File execLocalPathFile = new File(execLocalPath); @@ -176,27 +176,6 @@ public class FileUtils { String mkdirLog = "create dir success " + execLocalPath; LoggerUtils.logInfo(Optional.ofNullable(logger), mkdirLog); LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), mkdirLog); - - //if not exists this user,then create - OSUtils.taskLoggerThreadLocal.set(taskLoggerThreadLocal.get()); - try { - if (!OSUtils.getUserList().contains(userName)) { - boolean isSuccessCreateUser = OSUtils.createUser(userName); - - String infoLog; - if (isSuccessCreateUser) { - infoLog = String.format("create user name success %s", userName); - } else { - infoLog = String.format("create user name fail %s", userName); - } - LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog); - LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog); - } - } catch (Throwable e) { - LoggerUtils.logError(Optional.ofNullable(logger), e); - LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), e); - } - OSUtils.taskLoggerThreadLocal.remove(); } /** @@ -217,7 +196,7 @@ public class FileUtils { return false; } bufferedReader = new BufferedReader(new StringReader(content)); - bufferedWriter = new BufferedWriter(new FileWriter(distFile)); + bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(distFile), StandardCharsets.UTF_8)); char[] buf = new char[1024]; int len; while ((len = bufferedReader.read(buf)) != -1) { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java index fe9ac932c1..8fa8d72236 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java @@ -39,6 +39,7 @@ import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.security.PrivilegedExceptionAction; import java.util.Collections; @@ -263,7 +264,7 @@ public class HadoopUtils implements Closeable { } try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) { - BufferedReader br = new BufferedReader(new InputStreamReader(in)); + BufferedReader br = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); Stream stream = br.lines().skip(skipLineNums).limit(limit); return stream.collect(Collectors.toList()); } @@ -648,27 +649,25 @@ public class HadoopUtils implements Closeable { String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info"; - String state = null; try { + /** - * send http get request to rm1 + * send http get request to rm */ - state = getRMState(String.format(yarnUrl, rmIdArr[0])); - if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmIdArr[0]; - } else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) { - state = getRMState(String.format(yarnUrl, rmIdArr[1])); + for (String rmId : rmIdArr) { + String state = getRMState(String.format(yarnUrl, rmId)); if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmIdArr[1]; + return rmId; } - } else { - return null; } + } catch (Exception e) { - state = getRMState(String.format(yarnUrl, rmIdArr[1])); - if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { - return rmIdArr[0]; + for (int i = 1; i < rmIdArr.length; i++) { + String state = getRMState(String.format(yarnUrl, rmIdArr[i])); + if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) { + return rmIdArr[i]; + } } } return null; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index 56ef74d6ee..fc11a2add2 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.common.utils; +import static java.nio.charset.StandardCharsets.UTF_8; + import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT; import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; @@ -126,6 +128,22 @@ public class JSONUtils { return null; } + /** + * deserialize + * + * @param src byte array + * @param clazz class + * @param deserialize type + * @return deserialize type + */ + public static T parseObject(byte[] src, Class clazz) { + if (src == null) { + return null; + } + String json = new String(src, UTF_8); + return parseObject(json, clazz); + } + /** * json to list * @@ -253,6 +271,27 @@ public class JSONUtils { } } + /** + * serialize to json byte + * + * @param obj object + * @param object type + * @return byte array + */ + public static byte[] toJsonByteArray(T obj) { + if (obj == null) { + return null; + } + String json = ""; + try { + json = toJsonString(obj); + } catch (Exception e) { + logger.error("json serialize exception.", e); + } + + return json.getBytes(UTF_8); + } + public static ObjectNode parseObject(String text) { try { return (ObjectNode) objectMapper.readTree(text); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java index 39ec04afcf..034217ef82 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java @@ -32,6 +32,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,6 +45,10 @@ public class ParameterUtils { private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class); + private static final String DATE_PARSE_PATTERN = "\\$\\[([^\\]]+)]"; + + private static final String DATE_START_PATTERN = "^[0-9]"; + private ParameterUtils() { throw new UnsupportedOperationException("Construct ParameterUtils"); } @@ -51,33 +57,29 @@ public class ParameterUtils { * convert parameters place holders * * @param parameterString parameter - * @param parameterMap parameter map + * @param parameterMap parameter map * @return convert parameters place holders */ public static String convertParameterPlaceholders(String parameterString, Map parameterMap) { - if (StringUtils.isEmpty(parameterString) || parameterMap == null) { + if (StringUtils.isEmpty(parameterString)) { return parameterString; } - - //Get current time, schedule execute time - String cronTimeStr = parameterMap.get(Constants.PARAMETER_DATETIME); - - Date cronTime = null; - - if (StringUtils.isNotEmpty(cronTimeStr)) { + Date cronTime; + if (parameterMap != null && !parameterMap.isEmpty()) { + // replace variable ${} form,refers to the replacement of system variables and custom variables + parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); + } + if (parameterMap != null && null != parameterMap.get(Constants.PARAMETER_DATETIME)) { + //Get current time, schedule execute time + String cronTimeStr = parameterMap.get(Constants.PARAMETER_DATETIME); cronTime = DateUtils.parse(cronTimeStr, Constants.PARAMETER_FORMAT_TIME); } else { cronTime = new Date(); } - - // replace variable ${} form,refers to the replacement of system variables and custom variables - parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); - // replace time $[...] form, eg. $[yyyyMMdd] if (cronTime != null) { - parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true); + return dateTemplateParse(parameterString, cronTime); } - return parameterString; } @@ -86,7 +88,7 @@ public class ParameterUtils { * convert parameters place holders * * @param parameterString parameter - * @param parameterMap parameter map + * @param parameterMap parameter map * @return convert parameters place holders */ public static String convertParameterPlaceholders2(String parameterString, Map parameterMap) { @@ -105,12 +107,13 @@ public class ParameterUtils { } // replace variable ${} form,refers to the replacement of system variables and custom variables - parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); + if (!parameterMap.isEmpty()) { + parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true); + } // replace time $[...] form, eg. $[yyyyMMdd] if (cronTime != null) { - parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true); - + return dateTemplateParse(parameterString, cronTime); } return parameterString; } @@ -118,10 +121,10 @@ public class ParameterUtils { /** * set in parameter * - * @param index index - * @param stmt preparedstatement + * @param index index + * @param stmt preparedstatement * @param dataType data type - * @param value value + * @param value value * @throws Exception errors */ public static void setInParameter(int index, PreparedStatement stmt, DataType dataType, String value) throws Exception { @@ -149,10 +152,10 @@ public class ParameterUtils { /** * curing user define parameters * - * @param globalParamMap global param map + * @param globalParamMap global param map * @param globalParamList global param list - * @param commandType command type - * @param scheduleTime schedule time + * @param commandType command type + * @param scheduleTime schedule time * @return curing user define parameters */ public static String curingGlobalParams(Map globalParamMap, List globalParamList, @@ -169,7 +172,7 @@ public class ParameterUtils { Map allParamMap = new HashMap<>(); //If it is a complement, a complement time needs to be passed in, according to the task type Map timeParams = BusinessTimeUtils - .getBusinessTime(commandType, scheduleTime); + .getBusinessTime(commandType, scheduleTime); if (timeParams != null) { allParamMap.putAll(timeParams); @@ -248,4 +251,30 @@ public class ParameterUtils { } return map; } + + private static String dateTemplateParse(String templateStr, Date date) { + if (templateStr == null) { + return null; + } + Pattern pattern = Pattern.compile(DATE_PARSE_PATTERN); + + StringBuffer newValue = new StringBuffer(templateStr.length()); + + Matcher matcher = pattern.matcher(templateStr); + + while (matcher.find()) { + String key = matcher.group(1); + if (Pattern.matches(DATE_START_PATTERN, key)) { + continue; + } + String value = TimePlaceholderUtils.getPlaceHolderTime(key, date); + assert value != null; + matcher.appendReplacement(newValue, value); + } + + matcher.appendTail(newValue); + + return newValue.toString(); + } + } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java index 4f4f12766b..256f19905c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/StringUtils.java @@ -45,6 +45,14 @@ public class StringUtils { } public static String replaceNRTtoUnderline(String src) { - return src.replaceAll("[\n|\r|\t]", "_"); + if (isBlank(src)) { + return src; + } else { + return src.replaceAll("[\n|\r|\t]", "_"); + } + } + + public static String trim(String str) { + return str == null ? null : str.trim(); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/VarPoolUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/VarPoolUtils.java index 837e96f55f..89a8605a99 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/VarPoolUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/VarPoolUtils.java @@ -18,42 +18,19 @@ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.task.TaskParams; import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.Map; public class VarPoolUtils { - /** - * getTaskNodeLocalParam - * @param taskNode taskNode - * @param prop prop - * @return localParamForProp - */ - public static Object getTaskNodeLocalParam(TaskNode taskNode, String prop) { - String taskParamsJson = taskNode.getParams(); - TaskParams taskParams = JSONUtils.parseObject(taskParamsJson, TaskParams.class); - if (taskParams == null) { - return null; - } - return taskParams.getLocalParamValue(prop); - } - - /** - * setTaskNodeLocalParams - * @param taskNode taskNode - * @param prop LocalParamName - * @param value LocalParamValue - */ - public static void setTaskNodeLocalParams(TaskNode taskNode, String prop, Object value) { - String taskParamsJson = taskNode.getParams(); - TaskParams taskParams = JSONUtils.parseObject(taskParamsJson, TaskParams.class); - if (taskParams == null) { - return; - } - taskParams.setLocalParamValue(prop, value); - taskNode.setParams(JSONUtils.toJsonString(taskParams)); - } + + private static final String LOCALPARAMS = "localParams"; + + private static final String PROP = "prop"; + + private static final String VALUE = "value"; /** * setTaskNodeLocalParams @@ -62,11 +39,20 @@ public class VarPoolUtils { */ public static void setTaskNodeLocalParams(TaskNode taskNode, Map propToValue) { String taskParamsJson = taskNode.getParams(); - TaskParams taskParams = JSONUtils.parseObject(taskParamsJson, TaskParams.class); - if (taskParams == null) { - return; + Map taskParams = JSONUtils.parseObject(taskParamsJson, HashMap.class); + + Object localParamsObject = taskParams.get(LOCALPARAMS); + if (null != localParamsObject && null != propToValue && propToValue.size() > 0) { + ArrayList localParams = (ArrayList)localParamsObject; + for (int i = 0; i < localParams.size(); i++) { + Map map = (Map)localParams.get(i); + String prop = map.get(PROP); + if (StringUtils.isNotEmpty(prop) && propToValue.containsKey(prop)) { + map.put(VALUE,(String)propToValue.get(prop)); + } + } + taskParams.put(LOCALPARAMS,localParams); } - taskParams.setLocalParamValue(propToValue); taskNode.setParams(JSONUtils.toJsonString(taskParams)); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java index 35cb018399..0094390c7c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java @@ -14,17 +14,36 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils.placeholder; +import static org.apache.dolphinscheduler.common.Constants.ADD_CHAR; +import static org.apache.dolphinscheduler.common.Constants.ADD_STRING; +import static org.apache.dolphinscheduler.common.Constants.DIVISION_CHAR; +import static org.apache.dolphinscheduler.common.Constants.DIVISION_STRING; +import static org.apache.dolphinscheduler.common.Constants.LEFT_BRACE_CHAR; +import static org.apache.dolphinscheduler.common.Constants.LEFT_BRACE_STRING; +import static org.apache.dolphinscheduler.common.Constants.MULTIPLY_CHAR; +import static org.apache.dolphinscheduler.common.Constants.MULTIPLY_STRING; +import static org.apache.dolphinscheduler.common.Constants.N; +import static org.apache.dolphinscheduler.common.Constants.P; +import static org.apache.dolphinscheduler.common.Constants.RIGHT_BRACE_CHAR; +import static org.apache.dolphinscheduler.common.Constants.SUBTRACT_CHAR; +import static org.apache.dolphinscheduler.common.Constants.SUBTRACT_STRING; + import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.dolphinscheduler.common.utils.StringUtils; -import java.util.*; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Stack; -import static org.apache.dolphinscheduler.common.Constants.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * time place holder utils @@ -46,8 +65,8 @@ public class TimePlaceholderUtils { * Replaces all placeholders of format {@code ${name}} with the value returned * from the supplied {@link PropertyPlaceholderHelper.PlaceholderResolver}. * - * @param value the value containing the placeholders to be replaced - * @param date custom date + * @param value the value containing the placeholders to be replaced + * @param date custom date * @param ignoreUnresolvablePlaceholders ignore unresolvable placeholders * @return the supplied value with placeholders replaced inline */ @@ -59,11 +78,11 @@ public class TimePlaceholderUtils { return helper.replacePlaceholders(value, new TimePlaceholderResolver(value, date)); } - /** * Creates a new {@code PropertyPlaceholderHelper} that uses the supplied prefix and suffix. + * * @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should - * be ignored ({@code true}) or cause an exception ({@code false}) + * be ignored ({@code true}) or cause an exception ({@code false}) */ private static PropertyPlaceholderHelper getPropertyPlaceholderHelper(boolean ignoreUnresolvablePlaceholders) { return new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, null, ignoreUnresolvablePlaceholders); @@ -89,7 +108,7 @@ public class TimePlaceholderUtils { * Change the sign in the expression to P (positive) N (negative) * * @param expression - * @return eg. "-3+-6*(+8)-(-5) -> S3+S6*(P8)-(S5)" + * @return eg. "-3+-6*(+8)-(-5) -> S3+S6*(P8)-(S5)" */ private static String convert(String expression) { char[] arr = expression.toCharArray(); @@ -262,7 +281,7 @@ public class TimePlaceholderUtils { * Placeholder replacement resolver */ private static class TimePlaceholderResolver implements - PropertyPlaceholderHelper.PlaceholderResolver { + PropertyPlaceholderHelper.PlaceholderResolver { private final String value; @@ -278,12 +297,28 @@ public class TimePlaceholderUtils { try { return calculateTime(placeholderName, date); } catch (Exception ex) { - logger.error("resolve placeholder '{}' in [ {} ]" , placeholderName, value, ex); + logger.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex); return null; } } } + /** + * return the formatted date according to the corresponding date format + * + * @param expression date expression + * @param date date + * @return reformat date + */ + public static String getPlaceHolderTime(String expression, Date date) { + if (StringUtils.isBlank(expression)) { + return null; + } + if (null == date) { + return null; + } + return calculateTime(expression, date); + } /** * calculate time @@ -320,9 +355,10 @@ public class TimePlaceholderUtils { /** * calculate time expresstion + * * @param expression expresstion - * @param date date - * @return map with date, date format + * @param date date + * @return map with date, date format */ public static Map.Entry calcTimeExpression(String expression, Date date) { Map.Entry resultEntry; @@ -346,8 +382,9 @@ public class TimePlaceholderUtils { /** * get first day of month + * * @param expression expresstion - * @param date date + * @param date date * @return first day of month */ public static Map.Entry calcMonthBegin(String expression, Date date) { @@ -369,8 +406,9 @@ public class TimePlaceholderUtils { /** * get last day of month + * * @param expression expresstion - * @param date date + * @param date date * @return last day of month */ public static Map.Entry calcMonthEnd(String expression, Date date) { @@ -392,8 +430,9 @@ public class TimePlaceholderUtils { /** * get first day of week + * * @param expression expresstion - * @param date date + * @param date date * @return monday */ public static Map.Entry calcWeekStart(String expression, Date date) { @@ -414,8 +453,9 @@ public class TimePlaceholderUtils { /** * get last day of week + * * @param expression expresstion - * @param date date + * @param date date * @return last day of week */ public static Map.Entry calcWeekEnd(String expression, Date date) { @@ -437,8 +477,9 @@ public class TimePlaceholderUtils { /** * calc months expression + * * @param expression expresstion - * @param date date + * @param date date * @return calc months */ public static Map.Entry calcMonths(String expression, Date date) { @@ -461,7 +502,7 @@ public class TimePlaceholderUtils { * calculate time expression * * @param expression expresstion - * @param date date + * @param date date * @return calculate time expression with date,format */ public static Map.Entry calcMinutes(String expression, Date date) { @@ -471,7 +512,7 @@ public class TimePlaceholderUtils { if (Character.isDigit(expression.charAt(index + 1))) { String addMinuteExpr = expression.substring(index + 1); Date targetDate = org.apache.commons.lang.time.DateUtils - .addMinutes(date, calcMinutes(addMinuteExpr)); + .addMinutes(date, calcMinutes(addMinuteExpr)); String dateFormat = expression.substring(0, index); return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); @@ -482,7 +523,7 @@ public class TimePlaceholderUtils { if (Character.isDigit(expression.charAt(index + 1))) { String addMinuteExpr = expression.substring(index + 1); Date targetDate = org.apache.commons.lang.time.DateUtils - .addMinutes(date, 0 - calcMinutes(addMinuteExpr)); + .addMinutes(date, 0 - calcMinutes(addMinuteExpr)); String dateFormat = expression.substring(0, index); return new AbstractMap.SimpleImmutableEntry<>(targetDate, dateFormat); @@ -512,7 +553,7 @@ public class TimePlaceholderUtils { } else { calcExpression = String.format("60*24*(%s)%s", minuteExpression.substring(0, index), - minuteExpression.substring(index)); + minuteExpression.substring(index)); } return calculate(calcExpression); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java deleted file mode 100644 index 9d51eaec1c..0000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32.java +++ /dev/null @@ -1,1107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils.process; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - -/** - * This class is used to create operating system processes. - * - *

Each {@code ProcessBuilderForWindows} instance manages a collection - * of process attributes. The {@link #start()} method creates a new - * {@link Process} instance with those attributes. The {@link - * #start()} method can be invoked repeatedly from the same instance - * to create new subprocesses with identical or related attributes. - * - *

Each process builder manages these process attributes: - * - *

    - * - *
  • a command, a list of strings which signifies the - * external program file to be invoked and its arguments, if any. - * Which string lists represent a valid operating system command is - * system-dependent. For example, it is common for each conceptual - * argument to be an element in this list, but there are operating - * systems where programs are expected to tokenize command line - * strings themselves - on such a system a Java implementation might - * require commands to contain exactly two elements. - * - *
  • an environment, which is a system-dependent mapping from - * variables to values. The initial value is a copy of - * the environment of the current process (see {@link System#getenv()}). - * - *
  • a working directory. The default value is the current - * working directory of the current process, usually the directory - * named by the system property {@code user.dir}. - * - *
  • a source of standard input. - * By default, the subprocess reads input from a pipe. Java code - * can access this pipe via the output stream returned by - * {@link Process#getOutputStream()}. However, standard input may - * be redirected to another source using - * {@link #redirectInput(ProcessBuilderForWin32.Redirect) redirectInput}. - * In this case, {@link Process#getOutputStream()} will return a - * null output stream, for which: - * - *
      - *
    • the {@link OutputStream#write(int) write} methods always - * throw {@code IOException} - *
    • the {@link OutputStream#close() close} method does nothing - *
    - * - *
  • a destination for standard output - * and standard error. By default, the subprocess writes standard - * output and standard error to pipes. Java code can access these pipes - * via the input streams returned by {@link Process#getInputStream()} and - * {@link Process#getErrorStream()}. However, standard output and - * standard error may be redirected to other destinations using - * {@link #redirectOutput(ProcessBuilderForWin32.Redirect) redirectOutput} and - * {@link #redirectError(ProcessBuilderForWin32.Redirect) redirectError}. - * In this case, {@link Process#getInputStream()} and/or - * {@link Process#getErrorStream()} will return a null input - * stream, for which: - * - *
      - *
    • the {@link InputStream#read() read} methods always return - * {@code -1} - *
    • the {@link InputStream#available() available} method always returns - * {@code 0} - *
    • the {@link InputStream#close() close} method does nothing - *
    - * - *
  • a redirectErrorStream property. Initially, this property - * is {@code false}, meaning that the standard output and error - * output of a subprocess are sent to two separate streams, which can - * be accessed using the {@link Process#getInputStream()} and {@link - * Process#getErrorStream()} methods. - * - *

    If the value is set to {@code true}, then: - * - *

      - *
    • standard error is merged with the standard output and always sent - * to the same destination (this makes it easier to correlate error - * messages with the corresponding output) - *
    • the common destination of standard error and standard output can be - * redirected using - * {@link #redirectOutput(ProcessBuilderForWin32.Redirect) redirectOutput} - *
    • any redirection set by the - * {@link #redirectError(ProcessBuilderForWin32.Redirect) redirectError} - * method is ignored when creating a subprocess - *
    • the stream returned from {@link Process#getErrorStream()} will - * always be a null input stream - *
    - * - *
- * - *

Modifying a process builder's attributes will affect processes - * subsequently started by that object's {@link #start()} method, but - * will never affect previously started processes or the Java process - * itself. - * - *

Most error checking is performed by the {@link #start()} method. - * It is possible to modify the state of an object so that {@link - * #start()} will fail. For example, setting the command attribute to - * an empty list will not throw an exception unless {@link #start()} - * is invoked. - * - *

Note that this class is not synchronized. - * If multiple threads access a {@code ProcessBuilderForWindows} instance - * concurrently, and at least one of the threads modifies one of the - * attributes structurally, it must be synchronized externally. - * - *

Starting a new process which uses the default working directory - * and environment is easy: - * - *

 {@code
- * Process p = new ProcessBuilderForWindows("myCommand", "myArg").start();
- * }
- * - *

Here is an example that starts a process with a modified working - * directory and environment, and redirects standard output and error - * to be appended to a log file: - * - *

 {@code
- * ProcessBuilderForWindows pb =
- *   new ProcessBuilderForWindows("myCommand", "myArg1", "myArg2");
- * Map env = pb.environment();
- * env.put("VAR1", "myValue");
- * env.remove("OTHERVAR");
- * env.put("VAR2", env.get("VAR1") + "suffix");
- * pb.directory(new File("myDir"));
- * File log = new File("log");
- * pb.redirectErrorStream(true);
- * pb.redirectOutput(Redirect.appendTo(log));
- * Process p = pb.start();
- * assert pb.redirectInput() == Redirect.PIPE;
- * assert pb.redirectOutput().file() == log;
- * assert p.getInputStream().read() == -1;
- * }
- * - *

To start a process with an explicit set of environment - * variables, first call {@link Map#clear() Map.clear()} - * before adding environment variables. - * - * @author Martin Buchholz - * @since 1.5 - */ - -public class ProcessBuilderForWin32 { - - private String username; - private String password; - private List command; - private File directory; - private Map environment; - private boolean redirectErrorStream; - private ProcessBuilderForWin32.Redirect[] redirects; - - /** - * Constructs a process builder with the specified operating - * system program and arguments. This constructor does not - * make a copy of the {@code command} list. Subsequent - * updates to the list will be reflected in the state of the - * process builder. It is not checked whether - * {@code command} corresponds to a valid operating system - * command. - * - * @param command the list containing the program and its arguments - * @throws NullPointerException if the argument is null - */ - public ProcessBuilderForWin32(List command) { - if (command == null) { - throw new NullPointerException(); - } - this.command = command; - } - - /** - * Constructs a process builder with the specified operating - * system program and arguments. This is a convenience - * constructor that sets the process builder's command to a string - * list containing the same strings as the {@code command} - * array, in the same order. It is not checked whether - * {@code command} corresponds to a valid operating system - * command. - * - * @param command a string array containing the program and its arguments - */ - public ProcessBuilderForWin32(String... command) { - this.command = new ArrayList<>(command.length); - for (String arg : command) { - this.command.add(arg); - } - } - - /** - * set username and password for process - * - * @param username username - * @param password password - * @return this process builder - */ - public ProcessBuilderForWin32 user(String username, String password) { - this.username = username; - this.password = password; - return this; - } - - /** - * Sets this process builder's operating system program and - * arguments. This method does not make a copy of the - * {@code command} list. Subsequent updates to the list will - * be reflected in the state of the process builder. It is not - * checked whether {@code command} corresponds to a valid - * operating system command. - * - * @param command the list containing the program and its arguments - * @return this process builder - * - * @throws NullPointerException if the argument is null - */ - public ProcessBuilderForWin32 command(List command) { - if (command == null) { - throw new NullPointerException(); - } - this.command = command; - return this; - } - - /** - * Sets this process builder's operating system program and - * arguments. This is a convenience method that sets the command - * to a string list containing the same strings as the - * {@code command} array, in the same order. It is not - * checked whether {@code command} corresponds to a valid - * operating system command. - * - * @param command a string array containing the program and its arguments - * @return this process builder - */ - public ProcessBuilderForWin32 command(String... command) { - this.command = new ArrayList<>(command.length); - for (String arg : command) { - this.command.add(arg); - } - return this; - } - - /** - * Returns this process builder's operating system program and - * arguments. The returned list is not a copy. Subsequent - * updates to the list will be reflected in the state of this - * process builder. - * - * @return this process builder's program and its arguments - */ - public List command() { - return command; - } - - /** - * Returns a string map view of this process builder's environment. - * - * Whenever a process builder is created, the environment is - * initialized to a copy of the current process environment (see - * {@link System#getenv()}). Subprocesses subsequently started by - * this object's {@link #start()} method will use this map as - * their environment. - * - *

The returned object may be modified using ordinary {@link - * Map Map} operations. These modifications will be - * visible to subprocesses started via the {@link #start()} - * method. Two {@code ProcessBuilderForWindows} instances always - * contain independent process environments, so changes to the - * returned map will never be reflected in any other - * {@code ProcessBuilderForWindows} instance or the values returned by - * {@link System#getenv System.getenv}. - * - *

If the system does not support environment variables, an - * empty map is returned. - * - *

The returned map does not permit null keys or values. - * Attempting to insert or query the presence of a null key or - * value will throw a {@link NullPointerException}. - * Attempting to query the presence of a key or value which is not - * of type {@link String} will throw a {@link ClassCastException}. - * - *

The behavior of the returned map is system-dependent. A - * system may not allow modifications to environment variables or - * may forbid certain variable names or values. For this reason, - * attempts to modify the map may fail with - * {@link UnsupportedOperationException} or - * {@link IllegalArgumentException} - * if the modification is not permitted by the operating system. - * - *

Since the external format of environment variable names and - * values is system-dependent, there may not be a one-to-one - * mapping between them and Java's Unicode strings. Nevertheless, - * the map is implemented in such a way that environment variables - * which are not modified by Java code will have an unmodified - * native representation in the subprocess. - * - *

The returned map and its collection views may not obey the - * general contract of the {@link Object#equals} and - * {@link Object#hashCode} methods. - * - *

The returned map is typically case-sensitive on all platforms. - * - *

If a security manager exists, its - * {@link SecurityManager#checkPermission checkPermission} method - * is called with a - * {@link RuntimePermission}{@code ("getenv.*")} permission. - * This may result in a {@link SecurityException} being thrown. - * - *

When passing information to a Java subprocess, - * system properties - * are generally preferred over environment variables. - * - * @return this process builder's environment - * - * @throws SecurityException - * if a security manager exists and its - * {@link SecurityManager#checkPermission checkPermission} - * method doesn't allow access to the process environment - * - * @see Runtime#exec(String[],String[], File) - * @see System#getenv() - */ - public Map environment() { - SecurityManager security = System.getSecurityManager(); - if (security != null) { - security.checkPermission(new RuntimePermission("getenv.*")); - } - - if (environment == null) { - environment = ProcessEnvironmentForWin32.environment(); - } - - assert environment != null; - - return environment; - } - - // Only for use by Runtime.exec(...envp...) - ProcessBuilderForWin32 environment(String[] envp) { - assert environment == null; - if (envp != null) { - environment = ProcessEnvironmentForWin32.emptyEnvironment(envp.length); - assert environment != null; - - for (String envstring : envp) { - // Before 1.5, we blindly passed invalid envstrings - // to the child process. - // We would like to throw an exception, but do not, - // for compatibility with old broken code. - - // Silently discard any trailing junk. - if (envstring.indexOf((int) '\u0000') != -1) { - envstring = envstring.replaceFirst("\u0000.*", ""); - } - - int eqlsign = - envstring.indexOf('=', ProcessEnvironmentForWin32.MIN_NAME_LENGTH); - // Silently ignore envstrings lacking the required `='. - if (eqlsign != -1) { - environment.put(envstring.substring(0,eqlsign), - envstring.substring(eqlsign+1)); - } - } - } - return this; - } - - /** - * Returns this process builder's working directory. - * - * Subprocesses subsequently started by this object's {@link - * #start()} method will use this as their working directory. - * The returned value may be {@code null} -- this means to use - * the working directory of the current Java process, usually the - * directory named by the system property {@code user.dir}, - * as the working directory of the child process. - * - * @return this process builder's working directory - */ - public File directory() { - return directory; - } - - /** - * Sets this process builder's working directory. - * - * Subprocesses subsequently started by this object's {@link - * #start()} method will use this as their working directory. - * The argument may be {@code null} -- this means to use the - * working directory of the current Java process, usually the - * directory named by the system property {@code user.dir}, - * as the working directory of the child process. - * - * @param directory the new working directory - * @return this process builder - */ - public ProcessBuilderForWin32 directory(File directory) { - this.directory = directory; - return this; - } - - // ---------------- I/O Redirection ---------------- - - /** - * Implements a null input stream. - */ - static class NullInputStream extends InputStream { - static final ProcessBuilderForWin32.NullInputStream INSTANCE = new ProcessBuilderForWin32.NullInputStream(); - private NullInputStream() {} - @Override - public int read() { return -1; } - @Override - public int available() { return 0; } - } - - /** - * Implements a null output stream. - */ - static class NullOutputStream extends OutputStream { - static final ProcessBuilderForWin32.NullOutputStream INSTANCE = new ProcessBuilderForWin32.NullOutputStream(); - private NullOutputStream() {} - @Override - public void write(int b) throws IOException { - throw new IOException("Stream closed"); - } - } - - /** - * Represents a source of subprocess input or a destination of - * subprocess output. - * - * Each {@code Redirect} instance is one of the following: - * - *

    - *
  • the special value {@link #PIPE Redirect.PIPE} - *
  • the special value {@link #INHERIT Redirect.INHERIT} - *
  • a redirection to read from a file, created by an invocation of - * {@link ProcessBuilderForWin32.Redirect#from Redirect.from(File)} - *
  • a redirection to write to a file, created by an invocation of - * {@link ProcessBuilderForWin32.Redirect#to Redirect.to(File)} - *
  • a redirection to append to a file, created by an invocation of - * {@link ProcessBuilderForWin32.Redirect#appendTo Redirect.appendTo(File)} - *
- * - *

Each of the above categories has an associated unique - * {@link ProcessBuilderForWin32.Redirect.Type Type}. - * - * @since 1.7 - */ - public abstract static class Redirect { - /** - * The type of a {@link ProcessBuilderForWin32.Redirect}. - */ - public enum Type { - /** - * The type of {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE}. - */ - PIPE, - - /** - * The type of {@link ProcessBuilderForWin32.Redirect#INHERIT Redirect.INHERIT}. - */ - INHERIT, - - /** - * The type of redirects returned from - * {@link ProcessBuilderForWin32.Redirect#from Redirect.from(File)}. - */ - READ, - - /** - * The type of redirects returned from - * {@link ProcessBuilderForWin32.Redirect#to Redirect.to(File)}. - */ - WRITE, - - /** - * The type of redirects returned from - * {@link ProcessBuilderForWin32.Redirect#appendTo Redirect.appendTo(File)}. - */ - APPEND - } - - /** - * Returns the type of this {@code Redirect}. - * @return the type of this {@code Redirect} - */ - public abstract ProcessBuilderForWin32.Redirect.Type type(); - - /** - * Indicates that subprocess I/O will be connected to the - * current Java process over a pipe. - * - * This is the default handling of subprocess standard I/O. - * - *

It will always be true that - *

 {@code
-         * Redirect.PIPE.file() == null &&
-         * Redirect.PIPE.type() == Redirect.Type.PIPE
-         * }
- */ - public static final ProcessBuilderForWin32.Redirect PIPE = new ProcessBuilderForWin32.Redirect() { - @Override - public Type type() { return Type.PIPE; } - @Override - public String toString() { return type().toString(); }}; - - /** - * Indicates that subprocess I/O source or destination will be the - * same as those of the current process. This is the normal - * behavior of most operating system command interpreters (shells). - * - *

It will always be true that - *

 {@code
-         * Redirect.INHERIT.file() == null &&
-         * Redirect.INHERIT.type() == Redirect.Type.INHERIT
-         * }
- */ - public static final ProcessBuilderForWin32.Redirect INHERIT = new ProcessBuilderForWin32.Redirect() { - @Override - public Type type() { return Type.INHERIT; } - @Override - public String toString() { return type().toString(); }}; - - /** - * Returns the {@link File} source or destination associated - * with this redirect, or {@code null} if there is no such file. - * - * @return the file associated with this redirect, - * or {@code null} if there is no such file - */ - public File file() { return null; } - - /** - * When redirected to a destination file, indicates if the output - * is to be written to the end of the file. - */ - boolean append() { - throw new UnsupportedOperationException(); - } - - /** - * Returns a redirect to read from the specified file. - * - *

It will always be true that - *

 {@code
-         * Redirect.from(file).file() == file &&
-         * Redirect.from(file).type() == Redirect.Type.READ
-         * }
- * - * @param file The {@code File} for the {@code Redirect}. - * @throws NullPointerException if the specified file is null - * @return a redirect to read from the specified file - */ - public static ProcessBuilderForWin32.Redirect from(final File file) { - if (file == null) { - throw new NullPointerException(); - } - return new ProcessBuilderForWin32.Redirect() { - @Override - public Type type() { return Type.READ; } - @Override - public File file() { return file; } - @Override - public String toString() { - return "redirect to read from file \"" + file + "\""; - } - }; - } - - /** - * Returns a redirect to write to the specified file. - * If the specified file exists when the subprocess is started, - * its previous contents will be discarded. - * - *

It will always be true that - *

 {@code
-         * Redirect.to(file).file() == file &&
-         * Redirect.to(file).type() == Redirect.Type.WRITE
-         * }
- * - * @param file The {@code File} for the {@code Redirect}. - * @throws NullPointerException if the specified file is null - * @return a redirect to write to the specified file - */ - public static ProcessBuilderForWin32.Redirect to(final File file) { - if (file == null) { - throw new NullPointerException(); - } - return new ProcessBuilderForWin32.Redirect() { - @Override - public Type type() { return Type.WRITE; } - @Override - public File file() { return file; } - @Override - public String toString() { - return "redirect to write to file \"" + file + "\""; - } - @Override - boolean append() { return false; } - }; - } - - /** - * Returns a redirect to append to the specified file. - * Each write operation first advances the position to the - * end of the file and then writes the requested data. - * Whether the advancement of the position and the writing - * of the data are done in a single atomic operation is - * system-dependent and therefore unspecified. - * - *

It will always be true that - *

 {@code
-         * Redirect.appendTo(file).file() == file &&
-         * Redirect.appendTo(file).type() == Redirect.Type.APPEND
-         * }
- * - * @param file The {@code File} for the {@code Redirect}. - * @throws NullPointerException if the specified file is null - * @return a redirect to append to the specified file - */ - public static ProcessBuilderForWin32.Redirect appendTo(final File file) { - if (file == null) { - throw new NullPointerException(); - } - return new ProcessBuilderForWin32.Redirect() { - @Override - public Type type() { return Type.APPEND; } - @Override - public File file() { return file; } - @Override - public String toString() { - return "redirect to append to file \"" + file + "\""; - } - @Override - boolean append() { return true; } - }; - } - - /** - * Compares the specified object with this {@code Redirect} for - * equality. Returns {@code true} if and only if the two - * objects are identical or both objects are {@code Redirect} - * instances of the same type associated with non-null equal - * {@code File} instances. - */ - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (! (obj instanceof ProcessBuilderForWin32.Redirect)) { - return false; - } - ProcessBuilderForWin32.Redirect r = (ProcessBuilderForWin32.Redirect) obj; - if (r.type() != this.type()) { - return false; - } - assert this.file() != null; - return this.file().equals(r.file()); - } - - /** - * Returns a hash code value for this {@code Redirect}. - * @return a hash code value for this {@code Redirect} - */ - @Override - public int hashCode() { - File file = file(); - if (file == null) { - return super.hashCode(); - } else { - return file.hashCode(); - } - } - - /** - * No public constructors. Clients must use predefined - * static {@code Redirect} instances or factory methods. - */ - private Redirect() {} - } - - private ProcessBuilderForWin32.Redirect[] redirects() { - if (redirects == null) { - redirects = new Redirect[] { - Redirect.PIPE, Redirect.PIPE, Redirect.PIPE - }; - } - return redirects; - } - - /** - * Sets this process builder's standard input source. - * - * Subprocesses subsequently started by this object's {@link #start()} - * method obtain their standard input from this source. - * - *

If the source is {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE} - * (the initial value), then the standard input of a - * subprocess can be written to using the output stream - * returned by {@link Process#getOutputStream()}. - * If the source is set to any other value, then - * {@link Process#getOutputStream()} will return a - * null output stream. - * - * @param source the new standard input source - * @return this process builder - * @throws IllegalArgumentException - * if the redirect does not correspond to a valid source - * of data, that is, has type - * {@link ProcessBuilderForWin32.Redirect.Type#WRITE WRITE} or - * {@link ProcessBuilderForWin32.Redirect.Type#APPEND APPEND} - * @since 1.7 - */ - public ProcessBuilderForWin32 redirectInput(ProcessBuilderForWin32.Redirect source) { - if (source.type() == ProcessBuilderForWin32.Redirect.Type.WRITE || - source.type() == ProcessBuilderForWin32.Redirect.Type.APPEND) { - throw new IllegalArgumentException( - "Redirect invalid for reading: " + source); - } - redirects()[0] = source; - return this; - } - - /** - * Sets this process builder's standard output destination. - * - * Subprocesses subsequently started by this object's {@link #start()} - * method send their standard output to this destination. - * - *

If the destination is {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE} - * (the initial value), then the standard output of a subprocess - * can be read using the input stream returned by {@link - * Process#getInputStream()}. - * If the destination is set to any other value, then - * {@link Process#getInputStream()} will return a - * null input stream. - * - * @param destination the new standard output destination - * @return this process builder - * @throws IllegalArgumentException - * if the redirect does not correspond to a valid - * destination of data, that is, has type - * {@link ProcessBuilderForWin32.Redirect.Type#READ READ} - * @since 1.7 - */ - public ProcessBuilderForWin32 redirectOutput(ProcessBuilderForWin32.Redirect destination) { - if (destination.type() == ProcessBuilderForWin32.Redirect.Type.READ) { - throw new IllegalArgumentException( - "Redirect invalid for writing: " + destination); - } - redirects()[1] = destination; - return this; - } - - /** - * Sets this process builder's standard error destination. - * - * Subprocesses subsequently started by this object's {@link #start()} - * method send their standard error to this destination. - * - *

If the destination is {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE} - * (the initial value), then the error output of a subprocess - * can be read using the input stream returned by {@link - * Process#getErrorStream()}. - * If the destination is set to any other value, then - * {@link Process#getErrorStream()} will return a - * null input stream. - * - *

If the {@link #redirectErrorStream redirectErrorStream} - * attribute has been set {@code true}, then the redirection set - * by this method has no effect. - * - * @param destination the new standard error destination - * @return this process builder - * @throws IllegalArgumentException - * if the redirect does not correspond to a valid - * destination of data, that is, has type - * {@link ProcessBuilderForWin32.Redirect.Type#READ READ} - * @since 1.7 - */ - public ProcessBuilderForWin32 redirectError(ProcessBuilderForWin32.Redirect destination) { - if (destination.type() == ProcessBuilderForWin32.Redirect.Type.READ) { - throw new IllegalArgumentException( - "Redirect invalid for writing: " + destination); - } - redirects()[2] = destination; - return this; - } - - /** - * Sets this process builder's standard input source to a file. - * - *

This is a convenience method. An invocation of the form - * {@code redirectInput(file)} - * behaves in exactly the same way as the invocation - * {@link #redirectInput(ProcessBuilderForWin32.Redirect) redirectInput} - * {@code (Redirect.from(file))}. - * - * @param file the new standard input source - * @return this process builder - * @since 1.7 - */ - public ProcessBuilderForWin32 redirectInput(File file) { - return redirectInput(ProcessBuilderForWin32.Redirect.from(file)); - } - - /** - * Sets this process builder's standard output destination to a file. - * - *

This is a convenience method. An invocation of the form - * {@code redirectOutput(file)} - * behaves in exactly the same way as the invocation - * {@link #redirectOutput(ProcessBuilderForWin32.Redirect) redirectOutput} - * {@code (Redirect.to(file))}. - * - * @param file the new standard output destination - * @return this process builder - * @since 1.7 - */ - public ProcessBuilderForWin32 redirectOutput(File file) { - return redirectOutput(ProcessBuilderForWin32.Redirect.to(file)); - } - - /** - * Sets this process builder's standard error destination to a file. - * - *

This is a convenience method. An invocation of the form - * {@code redirectError(file)} - * behaves in exactly the same way as the invocation - * {@link #redirectError(ProcessBuilderForWin32.Redirect) redirectError} - * {@code (Redirect.to(file))}. - * - * @param file the new standard error destination - * @return this process builder - * @since 1.7 - */ - public ProcessBuilderForWin32 redirectError(File file) { - return redirectError(ProcessBuilderForWin32.Redirect.to(file)); - } - - /** - * Returns this process builder's standard input source. - * - * Subprocesses subsequently started by this object's {@link #start()} - * method obtain their standard input from this source. - * The initial value is {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE}. - * - * @return this process builder's standard input source - * @since 1.7 - */ - public ProcessBuilderForWin32.Redirect redirectInput() { - return (redirects == null) ? ProcessBuilderForWin32.Redirect.PIPE : redirects[0]; - } - - /** - * Returns this process builder's standard output destination. - * - * Subprocesses subsequently started by this object's {@link #start()} - * method redirect their standard output to this destination. - * The initial value is {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE}. - * - * @return this process builder's standard output destination - * @since 1.7 - */ - public ProcessBuilderForWin32.Redirect redirectOutput() { - return (redirects == null) ? ProcessBuilderForWin32.Redirect.PIPE : redirects[1]; - } - - /** - * Returns this process builder's standard error destination. - * - * Subprocesses subsequently started by this object's {@link #start()} - * method redirect their standard error to this destination. - * The initial value is {@link ProcessBuilderForWin32.Redirect#PIPE Redirect.PIPE}. - * - * @return this process builder's standard error destination - * @since 1.7 - */ - public ProcessBuilderForWin32.Redirect redirectError() { - return (redirects == null) ? ProcessBuilderForWin32.Redirect.PIPE : redirects[2]; - } - - /** - * Sets the source and destination for subprocess standard I/O - * to be the same as those of the current Java process. - * - *

This is a convenience method. An invocation of the form - *

 {@code
-     * pb.inheritIO()
-     * }
- * behaves in exactly the same way as the invocation - *
 {@code
-     * pb.redirectInput(Redirect.INHERIT)
-     *   .redirectOutput(Redirect.INHERIT)
-     *   .redirectError(Redirect.INHERIT)
-     * }
- * - * This gives behavior equivalent to most operating system - * command interpreters, or the standard C library function - * {@code system()}. - * - * @return this process builder - * @since 1.7 - */ - public ProcessBuilderForWin32 inheritIO() { - Arrays.fill(redirects(), ProcessBuilderForWin32.Redirect.INHERIT); - return this; - } - - /** - * Tells whether this process builder merges standard error and - * standard output. - * - *

If this property is {@code true}, then any error output - * generated by subprocesses subsequently started by this object's - * {@link #start()} method will be merged with the standard - * output, so that both can be read using the - * {@link Process#getInputStream()} method. This makes it easier - * to correlate error messages with the corresponding output. - * The initial value is {@code false}. - * - * @return this process builder's {@code redirectErrorStream} property - */ - public boolean redirectErrorStream() { - return redirectErrorStream; - } - - /** - * Sets this process builder's {@code redirectErrorStream} property. - * - *

If this property is {@code true}, then any error output - * generated by subprocesses subsequently started by this object's - * {@link #start()} method will be merged with the standard - * output, so that both can be read using the - * {@link Process#getInputStream()} method. This makes it easier - * to correlate error messages with the corresponding output. - * The initial value is {@code false}. - * - * @param redirectErrorStream the new property value - * @return this process builder - */ - public ProcessBuilderForWin32 redirectErrorStream(boolean redirectErrorStream) { - this.redirectErrorStream = redirectErrorStream; - return this; - } - - /** - * Starts a new process using the attributes of this process builder. - * - *

The new process will - * invoke the command and arguments given by {@link #command()}, - * in a working directory as given by {@link #directory()}, - * with a process environment as given by {@link #environment()}. - * - *

This method checks that the command is a valid operating - * system command. Which commands are valid is system-dependent, - * but at the very least the command must be a non-empty list of - * non-null strings. - * - *

A minimal set of system dependent environment variables may - * be required to start a process on some operating systems. - * As a result, the subprocess may inherit additional environment variable - * settings beyond those in the process builder's {@link #environment()}. - * - *

If there is a security manager, its - * {@link SecurityManager#checkExec checkExec} - * method is called with the first component of this object's - * {@code command} array as its argument. This may result in - * a {@link SecurityException} being thrown. - * - *

Starting an operating system process is highly system-dependent. - * Among the many things that can go wrong are: - *

    - *
  • The operating system program file was not found. - *
  • Access to the program file was denied. - *
  • The working directory does not exist. - *
- * - *

In such cases an exception will be thrown. The exact nature - * of the exception is system-dependent, but it will always be a - * subclass of {@link IOException}. - * - *

Subsequent modifications to this process builder will not - * affect the returned {@link Process}. - * - * @return a new {@link Process} object for managing the subprocess - * - * @throws NullPointerException - * if an element of the command list is null - * - * @throws IndexOutOfBoundsException - * if the command is an empty list (has size {@code 0}) - * - * @throws SecurityException - * if a security manager exists and - *

    - * - *
  • its - * {@link SecurityManager#checkExec checkExec} - * method doesn't allow creation of the subprocess, or - * - *
  • the standard input to the subprocess was - * {@linkplain #redirectInput redirected from a file} - * and the security manager's - * {@link SecurityManager#checkRead checkRead} method - * denies read access to the file, or - * - *
  • the standard output or standard error of the - * subprocess was - * {@linkplain #redirectOutput redirected to a file} - * and the security manager's - * {@link SecurityManager#checkWrite checkWrite} method - * denies write access to the file - * - *
- * - * @throws IOException if an I/O error occurs - * - * @see Runtime#exec(String[], String[], File) - */ - public Process start() throws IOException { - // Must convert to array first -- a malicious user-supplied - // list might try to circumvent the security check. - String[] cmdarray = command.toArray(new String[command.size()]); - cmdarray = cmdarray.clone(); - - for (String arg : cmdarray) { - if (arg == null) { - throw new NullPointerException(); - } - } - // Throws IndexOutOfBoundsException if command is empty - String prog = cmdarray[0]; - - SecurityManager security = System.getSecurityManager(); - if (security != null) { - security.checkExec(prog); - } - - String dir = directory == null ? null : directory.toString(); - - for (int i = 1; i < cmdarray.length; i++) { - if (cmdarray[i].indexOf('\u0000') >= 0) { - throw new IOException("invalid null character in command"); - } - } - - try { - return ProcessImplForWin32.start( - username, - password, - cmdarray, - environment, - dir, - redirects, - redirectErrorStream); - } catch (IOException | IllegalArgumentException e) { - String exceptionInfo = ": " + e.getMessage(); - Throwable cause = e; - if ((e instanceof IOException) && security != null) { - // Can not disclose the fail reason for read-protected files. - try { - security.checkRead(prog); - } catch (SecurityException se) { - exceptionInfo = ""; - cause = se; - } - } - // It's much easier for us to create a high-quality error - // message than the low-level C code which found the problem. - throw new IOException( - "Cannot run program \"" + prog + "\"" - + (dir == null ? "" : " (in directory \"" + dir + "\")") - + exceptionInfo, - cause); - } - } - -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java deleted file mode 100644 index 85d9a2120c..0000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils.process; - -import com.sun.jna.platform.win32.Kernel32Util; - -import java.util.*; - -final class ProcessEnvironmentForWin32 extends HashMap { - - private static final long serialVersionUID = -8017839552603542824L; - - private static String validateName(String name) { - // An initial `=' indicates a magic Windows variable name -- OK - if (name.indexOf('=', 1) != -1 || - name.indexOf('\u0000') != -1) { - throw new IllegalArgumentException - ("Invalid environment variable name: \"" + name + "\""); - } - return name; - } - - private static String validateValue(String value) { - if (value.indexOf('\u0000') != -1) { - throw new IllegalArgumentException - ("Invalid environment variable value: \"" + value + "\""); - } - return value; - } - - private static String nonNullString(Object o) { - if (o == null) { - throw new NullPointerException(); - } - return (String) o; - } - - @Override - public String put(String key, String value) { - return super.put(validateName(key), validateValue(value)); - } - @Override - public String get(Object key) { - return super.get(nonNullString(key)); - } - @Override - public boolean containsKey(Object key) { - return super.containsKey(nonNullString(key)); - } - @Override - public boolean containsValue(Object value) { - return super.containsValue(nonNullString(value)); - } - @Override - public String remove(Object key) { - return super.remove(nonNullString(key)); - } - - private static class CheckedEntry implements Entry { - private final Entry e; - public CheckedEntry(Entry e) {this.e = e;} - @Override - public String getKey() { return e.getKey();} - @Override - public String getValue() { return e.getValue();} - @Override - public String setValue(String value) { - return e.setValue(validateValue(value)); - } - @Override - public String toString() { return getKey() + "=" + getValue();} - @Override - public boolean equals(Object o) {return e.equals(o);} - @Override - public int hashCode() {return e.hashCode();} - } - - private static class CheckedEntrySet extends AbstractSet> { - private final Set> s; - public CheckedEntrySet(Set> s) {this.s = s;} - @Override - public int size() {return s.size();} - @Override - public boolean isEmpty() {return s.isEmpty();} - @Override - public void clear() { s.clear();} - @Override - public Iterator> iterator() { - return new Iterator>() { - Iterator> i = s.iterator(); - @Override - public boolean hasNext() { return i.hasNext();} - @Override - public Entry next() { - return new CheckedEntry(i.next()); - } - @Override - public void remove() { i.remove();} - }; - } - private static Entry checkedEntry(Object o) { - @SuppressWarnings("unchecked") - Entry e = (Entry) o; - nonNullString(e.getKey()); - nonNullString(e.getValue()); - return e; - } - @Override - public boolean contains(Object o) {return s.contains(checkedEntry(o));} - @Override - public boolean remove(Object o) {return s.remove(checkedEntry(o));} - } - - private static class CheckedValues extends AbstractCollection { - private final Collection c; - public CheckedValues(Collection c) {this.c = c;} - @Override - public int size() {return c.size();} - @Override - public boolean isEmpty() {return c.isEmpty();} - @Override - public void clear() { c.clear();} - @Override - public Iterator iterator() {return c.iterator();} - @Override - public boolean contains(Object o) {return c.contains(nonNullString(o));} - @Override - public boolean remove(Object o) {return c.remove(nonNullString(o));} - } - - private static class CheckedKeySet extends AbstractSet { - private final Set s; - public CheckedKeySet(Set s) {this.s = s;} - @Override - public int size() {return s.size();} - @Override - public boolean isEmpty() {return s.isEmpty();} - @Override - public void clear() { s.clear();} - @Override - public Iterator iterator() {return s.iterator();} - @Override - public boolean contains(Object o) {return s.contains(nonNullString(o));} - @Override - public boolean remove(Object o) {return s.remove(nonNullString(o));} - } - @Override - public Set keySet() { - return new CheckedKeySet(super.keySet()); - } - @Override - public Collection values() { - return new CheckedValues(super.values()); - } - @Override - public Set> entrySet() { - return new CheckedEntrySet(super.entrySet()); - } - - private static final class NameComparator implements Comparator { - @Override - public int compare(String s1, String s2) { - // We can't use String.compareToIgnoreCase since it - // canonicalizes to lower case, while Windows - // canonicalizes to upper case! For example, "_" should - // sort *after* "Z", not before. - int n1 = s1.length(); - int n2 = s2.length(); - int min = Math.min(n1, n2); - for (int i = 0; i < min; i++) { - char c1 = s1.charAt(i); - char c2 = s2.charAt(i); - if (c1 != c2) { - c1 = Character.toUpperCase(c1); - c2 = Character.toUpperCase(c2); - if (c1 != c2) - // No overflow because of numeric promotion - { - return c1 - c2; - } - } - } - return n1 - n2; - } - } - - private static final class EntryComparator implements Comparator> { - @Override - public int compare(Entry e1, - Entry e2) { - return nameComparator.compare(e1.getKey(), e2.getKey()); - } - } - - // Allow `=' as first char in name, e.g. =C:=C:\DIR - static final int MIN_NAME_LENGTH = 1; - - private static final NameComparator nameComparator; - private static final EntryComparator entryComparator; - private static final ProcessEnvironmentForWin32 theEnvironment; - private static final Map theUnmodifiableEnvironment; - private static final Map theCaseInsensitiveEnvironment; - - static { - nameComparator = new NameComparator(); - entryComparator = new EntryComparator(); - theEnvironment = new ProcessEnvironmentForWin32(); - theUnmodifiableEnvironment = Collections.unmodifiableMap(theEnvironment); - - theEnvironment.putAll(environmentBlock()); - - theCaseInsensitiveEnvironment = new TreeMap<>(nameComparator); - theCaseInsensitiveEnvironment.putAll(theEnvironment); - } - - private ProcessEnvironmentForWin32() { - super(); - } - - private ProcessEnvironmentForWin32(int capacity) { - super(capacity); - } - - // Only for use by System.getenv(String) - static String getenv(String name) { - // The original implementation used a native call to _wgetenv, - // but it turns out that _wgetenv is only consistent with - // GetEnvironmentStringsW (for non-ASCII) if `wmain' is used - // instead of `main', even in a process created using - // CREATE_UNICODE_ENVIRONMENT. Instead we perform the - // case-insensitive comparison ourselves. At least this - // guarantees that System.getenv().get(String) will be - // consistent with System.getenv(String). - return theCaseInsensitiveEnvironment.get(name); - } - - // Only for use by System.getenv() - static Map getenv() { - return theUnmodifiableEnvironment; - } - - // Only for use by ProcessBuilder.environment() - @SuppressWarnings("unchecked") - static Map environment() { - return (Map) theEnvironment.clone(); - } - - // Only for use by ProcessBuilder.environment(String[] envp) - static Map emptyEnvironment(int capacity) { - return new ProcessEnvironmentForWin32(capacity); - } - - private static Map environmentBlock() { - return Kernel32Util.getEnvironmentVariables(); - } - - // Only for use by ProcessImpl.start() - String toEnvironmentBlock() { - // Sort Unicode-case-insensitively by name - List> list = new ArrayList<>(entrySet()); - Collections.sort(list, entryComparator); - - StringBuilder sb = new StringBuilder(size()*30); - int cmp = -1; - - // Some versions of MSVCRT.DLL require SystemRoot to be set. - // So, we make sure that it is always set, even if not provided - // by the caller. - final String SYSTEMROOT = "SystemRoot"; - - for (Entry e : list) { - String key = e.getKey(); - String value = e.getValue(); - if (cmp < 0 && (cmp = nameComparator.compare(key, SYSTEMROOT)) > 0) { - // Not set, so add it here - addToEnvIfSet(sb, SYSTEMROOT); - } - addToEnv(sb, key, value); - } - if (cmp < 0) { - // Got to end of list and still not found - addToEnvIfSet(sb, SYSTEMROOT); - } - if (sb.length() == 0) { - // Environment was empty and SystemRoot not set in parent - sb.append('\u0000'); - } - // Block is double NUL terminated - sb.append('\u0000'); - return sb.toString(); - } - - // add the environment variable to the child, if it exists in parent - private static void addToEnvIfSet(StringBuilder sb, String name) { - String s = getenv(name); - if (s != null) { - addToEnv(sb, name, s); - } - } - - private static void addToEnv(StringBuilder sb, String name, String val) { - sb.append(name).append('=').append(val).append('\u0000'); - } - - static String toEnvironmentBlock(Map map) { - return map == null ? null : ((ProcessEnvironmentForWin32)map).toEnvironmentBlock(); - } -} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java deleted file mode 100644 index 1efde52e62..0000000000 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32.java +++ /dev/null @@ -1,818 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils.process; - -import com.sun.jna.Pointer; -import com.sun.jna.platform.win32.*; -import com.sun.jna.ptr.IntByReference; -import java.lang.reflect.Field; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import sun.security.action.GetPropertyAction; - -import java.io.*; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.Locale; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static com.sun.jna.platform.win32.WinBase.INVALID_HANDLE_VALUE; -import static com.sun.jna.platform.win32.WinBase.STILL_ACTIVE; -import static java.util.Objects.requireNonNull; - -public class ProcessImplForWin32 extends Process { - - private static final Field FD_HANDLE; - - static { - if (!OSUtils.isWindows()) { - throw new RuntimeException("ProcessImplForWin32 can be only initialized in " + - "Windows environment, but current OS is " + OSUtils.getOSName()); - } - - try { - FD_HANDLE = requireNonNull(FileDescriptor.class.getDeclaredField("handle")); - FD_HANDLE.setAccessible(true); - } catch (NoSuchFieldException e) { - throw new RuntimeException(e); - } - } - - private static final int PIPE_SIZE = 4096 + 24; - - private static final int HANDLE_STORAGE_SIZE = 6; - - private static final int OFFSET_READ = 0; - - private static final int OFFSET_WRITE = 1; - - private static final WinNT.HANDLE JAVA_INVALID_HANDLE_VALUE = new WinNT.HANDLE(Pointer.createConstant(-1)); - - private static void setHandle(FileDescriptor obj, long handle) { - try { - FD_HANDLE.set(obj, handle); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - } - - private static long getHandle(FileDescriptor obj) { - try { - return (Long) FD_HANDLE.get(obj); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - } - - /** - * Open a file for writing. If {@code append} is {@code true} then the file - * is opened for atomic append directly and a FileOutputStream constructed - * with the resulting handle. This is because a FileOutputStream created - * to append to a file does not open the file in a manner that guarantees - * that writes by the child process will be atomic. - */ - private static FileOutputStream newFileOutputStream(File f, boolean append) - throws IOException - { - if (append) { - String path = f.getPath(); - SecurityManager sm = System.getSecurityManager(); - if (sm != null) { - sm.checkWrite(path); - } - long handle = openForAtomicAppend(path); - final FileDescriptor fd = new FileDescriptor(); - setHandle(fd, handle); - return AccessController.doPrivileged( - new PrivilegedAction() { - @Override - public FileOutputStream run() { - return new FileOutputStream(fd); - } - } - ); - } else { - return new FileOutputStream(f); - } - } - - // System-dependent portion of ProcessBuilderForWindows.start() - static Process start(String username, - String password, - String[] cmdarray, - java.util.Map environment, - String dir, - ProcessBuilderForWin32.Redirect[] redirects, - boolean redirectErrorStream) - throws IOException - { - String envblock = ProcessEnvironmentForWin32.toEnvironmentBlock(environment); - - FileInputStream f0 = null; - FileOutputStream f1 = null; - FileOutputStream f2 = null; - - try { - long[] stdHandles; - if (redirects == null) { - stdHandles = new long[] { -1L, -1L, -1L }; - } else { - stdHandles = new long[3]; - - if (redirects[0] == ProcessBuilderForWin32.Redirect.PIPE) { - stdHandles[0] = -1L; - } else if (redirects[0] == ProcessBuilderForWin32.Redirect.INHERIT) { - stdHandles[0] = getHandle(FileDescriptor.in); - } else { - f0 = new FileInputStream(redirects[0].file()); - stdHandles[0] = getHandle(f0.getFD()); - } - - if (redirects[1] == ProcessBuilderForWin32.Redirect.PIPE) { - stdHandles[1] = -1L; - } else if (redirects[1] == ProcessBuilderForWin32.Redirect.INHERIT) { - stdHandles[1] = getHandle(FileDescriptor.out); - } else { - f1 = newFileOutputStream(redirects[1].file(), - redirects[1].append()); - stdHandles[1] = getHandle(f1.getFD()); - } - - if (redirects[2] == ProcessBuilderForWin32.Redirect.PIPE) { - stdHandles[2] = -1L; - } else if (redirects[2] == ProcessBuilderForWin32.Redirect.INHERIT) { - stdHandles[2] = getHandle(FileDescriptor.err); - } else { - f2 = newFileOutputStream(redirects[2].file(), - redirects[2].append()); - stdHandles[2] = getHandle(f2.getFD()); - } - } - - return new ProcessImplForWin32(username, password, cmdarray, envblock, dir, stdHandles, redirectErrorStream); - } finally { - // In theory, close() can throw IOException - // (although it is rather unlikely to happen here) - try { if (f0 != null) { - f0.close(); - } - } - finally { - try { if (f1 != null) { - f1.close(); - } - } - finally { if (f2 != null) { - f2.close(); - } - } - } - } - - } - - private static class LazyPattern { - // Escape-support version: - // "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)" - private static final Pattern PATTERN = - Pattern.compile("[^\\s\"]+|\"[^\"]*\""); - } - - /* Parses the command string parameter into the executable name and - * program arguments. - * - * The command string is broken into tokens. The token separator is a space - * or quota character. The space inside quotation is not a token separator. - * There are no escape sequences. - */ - private static String[] getTokensFromCommand(String command) { - ArrayList matchList = new ArrayList<>(8); - Matcher regexMatcher = ProcessImplForWin32.LazyPattern.PATTERN.matcher(command); - while (regexMatcher.find()) { - matchList.add(regexMatcher.group()); - } - return matchList.toArray(new String[matchList.size()]); - } - - private static final int VERIFICATION_CMD_BAT = 0; - private static final int VERIFICATION_WIN32 = 1; - private static final int VERIFICATION_WIN32_SAFE = 2; // inside quotes not allowed - private static final int VERIFICATION_LEGACY = 3; - // See Command shell overview for documentation of special characters. - // https://docs.microsoft.com/en-us/previous-versions/windows/it-pro/windows-xp/bb490954(v=technet.10) - private static final char[][] ESCAPE_VERIFICATION = { - // We guarantee the only command file execution for implicit [cmd.exe] run. - // http://technet.microsoft.com/en-us/library/bb490954.aspx - {' ', '\t', '<', '>', '&', '|', '^'}, - {' ', '\t', '<', '>'}, - {' ', '\t', '<', '>'}, - {' ', '\t'} - }; - - private static String createCommandLine(int verificationType, - final String executablePath, - final String[] cmd) - { - StringBuilder cmdbuf = new StringBuilder(80); - - cmdbuf.append(executablePath); - - for (int i = 1; i < cmd.length; ++i) { - cmdbuf.append(' '); - String s = cmd[i]; - if (needsEscaping(verificationType, s)) { - cmdbuf.append('"'); - - if (verificationType == VERIFICATION_WIN32_SAFE) { - // Insert the argument, adding '\' to quote any interior quotes - int length = s.length(); - for (int j = 0; j < length; j++) { - char c = s.charAt(j); - if (c == DOUBLEQUOTE) { - int count = countLeadingBackslash(verificationType, s, j); - while (count-- > 0) { - cmdbuf.append(BACKSLASH); // double the number of backslashes - } - cmdbuf.append(BACKSLASH); // backslash to quote the quote - } - cmdbuf.append(c); - } - } else { - cmdbuf.append(s); - } - // The code protects the [java.exe] and console command line - // parser, that interprets the [\"] combination as an escape - // sequence for the ["] char. - // http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - // - // If the argument is an FS path, doubling of the tail [\] - // char is not a problem for non-console applications. - // - // The [\"] sequence is not an escape sequence for the [cmd.exe] - // command line parser. The case of the [""] tail escape - // sequence could not be realized due to the argument validation - // procedure. - int count = countLeadingBackslash(verificationType, s, s.length()); - while (count-- > 0) { - cmdbuf.append(BACKSLASH); // double the number of backslashes - } - cmdbuf.append('"'); - } else { - cmdbuf.append(s); - } - } - return cmdbuf.toString(); - } - - /** - * Return the argument without quotes (1st and last) if present, else the arg. - * @param str a string - * @return the string without 1st and last quotes - */ - private static String unQuote(String str) { - int len = str.length(); - return (len >= 2 && str.charAt(0) == DOUBLEQUOTE && str.charAt(len - 1) == DOUBLEQUOTE) - ? str.substring(1, len - 1) - : str; - } - - private static boolean needsEscaping(int verificationType, String arg) { - // Switch off MS heuristic for internal ["]. - // Please, use the explicit [cmd.exe] call - // if you need the internal ["]. - // Example: "cmd.exe", "/C", "Extended_MS_Syntax" - - // For [.exe] or [.com] file the unpaired/internal ["] - // in the argument is not a problem. - String unquotedArg = unQuote(arg); - boolean argIsQuoted = !arg.equals(unquotedArg); - boolean embeddedQuote = unquotedArg.indexOf(DOUBLEQUOTE) >= 0; - - switch (verificationType) { - case VERIFICATION_CMD_BAT: - if (embeddedQuote) { - throw new IllegalArgumentException("Argument has embedded quote, " + - "use the explicit CMD.EXE call."); - } - break; // break determine whether to quote - case VERIFICATION_WIN32_SAFE: - if (argIsQuoted && embeddedQuote) { - throw new IllegalArgumentException("Malformed argument has embedded quote: " - + unquotedArg); - } - break; - default: - break; - } - - if (!argIsQuoted) { - char[] testEscape = ESCAPE_VERIFICATION[verificationType]; - for (int i = 0; i < testEscape.length; ++i) { - if (arg.indexOf(testEscape[i]) >= 0) { - return true; - } - } - } - return false; - } - - private static String getExecutablePath(String path) - throws IOException - { - String name = unQuote(path); - if (name.indexOf(DOUBLEQUOTE) >= 0) { - throw new IllegalArgumentException("Executable name has embedded quote, " + - "split the arguments: " + name); - } - // Win32 CreateProcess requires path to be normalized - File fileToRun = new File(name); - - // From the [CreateProcess] function documentation: - // - // "If the file name does not contain an extension, .exe is appended. - // Therefore, if the file name extension is .com, this parameter - // must include the .com extension. If the file name ends in - // a period (.) with no extension, or if the file name contains a path, - // .exe is not appended." - // - // "If the file name !does not contain a directory path!, - // the system searches for the executable file in the following - // sequence:..." - // - // In practice ANY non-existent path is extended by [.exe] extension - // in the [CreateProcess] function with the only exception: - // the path ends by (.) - - return fileToRun.getPath(); - } - - /** - * An executable is any program that is an EXE or does not have an extension - * and the Windows createProcess will be looking for .exe. - * The comparison is case insensitive based on the name. - * @param executablePath the executable file - * @return true if the path ends in .exe or does not have an extension. - */ - private boolean isExe(String executablePath) { - File file = new File(executablePath); - String upName = file.getName().toUpperCase(Locale.ROOT); - return (upName.endsWith(".EXE") || upName.indexOf('.') < 0); - } - - // Old version that can be bypassed - private boolean isShellFile(String executablePath) { - String upPath = executablePath.toUpperCase(); - return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT")); - } - - private String quoteString(String arg) { - StringBuilder argbuf = new StringBuilder(arg.length() + 2); - return argbuf.append('"').append(arg).append('"').toString(); - } - - // Count backslashes before start index of string. - // .bat files don't include backslashes as part of the quote - private static int countLeadingBackslash(int verificationType, - CharSequence input, int start) { - if (verificationType == VERIFICATION_CMD_BAT) { - return 0; - } - int j; - for (j = start - 1; j >= 0 && input.charAt(j) == BACKSLASH; j--) { - // just scanning backwards - } - return (start - 1) - j; // number of BACKSLASHES - } - - private static final char DOUBLEQUOTE = '\"'; - private static final char BACKSLASH = '\\'; - - private WinNT.HANDLE handle; - private OutputStream stdinStream; - private InputStream stdoutStream; - private InputStream stderrStream; - - private ProcessImplForWin32( - String username, - String password, - String[] cmd, - final String envblock, - final String path, - final long[] stdHandles, - final boolean redirectErrorStream) - throws IOException - { - String cmdstr; - final SecurityManager security = System.getSecurityManager(); - GetPropertyAction action = new GetPropertyAction("jdk.lang.Process.allowAmbiguousCommands", - (security == null) ? "true" : "false"); - final boolean allowAmbiguousCommands = !"false".equalsIgnoreCase(action.run()); - if (allowAmbiguousCommands && security == null) { - // Legacy mode. - - // Normalize path if possible. - String executablePath = new File(cmd[0]).getPath(); - - // No worry about internal, unpaired ["], and redirection/piping. - if (needsEscaping(VERIFICATION_LEGACY, executablePath) ) { - executablePath = quoteString(executablePath); - } - - cmdstr = createCommandLine( - //legacy mode doesn't worry about extended verification - VERIFICATION_LEGACY, - executablePath, - cmd); - } else { - String executablePath; - try { - executablePath = getExecutablePath(cmd[0]); - } catch (IllegalArgumentException e) { - // Workaround for the calls like - // Runtime.getRuntime().exec("\"C:\\Program Files\\foo\" bar") - - // No chance to avoid CMD/BAT injection, except to do the work - // right from the beginning. Otherwise we have too many corner - // cases from - // Runtime.getRuntime().exec(String[] cmd [, ...]) - // calls with internal ["] and escape sequences. - - // Restore original command line. - StringBuilder join = new StringBuilder(); - // terminal space in command line is ok - for (String s : cmd) { - join.append(s).append(' '); - } - - // Parse the command line again. - cmd = getTokensFromCommand(join.toString()); - executablePath = getExecutablePath(cmd[0]); - - // Check new executable name once more - if (security != null) { - security.checkExec(executablePath); - } - } - - // Quotation protects from interpretation of the [path] argument as - // start of longer path with spaces. Quotation has no influence to - // [.exe] extension heuristic. - boolean isShell = allowAmbiguousCommands ? isShellFile(executablePath) - : !isExe(executablePath); - cmdstr = createCommandLine( - // We need the extended verification procedures - isShell ? VERIFICATION_CMD_BAT - : (allowAmbiguousCommands ? VERIFICATION_WIN32 : VERIFICATION_WIN32_SAFE), - quoteString(executablePath), - cmd); - } - - handle = create(username, password, cmdstr, envblock, path, stdHandles, redirectErrorStream); - - AccessController.doPrivileged( - new PrivilegedAction() { - @Override - public Void run() { - if (stdHandles[0] == -1L) { - stdinStream = ProcessBuilderForWin32.NullOutputStream.INSTANCE; - } else { - FileDescriptor stdinFd = new FileDescriptor(); - setHandle(stdinFd, stdHandles[0]); - stdinStream = new BufferedOutputStream( - new FileOutputStream(stdinFd)); - } - - if (stdHandles[1] == -1L) { - stdoutStream = ProcessBuilderForWin32.NullInputStream.INSTANCE; - } else { - FileDescriptor stdoutFd = new FileDescriptor(); - setHandle(stdoutFd, stdHandles[1]); - stdoutStream = new BufferedInputStream( - new FileInputStream(stdoutFd)); - } - - if (stdHandles[2] == -1L) { - stderrStream = ProcessBuilderForWin32.NullInputStream.INSTANCE; - } else { - FileDescriptor stderrFd = new FileDescriptor(); - setHandle(stderrFd, stdHandles[2]); - stderrStream = new FileInputStream(stderrFd); - } - - return null; }}); - } - - @Override - public OutputStream getOutputStream() { - return stdinStream; - } - - @Override - public InputStream getInputStream() { - return stdoutStream; - } - - @Override - public InputStream getErrorStream() { - return stderrStream; - } - - @Override - protected void finalize() { - closeHandle(handle); - } - - @Override - public int exitValue() { - int exitCode = getExitCodeProcess(handle); - if (exitCode == STILL_ACTIVE) { - throw new IllegalThreadStateException("process has not exited"); - } - return exitCode; - } - - @Override - public int waitFor() throws InterruptedException { - waitForInterruptibly(handle); - if (Thread.interrupted()) { - throw new InterruptedException(); - } - return exitValue(); - } - - @Override - public boolean waitFor(long timeout, TimeUnit unit) - throws InterruptedException - { - if (getExitCodeProcess(handle) != STILL_ACTIVE) { - return true; - } - if (timeout <= 0) { - return false; - } - - long remainingNanos = unit.toNanos(timeout); - long deadline = System.nanoTime() + remainingNanos ; - - do { - // Round up to next millisecond - long msTimeout = TimeUnit.NANOSECONDS.toMillis(remainingNanos + 999_999L); - waitForTimeoutInterruptibly(handle, msTimeout); - if (Thread.interrupted()) { - throw new InterruptedException(); - } - if (getExitCodeProcess(handle) != STILL_ACTIVE) { - return true; - } - remainingNanos = deadline - System.nanoTime(); - } while (remainingNanos > 0); - - return (getExitCodeProcess(handle) != STILL_ACTIVE); - } - - @Override - public void destroy() { terminateProcess(handle); } - - @Override - public Process destroyForcibly() { - destroy(); - return this; - } - @Override - public boolean isAlive() { - return isProcessAlive(handle); - } - - private static boolean initHolder(WinNT.HANDLEByReference pjhandles, - WinNT.HANDLEByReference[] pipe, - int offset, - WinNT.HANDLEByReference phStd) { - if (!pjhandles.getValue().equals(JAVA_INVALID_HANDLE_VALUE)) { - phStd.setValue(pjhandles.getValue()); - pjhandles.setValue(JAVA_INVALID_HANDLE_VALUE); - } else { - if (!Kernel32.INSTANCE.CreatePipe(pipe[0], pipe[1], null, PIPE_SIZE)) { - throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); - } else { - WinNT.HANDLE thisProcessEnd = offset == OFFSET_READ ? pipe[1].getValue() : pipe[0].getValue(); - phStd.setValue(pipe[offset].getValue()); - pjhandles.setValue(thisProcessEnd); - } - } - Kernel32.INSTANCE.SetHandleInformation(phStd.getValue(), WinBase.HANDLE_FLAG_INHERIT, WinBase.HANDLE_FLAG_INHERIT); - return true; - } - - private static void releaseHolder(boolean complete, WinNT.HANDLEByReference[] pipe, int offset) { - closeHandle(pipe[offset].getValue()); - if (complete) { - closeHandle(pipe[offset == OFFSET_READ ? OFFSET_WRITE : OFFSET_READ].getValue()); - } - } - - private static void prepareIOEHandleState(WinNT.HANDLE[] stdIOE, Boolean[] inherit) { - for(int i = 0; i < HANDLE_STORAGE_SIZE; ++i) { - WinNT.HANDLE hstd = stdIOE[i]; - if (!WinBase.INVALID_HANDLE_VALUE.equals(hstd)) { - inherit[i] = Boolean.TRUE; - Kernel32.INSTANCE.SetHandleInformation(hstd, WinBase.HANDLE_FLAG_INHERIT, 0); - } - } - } - - private static void restoreIOEHandleState(WinNT.HANDLE[] stdIOE, Boolean[] inherit) { - for (int i = HANDLE_STORAGE_SIZE - 1; i >= 0; --i) { - if (!WinBase.INVALID_HANDLE_VALUE.equals(stdIOE[i])) { - Kernel32.INSTANCE.SetHandleInformation(stdIOE[i], WinBase.HANDLE_FLAG_INHERIT, Boolean.TRUE.equals(inherit[i]) ? WinBase.HANDLE_FLAG_INHERIT : 0); - } - } - } - - private static WinNT.HANDLE processCreate(String username, - String password, - String cmd, - final String envblock, - final String path, - final WinNT.HANDLEByReference[] stdHandles, - final boolean redirectErrorStream) { - WinNT.HANDLE ret = new WinNT.HANDLE(Pointer.createConstant(0)); - - WinNT.HANDLE[] stdIOE = new WinNT.HANDLE[] { - WinBase.INVALID_HANDLE_VALUE, WinBase.INVALID_HANDLE_VALUE, WinBase.INVALID_HANDLE_VALUE, - stdHandles[0].getValue(), stdHandles[1].getValue(), stdHandles[2].getValue() - }; - stdIOE[0] = Kernel32.INSTANCE.GetStdHandle(Wincon.STD_INPUT_HANDLE); - stdIOE[1] = Kernel32.INSTANCE.GetStdHandle(Wincon.STD_OUTPUT_HANDLE); - stdIOE[2] = Kernel32.INSTANCE.GetStdHandle(Wincon.STD_ERROR_HANDLE); - - Boolean[] inherit = new Boolean[] { - Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, - Boolean.FALSE, Boolean.FALSE, Boolean.FALSE - }; - - prepareIOEHandleState(stdIOE, inherit); - - // input - WinNT.HANDLEByReference hStdInput = new WinNT.HANDLEByReference(); - WinNT.HANDLEByReference[] pipeIn = new WinNT.HANDLEByReference[] { - new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE) }; - - // output - WinNT.HANDLEByReference hStdOutput = new WinNT.HANDLEByReference(); - WinNT.HANDLEByReference[] pipeOut = new WinNT.HANDLEByReference[] { - new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE) }; - - // error - WinNT.HANDLEByReference hStdError = new WinNT.HANDLEByReference(); - WinNT.HANDLEByReference[] pipeError = new WinNT.HANDLEByReference[] { - new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE), new WinNT.HANDLEByReference(WinBase.INVALID_HANDLE_VALUE) }; - - boolean success; - if (initHolder(stdHandles[0], pipeIn, OFFSET_READ, hStdInput)) { - if (initHolder(stdHandles[1], pipeOut, OFFSET_WRITE, hStdOutput)) { - WinBase.STARTUPINFO si = new WinBase.STARTUPINFO(); - si.hStdInput = hStdInput.getValue(); - si.hStdOutput = hStdOutput.getValue(); - - if (redirectErrorStream) { - si.hStdError = si.hStdOutput; - stdHandles[2].setValue(JAVA_INVALID_HANDLE_VALUE); - success = true; - } else { - success = initHolder(stdHandles[2], pipeError, OFFSET_WRITE, hStdError); - si.hStdError = hStdError.getValue(); - } - - if (success) { - WTypes.LPSTR lpEnvironment = envblock == null ? new WTypes.LPSTR() : new WTypes.LPSTR(envblock); - WinBase.PROCESS_INFORMATION pi = new WinBase.PROCESS_INFORMATION(); - si.dwFlags = WinBase.STARTF_USESTDHANDLES; - if (!Advapi32.INSTANCE.CreateProcessWithLogonW( - username - , null - , password - , Advapi32.LOGON_WITH_PROFILE - , null - , cmd - , WinBase.CREATE_NO_WINDOW - , lpEnvironment.getPointer() - , path - , si - , pi)) { - throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); - } else { - closeHandle(pi.hThread); - ret = pi.hProcess; - } - } - releaseHolder(ret.getPointer().equals(Pointer.createConstant(0)), pipeError, OFFSET_WRITE); - releaseHolder(ret.getPointer().equals(Pointer.createConstant(0)), pipeOut, OFFSET_WRITE); - } - releaseHolder(ret.getPointer().equals(Pointer.createConstant(0)), pipeIn, OFFSET_READ); - } - restoreIOEHandleState(stdIOE, inherit); - return ret; - } - - private static synchronized WinNT.HANDLE create(String username, - String password, - String cmd, - final String envblock, - final String path, - final long[] stdHandles, - final boolean redirectErrorStream) { - WinNT.HANDLE ret = new WinNT.HANDLE(Pointer.createConstant(0)); - WinNT.HANDLEByReference[] handles = new WinNT.HANDLEByReference[stdHandles.length]; - for (int i = 0; i < stdHandles.length; i++) { - handles[i] = new WinNT.HANDLEByReference(new WinNT.HANDLE(Pointer.createConstant(stdHandles[i]))); - } - - if (cmd != null && username != null && password != null) { - ret = processCreate(username, password, cmd, envblock, path, handles, redirectErrorStream); - } - - for (int i = 0; i < stdHandles.length; i++) { - stdHandles[i] = handles[i].getPointer().getLong(0); - } - - return ret; - } - - private static int getExitCodeProcess(WinNT.HANDLE handle) { - IntByReference exitStatus = new IntByReference(); - if (!Kernel32.INSTANCE.GetExitCodeProcess(handle, exitStatus)) { - throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); - } - return exitStatus.getValue(); - } - - private static void terminateProcess(WinNT.HANDLE handle) { - Kernel32.INSTANCE.TerminateProcess(handle, 1); - } - - private static boolean isProcessAlive(WinNT.HANDLE handle) { - IntByReference exitStatus = new IntByReference(); - Kernel32.INSTANCE.GetExitCodeProcess(handle, exitStatus); - return exitStatus.getValue() == STILL_ACTIVE; - } - - private static void closeHandle(WinNT.HANDLE handle) { - if (!handle.equals(INVALID_HANDLE_VALUE)) { - Kernel32Util.closeHandle(handle); - } - } - - /** - * Opens a file for atomic append. The file is created if it doesn't - * already exist. - * - * @param path the file to open or create - * @return the native HANDLE - */ - private static long openForAtomicAppend(String path) throws IOException { - int access = WinNT.GENERIC_READ | WinNT.GENERIC_WRITE; - int sharing = WinNT.FILE_SHARE_READ | WinNT.FILE_SHARE_WRITE; - int disposition = WinNT.OPEN_ALWAYS; - int flagsAndAttributes = WinNT.FILE_ATTRIBUTE_NORMAL; - if (path == null || path.isEmpty()) { - return -1; - } else { - WinNT.HANDLE handle = Kernel32.INSTANCE.CreateFile(path, access, sharing, null, disposition, flagsAndAttributes, null); - if (handle == WinBase.INVALID_HANDLE_VALUE) { - throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); - } - return handle.getPointer().getLong(0); - } - } - - private static void waitForInterruptibly(WinNT.HANDLE handle) { - int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, WinBase.INFINITE); - if (result == WinBase.WAIT_FAILED) { - throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); - } - } - - private static void waitForTimeoutInterruptibly(WinNT.HANDLE handle, long timeout) { - int result = Kernel32.INSTANCE.WaitForMultipleObjects(1, new WinNT.HANDLE[]{handle}, false, (int) timeout); - if (result == WinBase.WAIT_FAILED) { - throw new Win32Exception(Kernel32.INSTANCE.GetLastError()); - } - } - -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataxParametersTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataxParametersTest.java new file mode 100644 index 0000000000..d6e2f69882 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataxParametersTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.task; + +import org.apache.dolphinscheduler.common.task.datax.DataxParameters; + +import org.junit.Assert; +import org.junit.Test; + +public class DataxParametersTest { + + /** + * jvm parameters + */ + public static final String JVM_EVN = " --jvm=\"-Xms%sG -Xmx%sG\" "; + + @Test + public void testLoadJvmEnv() { + + DataxParameters dataxParameters = new DataxParameters(); + dataxParameters.setXms(0); + dataxParameters.setXmx(-100); + + String actual = loadJvmEnvTest(dataxParameters); + + String except = " --jvm=\"-Xms1G -Xmx1G\" "; + Assert.assertEquals(except,actual); + + dataxParameters.setXms(13); + dataxParameters.setXmx(14); + actual = loadJvmEnvTest(dataxParameters); + except = " --jvm=\"-Xms13G -Xmx14G\" "; + Assert.assertEquals(except,actual); + + } + + @Test + public void testToString() { + + DataxParameters dataxParameters = new DataxParameters(); + dataxParameters.setCustomConfig(0); + dataxParameters.setXms(0); + dataxParameters.setXmx(-100); + dataxParameters.setDataSource(1); + dataxParameters.setDataTarget(1); + dataxParameters.setDsType("MYSQL"); + dataxParameters.setDtType("MYSQL"); + dataxParameters.setJobSpeedByte(1); + dataxParameters.setJobSpeedRecord(1); + dataxParameters.setJson("json"); + + String expected = "DataxParameters" + + "{" + + "customConfig=0, " + + "json='json', " + + "dsType='MYSQL', " + + "dataSource=1, " + + "dtType='MYSQL', " + + "dataTarget=1, " + + "sql='null', " + + "targetTable='null', " + + "preStatements=null, " + + "postStatements=null, " + + "jobSpeedByte=1, " + + "jobSpeedRecord=1, " + + "xms=0, " + + "xmx=-100" + + "}"; + + Assert.assertEquals(expected,dataxParameters.toString()); + } + + public String loadJvmEnvTest(DataxParameters dataXParameters) { + int xms = dataXParameters.getXms() < 1 ? 1 : dataXParameters.getXms(); + int xmx = dataXParameters.getXmx() < 1 ? 1 : dataXParameters.getXmx(); + return String.format(JVM_EVN, xms, xmx); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadUtilsTest.java deleted file mode 100644 index 2c76f40c0b..0000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadUtilsTest.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.threadutils; - -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Calendar; -import java.util.concurrent.*; - -import static org.junit.Assert.*; - - -public class ThreadUtilsTest { - private static final Logger logger = LoggerFactory.getLogger(ThreadUtilsTest.class); - /** - * create a naming thread - */ - @Test - public void testNewDaemonFixedThreadExecutor() { - // create core size and max size are all 3 - ExecutorService testExec = ThreadUtils.newDaemonFixedThreadExecutor("test-exec-thread",10); - - for (int i = 0; i < 19; i++) { - final int index = i; - testExec.submit(() -> { - System.out.println("do some work index " + index); - }); - } - assertFalse(testExec.isShutdown()); - testExec.shutdownNow(); - assertTrue(testExec.isShutdown()); - - } - - /** - * test schedulerThreadExecutor as for print time in scheduler - * default check thread is 1 - */ - @Test - public void testNewDaemonScheduleThreadExecutor() { - - ScheduledExecutorService scheduleService = ThreadUtils.newDaemonThreadScheduledExecutor("scheduler-thread", 1); - Calendar start = Calendar.getInstance(); - Calendar globalTimer = Calendar.getInstance(); - globalTimer.set(2019, Calendar.DECEMBER, 1, 0, 0, 0); - // current - Calendar end = Calendar.getInstance(); - end.set(2019, Calendar.DECEMBER, 1, 0, 0, 3); - Runnable schedulerTask = new Runnable() { - @Override - public void run() { - start.set(2019, Calendar.DECEMBER, 1, 0, 0, 0); - int index = 0; - // send heart beat work - while (start.getTime().getTime() <= end.getTime().getTime()) { - System.out.println("worker here"); - System.out.println(index ++); - start.add(Calendar.SECOND, 1); - globalTimer.add(Calendar.SECOND, 1); - } - System.out.println("time is " + System.currentTimeMillis()); - } - }; - scheduleService.scheduleAtFixedRate(schedulerTask, 2, 10, TimeUnit.SECONDS); - assertFalse(scheduleService.isShutdown()); - try { - Thread.sleep(60000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - scheduleService.shutdownNow(); - assertTrue(scheduleService.isShutdown()); - } - - /** - * test stopper is working normal - */ - @Test - public void testStopper() { - assertTrue(Stopper.isRunning()); - Stopper.stop(); - assertTrue(Stopper.isStopped()); - } - - /** - * test threadPoolExecutors with 3 workers and current each 5 tasks - * @throws InterruptedException - */ - @Test - public void testThreadInfo() throws InterruptedException { - ThreadPoolExecutors workers = ThreadPoolExecutors.getInstance("worker", 3); - for (int i = 0; i < 5; ++i ) { - int index = i; - workers.execute(() -> { - for (int j = 0; j < 10; ++j) { - try { - Thread.sleep(1000); - System.out.printf("worker %d is doing the task", index); - System.out.println(); - workers.printStatus(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - }); - workers.submit(() -> { - for (int j = 0; j < 10; ++j) { - try { - Thread.sleep(1000); - System.out.printf("worker_2 %d is doing the task", index); - System.out.println(); - workers.printStatus(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - }); - } - Thread.sleep(50001); - workers.shutdown(); - } - - /** - * test a single daemon thread pool - */ - @Test - public void testNewDaemonSingleThreadExecutor() { - ExecutorService threadTest = ThreadUtils.newDaemonSingleThreadExecutor("thread_test"); - threadTest.execute(() -> { - for (int i = 0; i < 100; ++i) { - System.out.println("daemon working "); - } - - }); - assertFalse(threadTest.isShutdown()); - threadTest.shutdownNow(); - assertTrue(threadTest.isShutdown()); - } - - @Test - public void testNewDaemonCachedThreadPool() { - - ThreadPoolExecutor threadPoolExecutor = ThreadUtils.newDaemonCachedThreadPool("threadTest-"); - Thread thread1 = threadPoolExecutor.getThreadFactory().newThread(() -> { - for (int i = 0; i < 10; ++i) { - System.out.println("this task is with index " + i ); - } - }); - assertTrue(thread1.getName().startsWith("threadTest-")); - assertFalse(threadPoolExecutor.isShutdown()); - threadPoolExecutor.shutdown(); - assertTrue(threadPoolExecutor.isShutdown()); - } - - @Test - public void testNewDaemonCachedThreadPoolWithThreadNumber() { - ThreadPoolExecutor threadPoolExecutor = ThreadUtils.newDaemonCachedThreadPool("threadTest--", 3, 10); - for (int i = 0; i < 10; ++ i) { - threadPoolExecutor.getThreadFactory().newThread(() -> { - assertEquals(3, threadPoolExecutor.getActiveCount()); - System.out.println("this task is first work to do"); - }); - } - assertFalse(threadPoolExecutor.isShutdown()); - threadPoolExecutor.shutdown(); - assertTrue(threadPoolExecutor.isShutdown()); - } - - - -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java index b2a255b2e2..a4a39ae252 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/FileUtilsTest.java @@ -14,16 +14,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils; +import static org.apache.dolphinscheduler.common.Constants.YYYYMMDDHHMMSS; + import org.apache.dolphinscheduler.common.Constants; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import static org.apache.dolphinscheduler.common.Constants.YYYYMMDDHHMMSS; @RunWith(PowerMockRunner.class) @PrepareForTest(DateUtils.class) @@ -60,9 +67,9 @@ public class FileUtilsTest { } @Test - public void testCreateWorkDirAndUserIfAbsent() { + public void testCreateWorkDirIfAbsent() { try { - FileUtils.createWorkDirAndUserIfAbsent("/tmp/createWorkDirAndUserIfAbsent", "test123"); + FileUtils.createWorkDirIfAbsent("/tmp/createWorkDirAndUserIfAbsent"); Assert.assertTrue(true); } catch (Exception e) { Assert.assertTrue(false); @@ -81,4 +88,15 @@ public class FileUtilsTest { } } + @Test + public void testWriteContent2File() throws FileNotFoundException { + // file exists, fmt is invalid + String filePath = "test/testFile.txt"; + String content = "正正正faffdasfasdfas"; + FileUtils.writeContent2File(content, filePath); + + String fileContent = FileUtils.readFile2Str(new FileInputStream(new File(filePath))); + Assert.assertEquals(content, fileContent); + } + } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java index e273496f56..af12d5a625 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.dolphinscheduler.common.enums.DataType; +import org.apache.dolphinscheduler.common.enums.Direct; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.process.Property; import java.util.ArrayList; import java.util.HashMap; @@ -28,13 +28,15 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.apache.dolphinscheduler.common.enums.DataType; -import org.apache.dolphinscheduler.common.enums.Direct; -import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.process.Property; import org.junit.Assert; import org.junit.Test; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.fasterxml.jackson.databind.node.ObjectNode; + public class JSONUtilsTest { @Test @@ -108,9 +110,8 @@ public class JSONUtilsTest { Assert.assertEquals(Direct.IN, direct); } - @Test - public void String2MapTest() { + public void string2MapTest() { String str = list2String(); List maps = JSONUtils.toList(str, @@ -145,6 +146,18 @@ public class JSONUtilsTest { Assert.assertNull(JSONUtils.parseObject("foo", String.class)); } + @Test + public void testJsonByteArray() { + String str = "foo"; + byte[] serializeByte = JSONUtils.toJsonByteArray(str); + String deserialize = JSONUtils.parseObject(serializeByte, String.class); + Assert.assertEquals(str, deserialize); + str = null; + serializeByte = JSONUtils.toJsonByteArray(str); + deserialize = JSONUtils.parseObject(serializeByte, String.class); + Assert.assertNull(deserialize); + } + @Test public void testToList() { Assert.assertEquals(new ArrayList(), diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java index 8627226b8b..796a5faa45 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/ParameterUtilsTest.java @@ -14,25 +14,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils; -import org.apache.commons.lang.time.DateUtils; +import static org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils.replacePlaceholders; + +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils; + +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.*; - -import static org.apache.dolphinscheduler.common.Constants.PARAMETER_FORMAT_TIME; -import static org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils.replacePlaceholders; - - public class ParameterUtilsTest { public static final Logger logger = LoggerFactory.getLogger(ParameterUtilsTest.class); @@ -40,13 +45,13 @@ public class ParameterUtilsTest { * Test convertParameterPlaceholders */ @Test - public void testConvertParameterPlaceholders() throws Exception { + public void testConvertParameterPlaceholders() throws ParseException { // parameterString,parameterMap is null Assert.assertNull(ParameterUtils.convertParameterPlaceholders(null, null)); // parameterString is null,parameterMap is not null - Map parameterMap = new HashMap(); - parameterMap.put("testParameter","testParameter"); + Map parameterMap = new HashMap(); + parameterMap.put("testParameter", "testParameter"); Assert.assertNull(ParameterUtils.convertParameterPlaceholders(null, parameterMap)); // parameterString、parameterMap is not null @@ -54,60 +59,72 @@ public class ParameterUtilsTest { Assert.assertEquals(parameterString, ParameterUtils.convertParameterPlaceholders(parameterString, parameterMap)); //replace variable ${} form - parameterMap.put("testParameter2","${testParameter}"); - Assert.assertEquals(parameterString,PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true)); + parameterMap.put("testParameter2", "${testParameter}"); + Assert.assertEquals(parameterString, PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true)); // replace time $[...] form, eg. $[yyyyMMdd] Date cronTime = new Date(); Assert.assertEquals(parameterString, replacePlaceholders(parameterString, cronTime, true)); // replace time $[...] form, eg. $[yyyyMMdd] - Date cronTimeStr = DateUtils.parseDate("20191220145900", new String[]{PARAMETER_FORMAT_TIME}); + Date cronTimeStr = DateUtils.stringToDate("2019-02-02 00:00:00"); Assert.assertEquals(parameterString, replacePlaceholders(parameterString, cronTimeStr, true)); } + @Test + public void testConvertParameterPlaceholders2() { + String parameterString = + "${user} is userName, '$[1]' '$[add_months(yyyyMMdd,12*2)]' '$[add_months(yyyyMMdd,-12*2)]' '$[add_months(yyyyMMdd,3)]' '$[add_months(yyyyMMdd,-4)]' " + + "'$[yyyyMMdd+7*2]' '$[yyyyMMdd-7*2]' '$[yyyyMMdd+3]' '$[0]' '$[yyyyMMdd-3]' '$[HHmmss+2/24]' '$[HHmmss-1/24]' '$[HHmmss+3/24/60]' '$[HHmmss-2/24/60]' '$[3]'"; + Map parameterMap = new HashMap<>(); + parameterMap.put("user", "Kris"); + parameterMap.put(Constants.PARAMETER_DATETIME, "20201201123000"); + parameterString = ParameterUtils.convertParameterPlaceholders(parameterString, parameterMap); + Assert.assertEquals("Kris is userName, '$[1]' '20221201' '20181201' '20210301' '20200801' '20201215' '20201117' '20201204' '$[0]' '20201128' '143000' '113000' '123300' '122800' '$[3]'", + parameterString); + } + /** * Test curingGlobalParams */ @Test - public void testCuringGlobalParams() throws Exception { + public void testCuringGlobalParams() { //define globalMap Map globalParamMap = new HashMap<>(); - globalParamMap.put("globalParams1","Params1"); + globalParamMap.put("globalParams1", "Params1"); //define globalParamList List globalParamList = new ArrayList<>(); //define scheduleTime - Date scheduleTime = DateUtils.parseDate("20191220145900", new String[]{PARAMETER_FORMAT_TIME}); + Date scheduleTime = DateUtils.stringToDate("2019-12-20 00:00:00"); //test globalParamList is null String result = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime); Assert.assertNull(result); - Assert.assertNull(ParameterUtils.curingGlobalParams(null,null,CommandType.START_CURRENT_TASK_PROCESS,null)); - Assert.assertNull(ParameterUtils.curingGlobalParams(globalParamMap,null,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime)); + Assert.assertNull(ParameterUtils.curingGlobalParams(null, null, CommandType.START_CURRENT_TASK_PROCESS, null)); + Assert.assertNull(ParameterUtils.curingGlobalParams(globalParamMap, null, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime)); //test globalParamList is not null - Property property=new Property("testGlobalParam", Direct.IN, DataType.VARCHAR,"testGlobalParam"); + Property property = new Property("testGlobalParam", Direct.IN, DataType.VARCHAR, "testGlobalParam"); globalParamList.add(property); - String result2 = ParameterUtils.curingGlobalParams(null,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,scheduleTime); + String result2 = ParameterUtils.curingGlobalParams(null, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime); Assert.assertEquals(result2, JSONUtils.toJsonString(globalParamList)); - String result3 = ParameterUtils.curingGlobalParams(globalParamMap,globalParamList,CommandType.START_CURRENT_TASK_PROCESS,null); + String result3 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, null); Assert.assertEquals(result3, JSONUtils.toJsonString(globalParamList)); String result4 = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, CommandType.START_CURRENT_TASK_PROCESS, scheduleTime); Assert.assertEquals(result4, JSONUtils.toJsonString(globalParamList)); //test var $ startsWith - globalParamMap.put("bizDate","${system.biz.date}"); - globalParamMap.put("b1zCurdate","${system.biz.curdate}"); - + globalParamMap.put("bizDate", "${system.biz.date}"); + globalParamMap.put("b1zCurdate", "${system.biz.curdate}"); - Property property2=new Property("testParamList1", Direct.IN, DataType.VARCHAR,"testParamList"); - Property property3=new Property("testParamList2", Direct.IN, DataType.VARCHAR,"{testParamList1}"); - Property property4=new Property("testParamList3", Direct.IN, DataType.VARCHAR,"${b1zCurdate}"); + Property property2 = new Property("testParamList1", Direct.IN, DataType.VARCHAR, "testParamList"); + Property property3 = new Property("testParamList2", Direct.IN, DataType.VARCHAR, "{testParamList1}"); + Property property4 = new Property("testParamList3", Direct.IN, DataType.VARCHAR, "${b1zCurdate}"); globalParamList.add(property2); globalParamList.add(property3); @@ -123,9 +140,9 @@ public class ParameterUtilsTest { @Test public void testHandleEscapes() throws Exception { Assert.assertNull(ParameterUtils.handleEscapes(null)); - Assert.assertEquals("",ParameterUtils.handleEscapes("")); - Assert.assertEquals("test Parameter",ParameterUtils.handleEscapes("test Parameter")); - Assert.assertEquals("////%test////%Parameter",ParameterUtils.handleEscapes("%test%Parameter")); + Assert.assertEquals("", ParameterUtils.handleEscapes("")); + Assert.assertEquals("test Parameter", ParameterUtils.handleEscapes("test Parameter")); + Assert.assertEquals("////%test////%Parameter", ParameterUtils.handleEscapes("%test%Parameter")); } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java index eca22def30..f67e89e7e2 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/StringUtilsTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils; import org.junit.Assert; @@ -61,4 +62,19 @@ public class StringUtilsTest { b = StringUtils.isNotBlank("test"); Assert.assertTrue(b); } + + @Test + public void testreplaceNRTtoUnderline() { + String result1 = StringUtils.replaceNRTtoUnderline("abc\n"); + Assert.assertEquals("abc_", result1); + + String result2 = StringUtils.replaceNRTtoUnderline("abc\r"); + Assert.assertEquals("abc_", result2); + + String result3 = StringUtils.replaceNRTtoUnderline("abc\t"); + Assert.assertEquals("abc_", result3); + + String result4 = StringUtils.replaceNRTtoUnderline(null); + Assert.assertNull(result4); + } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/VarPoolUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/VarPoolUtilsTest.java index e47203c225..6713b221bc 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/VarPoolUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/VarPoolUtilsTest.java @@ -19,6 +19,8 @@ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.model.TaskNode; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.junit.Assert; @@ -29,28 +31,7 @@ import org.slf4j.LoggerFactory; public class VarPoolUtilsTest { private static final Logger logger = LoggerFactory.getLogger(VarPoolUtilsTest.class); - - @Test - public void testSetTaskNodeLocalParams() { - String taskJson = "{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," - + "\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-75298\",\"maxRetryTimes\":0,\"name\":\"a1\"," - + "\"params\":\"{\\\"rawScript\\\":\\\"print(\\\\\\\"this is python task \\\\\\\",${p0})\\\"," - + "\\\"localParams\\\":[{\\\"prop\\\":\\\"p1\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"1\\\"}]," - + "\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\"," - + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\"," - + "\"type\":\"PYTHON\",\"workerGroup\":\"default\"}"; - TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); - - VarPoolUtils.setTaskNodeLocalParams(taskNode, "p1", "test1"); - Assert.assertEquals(VarPoolUtils.getTaskNodeLocalParam(taskNode, "p1"), "test1"); - - ConcurrentHashMap propToValue = new ConcurrentHashMap(); - propToValue.put("p1", "test2"); - - VarPoolUtils.setTaskNodeLocalParams(taskNode, propToValue); - Assert.assertEquals(VarPoolUtils.getTaskNodeLocalParam(taskNode, "p1"), "test2"); - } - + @Test public void testConvertVarPoolToMap() throws Exception { String varPool = "p1,66$VarPool$p2,69$VarPool$"; @@ -70,4 +51,40 @@ public class VarPoolUtilsTest { + "print(\"${{setValue({},{})}}\".format(\"p2\",4));"); logger.info(rawScript); } + + @Test + public void testSetTaskNodeLocalParams() throws Exception { + String taskJson = "{\"id\":\"tasks-66199\",\"name\":\"file-shell\",\"desc\":null,\"type\":\"SHELL\"," + + "\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"" + + "params\":{\"rawScript\":\"sh n-1/n-1-1/run.sh\",\"" + + "localParams\":[{\"prop\":\"k1\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"v1\"},{\"prop\":\"k2\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"v2\"}," + + "{\"prop\":\"k3\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"v3\"}],\"" + + "resourceList\":[{\"id\":\"dolphinschedule-code\",\"res\":\"n-1/n-1-1/dolphinscheduler-api-server.log\"}," + + "{\"id\":\"mr-code\",\"res\":\"n-1/n-1-1/hadoop-mapreduce-examples-2.7.4.jar\"}," + + "{\"id\":\"run\",\"res\":\"n-1/n-1-1/run.sh\"}]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"" + + "dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"" + + "workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"delayTime\":0}"; + String changeTaskJson = "{\"id\":\"tasks-66199\",\"name\":\"file-shell\",\"desc\":null,\"type\":\"SHELL\"," + + "\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"" + + "params\":{\"rawScript\":\"sh n-1/n-1-1/run.sh\",\"" + + "localParams\":[{\"prop\":\"k1\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"k1-value-change\"}," + + "{\"prop\":\"k2\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"k2-value-change\"}," + + "{\"prop\":\"k3\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"v3\"}],\"" + + "resourceList\":[{\"id\":\"dolphinschedule-code\",\"res\":\"n-1/n-1-1/dolphinscheduler-api-server.log\"}," + + "{\"id\":\"mr-code\",\"res\":\"n-1/n-1-1/hadoop-mapreduce-examples-2.7.4.jar\"}," + + "{\"id\":\"run\",\"res\":\"n-1/n-1-1/run.sh\"}]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"" + + "dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"" + + "workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"delayTime\":0}"; + Map propToValue = new HashMap(); + propToValue.put("k1","k1-value-change"); + propToValue.put("k2","k2-value-change"); + + TaskNode taskNode = JSONUtils.parseObject(taskJson,TaskNode.class); + + VarPoolUtils.setTaskNodeLocalParams(taskNode,propToValue); + + Assert.assertEquals(changeTaskJson,JSONUtils.toJsonString(taskNode)); + + } + } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java index d204dfd4de..68f206d28f 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtilsTest.java @@ -14,55 +14,62 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.common.utils.placeholder; import org.apache.dolphinscheduler.common.utils.DateUtils; + +import java.util.Date; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import java.util.Date; - public class TimePlaceholderUtilsTest { - Date date = null; + private Date date; @Before - public void init(){ - date = DateUtils.parse("20170101010101","yyyyMMddHHmmss"); + public void init() { + date = DateUtils.parse("20170101010101", "yyyyMMddHHmmss"); } -// @Test -// public void replacePlaceholdersT() { -// Assert.assertEquals("2017test12017:***2016-12-31,20170102,20170130,20161227,20161231", TimePlaceholderUtils.replacePlaceholders("$[yyyy]test1$[yyyy:***]$[yyyy-MM-dd-1],$[month_begin(yyyyMMdd, 1)],$[month_end(yyyyMMdd, -1)],$[week_begin(yyyyMMdd, 1)],$[week_end(yyyyMMdd, -1)]", -// date, true)); -// -// Assert.assertEquals("1483200061,1483290061,1485709261,1482771661,1483113600,1483203661", TimePlaceholderUtils.replacePlaceholders("$[timestamp(yyyyMMdd00mmss)]," -// + "$[timestamp(month_begin(yyyyMMddHHmmss, 1))]," -// + "$[timestamp(month_end(yyyyMMddHHmmss, -1))]," -// + "$[timestamp(week_begin(yyyyMMddHHmmss, 1))]," -// + "$[timestamp(week_end(yyyyMMdd000000, -1))]," -// + "$[timestamp(yyyyMMddHHmmss)]", -// date, true)); -// } -// -// -// -// @Test -// public void calcMinutesT() { -// Assert.assertEquals("Sun Jan 01 01:01:01 CST 2017=yyyy", TimePlaceholderUtils.calcMinutes("yyyy", date).toString()); -// Assert.assertEquals("Sun Jan 08 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+7*1", date).toString()); -// Assert.assertEquals("Sun Dec 25 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-7*1", date).toString()); -// Assert.assertEquals("Mon Jan 02 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+1", date).toString()); -// Assert.assertEquals("Sat Dec 31 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-1", date).toString()); -// Assert.assertEquals("Sun Jan 01 02:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH+1/24", date).toString()); -// Assert.assertEquals("Sun Jan 01 00:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH-1/24", date).toString()); -// } -// -// @Test -// public void calcMonthsT() { -// Assert.assertEquals("Mon Jan 01 01:01:01 CST 2018=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,12*1)", date).toString()); -// Assert.assertEquals("Fri Jan 01 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,-12*1)", date).toString()); -// } + @Test + public void replacePlaceholdersT() { + Assert.assertEquals("2017test12017:***2016-12-31,20170102,20170130,20161227,20161231", TimePlaceholderUtils + .replacePlaceholders("$[yyyy]test1$[yyyy:***]$[yyyy-MM-dd-1],$[month_begin(yyyyMMdd, 1)],$[month_end(yyyyMMdd, -1)],$[week_begin(yyyyMMdd, 1)],$[week_end(yyyyMMdd, -1)]", + date, true)); + + Assert.assertEquals("1483200061,1483290061,1485709261,1482771661,1483113600,1483203661", TimePlaceholderUtils.replacePlaceholders("$[timestamp(yyyyMMdd00mmss)]," + + "$[timestamp(month_begin(yyyyMMddHHmmss, 1))]," + + "$[timestamp(month_end(yyyyMMddHHmmss, -1))]," + + "$[timestamp(week_begin(yyyyMMddHHmmss, 1))]," + + "$[timestamp(week_end(yyyyMMdd000000, -1))]," + + "$[timestamp(yyyyMMddHHmmss)]", + date, true)); + } + + @Test + public void calcMinutesT() { + Assert.assertEquals("Sun Jan 01 01:01:01 CST 2017=yyyy", TimePlaceholderUtils.calcMinutes("yyyy", date).toString()); + Assert.assertEquals("Sun Jan 08 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+7*1", date).toString()); + Assert.assertEquals("Sun Dec 25 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-7*1", date).toString()); + Assert.assertEquals("Mon Jan 02 01:01:01 CST 2017=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd+1", date).toString()); + Assert.assertEquals("Sat Dec 31 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMinutes("yyyyMMdd-1", date).toString()); + Assert.assertEquals("Sun Jan 01 02:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH+1/24", date).toString()); + Assert.assertEquals("Sun Jan 01 00:01:01 CST 2017=yyyyMMddHH", TimePlaceholderUtils.calcMinutes("yyyyMMddHH-1/24", date).toString()); + } + + @Test + public void calcMonthsT() { + Assert.assertEquals("Mon Jan 01 01:01:01 CST 2018=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,12*1)", date).toString()); + Assert.assertEquals("Fri Jan 01 01:01:01 CST 2016=yyyyMMdd", TimePlaceholderUtils.calcMonths("add_months(yyyyMMdd,-12*1)", date).toString()); + } + + @Test + public void testGetPlaceHolderTime() { + + Assert.assertEquals("20170101", TimePlaceholderUtils.getPlaceHolderTime("yyyyMMdd", date)); + } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32Test.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32Test.java deleted file mode 100644 index ce04346743..0000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessBuilderForWin32Test.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils.process; - -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -@RunWith(PowerMockRunner.class) -@PrepareForTest(OSUtils.class) -public class ProcessBuilderForWin32Test { - - private static final Logger logger = LoggerFactory.getLogger(ProcessBuilderForWin32Test.class); - - @Before - public void before() { - PowerMockito.mockStatic(OSUtils.class); - PowerMockito.when(OSUtils.isWindows()).thenReturn(true); - } - - @Test - public void testCreateProcessBuilderForWin32() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - Assert.assertNotNull(builder); - - builder = new ProcessBuilderForWin32("net"); - Assert.assertNotNull(builder); - - builder = new ProcessBuilderForWin32(Collections.singletonList("net")); - Assert.assertNotNull(builder); - - builder = new ProcessBuilderForWin32((List) null); - Assert.assertNotNull(builder); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testBuildUser() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - builder.user("test", StringUtils.EMPTY); - Assert.assertNotNull(builder); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testBuildCommand() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - builder.command(Collections.singletonList("net")); - Assert.assertNotEquals(0, builder.command().size()); - - builder = new ProcessBuilderForWin32(); - builder.command("net"); - Assert.assertNotEquals(0, builder.command().size()); - - builder = new ProcessBuilderForWin32(); - builder.command((List) null); - Assert.assertNotEquals(0, builder.command().size()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testEnvironment() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - Assert.assertNotNull(builder.environment()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - builder.environment(new String[]{ "a=123" }); - Assert.assertNotEquals(0, builder.environment().size()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testDirectory() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - builder.directory(new File("/tmp")); - Assert.assertNotNull(builder.directory()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testStream() { - try { - InputStream in = ProcessBuilderForWin32.NullInputStream.INSTANCE; - Assert.assertNotNull(in); - Assert.assertEquals(-1, in.read()); - Assert.assertEquals(0, in.available()); - - OutputStream out = ProcessBuilderForWin32.NullOutputStream.INSTANCE; - Assert.assertNotNull(out); - out.write(new byte[] {1}); - } catch (Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testRedirect() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - - builder.redirectInput(new File("/tmp")); - Assert.assertNotNull(builder.redirectInput()); - Assert.assertNotNull(builder.redirectInput().file()); - - builder.redirectOutput(new File("/tmp")); - Assert.assertNotNull(builder.redirectOutput()); - Assert.assertNotNull(builder.redirectOutput().file()); - - builder.redirectError(new File("/tmp")); - Assert.assertNotNull(builder.redirectError()); - Assert.assertNotNull(builder.redirectError().file()); - - builder.redirectInput(builder.redirectOutput()); - builder.redirectOutput(builder.redirectInput()); - builder.redirectError(builder.redirectInput()); - - Assert.assertNotNull(ProcessBuilderForWin32.Redirect.PIPE.type()); - Assert.assertNotNull(ProcessBuilderForWin32.Redirect.PIPE.toString()); - Assert.assertNotNull(ProcessBuilderForWin32.Redirect.INHERIT.type()); - Assert.assertNotNull(ProcessBuilderForWin32.Redirect.INHERIT.toString()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testRedirectErrorStream() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - builder.redirectErrorStream(true); - Assert.assertTrue(builder.redirectErrorStream()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void runCmdViaUser() { - try { - ProcessBuilderForWin32 builder = new ProcessBuilderForWin32(); - builder.user("test123", StringUtils.EMPTY); - - List commands = new ArrayList<>(); - commands.add("cmd.exe"); - commands.add("/c"); - commands.add("net user"); - builder.command(commands); - - Process process = builder.start(); - BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream(), Charset.forName("GBK"))); - String line; - StringBuilder sb = new StringBuilder(); - while ((line = inReader.readLine()) != null) { - sb.append(line); - } - logger.info("net user: {}", sb.toString()); - Assert.assertNotEquals(StringUtils.EMPTY, sb.toString()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32Test.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32Test.java deleted file mode 100644 index 00c54c0164..0000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessEnvironmentForWin32Test.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils.process; - -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Collection; -import java.util.Map; -import java.util.Set; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({OSUtils.class, ProcessEnvironmentForWin32.class}) -public class ProcessEnvironmentForWin32Test { - - private static final Logger logger = LoggerFactory.getLogger(ProcessBuilderForWin32Test.class); - - @Before - public void before() { - try { - PowerMockito.mockStatic(OSUtils.class); - PowerMockito.when(OSUtils.isWindows()).thenReturn(true); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testPutAndGet() { - try { - ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0); - processEnvironmentForWin32.put("a", "123"); - Assert.assertEquals("123", processEnvironmentForWin32.get("a")); - Assert.assertTrue(processEnvironmentForWin32.containsKey("a")); - Assert.assertTrue(processEnvironmentForWin32.containsValue("123")); - Assert.assertEquals("123", processEnvironmentForWin32.remove("a")); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - - try { - ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0); - processEnvironmentForWin32.put("b=", "123"); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - - try { - ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0); - processEnvironmentForWin32.put("b", "\u0000"); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - - try { - ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0); - processEnvironmentForWin32.get(null); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testEntrySet() { - try { - ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0); - processEnvironmentForWin32.clear(); - processEnvironmentForWin32.put("a", "123"); - Assert.assertEquals(0, processEnvironmentForWin32.entrySet().size()); - Assert.assertTrue(processEnvironmentForWin32.entrySet().isEmpty()); - for (Map.Entry entry : processEnvironmentForWin32.entrySet()) { - Assert.assertNotNull(entry); - Assert.assertNotNull(entry.getKey()); - Assert.assertNotNull(entry.getValue()); - Assert.assertNotNull(entry.setValue("123")); - } - - processEnvironmentForWin32.clear(); - Set keys = processEnvironmentForWin32.keySet(); - Assert.assertEquals(0, keys.size()); - Assert.assertTrue(keys.isEmpty()); - - processEnvironmentForWin32.clear(); - Collection values = processEnvironmentForWin32.values(); - Assert.assertEquals(0, keys.size()); - Assert.assertTrue(keys.isEmpty()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testToEnvironmentBlock() { - try { - ProcessEnvironmentForWin32 processEnvironmentForWin32 = (ProcessEnvironmentForWin32) ProcessEnvironmentForWin32.emptyEnvironment(0); - Assert.assertNotNull(processEnvironmentForWin32.toEnvironmentBlock()); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - -} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32Test.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32Test.java deleted file mode 100644 index 3f8bcbfb66..0000000000 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/process/ProcessImplForWin32Test.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.common.utils.process; - -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import sun.security.action.GetPropertyAction; - -@RunWith(PowerMockRunner.class) -@PrepareForTest({OSUtils.class, GetPropertyAction.class}) -public class ProcessImplForWin32Test { - - private static final Logger logger = LoggerFactory.getLogger(ProcessBuilderForWin32Test.class); - - @Before - public void before() { - PowerMockito.mockStatic(OSUtils.class); - PowerMockito.mockStatic(GetPropertyAction.class); - PowerMockito.when(OSUtils.isWindows()).thenReturn(true); - } - - @Test - public void testStart() { - try { - Process process = ProcessImplForWin32.start( - "test123", StringUtils.EMPTY, new String[]{"net"}, - null, null, null, false); - Assert.assertNotNull(process); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - - try { - Process process = ProcessImplForWin32.start( - "test123", StringUtils.EMPTY, new String[]{"net"}, - null, null, new ProcessBuilderForWin32.Redirect[]{ - ProcessBuilderForWin32.Redirect.PIPE, - ProcessBuilderForWin32.Redirect.PIPE, - ProcessBuilderForWin32.Redirect.PIPE - }, false); - Assert.assertNotNull(process); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - -} diff --git a/dolphinscheduler-dao/pom.xml b/dolphinscheduler-dao/pom.xml index 2ebe234fc6..6bdaf416d9 100644 --- a/dolphinscheduler-dao/pom.xml +++ b/dolphinscheduler-dao/pom.xml @@ -16,139 +16,140 @@ ~ limitations under the License. --> - - 4.0.0 - - org.apache.dolphinscheduler - dolphinscheduler - 1.3.2-SNAPSHOT - - dolphinscheduler-dao - ${project.artifactId} + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler + 1.3.4-SNAPSHOT + + dolphinscheduler-dao + ${project.artifactId} - - UTF-8 - - - - junit - junit - test - - - com.baomidou - mybatis-plus - ${mybatis-plus.version} - - - com.baomidou - mybatis-plus-boot-starter - ${mybatis-plus.version} - - - org.apache.logging.log4j - log4j-to-slf4j - - - - - org.postgresql - postgresql - + + UTF-8 + + + + junit + junit + test + + + com.baomidou + mybatis-plus + ${mybatis-plus.version} + + + com.baomidou + mybatis-plus-boot-starter + ${mybatis-plus.version} + + + org.apache.logging.log4j + log4j-to-slf4j + + + + + org.postgresql + postgresql + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.ow2.asm + asm + + + org.springframework.boot + spring-boot + + + org.springframework.boot + spring-boot-autoconfigure + + + log4j-api + org.apache.logging.log4j + + + org.springframework.boot + spring-boot-starter-tomcat + + + org.apache.logging.log4j + log4j-to-slf4j + + + - - org.springframework.boot - spring-boot-starter-test - test - - - org.ow2.asm - asm - - - org.springframework.boot - spring-boot - - - org.springframework.boot - spring-boot-autoconfigure - - - log4j-api - org.apache.logging.log4j - - - org.springframework.boot - spring-boot-starter-tomcat - - - org.apache.logging.log4j - log4j-to-slf4j - - - + + mysql + mysql-connector-java + + + com.h2database + h2 + + + com.alibaba + druid + - - mysql - mysql-connector-java - - - com.h2database - h2 - - - com.alibaba - druid - + + ch.qos.logback + logback-classic + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.httpcomponents + httpclient + + + commons-httpclient + commons-httpclient + - - ch.qos.logback - logback-classic - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - - org.apache.httpcomponents - httpclient - - - commons-httpclient - commons-httpclient - - - - com.cronutils - cron-utils - + + com.cronutils + cron-utils + commons-configuration commons-configuration - - org.apache.dolphinscheduler - dolphinscheduler-common - - - protobuf-java - com.google.protobuf - - - + + org.apache.dolphinscheduler + dolphinscheduler-common + + + protobuf-java + com.google.protobuf + + + org.springframework spring-test test - - org.yaml - snakeyaml - - - \ No newline at end of file + + org.yaml + snakeyaml + + + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java index 7d52dc93f3..cba0151828 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Command.java @@ -131,6 +131,7 @@ public class Command { WarningType warningType, int warningGroupId, Date scheduleTime, + String workerGroup, Priority processInstancePriority) { this.commandType = commandType; this.executorId = executorId; @@ -143,6 +144,7 @@ public class Command { this.failureStrategy = failureStrategy; this.startTime = new Date(); this.updateTime = new Date(); + this.workerGroup = workerGroup; this.processInstancePriority = processInstancePriority; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java index e3a3f11386..f1d43a353b 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java @@ -228,18 +228,6 @@ public class ProcessInstance { * varPool string */ private String varPool; - - /** - * receivers for api - */ - @TableField(exist = false) - private String receivers; - - /** - * receivers cc for api - */ - @TableField(exist = false) - private String receiversCc; public ProcessInstance() { @@ -268,7 +256,7 @@ public class ProcessInstance { public void setVarPool(String varPool) { this.varPool = varPool; } - + public ProcessDefinition getProcessDefinition() { return processDefinition; } @@ -591,22 +579,6 @@ public class ProcessInstance { this.tenantId = tenantId; } - public String getReceivers() { - return receivers; - } - - public void setReceivers(String receivers) { - this.receivers = receivers; - } - - public String getReceiversCc() { - return receiversCc; - } - - public void setReceiversCc(String receiversCc) { - this.receiversCc = receiversCc; - } - @Override public String toString() { return "ProcessInstance{" @@ -679,12 +651,6 @@ public class ProcessInstance { + timeout + ", tenantId=" + tenantId - + ", receivers='" - + receivers - + '\'' - + ", receiversCc='" - + receiversCc - + '\'' + '}'; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java index 1b2eea403d..87674fe995 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/Tenant.java @@ -42,11 +42,6 @@ public class Tenant { */ private String tenantCode; - /** - * tenant name - */ - private String tenantName; - /** * description */ @@ -97,14 +92,6 @@ public class Tenant { this.tenantCode = tenantCode; } - public String getTenantName() { - return tenantName; - } - - public void setTenantName(String tenantName) { - this.tenantName = tenantName; - } - public int getQueueId() { return queueId; } @@ -150,7 +137,6 @@ public class Tenant { return "Tenant{" + "id=" + id + ", tenantCode='" + tenantCode + '\'' + - ", tenantName='" + tenantName + '\'' + ", queueId=" + queueId + ", queueName='" + queueName + '\'' + ", queue='" + queue + '\'' + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java index 89da171caf..2110fe2021 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/User.java @@ -79,12 +79,6 @@ public class User { @TableField(exist = false) private String tenantCode; - /** - * tenant name - */ - @TableField(exist = false) - private String tenantName; - /** * queue name */ @@ -203,14 +197,6 @@ public class User { this.alertGroup = alertGroup; } - public String getTenantName() { - return tenantName; - } - - public void setTenantName(String tenantName) { - this.tenantName = tenantName; - } - public String getTenantCode() { return tenantCode; } @@ -272,7 +258,6 @@ public class User { ", tenantId=" + tenantId + ", state=" + state + ", tenantCode='" + tenantCode + '\'' + - ", tenantName='" + tenantName + '\'' + ", queueName='" + queueName + '\'' + ", alertGroup='" + alertGroup + '\'' + ", queue='" + queue + '\'' + diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java index 86e3172f23..621ec43c27 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.java @@ -34,6 +34,15 @@ import com.baomidou.mybatisplus.core.metadata.IPage; */ public interface ProcessDefinitionMapper extends BaseMapper { + /** + * verify process definition by name + * + * @param projectId projectId + * @param name name + * @return process definition + */ + ProcessDefinition verifyByDefineName(@Param("projectId") int projectId, + @Param("processDefinitionName") String name); /** * query process definition by name diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java index 8048fda812..a9ebbf000c 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java @@ -75,6 +75,7 @@ public interface ProcessInstanceMapper extends BaseMapper { * @param projectId projectId * @param processDefinitionId processDefinitionId * @param searchVal searchVal + * @param executorId executorId * @param statusArray statusArray * @param host host * @param startTime startTime diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java index 424cb8e869..b24fd2ede4 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.java @@ -74,4 +74,11 @@ public interface ProjectMapper extends BaseMapper { */ List queryProjectExceptUserId(@Param("userId") int userId); + /** + * query project list by userId + * @param userId + * @return + */ + List queryProjectCreatedAndAuthorizedByUserId(@Param("userId") int userId); + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java index ac23b25c9c..b0e9ca7338 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java @@ -14,25 +14,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.dao.mapper; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.TaskInstance; + import org.apache.ibatis.annotations.Param; import java.util.Date; import java.util.List; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; + /** * task instance mapper interface */ public interface TaskInstanceMapper extends BaseMapper { - List queryTaskByProcessIdAndState(@Param("processInstanceId") Integer processInstanceId, @Param("state") Integer state); @@ -61,6 +63,7 @@ public interface TaskInstanceMapper extends BaseMapper { IPage queryTaskInstanceListPaging(IPage page, @Param("projectId") int projectId, @Param("processInstanceId") Integer processInstanceId, + @Param("processInstanceName") String processInstanceName, @Param("searchVal") String searchVal, @Param("taskName") String taskName, @Param("executorId") int executorId, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java index a2ce6b29b8..b7351f4b49 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.java @@ -100,5 +100,12 @@ public interface UdfFuncMapper extends BaseMapper { */ List listAuthorizedUdfByResourceId(@Param("userId") int userId,@Param("resourceIds") int[] resourceIds); + /** + * batch update udf func + * @param udfFuncList udf list + * @return update num + */ + int batchUpdateUdfFunc(@Param("udfFuncList") List udfFuncList); + } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.java index 3abba3b7d3..fb744137e0 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.java @@ -24,9 +24,9 @@ import java.util.Set; public interface WorkFlowLineageMapper { - public List queryByName(@Param("searchVal") String searchVal, @Param("projectId") int projectId); + List queryByName(@Param("searchVal") String searchVal, @Param("projectId") int projectId); - public List queryByIds(@Param("ids") Set ids, @Param("projectId") int projectId); + List queryByIds(@Param("ids") Set ids, @Param("projectId") int projectId); - public List querySourceTarget(@Param("id") int id); + List querySourceTarget(@Param("id") int id); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java index 8d1d862640..b2daae28cb 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/DolphinSchedulerManager.java @@ -117,6 +117,8 @@ public class DolphinSchedulerManager { upgradeDao.upgradeDolphinScheduler(schemaDir); if ("1.3.0".equals(schemaVersion)) { upgradeDao.upgradeDolphinSchedulerWorkerGroup(); + } else if ("1.3.2".equals(schemaVersion)) { + upgradeDao.upgradeDolphinSchedulerResourceList(); } version = schemaVersion; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java new file mode 100644 index 0000000000..c9ff149306 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ResourceDao.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.upgrade; + +import org.apache.dolphinscheduler.common.utils.ConnectionUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.util.HashMap; +import java.util.Map; + +/** + * resource dao + */ +public class ResourceDao { + public static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDao.class); + + /** + * list all resources + * + * @param conn connection + * @return map that key is full_name and value is id + */ + Map listAllResources(Connection conn) { + Map resourceMap = new HashMap<>(); + + String sql = String.format("SELECT id,full_name FROM t_ds_resources"); + ResultSet rs = null; + PreparedStatement pstmt = null; + try { + pstmt = conn.prepareStatement(sql); + rs = pstmt.executeQuery(); + + while (rs.next()) { + Integer id = rs.getInt(1); + String fullName = rs.getString(2); + resourceMap.put(fullName, id); + } + + } catch (Exception e) { + logger.error(e.getMessage(), e); + throw new RuntimeException("sql: " + sql, e); + } finally { + ConnectionUtils.releaseResource(rs, pstmt, conn); + } + + return resourceMap; + } + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java index 29c625337d..f0ffc52f68 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java @@ -16,10 +16,10 @@ */ package org.apache.dolphinscheduler.dao.upgrade; -import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.AbstractBaseDao; import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory; @@ -34,7 +34,9 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.text.MessageFormat; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; public abstract class UpgradeDao extends AbstractBaseDao { @@ -88,7 +90,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * init schema */ - public void initSchema(){ + public void initSchema() { DbType dbType = getDbType(); String initSqlPath = ""; if (dbType != null) { @@ -111,6 +113,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * init scheam + * * @param initSqlPath initSqlPath */ public void initSchema(String initSqlPath) { @@ -126,6 +129,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * run DML + * * @param initSqlPath initSqlPath */ private void runInitDML(String initSqlPath) { @@ -148,20 +152,20 @@ public abstract class UpgradeDao extends AbstractBaseDao { try { conn.rollback(); } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); + logger.error(e1.getMessage(), e1); } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); + logger.error(e1.getMessage(), e1); } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(conn); @@ -171,6 +175,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * run DDL + * * @param initSqlPath initSqlPath */ private void runInitDDL(String initSqlPath) { @@ -189,12 +194,12 @@ public abstract class UpgradeDao extends AbstractBaseDao { } catch (IOException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(conn); @@ -204,6 +209,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * determines whether a table exists + * * @param tableName tableName * @return if table exist return true,else return false */ @@ -211,20 +217,22 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * determines whether a field exists in the specified table - * @param tableName tableName + * + * @param tableName tableName * @param columnName columnName - * @return if column name exist return true,else return false + * @return if column name exist return true,else return false */ - public abstract boolean isExistsColumn(String tableName,String columnName); + public abstract boolean isExistsColumn(String tableName, String columnName); /** * get current version + * * @param versionName versionName * @return version */ public String getCurrentVersion(String versionName) { - String sql = String.format("select version from %s",versionName); + String sql = String.format("select version from %s", versionName); Connection conn = null; ResultSet rs = null; PreparedStatement pstmt = null; @@ -241,7 +249,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { return version; } catch (SQLException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } finally { ConnectionUtils.releaseResource(rs, pstmt, conn); @@ -251,6 +259,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * upgrade DolphinScheduler + * * @param schemaDir schema dir */ public void upgradeDolphinScheduler(String schemaDir) { @@ -268,52 +277,121 @@ public abstract class UpgradeDao extends AbstractBaseDao { public void upgradeDolphinSchedulerWorkerGroup() { updateProcessDefinitionJsonWorkerGroup(); } + + /** + * upgrade DolphinScheduler resource list + * ds-1.3.2 modify the resource list for process definition json + */ + public void upgradeDolphinSchedulerResourceList() { + updateProcessDefinitionJsonResourceList(); + } + /** * updateProcessDefinitionJsonWorkerGroup */ - protected void updateProcessDefinitionJsonWorkerGroup(){ + protected void updateProcessDefinitionJsonWorkerGroup() { WorkerGroupDao workerGroupDao = new WorkerGroupDao(); ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); - Map replaceProcessDefinitionMap = new HashMap<>(); + Map replaceProcessDefinitionMap = new HashMap<>(); try { Map oldWorkerGroupMap = workerGroupDao.queryAllOldWorkerGroup(dataSource.getConnection()); - Map processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); + Map processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); - for (Map.Entry entry : processDefinitionJsonMap.entrySet()){ + for (Map.Entry entry : processDefinitionJsonMap.entrySet()) { ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); - for (int i = 0 ;i < tasks.size() ; i++){ + for (int i = 0; i < tasks.size(); i++) { ObjectNode task = (ObjectNode) tasks.path(i); ObjectNode workerGroupNode = (ObjectNode) task.path("workerGroupId"); Integer workerGroupId = -1; - if(workerGroupNode != null && workerGroupNode.canConvertToInt()){ + if (workerGroupNode != null && workerGroupNode.canConvertToInt()) { workerGroupId = workerGroupNode.asInt(-1); } if (workerGroupId == -1) { task.put("workerGroup", "default"); - }else { + } else { task.put("workerGroup", oldWorkerGroupMap.get(workerGroupId)); } } jsonObject.remove("task"); - jsonObject.put("tasks",tasks); + jsonObject.put("tasks", tasks); - replaceProcessDefinitionMap.put(entry.getKey(),jsonObject.toString()); + replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); } - if (replaceProcessDefinitionMap.size() > 0){ - processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(),replaceProcessDefinitionMap); + if (replaceProcessDefinitionMap.size() > 0) { + processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); } - }catch (Exception e){ - logger.error("update process definition json workergroup error",e); + } catch (Exception e) { + logger.error("update process definition json workergroup error", e); + } + } + + /** + * updateProcessDefinitionJsonResourceList + */ + protected void updateProcessDefinitionJsonResourceList() { + ResourceDao resourceDao = new ResourceDao(); + ProcessDefinitionDao processDefinitionDao = new ProcessDefinitionDao(); + Map replaceProcessDefinitionMap = new HashMap<>(); + try { + Map resourcesMap = resourceDao.listAllResources(dataSource.getConnection()); + Map processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); + + for (Map.Entry entry : processDefinitionJsonMap.entrySet()) { + ObjectNode jsonObject = JSONUtils.parseObject(entry.getValue()); + ArrayNode tasks = JSONUtils.parseArray(jsonObject.get("tasks").toString()); + + for (int i = 0; i < tasks.size(); i++) { + ObjectNode task = (ObjectNode) tasks.get(i); + ObjectNode param = (ObjectNode) task.get("params"); + if (param != null) { + + List resourceList = JSONUtils.toList(param.get("resourceList").toString(), ResourceInfo.class); + ResourceInfo mainJar = JSONUtils.parseObject(param.get("mainJar").toString(), ResourceInfo.class); + if (mainJar != null && mainJar.getId() == 0) { + String fullName = mainJar.getRes().startsWith("/") ? mainJar.getRes() : String.format("/%s", mainJar.getRes()); + if (resourcesMap.containsKey(fullName)) { + mainJar.setId(resourcesMap.get(fullName)); + param.put("mainJar", JSONUtils.parseObject(JSONUtils.toJsonString(mainJar))); + } + } + + if (CollectionUtils.isNotEmpty(resourceList)) { + List newResourceList = resourceList.stream().map(resInfo -> { + String fullName = resInfo.getRes().startsWith("/") ? resInfo.getRes() : String.format("/%s", resInfo.getRes()); + if (resInfo.getId() == 0 && resourcesMap.containsKey(fullName)) { + resInfo.setId(resourcesMap.get(fullName)); + } + return resInfo; + }).collect(Collectors.toList()); + param.put("resourceList", JSONUtils.parseObject(JSONUtils.toJsonString(newResourceList))); + } + } + task.put("params", param); + + } + + jsonObject.remove("tasks"); + + jsonObject.put("tasks", tasks); + + replaceProcessDefinitionMap.put(entry.getKey(), jsonObject.toString()); + } + if (replaceProcessDefinitionMap.size() > 0) { + processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(), replaceProcessDefinitionMap); + } + } catch (Exception e) { + logger.error("update process definition json resource list error", e); } } /** * upgradeDolphinScheduler DML + * * @param schemaDir schemaDir */ private void upgradeDolphinSchedulerDML(String schemaDir) { @@ -321,8 +399,8 @@ public abstract class UpgradeDao extends AbstractBaseDao { if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } - String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); - logger.info("sqlSQLFilePath"+sqlFilePath); + String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_dml.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); + logger.info("sqlSQLFilePath" + sqlFilePath); Connection conn = null; PreparedStatement pstmt = null; try { @@ -334,13 +412,13 @@ public abstract class UpgradeDao extends AbstractBaseDao { scriptRunner.runScript(sqlReader); if (isExistsTable(T_VERSION_NAME)) { // Change version in the version table to the new version - String upgradeSQL = String.format("update %s set version = ?",T_VERSION_NAME); + String upgradeSQL = String.format("update %s set version = ?", T_VERSION_NAME); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, schemaVersion); pstmt.executeUpdate(); - }else if (isExistsTable(T_NEW_VERSION_NAME)) { + } else if (isExistsTable(T_NEW_VERSION_NAME)) { // Change version in the version table to the new version - String upgradeSQL = String.format("update %s set version = ?",T_NEW_VERSION_NAME); + String upgradeSQL = String.format("update %s set version = ?", T_NEW_VERSION_NAME); pstmt = conn.prepareStatement(upgradeSQL); pstmt.setString(1, schemaVersion); pstmt.executeUpdate(); @@ -350,38 +428,38 @@ public abstract class UpgradeDao extends AbstractBaseDao { try { conn.rollback(); } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); + logger.error(e1.getMessage(), e1); } - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); } catch (IOException e) { try { conn.rollback(); } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); + logger.error(e1.getMessage(), e1); } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } catch (SQLException e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); + logger.error(e1.getMessage(), e1); } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { try { if (null != conn) { conn.rollback(); } } catch (SQLException e1) { - logger.error(e1.getMessage(),e1); + logger.error(e1.getMessage(), e1); } - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } @@ -390,13 +468,14 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * upgradeDolphinScheduler DDL + * * @param schemaDir schemaDir */ private void upgradeDolphinSchedulerDDL(String schemaDir) { if (StringUtils.isEmpty(rootDir)) { throw new RuntimeException("Environment variable user.dir not found"); } - String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql",rootDir,schemaDir,getDbType().name().toLowerCase()); + String sqlFilePath = MessageFormat.format("{0}/sql/upgrade/{1}/{2}/dolphinscheduler_ddl.sql", rootDir, schemaDir, getDbType().name().toLowerCase()); Connection conn = null; PreparedStatement pstmt = null; try { @@ -411,20 +490,20 @@ public abstract class UpgradeDao extends AbstractBaseDao { } catch (FileNotFoundException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); throw new RuntimeException("sql file not found ", e); } catch (IOException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } catch (SQLException e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } catch (Exception e) { - logger.error(e.getMessage(),e); - throw new RuntimeException(e.getMessage(),e); + logger.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); } finally { ConnectionUtils.releaseResource(pstmt, conn); } @@ -434,15 +513,16 @@ public abstract class UpgradeDao extends AbstractBaseDao { /** * update version + * * @param version version */ public void updateVersion(String version) { // Change version in the version table to the new version String versionName = T_VERSION_NAME; - if(!SchemaUtils.isAGreatVersion("1.2.0" , version)){ + if (!SchemaUtils.isAGreatVersion("1.2.0", version)) { versionName = "t_ds_version"; } - String upgradeSQL = String.format("update %s set version = ?",versionName); + String upgradeSQL = String.format("update %s set version = ?", versionName); PreparedStatement pstmt = null; Connection conn = null; try { @@ -452,7 +532,7 @@ public abstract class UpgradeDao extends AbstractBaseDao { pstmt.executeUpdate(); } catch (SQLException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); throw new RuntimeException("sql: " + upgradeSQL, e); } finally { ConnectionUtils.releaseResource(pstmt, conn); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java index d3b829cb4f..6ee1c19e35 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java @@ -22,11 +22,13 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.TaskInstance; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,6 +47,7 @@ public class DagHelper { /** * generate flow node relation list by task node list; * Edges that are not in the task Node List will not be added to the result + * * @param taskNodeList taskNodeList * @return task node relation list */ @@ -66,10 +69,11 @@ public class DagHelper { /** * generate task nodes needed by dag - * @param taskNodeList taskNodeList - * @param startNodeNameList startNodeNameList + * + * @param taskNodeList taskNodeList + * @param startNodeNameList startNodeNameList * @param recoveryNodeNameList recoveryNodeNameList - * @param taskDependType taskDependType + * @param taskDependType taskDependType * @return task node list */ public static List generateFlowNodeListByStartNode(List taskNodeList, List startNodeNameList, @@ -77,8 +81,8 @@ public class DagHelper { List destFlowNodeList = new ArrayList<>(); List startNodeList = startNodeNameList; - if(taskDependType != TaskDependType.TASK_POST - && CollectionUtils.isEmpty(startNodeList)){ + if (taskDependType != TaskDependType.TASK_POST + && CollectionUtils.isEmpty(startNodeList)) { logger.error("start node list is empty! cannot continue run the process "); return destFlowNodeList; } @@ -126,7 +130,8 @@ public class DagHelper { /** * find all the nodes that depended on the start node - * @param startNode startNode + * + * @param startNode startNode * @param taskNodeList taskNodeList * @return task node list */ @@ -150,9 +155,10 @@ public class DagHelper { /** * find all nodes that start nodes depend on. - * @param startNode startNode + * + * @param startNode startNode * @param recoveryNodeNameList recoveryNodeNameList - * @param taskNodeList taskNodeList + * @param taskNodeList taskNodeList * @return task node list */ private static List getFlowNodeListPre(TaskNode startNode, List recoveryNodeNameList, List taskNodeList, List visitedNodeNameList) { @@ -165,9 +171,6 @@ public class DagHelper { resultList.add(startNode); } if (CollectionUtils.isEmpty(depList)) { - if (null != startNode) { - visitedNodeNameList.add(startNode.getName()); - } return resultList; } for (String depNodeName : depList) { @@ -187,10 +190,11 @@ public class DagHelper { /** * generate dag by start nodes and recovery nodes + * * @param processDefinitionJson processDefinitionJson - * @param startNodeNameList startNodeNameList - * @param recoveryNodeNameList recoveryNodeNameList - * @param depNodeType depNodeType + * @param startNodeNameList startNodeNameList + * @param recoveryNodeNameList recoveryNodeNameList + * @param depNodeType depNodeType * @return process dag * @throws Exception if error throws Exception */ @@ -217,10 +221,11 @@ public class DagHelper { /** * parse the forbidden task nodes in process definition. + * * @param processDefinitionJson processDefinitionJson * @return task node map */ - public static Map getForbiddenTaskNodeMaps(String processDefinitionJson){ + public static Map getForbiddenTaskNodeMaps(String processDefinitionJson) { Map forbidTaskNodeMap = new ConcurrentHashMap<>(); ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); @@ -228,8 +233,8 @@ public class DagHelper { if (null != processData) { taskNodeList = processData.getTasks(); } - for(TaskNode node : taskNodeList){ - if(node.isForbidden()){ + for (TaskNode node : taskNodeList) { + if (node.isForbidden()) { forbidTaskNodeMap.putIfAbsent(node.getName(), node); } } @@ -239,8 +244,9 @@ public class DagHelper { /** * find node by node name + * * @param nodeDetails nodeDetails - * @param nodeName nodeName + * @param nodeName nodeName * @return task node */ public static TaskNode findNodeByName(List nodeDetails, String nodeName) { @@ -252,71 +258,89 @@ public class DagHelper { return null; } - /** - * get start vertex in one dag - * it would find the post node if the start vertex is forbidden running - * @param parentNodeName previous node - * @param dag dag + * the task can be submit when all the depends nodes are forbidden or complete + * + * @param taskNode taskNode + * @param dag dag * @param completeTaskList completeTaskList - * @return start Vertex list + * @return can submit */ - public static Collection getStartVertex(String parentNodeName, DAG dag, - Map completeTaskList){ - - if(completeTaskList == null){ - completeTaskList = new HashMap<>(); + public static boolean allDependsForbiddenOrEnd(TaskNode taskNode, + DAG dag, + Map skipTaskNodeList, + Map completeTaskList) { + List dependList = taskNode.getDepList(); + if (dependList == null) { + return true; } - Collection startVertexs = null; - if(StringUtils.isNotEmpty(parentNodeName)){ - startVertexs = dag.getSubsequentNodes(parentNodeName); - }else{ - startVertexs = dag.getBeginNode(); + for (String dependNodeName : dependList) { + TaskNode dependNode = dag.getNode(dependNodeName); + if (dependNode == null || completeTaskList.containsKey(dependNodeName) + || dependNode.isForbidden() + || skipTaskNodeList.containsKey(dependNodeName)) { + continue; + } else { + return false; + } } + return true; + } - List tmpStartVertexs = new ArrayList<>(); - if(startVertexs!= null){ - tmpStartVertexs.addAll(startVertexs); + /** + * parse the successor nodes of previous node. + * this function parse the condition node to find the right branch. + * also check all the depends nodes forbidden or complete + * + * @param preNodeName + * @return successor nodes + */ + public static Set parsePostNodes(String preNodeName, + Map skipTaskNodeList, + DAG dag, + Map completeTaskList) { + Set postNodeList = new HashSet<>(); + Collection startVertexes = new ArrayList<>(); + if (preNodeName == null) { + startVertexes = dag.getBeginNode(); + } else if (dag.getNode(preNodeName).isConditionsTask()) { + List conditionTaskList = parseConditionTask(preNodeName, skipTaskNodeList, dag, completeTaskList); + startVertexes.addAll(conditionTaskList); + } else { + startVertexes = dag.getSubsequentNodes(preNodeName); } - - for(String start : startVertexs){ - TaskNode startNode = dag.getNode(start); - if(!startNode.isForbidden() && !completeTaskList.containsKey(start)){ - // the start can be submit if not forbidden and not in complete tasks + for (String subsequent : startVertexes) { + TaskNode taskNode = dag.getNode(subsequent); + if (isTaskNodeNeedSkip(taskNode, skipTaskNodeList)) { + setTaskNodeSkip(subsequent, dag, completeTaskList, skipTaskNodeList); continue; } - // then submit the post nodes - Collection postNodes = getStartVertex(start, dag, completeTaskList); - for(String post : postNodes){ - TaskNode postNode = dag.getNode(post); - if(taskNodeCanSubmit(postNode, dag, completeTaskList)){ - tmpStartVertexs.add(post); - } + if (!DagHelper.allDependsForbiddenOrEnd(taskNode, dag, skipTaskNodeList, completeTaskList)) { + continue; } - tmpStartVertexs.remove(start); + if (taskNode.isForbidden() || completeTaskList.containsKey(subsequent)) { + postNodeList.addAll(parsePostNodes(subsequent, skipTaskNodeList, dag, completeTaskList)); + continue; + } + postNodeList.add(subsequent); } - return tmpStartVertexs; + return postNodeList; } /** - * the task can be submit when all the depends nodes are forbidden or complete - * @param taskNode taskNode - * @param dag dag - * @param completeTaskList completeTaskList - * @return can submit + * if all of the task dependence are skipped, skip it too. + * + * @param taskNode + * @return */ - public static boolean taskNodeCanSubmit(TaskNode taskNode, - DAG dag, - Map completeTaskList) { - - List dependList = taskNode.getDepList(); - if(dependList == null){ - return true; + private static boolean isTaskNodeNeedSkip(TaskNode taskNode, + Map skipTaskNodeList + ) { + if (CollectionUtils.isEmpty(taskNode.getDepList())) { + return false; } - - for(String dependNodeName : dependList){ - TaskNode dependNode = dag.getNode(dependNodeName); - if(!dependNode.isForbidden() && !completeTaskList.containsKey(dependNodeName)){ + for (String depNode : taskNode.getDepList()) { + if (!skipTaskNodeList.containsKey(depNode)) { return false; } } @@ -324,6 +348,70 @@ public class DagHelper { } + /** + * parse condition task find the branch process + * set skip flag for another one. + * + * @param nodeName + * @return + */ + public static List parseConditionTask(String nodeName, + Map skipTaskNodeList, + DAG dag, + Map completeTaskList) { + List conditionTaskList = new ArrayList<>(); + TaskNode taskNode = dag.getNode(nodeName); + if (!taskNode.isConditionsTask()) { + return conditionTaskList; + } + if (!completeTaskList.containsKey(nodeName)) { + return conditionTaskList; + } + TaskInstance taskInstance = completeTaskList.get(nodeName); + ConditionsParameters conditionsParameters = + JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); + List skipNodeList = new ArrayList<>(); + if (taskInstance.getState().typeIsSuccess()) { + conditionTaskList = conditionsParameters.getSuccessNode(); + skipNodeList = conditionsParameters.getFailedNode(); + } else if (taskInstance.getState().typeIsFailure()) { + conditionTaskList = conditionsParameters.getFailedNode(); + skipNodeList = conditionsParameters.getSuccessNode(); + } else { + conditionTaskList.add(nodeName); + } + for (String failedNode : skipNodeList) { + setTaskNodeSkip(failedNode, dag, completeTaskList, skipTaskNodeList); + } + return conditionTaskList; + } + + /** + * set task node and the post nodes skip flag + * + * @param skipNodeName + * @param dag + * @param completeTaskList + * @param skipTaskNodeList + */ + private static void setTaskNodeSkip(String skipNodeName, + DAG dag, + Map completeTaskList, + Map skipTaskNodeList) { + if (!dag.containsNode(skipNodeName)) { + return; + } + skipTaskNodeList.putIfAbsent(skipNodeName, dag.getNode(skipNodeName)); + Collection postNodeList = dag.getSubsequentNodes(skipNodeName); + for (String post : postNodeList) { + TaskNode postNode = dag.getNode(post); + if (isTaskNodeNeedSkip(postNode, skipTaskNodeList)) { + setTaskNodeSkip(post, dag, completeTaskList, skipTaskNodeList); + } + } + } + + /*** * build dag graph * @param processDag processDag @@ -331,19 +419,19 @@ public class DagHelper { */ public static DAG buildDagGraph(ProcessDag processDag) { - DAG dag = new DAG<>(); + DAG dag = new DAG<>(); //add vertex - if (CollectionUtils.isNotEmpty(processDag.getNodes())){ - for (TaskNode node : processDag.getNodes()){ - dag.addNode(node.getName(),node); + if (CollectionUtils.isNotEmpty(processDag.getNodes())) { + for (TaskNode node : processDag.getNodes()) { + dag.addNode(node.getName(), node); } } //add edge - if (CollectionUtils.isNotEmpty(processDag.getEdges())){ - for (TaskNodeRelation edge : processDag.getEdges()){ - dag.addEdge(edge.getStartNode(),edge.getEndNode()); + if (CollectionUtils.isNotEmpty(processDag.getEdges())) { + for (TaskNodeRelation edge : processDag.getEdges()) { + dag.addEdge(edge.getStartNode(), edge.getEndNode()); } } return dag; @@ -351,6 +439,7 @@ public class DagHelper { /** * get process dag + * * @param taskNodeList task node list * @return Process dag */ @@ -378,21 +467,22 @@ public class DagHelper { /** * is there have conditions after the parent node + * * @param parentNodeName * @return */ public static boolean haveConditionsAfterNode(String parentNodeName, DAG dag - ){ + ) { boolean result = false; Set subsequentNodes = dag.getSubsequentNodes(parentNodeName); - if(CollectionUtils.isEmpty(subsequentNodes)){ + if (CollectionUtils.isEmpty(subsequentNodes)) { return result; } - for(String nodeName : subsequentNodes){ + for (String nodeName : subsequentNodes) { TaskNode taskNode = dag.getNode(nodeName); List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); - if(preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()){ + if (preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()) { return true; } } @@ -401,19 +491,20 @@ public class DagHelper { /** * is there have conditions after the parent node + * * @param parentNodeName * @return */ public static boolean haveConditionsAfterNode(String parentNodeName, List taskNodes - ){ + ) { boolean result = false; - if(CollectionUtils.isEmpty(taskNodes)){ + if (CollectionUtils.isEmpty(taskNodes)) { return result; } - for(TaskNode taskNode : taskNodes){ + for (TaskNode taskNode : taskNodes) { List preTasksList = JSONUtils.toList(taskNode.getPreTasks(), String.class); - if(preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()){ + if (preTasksList.contains(parentNodeName) && taskNode.isConditionsTask()) { return true; } } diff --git a/dolphinscheduler-dao/src/main/resources/datasource.properties b/dolphinscheduler-dao/src/main/resources/datasource.properties index d55e36addc..fa07c6a6d3 100644 --- a/dolphinscheduler-dao/src/main/resources/datasource.properties +++ b/dolphinscheduler-dao/src/main/resources/datasource.properties @@ -27,6 +27,13 @@ spring.datasource.url=jdbc:postgresql://127.0.0.1:5432/dolphinscheduler spring.datasource.username=test spring.datasource.password=test +# mysql +#spring.datasource.driver-class-name=com.mysql.jdbc.Driver +#spring.datasource.url=jdbc:mysql://127.0.0.1:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 +#spring.datasource.username=xxxx +#spring.datasource.password=xxxx + + # connection configuration #spring.datasource.initialSize=5 # min connection number diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml index 29c8dfa5a3..02fc9526b3 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml @@ -19,7 +19,8 @@ - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml index d68d4597f4..dd6796998e 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml @@ -18,8 +18,14 @@ + + id + , group_name, description, create_user_id,create_time, update_time + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml index 703b685157..9be5c7c784 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml @@ -18,9 +18,15 @@ + + id + , title, content, alert_status, log, + alertgroup_id, create_time, update_time + - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.xml index 1fc0f6b77b..49abb4d8f6 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AlertPluginInstanceMapper.xml @@ -18,10 +18,12 @@ - select * from t_ds_alert_plugin_instance - where 1=1 + where 1 = 1 - select command.* from t_ds_command command - join t_ds_process_definition definition on command.process_definition_id = definition.id + select cmd.id, cmd.command_type, cmd.process_definition_id, cmd.command_param, cmd.task_depend_type, cmd.failure_strategy, + cmd.warning_type, cmd.warning_group_id, cmd.schedule_time, cmd.start_time, cmd.executor_id, cmd.dependence, cmd.update_time, + cmd.process_instance_priority, cmd.worker_group + from t_ds_command cmd + join t_ds_process_definition definition on cmd.process_definition_id = definition.id where definition.release_state = 1 AND definition.flag = 1 - order by command.update_time asc + order by cmd.update_time asc limit 1 - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml index 15536ae652..b4606db00a 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml @@ -18,30 +18,34 @@ + + id, name, note, type, user_id, connection_params, create_time, update_time + - - - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml index a43cbeca91..2364ebe602 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml @@ -21,10 +21,9 @@ delete from t_ds_relation_datasource_user where user_id = #{userId} - delete from t_ds_relation_datasource_user where datasource_id = #{datasourceId} - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml index 1dc9bbf636..8359a98a91 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml @@ -18,15 +18,68 @@ + + id + , name, version, release_state, project_id, user_id, process_definition_json, description, + global_params, flag, locations, connects, warning_group_id, create_time, timeout, + tenant_id, update_time, modify_by, resource_ids + + @@ -110,4 +183,4 @@ set version = #{version} where id = #{processDefinitionId} - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml index b2d0b85982..ff20e23246 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionVersionMapper.xml @@ -18,24 +18,33 @@ + + id + , process_definition_id, version, process_definition_json, description, global_params,locations,connects, + warning_group_id, create_time, timeout, resource_ids + - + select + from t_ds_process_definition_version where process_definition_id = #{processDefinitionId} order by version desc - + select + from t_ds_process_definition_version where process_definition_id = #{processDefinitionId} - and version = #{version} + and version = #{version} @@ -44,4 +53,5 @@ where process_definition_id = #{processDefinitionId} and version = #{version} - \ No newline at end of file + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml index d217665eab..249fb8669f 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml @@ -18,20 +18,24 @@ + + id, parent_process_instance_id, parent_task_instance_id, process_instance_id + delete from t_ds_relation_process_instance where parent_process_instance_id=#{parentProcessId} - @@ -40,4 +44,4 @@ from t_ds_relation_process_instance where parent_process_instance_id = #{parentInstanceId} - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml index 83864b5163..f66163541b 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml @@ -18,13 +18,22 @@ + + id, name, process_definition_id, state, recovery, start_time, end_time, run_times,host, + command_type, command_param, task_depend_type, max_try_times, failure_strategy, warning_type, + warning_group_id, schedule_time, command_start_time, global_params, process_instance_json, flag, + update_time, is_sub_process, executor_id, locations, connects, history_cmd, dependence_schedule_times, + process_instance_priority, worker_group, timeout, tenant_id, var_pool + - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml index 5ab0756250..64c5b6cfc6 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml @@ -18,30 +18,52 @@ + + id, name, description, user_id, flag, create_time, update_time + + + ${alias}.id, ${alias}.name, ${alias}.description, ${alias}.user_id, ${alias}.flag, ${alias}.create_time, ${alias}.update_time + - - \ No newline at end of file + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml index 006cf080eb..de74d6480c 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml @@ -18,6 +18,9 @@ + + id, user_id, project_id, perm, create_time, update_time + delete from t_ds_relation_project_user where 1=1 @@ -27,10 +30,11 @@ - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml index 423b0dd04d..564dd0354f 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml @@ -18,19 +18,24 @@ + + id, queue_name, queue, create_time, update_time + - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml index 6b1c9b7e34..6ad350835f 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml @@ -18,8 +18,17 @@ + + id, alias, file_name, description, user_id, type, size, create_time, update_time, + pid, full_name, is_directory + + + ${alias}.id, ${alias}.alias, ${alias}.file_name, ${alias}.description, ${alias}.user_id, ${alias}.type, ${alias}.size, ${alias}.create_time, ${alias}.update_time, + ${alias}.pid, ${alias}.full_name, ${alias}.is_directory + - + delete from t_ds_resources where id in @@ -117,14 +134,15 @@ - + update t_ds_resources full_name=#{resource.fullName}, @@ -137,7 +155,8 @@ - select p_f.name as process_definition_name, p.name as project_name,u.user_name,s.* + select p_f.name as process_definition_name, p.name as project_name,u.user_name, + + + from t_ds_schedules s join t_ds_process_definition p_f on s.process_definition_id = p_f.id join t_ds_project as p on p_f.project_id = p.id @@ -39,9 +50,10 @@ where p.name = #{projectName} - + select + from t_ds_schedules where process_definition_id =#{processDefinitionId} and release_state = 1 - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml index 4fa7f309dc..0aa91b7a12 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml @@ -18,15 +18,20 @@ + + id, user_id, ip, last_login_time + - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml index 143761bf8c..71bd251a4a 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml @@ -18,6 +18,18 @@ + + id, name, task_type, process_definition_id, process_instance_id, task_json, state, submit_time, + start_time, end_time, host, execute_path, log_path, alert_flag, retry_times, pid, app_link, + flag, retry_interval, max_retry_times, task_instance_priority, worker_group, executor_id, + first_submit_time, delay_time, var_pool + + + ${alias}.id, ${alias}.name, ${alias}.task_type, ${alias}.process_definition_id, ${alias}.process_instance_id, ${alias}.task_json, ${alias}.state, ${alias}.submit_time, + ${alias}.start_time, ${alias}.end_time, ${alias}.host, ${alias}.execute_path, ${alias}.log_path, ${alias}.alert_flag, ${alias}.retry_times, ${alias}.pid, ${alias}.app_link, + ${alias}.flag, ${alias}.retry_interval, ${alias}.max_retry_times, ${alias}.task_instance_priority, ${alias}.worker_group, ${alias}.executor_id, + ${alias}.first_submit_time, ${alias}.delay_time, ${alias}.var_pool + update t_ds_task_instance set state = #{destStatus} @@ -35,14 +47,16 @@ and flag = 1 diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml index fc9219ce86..db3a282846 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml @@ -18,24 +18,40 @@ + + id, tenant_code, description, queue_id, create_time, update_time + + + ${alias}.id, ${alias}.tenant_code, ${alias}.description, ${alias}.queue_id, ${alias}.create_time, ${alias}.update_time + - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml index e38d1637d6..445810dce1 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml @@ -18,97 +18,143 @@ + + + ${alias}.id, ${alias}.user_id, ${alias}.func_name, ${alias}.class_name, ${alias}.type, ${alias}.arg_types, + ${alias}.database, ${alias}.description, ${alias}.resource_id, ${alias}.resource_name, ${alias}.create_time, ${alias}.update_time + + - \ No newline at end of file + + + update t_ds_udfs + + resource_name=#{udf.resourceName}, + update_time=#{udf.updateTime} + + + id=#{udf.id} + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml index f219ad3efe..f4263eb54b 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml @@ -18,33 +18,50 @@ + + id, user_name, user_password, user_type, email, phone, tenant_id, create_time, update_time, queue, state + + + ${alias}.id, ${alias}.user_name, ${alias}.user_password, ${alias}.user_type, ${alias}.email, ${alias}.phone, ${alias}.tenant_id, + ${alias}.create_time, ${alias}.update_time, ${alias}.queue, ${alias}.state + update t_ds_user set queue = #{newQueue} where queue = #{oldQueue} - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml index 823ea0f774..0772be8a77 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/WorkFlowLineageMapper.xml @@ -100,4 +100,4 @@ where source_id = #{id}::text; - \ No newline at end of file + diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java index 636a9ca958..2046735221 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapperTest.java @@ -132,6 +132,43 @@ public class ProcessDefinitionMapperTest { Assert.assertNotEquals(dataSources.size(), 0); } + /** + * test verifyByDefineName + */ + @Test + public void testVerifyByDefineName() { + Project project = new Project(); + project.setName("ut project"); + project.setUserId(4); + projectMapper.insert(project); + Queue queue = new Queue(); + queue.setQueue("queue"); + queue.setQueueName("queue name"); + queueMapper.insert(queue); + Tenant tenant = new Tenant(); + tenant.setTenantCode("tenant"); + tenant.setQueueId(queue.getId()); + tenant.setDescription("t"); + tenantMapper.insert(tenant); + User user = new User(); + user.setUserName("hello"); + user.setUserPassword("pwd"); + user.setUserType(UserType.GENERAL_USER); + user.setTenantId(tenant.getId()); + userMapper.insert(user); + //insertOne + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(project.getId()); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinition.setTenantId(tenant.getId()); + processDefinition.setUserId(user.getId()); + processDefinitionMapper.insert(processDefinition); + ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(10, "xxx"); + Assert.assertEquals(definition, null); + } + /** * test query by definition name */ @@ -174,6 +211,64 @@ public class ProcessDefinitionMapperTest { Assert.assertNotEquals(processDefinition1, null); } + /** + * test queryDefinitionListByTenant + */ + @Test + public void testQueryDefinitionListByTenant() { + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(888); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinition.setTenantId(999); + processDefinition.setUserId(1234); + processDefinitionMapper.insert(processDefinition); + List definitions = processDefinitionMapper.queryDefinitionListByTenant(999); + Assert.assertNotEquals(definitions.size(), 0); + } + + /** + * test queryByDefineId + */ + @Test + public void testQueryByDefineId() { + Project project = new Project(); + project.setName("ut project"); + project.setUserId(4); + projectMapper.insert(project); + + Queue queue = new Queue(); + queue.setQueue("queue"); + queue.setQueueName("queue name"); + queueMapper.insert(queue); + + Tenant tenant = new Tenant(); + tenant.setTenantCode("tenant"); + tenant.setQueueId(queue.getId()); + tenant.setDescription("t"); + tenantMapper.insert(tenant); + + User user = new User(); + user.setUserName("hello"); + user.setUserPassword("pwd"); + user.setUserType(UserType.GENERAL_USER); + user.setTenantId(tenant.getId()); + userMapper.insert(user); + + //insertOne + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setName("def 1"); + processDefinition.setProjectId(project.getId()); + processDefinition.setUpdateTime(new Date()); + processDefinition.setCreateTime(new Date()); + processDefinition.setTenantId(tenant.getId()); + processDefinition.setUserId(user.getId()); + processDefinitionMapper.insert(processDefinition); + ProcessDefinition definition = processDefinitionMapper.queryByDefineId(333); + Assert.assertEquals(definition, null); + } + /** * test page */ @@ -271,4 +366,4 @@ public class ProcessDefinitionMapperTest { ProcessDefinition processDefinition1 = processDefinitionMapper.selectById(processDefinition.getId()); Assert.assertEquals(expectedVersion, processDefinition1.getVersion()); } -} \ No newline at end of file +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java index 76741a7db9..d36a26ffd3 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java @@ -308,7 +308,6 @@ public class ResourceMapperTest { Tenant tenant = new Tenant(); - tenant.setTenantName("ut tenant "); tenant.setTenantCode("ut tenant code for resource"); int tenantInsertStatus = tenantMapper.insert(tenant); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java index 017527137b..a225f7654c 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java @@ -286,6 +286,7 @@ public class TaskInstanceMapperTest { task.getProcessInstanceId(), "", "", + "", 0, new int[0], "", diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java index 493e85b39c..a1860709bc 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TenantMapperTest.java @@ -137,12 +137,12 @@ public class TenantMapperTest { Tenant tenant = insertOne(); tenant.setTenantCode("ut code"); - tenant.setTenantName("ut name"); tenant.setQueueId(queue.getId()); tenantMapper.updateById(tenant); Page page = new Page(1,3); - IPage tenantIPage = tenantMapper.queryTenantPaging(page, tenant.getTenantName()); + //tenant.getTenantCode() used instead of tenant.getTenantName() + IPage tenantIPage = tenantMapper.queryTenantPaging(page, tenant.getTenantCode()); Assert.assertNotEquals(tenantIPage.getTotal(), 0); } diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java index 47d8d89b40..ca672e0281 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapperTest.java @@ -19,11 +19,13 @@ package org.apache.dolphinscheduler.dao.mapper; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.dao.entity.UDFUser; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -33,6 +35,7 @@ import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.transaction.annotation.Transactional; +import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; @@ -56,9 +59,10 @@ public class UdfFuncMapperTest { /** * insert one udf + * * @return UdfFunc */ - private UdfFunc insertOne(){ + private UdfFunc insertOne() { UdfFunc udfFunc = new UdfFunc(); udfFunc.setUserId(1); udfFunc.setFuncName("dolphin_udf_func"); @@ -74,9 +78,10 @@ public class UdfFuncMapperTest { /** * insert one udf + * * @return */ - private UdfFunc insertOne(User user){ + private UdfFunc insertOne(User user) { UdfFunc udfFunc = new UdfFunc(); udfFunc.setUserId(user.getId()); udfFunc.setFuncName("dolphin_udf_func"); @@ -92,9 +97,10 @@ public class UdfFuncMapperTest { /** * insert one user + * * @return User */ - private User insertOneUser(){ + private User insertOneUser() { User user = new User(); user.setUserName("user1"); user.setUserPassword("1"); @@ -109,9 +115,10 @@ public class UdfFuncMapperTest { /** * insert one user + * * @return User */ - private User insertOneUser(String userName){ + private User insertOneUser(String userName) { User user = new User(); user.setUserName(userName); user.setUserPassword("1"); @@ -126,11 +133,12 @@ public class UdfFuncMapperTest { /** * insert UDFUser - * @param user user + * + * @param user user * @param udfFunc udf func * @return UDFUser */ - private UDFUser insertOneUDFUser(User user,UdfFunc udfFunc){ + private UDFUser insertOneUDFUser(User user, UdfFunc udfFunc) { UDFUser udfUser = new UDFUser(); udfUser.setUdfId(udfFunc.getId()); udfUser.setUserId(user.getId()); @@ -142,9 +150,10 @@ public class UdfFuncMapperTest { /** * create general user + * * @return User */ - private User createGeneralUser(String userName){ + private User createGeneralUser(String userName) { User user = new User(); user.setUserName(userName); user.setUserPassword("1"); @@ -161,7 +170,7 @@ public class UdfFuncMapperTest { * test update */ @Test - public void testUpdate(){ + public void testUpdate() { //insertOne UdfFunc udfFunc = insertOne(); udfFunc.setResourceName("dolphin_resource_update"); @@ -178,7 +187,7 @@ public class UdfFuncMapperTest { * test delete */ @Test - public void testDelete(){ + public void testDelete() { //insertOne UdfFunc udfFunc = insertOne(); //delete @@ -190,7 +199,7 @@ public class UdfFuncMapperTest { * test query */ @Test - public void testQuery(){ + public void testQuery() { //insertOne UdfFunc udfFunc = insertOne(); //query @@ -207,9 +216,9 @@ public class UdfFuncMapperTest { UdfFunc udfFunc = insertOne(); //insertOne UdfFunc udfFunc1 = insertOne(); - int[] idArray = new int[]{udfFunc.getId(),udfFunc1.getId()}; + int[] idArray = new int[]{udfFunc.getId(), udfFunc1.getId()}; //queryUdfByIdStr - List udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray,""); + List udfFuncList = udfFuncMapper.queryUdfByIdStr(idArray, ""); Assert.assertNotEquals(udfFuncList.size(), 0); } @@ -223,8 +232,8 @@ public class UdfFuncMapperTest { //insertOne UdfFunc udfFunc = insertOne(user); //queryUdfFuncPaging - Page page = new Page(1,3); - IPage udfFuncIPage = udfFuncMapper.queryUdfFuncPaging(page,user.getId(),""); + Page page = new Page(1, 3); + IPage udfFuncIPage = udfFuncMapper.queryUdfFuncPaging(page, user.getId(), ""); Assert.assertNotEquals(udfFuncIPage.getTotal(), 0); } @@ -279,7 +288,7 @@ public class UdfFuncMapperTest { } @Test - public void testListAuthorizedUdfFunc(){ + public void testListAuthorizedUdfFunc() { //create general user User generalUser1 = createGeneralUser("user1"); User generalUser2 = createGeneralUser("user2"); @@ -289,18 +298,30 @@ public class UdfFuncMapperTest { UdfFunc unauthorizdUdfFunc = insertOne(generalUser2); //udf function ids - Integer[] udfFuncIds = new Integer[]{udfFunc.getId(),unauthorizdUdfFunc.getId()}; + Integer[] udfFuncIds = new Integer[]{udfFunc.getId(), unauthorizdUdfFunc.getId()}; List authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); - Assert.assertEquals(generalUser1.getId(),udfFunc.getUserId()); - Assert.assertNotEquals(generalUser1.getId(),unauthorizdUdfFunc.getUserId()); + Assert.assertEquals(generalUser1.getId(), udfFunc.getUserId()); + Assert.assertNotEquals(generalUser1.getId(), unauthorizdUdfFunc.getUserId()); Assert.assertFalse(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds))); //authorize object unauthorizdUdfFunc to generalUser1 - insertOneUDFUser(generalUser1,unauthorizdUdfFunc); + insertOneUDFUser(generalUser1, unauthorizdUdfFunc); authorizedUdfFunc = udfFuncMapper.listAuthorizedUdfFunc(generalUser1.getId(), udfFuncIds); Assert.assertTrue(authorizedUdfFunc.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(udfFuncIds))); } + + @Test + public void batchUpdateUdfFuncTest() { + //create general user + User generalUser1 = createGeneralUser("user1"); + UdfFunc udfFunc = insertOne(generalUser1); + udfFunc.setResourceName("/updateTest"); + List udfFuncList = new ArrayList<>(); + udfFuncList.add(udfFunc); + Assert.assertTrue(udfFuncMapper.batchUpdateUdfFunc(udfFuncList) > 0); + + } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java index d037c441b4..7f7ceb480a 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/UserMapperTest.java @@ -161,7 +161,6 @@ public class UserMapperTest { private Tenant insertOneTenant() { Tenant tenant = new Tenant(); tenant.setTenantCode("dolphin"); - tenant.setTenantName("dolphin test"); tenant.setDescription("dolphin user use"); tenant.setQueue("1"); tenant.setCreateTime(new Date()); @@ -178,7 +177,6 @@ public class UserMapperTest { private Tenant insertOneTenant(Queue queue) { Tenant tenant = new Tenant(); tenant.setTenantCode("dolphin"); - tenant.setTenantName("dolphin test"); tenant.setDescription("dolphin user use"); tenant.setQueueId(queue.getId()); tenant.setQueue(queue.getQueue()); diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java index 14dfe0b750..88e71c4891 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java @@ -18,7 +18,9 @@ package org.apache.dolphinscheduler.dao.utils; import com.fasterxml.jackson.core.JsonProcessingException; + import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskDependType; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; @@ -27,6 +29,7 @@ import org.apache.dolphinscheduler.common.process.ProcessDag; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.TaskInstance; + import org.junit.Assert; import org.junit.Test; @@ -34,6 +37,8 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; + /** * dag helper test @@ -41,15 +46,17 @@ import java.util.Map; public class DagHelperTest { /** * test task node can submit + * * @throws JsonProcessingException if error throws JsonProcessingException */ @Test public void testTaskNodeCanSubmit() throws JsonProcessingException { - //1->2->3->5 - //4->3 + //1->2->3->5->7 + //4->3->6 DAG dag = generateDag(); TaskNode taskNode3 = dag.getNode("3"); - Map completeTaskList = new HashMap<>(); + Map completeTaskList = new HashMap<>(); + Map skipNodeList = new HashMap<>(); completeTaskList.putIfAbsent("1", new TaskInstance()); Boolean canSubmit = false; @@ -58,27 +65,206 @@ public class DagHelperTest { node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); TaskNode nodex = dag.getNode("4"); nodex.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); - canSubmit = DagHelper.taskNodeCanSubmit(taskNode3, dag, completeTaskList); + canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList); Assert.assertEquals(canSubmit, true); // 2forbidden, 3 cannot be submit completeTaskList.putIfAbsent("2", new TaskInstance()); TaskNode nodey = dag.getNode("4"); nodey.setRunFlag(""); - canSubmit = DagHelper.taskNodeCanSubmit(taskNode3, dag, completeTaskList); + canSubmit = DagHelper.allDependsForbiddenOrEnd(taskNode3, dag, skipNodeList, completeTaskList); Assert.assertEquals(canSubmit, false); // 2/3 forbidden submit 5 TaskNode node3 = dag.getNode("3"); node3.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); + TaskNode node8 = dag.getNode("8"); + node8.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); TaskNode node5 = dag.getNode("5"); - canSubmit = DagHelper.taskNodeCanSubmit(node5, dag, completeTaskList); + canSubmit = DagHelper.allDependsForbiddenOrEnd(node5, dag, skipNodeList, completeTaskList); Assert.assertEquals(canSubmit, true); - } + } /** - * 1->2->3->5 - * 4->3 + * test parse post node list + */ + @Test + public void testParsePostNodeList() throws JsonProcessingException { + DAG dag = generateDag(); + Map completeTaskList = new HashMap<>(); + Map skipNodeList = new HashMap<>(); + + Set postNodes = null; + //complete : null + // expect post: 1/4 + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("1")); + Assert.assertTrue(postNodes.contains("4")); + + //complete : 1 + // expect post: 2/4 + completeTaskList.put("1", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("2")); + Assert.assertTrue(postNodes.contains("4")); + + // complete : 1/2 + // expect post: 4 + completeTaskList.put("2", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("4")); + Assert.assertTrue(postNodes.contains("8")); + + // complete : 1/2/4 + // expect post: 3 + completeTaskList.put("4", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("3")); + Assert.assertTrue(postNodes.contains("8")); + + // complete : 1/2/4/3 + // expect post: 8/6 + completeTaskList.put("3", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("8")); + Assert.assertTrue(postNodes.contains("6")); + + // complete : 1/2/4/3/8 + // expect post: 6/5 + completeTaskList.put("8", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("5")); + Assert.assertTrue(postNodes.contains("6")); + // complete : 1/2/4/3/5/6/8 + // expect post: 7 + completeTaskList.put("6", new TaskInstance()); + completeTaskList.put("5", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(1, postNodes.size()); + Assert.assertTrue(postNodes.contains("7")); + } + + /** + * test forbidden post node + * + * @throws JsonProcessingException + */ + @Test + public void testForbiddenPostNode() throws JsonProcessingException { + DAG dag = generateDag(); + Map completeTaskList = new HashMap<>(); + Map skipNodeList = new HashMap<>(); + Set postNodes = null; + // dag: 1-2-3-5-7 4-3-6 2-8-5-7 + // forbid:2 complete:1 post:4/8 + completeTaskList.put("1", new TaskInstance()); + TaskNode node2 = dag.getNode("2"); + node2.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("4")); + Assert.assertTrue(postNodes.contains("8")); + + //forbid:2/4 complete:1 post:3/8 + TaskNode node4 = dag.getNode("4"); + node4.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(2, postNodes.size()); + Assert.assertTrue(postNodes.contains("3")); + Assert.assertTrue(postNodes.contains("8")); + + //forbid:2/4/5 complete:1/8 post:3 + completeTaskList.put("8", new TaskInstance()); + TaskNode node5 = dag.getNode("5"); + node5.setRunFlag(Constants.FLOWNODE_RUN_FLAG_FORBIDDEN); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(1, postNodes.size()); + Assert.assertTrue(postNodes.contains("3")); + } + + /** + * test condition post node + * + * @throws JsonProcessingException + */ + @Test + public void testConditionPostNode() throws JsonProcessingException { + DAG dag = generateDag(); + Map completeTaskList = new HashMap<>(); + Map skipNodeList = new HashMap<>(); + Set postNodes = null; + // dag: 1-2-3-5-7 4-3-6 2-8-5-7 + // 3-if + completeTaskList.put("1", new TaskInstance()); + completeTaskList.put("2", new TaskInstance()); + completeTaskList.put("4", new TaskInstance()); + TaskNode node3 = dag.getNode("3"); + node3.setType("CONDITIONS"); + node3.setConditionResult("{\n" + + + " \"successNode\": [5\n" + + + " ],\n" + + + " \"failedNode\": [6\n" + + + " ]\n" + + + " }"); + completeTaskList.remove("3"); + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setState(ExecutionStatus.SUCCESS); + //complete 1/2/3/4 expect:8 + completeTaskList.put("3", taskInstance); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(1, postNodes.size()); + Assert.assertTrue(postNodes.contains("8")); + + //2.complete 1/2/3/4/8 expect:5 skip:6 + completeTaskList.put("8", new TaskInstance()); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertTrue(postNodes.contains("5")); + Assert.assertEquals(1, skipNodeList.size()); + Assert.assertTrue(skipNodeList.containsKey("6")); + + // 3.complete 1/2/3/4/5/8 expect post:7 skip:6 + skipNodeList.clear(); + TaskInstance taskInstance1 = new TaskInstance(); + taskInstance.setState(ExecutionStatus.SUCCESS); + completeTaskList.put("5", taskInstance1); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(1, postNodes.size()); + Assert.assertTrue(postNodes.contains("7")); + Assert.assertEquals(1, skipNodeList.size()); + Assert.assertTrue(skipNodeList.containsKey("6")); + + // dag: 1-2-3-5-7 4-3-6 + // 3-if , complete:1/2/3/4 + // 1.failure:3 expect post:6 skip:5/7 + skipNodeList.clear(); + completeTaskList.remove("3"); + taskInstance = new TaskInstance(); + taskInstance.setState(ExecutionStatus.FAILURE); + completeTaskList.put("3", taskInstance); + postNodes = DagHelper.parsePostNodes(null, skipNodeList, dag, completeTaskList); + Assert.assertEquals(1, postNodes.size()); + Assert.assertTrue(postNodes.contains("6")); + Assert.assertEquals(2, skipNodeList.size()); + Assert.assertTrue(skipNodeList.containsKey("5")); + Assert.assertTrue(skipNodeList.containsKey("7")); + } + + /** + * 1->2->3->5->7 + * 4->3->6 + * 2->8->5->7 + * * @return dag * @throws JsonProcessingException if error throws JsonProcessingException */ @@ -87,11 +273,13 @@ public class DagHelperTest { TaskNode node1 = new TaskNode(); node1.setId("1"); node1.setName("1"); + node1.setType("SHELL"); taskNodeList.add(node1); TaskNode node2 = new TaskNode(); node2.setId("2"); node2.setName("2"); + node2.setType("SHELL"); List dep2 = new ArrayList<>(); dep2.add("1"); node2.setDepList(dep2); @@ -101,11 +289,13 @@ public class DagHelperTest { TaskNode node4 = new TaskNode(); node4.setId("4"); node4.setName("4"); + node4.setType("SHELL"); taskNodeList.add(node4); TaskNode node3 = new TaskNode(); node3.setId("3"); node3.setName("3"); + node3.setType("SHELL"); List dep3 = new ArrayList<>(); dep3.add("2"); dep3.add("4"); @@ -115,29 +305,61 @@ public class DagHelperTest { TaskNode node5 = new TaskNode(); node5.setId("5"); node5.setName("5"); + node5.setType("SHELL"); List dep5 = new ArrayList<>(); dep5.add("3"); + dep5.add("8"); node5.setDepList(dep5); taskNodeList.add(node5); + TaskNode node6 = new TaskNode(); + node6.setId("6"); + node6.setName("6"); + node6.setType("SHELL"); + List dep6 = new ArrayList<>(); + dep6.add("3"); + node6.setDepList(dep6); + taskNodeList.add(node6); + + TaskNode node7 = new TaskNode(); + node7.setId("7"); + node7.setName("7"); + node7.setType("SHELL"); + List dep7 = new ArrayList<>(); + dep7.add("5"); + node7.setDepList(dep7); + taskNodeList.add(node7); + + TaskNode node8 = new TaskNode(); + node8.setId("8"); + node8.setName("8"); + node8.setType("SHELL"); + List dep8 = new ArrayList<>(); + dep8.add("2"); + node8.setDepList(dep8); + taskNodeList.add(node8); + List startNodes = new ArrayList<>(); - List recoveryNodes = new ArrayList<>(); + List recoveryNodes = new ArrayList<>(); List destTaskNodeList = DagHelper.generateFlowNodeListByStartNode(taskNodeList, startNodes, recoveryNodes, TaskDependType.TASK_POST); - List taskNodeRelations =DagHelper.generateRelationListByFlowNodes(destTaskNodeList); + List taskNodeRelations = DagHelper.generateRelationListByFlowNodes(destTaskNodeList); ProcessDag processDag = new ProcessDag(); processDag.setEdges(taskNodeRelations); processDag.setNodes(destTaskNodeList); - return DagHelper.buildDagGraph(processDag); } @Test public void testBuildDagGraph() { - String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + - "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + - "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + - "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + + String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + + + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + + + "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + + + "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; ProcessData processData = JSONUtils.parseObject(shellJson, ProcessData.class); diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml index e5f3d54a75..1ec74fd409 100644 --- a/dolphinscheduler-dist/pom.xml +++ b/dolphinscheduler-dist/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-dist/release-docs/LICENSE b/dolphinscheduler-dist/release-docs/LICENSE index 6fa1ea9279..42a7fad61a 100644 --- a/dolphinscheduler-dist/release-docs/LICENSE +++ b/dolphinscheduler-dist/release-docs/LICENSE @@ -230,7 +230,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt. clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0 commons-beanutils 1.7.0 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.7.0, Apache 2.0 commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0 - commons-codec 1.6: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.6, Apache 2.0 + commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0 commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0 commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0 commons-compress 1.4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.4.1, Apache 2.0 @@ -287,15 +287,15 @@ The text of each license is also included at licenses/LICENSE-[project].txt. httpclient 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient/4.4.1, Apache 2.0 httpcore 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore/4.4.1, Apache 2.0 httpmime 4.5.7: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime/4.5.7, Apache 2.0 - jackson-annotations 2.9.8: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.9.8, Apache 2.0 - jackson-core 2.9.8: https://github.com/FasterXML/jackson-core, Apache 2.0 + jackson-annotations 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.9.10, Apache 2.0 + jackson-core 2.9.10: https://github.com/FasterXML/jackson-core, Apache 2.0 jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0 - jackson-databind 2.9.8: https://github.com/FasterXML/jackson-databind, Apache 2.0 - jackson-datatype-jdk8 2.9.8: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.8, Apache 2.0 - jackson-datatype-jsr310 2.9.8: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.8, Apache 2.0 + jackson-databind 2.9.10: https://github.com/FasterXML/jackson-databind, Apache 2.0 + jackson-datatype-jdk8 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.10, Apache 2.0 + jackson-datatype-jsr310 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.10, Apache 2.0 jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1 jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0 - jackson-module-parameter-names 2.9.8: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.8, Apache 2.0 + jackson-module-parameter-names 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.10, Apache 2.0 jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1 javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0 javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0 @@ -344,25 +344,25 @@ The text of each license is also included at licenses/LICENSE-[project].txt. opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0 parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0 poi 3.17: https://mvnrepository.com/artifact/org.apache.poi/poi/3.17, Apache 2.0 - quartz 2.2.3: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.2.3, Apache 2.0 - quartz-jobs 2.2.3: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.2.3, Apache 2.0 + quartz 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.0, Apache 2.0 + quartz-jobs 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.0, Apache 2.0 snakeyaml 1.23: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.23, Apache 2.0 snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0 snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0 - spring-aop 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.5.RELEASE, Apache 2.0 - spring-beans 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.5.RELEASE, Apache 2.0 - spring-boot 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.3.RELEASE, Apache 2.0 - spring-boot-autoconfigure 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter-aop 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter-jdbc 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter-jetty 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter-json 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter-logging 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.3.RELEASE, Apache 2.0 - spring-boot-starter-web 2.1.3.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.3.RELEASE, Apache 2.0 - spring-context 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.5.RELEASE, Apache 2.0 - spring-core 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core, Apache 2.0 - spring-expression 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression, Apache 2.0 + spring-aop 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.18.RELEASE, Apache 2.0 + spring-beans 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.18.RELEASE, Apache 2.0 + spring-boot 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.17.RELEASE, Apache 2.0 + spring-boot-autoconfigure 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter-aop 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter-jdbc 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter-jetty 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter-json 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter-logging 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.17.RELEASE, Apache 2.0 + spring-boot-starter-web 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.17.RELEASE, Apache 2.0 + spring-context 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.18.RELEASE, Apache 2.0 + spring-core 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core/5.1.18.RELEASE, Apache 2.0 + spring-expression 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression/5.1.18.RELEASE, Apache 2.0 springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0 springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0 springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0 @@ -370,13 +370,13 @@ The text of each license is also included at licenses/LICENSE-[project].txt. springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0 springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0 springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0 - spring-jcl 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.5.RELEASE, Apache 2.0 - spring-jdbc 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.5.RELEASE, Apache 2.0 + spring-jcl 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.18.RELEASE, Apache 2.0 + spring-jdbc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.18.RELEASE, Apache 2.0 spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0 spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0 - spring-tx 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.5.RELEASE, Apache 2.0 - spring-web 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.5.RELEASE, Apache 2.0 - spring-webmvc 5.1.5.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.5.RELEASE, Apache 2.0 + spring-tx 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.18.RELEASE, Apache 2.0 + spring-web 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.18.RELEASE, Apache 2.0 + spring-webmvc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.18.RELEASE, Apache 2.0 swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0 swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0 swagger-models 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.20, Apache 2.0 @@ -387,7 +387,6 @@ The text of each license is also included at licenses/LICENSE-[project].txt. zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0 presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1 - ======================================================================== BSD licenses ======================================================================== diff --git a/dolphinscheduler-dist/release-docs/NOTICE b/dolphinscheduler-dist/release-docs/NOTICE index 61076b9780..6a6741b796 100644 --- a/dolphinscheduler-dist/release-docs/NOTICE +++ b/dolphinscheduler-dist/release-docs/NOTICE @@ -384,8 +384,8 @@ This product contains the Maven wrapper scripts from 'Maven Wrapper', that provi Spring Framework NOTICE ======================================================================== -Spring Framework 5.1.5.RELEASE -Copyright (c) 2002-2019 Pivotal, Inc. +Spring Framework 5.1.18.RELEASE +Copyright (c) 2002-2020 Pivotal, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). You may not use this product except in compliance with diff --git a/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-element-ui b/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-element-ui index d4462f3dcf..4b275d033b 100644 --- a/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-element-ui +++ b/dolphinscheduler-dist/release-docs/licenses/ui-licenses/LICENSE-element-ui @@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file +SOFTWARE. \ No newline at end of file diff --git a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml index 9b61c5e094..34f2cd2a9a 100644 --- a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml +++ b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml @@ -112,8 +112,8 @@ - - + + ${basedir}/../dolphinscheduler-service/src/main/resources @@ -226,4 +226,4 @@ - \ No newline at end of file + diff --git a/dolphinscheduler-microbench/pom.xml b/dolphinscheduler-microbench/pom.xml index 6b11b2e2d6..606ecd3c38 100644 --- a/dolphinscheduler-microbench/pom.xml +++ b/dolphinscheduler-microbench/pom.xml @@ -21,7 +21,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 diff --git a/dolphinscheduler-remote/pom.xml b/dolphinscheduler-remote/pom.xml index 4d398f3069..3ac7b914a5 100644 --- a/dolphinscheduler-remote/pom.xml +++ b/dolphinscheduler-remote/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 @@ -35,6 +35,10 @@ + + org.apache.dolphinscheduler + dolphinscheduler-common + io.netty netty-all @@ -48,10 +52,6 @@ junit test - - com.fasterxml.jackson.core - jackson-databind - diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java index 38f00fb4fd..c1aea90393 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java @@ -17,17 +17,6 @@ package org.apache.dolphinscheduler.remote; -import io.netty.bootstrap.Bootstrap; -import io.netty.channel.Channel; -import io.netty.channel.ChannelFuture; -import io.netty.channel.ChannelFutureListener; -import io.netty.channel.ChannelInitializer; -import io.netty.channel.ChannelOption; -import io.netty.channel.EventLoopGroup; -import io.netty.channel.epoll.EpollEventLoopGroup; -import io.netty.channel.nio.NioEventLoopGroup; -import io.netty.channel.socket.SocketChannel; - import org.apache.dolphinscheduler.remote.codec.NettyDecoder; import org.apache.dolphinscheduler.remote.codec.NettyEncoder; import org.apache.dolphinscheduler.remote.command.Command; @@ -41,19 +30,40 @@ import org.apache.dolphinscheduler.remote.future.ReleaseSemaphore; import org.apache.dolphinscheduler.remote.future.ResponseFuture; import org.apache.dolphinscheduler.remote.handler.NettyClientHandler; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.CallerThreadExecutePolicy; +import org.apache.dolphinscheduler.remote.utils.Constants; +import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; import org.apache.dolphinscheduler.remote.utils.NettyUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.net.InetSocketAddress; -import java.util.concurrent.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.Semaphore; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.epoll.EpollEventLoopGroup; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.handler.timeout.IdleStateHandler; + /** * remoting netty client */ @@ -162,11 +172,10 @@ public class NettyRemotingClient { .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, clientConfig.getConnectTimeoutMillis()) .handler(new ChannelInitializer() { @Override - public void initChannel(SocketChannel ch) throws Exception { - ch.pipeline().addLast( - new NettyDecoder(), - clientHandler, - encoder); + public void initChannel(SocketChannel ch) { + ch.pipeline() + .addLast("client-idle-handler", new IdleStateHandler(Constants.NETTY_CLIENT_HEART_BEAT_TIME, 0, 0, TimeUnit.MILLISECONDS)) + .addLast(new NettyDecoder(), clientHandler, encoder); } }); this.responseFutureExecutor.scheduleAtFixedRate(new Runnable() { diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java index ad5c95bb38..867cf4dc56 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java @@ -29,6 +29,7 @@ import org.apache.dolphinscheduler.remote.utils.NettyUtils; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -39,11 +40,11 @@ import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; -import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.epoll.EpollEventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; +import io.netty.handler.timeout.IdleStateHandler; /** * remoting netty server @@ -183,10 +184,11 @@ public class NettyRemotingServer { * @param ch socket channel */ private void initNettyChannel(SocketChannel ch) { - ChannelPipeline pipeline = ch.pipeline(); - pipeline.addLast("encoder", encoder); - pipeline.addLast("decoder", new NettyDecoder()); - pipeline.addLast("handler", serverHandler); + ch.pipeline() + .addLast("encoder", encoder) + .addLast("decoder", new NettyDecoder()) + .addLast("server-idle-handle", new IdleStateHandler(0, 0, Constants.NETTY_SERVER_HEART_BEAT_TIME, TimeUnit.MILLISECONDS)) + .addLast("handler", serverHandler); } /** diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java index 179ae1bef8..343e8c63dd 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java @@ -22,6 +22,7 @@ import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.ReplayingDecoder; import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandContext; import org.apache.dolphinscheduler.remote.command.CommandHeader; import org.apache.dolphinscheduler.remote.command.CommandType; import org.slf4j.Logger; @@ -54,16 +55,34 @@ public class NettyDecoder extends ReplayingDecoder { switch (state()){ case MAGIC: checkMagic(in.readByte()); + checkpoint(State.VERSION); + // fallthru + case VERSION: + checkVersion(in.readByte()); checkpoint(State.COMMAND); + // fallthru case COMMAND: commandHeader.setType(in.readByte()); checkpoint(State.OPAQUE); + // fallthru case OPAQUE: commandHeader.setOpaque(in.readLong()); + checkpoint(State.CONTEXT_LENGTH); + // fallthru + case CONTEXT_LENGTH: + commandHeader.setContextLength(in.readInt()); + checkpoint(State.CONTEXT); + // fallthru + case CONTEXT: + byte[] context = new byte[commandHeader.getContextLength()]; + in.readBytes(context); + commandHeader.setContext(context); checkpoint(State.BODY_LENGTH); + // fallthru case BODY_LENGTH: commandHeader.setBodyLength(in.readInt()); checkpoint(State.BODY); + // fallthru case BODY: byte[] body = new byte[commandHeader.getBodyLength()]; in.readBytes(body); @@ -71,6 +90,7 @@ public class NettyDecoder extends ReplayingDecoder { Command packet = new Command(); packet.setType(commandType(commandHeader.getType())); packet.setOpaque(commandHeader.getOpaque()); + packet.setContext(CommandContext.valueOf(commandHeader.getContext())); packet.setBody(body); out.add(packet); // @@ -105,10 +125,23 @@ public class NettyDecoder extends ReplayingDecoder { } } + /** + * check version + * @param version + */ + private void checkVersion(byte version) { + if (version != Command.VERSION) { + throw new IllegalArgumentException("illegal protocol [version]" + version); + } + } + enum State{ MAGIC, + VERSION, COMMAND, OPAQUE, + CONTEXT_LENGTH, + CONTEXT, BODY_LENGTH, BODY; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java index 4e9836a26f..785ee5aaf2 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java @@ -42,11 +42,18 @@ public class NettyEncoder extends MessageToByteEncoder { throw new Exception("encode msg is null"); } out.writeByte(Command.MAGIC); + out.writeByte(Command.VERSION); out.writeByte(msg.getType().ordinal()); out.writeLong(msg.getOpaque()); + writeContext(msg, out); out.writeInt(msg.getBody().length); out.writeBytes(msg.getBody()); } + private void writeContext(Command msg, ByteBuf out) { + byte[] headerBytes = msg.getContext().toBytes(); + out.writeInt(headerBytes.length); + out.writeBytes(headerBytes); + } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java index ed46e1ff51..9baa321a9e 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java @@ -28,6 +28,7 @@ public class Command implements Serializable { private static final AtomicLong REQUEST_ID = new AtomicLong(1); public static final byte MAGIC = (byte) 0xbabe; + public static final byte VERSION = 0; public Command(){ this.opaque = REQUEST_ID.getAndIncrement(); @@ -47,6 +48,11 @@ public class Command implements Serializable { */ private long opaque; + /** + * request context + */ + private CommandContext context = new CommandContext(); + /** * data body */ @@ -76,6 +82,14 @@ public class Command implements Serializable { this.body = body; } + public CommandContext getContext() { + return context; + } + + public void setContext(CommandContext context) { + this.context = context; + } + @Override public int hashCode() { final int prime = 31; diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandContext.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandContext.java new file mode 100644 index 0000000000..c9febee6fc --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandContext.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.io.Serializable; +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * command context + */ +public class CommandContext implements Serializable { + + private Map items = new LinkedHashMap<>(); + + public Map getItems() { + return items; + } + + public void setItems(Map items) { + this.items = items; + } + + public void put(String key, String value) { + items.put(key, value); + } + + public String get(String key) { + return items.get(key); + } + + public byte[] toBytes() { + return JSONUtils.toJsonByteArray(this); + } + + public static CommandContext valueOf(byte[] src) { + return JSONUtils.parseObject(src, CommandContext.class); + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java index 78948a5c0c..9e83a426f9 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java @@ -33,6 +33,16 @@ public class CommandHeader implements Serializable { */ private long opaque; + /** + * context length + */ + private int contextLength; + + /** + * context + */ + private byte[] context; + /** * body length */ @@ -61,4 +71,20 @@ public class CommandHeader implements Serializable { public void setOpaque(long opaque) { this.opaque = opaque; } + + public int getContextLength() { + return contextLength; + } + + public void setContextLength(int contextLength) { + this.contextLength = contextLength; + } + + public byte[] getContext() { + return context; + } + + public void setContext(byte[] context) { + this.context = context; + } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java index 753216995e..6c7377db17 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java @@ -1 +1,125 @@ -/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; public enum CommandType { /** * remove task log request, */ REMOVE_TAK_LOG_REQUEST, /** * remove task log response */ REMOVE_TAK_LOG_RESPONSE, /** * roll view log request */ ROLL_VIEW_LOG_REQUEST, /** * roll view log response */ ROLL_VIEW_LOG_RESPONSE, /** * view whole log request */ VIEW_WHOLE_LOG_REQUEST, /** * view whole log response */ VIEW_WHOLE_LOG_RESPONSE, /** * get log bytes request */ GET_LOG_BYTES_REQUEST, /** * get log bytes response */ GET_LOG_BYTES_RESPONSE, WORKER_REQUEST, MASTER_RESPONSE, /** * execute task request */ TASK_EXECUTE_REQUEST, /** * execute task ack */ TASK_EXECUTE_ACK, /** * execute task response */ TASK_EXECUTE_RESPONSE, /** * kill task */ TASK_KILL_REQUEST, /** * kill task response */ TASK_KILL_RESPONSE, /** * ping */ PING, /** * pong */ PONG, /** * alert send request */ ALERT_SEND_REQUEST, /** * alert send response */ ALERT_SEND_RESPONSE; } \ No newline at end of file +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +public enum CommandType { + + /** + * remove task log request, + */ + REMOVE_TAK_LOG_REQUEST, + + /** + * remove task log response + */ + REMOVE_TAK_LOG_RESPONSE, + + /** + * roll view log request + */ + ROLL_VIEW_LOG_REQUEST, + + /** + * roll view log response + */ + ROLL_VIEW_LOG_RESPONSE, + + /** + * view whole log request + */ + VIEW_WHOLE_LOG_REQUEST, + + /** + * view whole log response + */ + VIEW_WHOLE_LOG_RESPONSE, + + /** + * get log bytes request + */ + GET_LOG_BYTES_REQUEST, + + /** + * get log bytes response + */ + GET_LOG_BYTES_RESPONSE, + + + WORKER_REQUEST, + MASTER_RESPONSE, + + /** + * execute task request + */ + TASK_EXECUTE_REQUEST, + + /** + * execute task ack + */ + TASK_EXECUTE_ACK, + + /** + * execute task response + */ + TASK_EXECUTE_RESPONSE, + + /** + * db task ack + */ + DB_TASK_ACK, + + /** + * db task response + */ + DB_TASK_RESPONSE, + + /** + * kill task + */ + TASK_KILL_REQUEST, + + /** + * kill task response + */ + TASK_KILL_RESPONSE, + + /** + * HEART_BEAT + */ + HEART_BEAT, + + /** + * ping + */ + PING, + + /** + * pong + */ + PONG, + + /** + * alert send request + */ + ALERT_SEND_REQUEST, + + /** + * alert send response + */ + ALERT_SEND_RESPONSE; +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java new file mode 100644 index 0000000000..4797104450 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskAckCommand.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.io.Serializable; + +/** + * db task ack request command + */ +public class DBTaskAckCommand implements Serializable { + + private int taskInstanceId; + private int status; + + public DBTaskAckCommand() { + super(); + } + + public DBTaskAckCommand(int status, int taskInstanceId) { + this.status = status; + this.taskInstanceId = taskInstanceId; + } + + public int getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + /** + * package response command + * + * @return command + */ + public Command convert2Command() { + Command command = new Command(); + command.setType(CommandType.DB_TASK_ACK); + byte[] body = JSONUtils.toJsonByteArray(this); + command.setBody(body); + return command; + } + + @Override + public String toString() { + return "DBTaskAckCommand{" + "taskInstanceId=" + taskInstanceId + ", status=" + status + '}'; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java new file mode 100644 index 0000000000..9bd86cbdf4 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/DBTaskResponseCommand.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; + +import java.io.Serializable; + +/** + * db task final result response command + */ +public class DBTaskResponseCommand implements Serializable { + + private int taskInstanceId; + private int status; + + public DBTaskResponseCommand() { + super(); + } + + public DBTaskResponseCommand(int status, int taskInstanceId) { + this.status = status; + this.taskInstanceId = taskInstanceId; + } + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + public int getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + /** + * package response command + * + * @return command + */ + public Command convert2Command() { + Command command = new Command(); + command.setType(CommandType.DB_TASK_RESPONSE); + byte[] body = JSONUtils.toJsonByteArray(this); + command.setBody(body); + return command; + } + + @Override + public String toString() { + return "DBTaskResponseCommand{" + + "taskInstanceId=" + taskInstanceId + + ", status=" + status + + '}'; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java index 135c14975b..2fc70f1fbc 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteAckCommand.java @@ -14,14 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.command; -import com.fasterxml.jackson.annotation.JsonFormat; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.io.Serializable; import java.util.Date; +import com.fasterxml.jackson.annotation.JsonFormat; + /** * execute task request command */ @@ -35,7 +37,7 @@ public class TaskExecuteAckCommand implements Serializable { /** * startTime */ - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone = "GMT+8") private Date startTime; /** @@ -111,23 +113,23 @@ public class TaskExecuteAckCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_ACK); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } @Override public String toString() { - return "TaskExecuteAckCommand{" + - "taskInstanceId=" + taskInstanceId + - ", startTime=" + startTime + - ", host='" + host + '\'' + - ", status=" + status + - ", logPath='" + logPath + '\'' + - ", executePath='" + executePath + '\'' + - '}'; + return "TaskExecuteAckCommand{" + + "taskInstanceId=" + taskInstanceId + + ", startTime=" + startTime + + ", host='" + host + '\'' + + ", status=" + status + + ", logPath='" + logPath + '\'' + + ", executePath='" + executePath + '\'' + + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java index 4ae28e3ca5..5b2e33922c 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteRequestCommand.java @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.command; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.io.Serializable; @@ -50,18 +51,18 @@ public class TaskExecuteRequestCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_REQUEST); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } @Override public String toString() { - return "TaskExecuteRequestCommand{" + - "taskExecutionContext='" + taskExecutionContext + '\'' + - '}'; + return "TaskExecuteRequestCommand{" + + "taskExecutionContext='" + taskExecutionContext + '\'' + + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java index 7f6ee668a8..de5b82c729 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskExecuteResponseCommand.java @@ -14,20 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.command; -import com.fasterxml.jackson.annotation.JsonFormat; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.io.Serializable; import java.util.Date; +import com.fasterxml.jackson.annotation.JsonFormat; + /** * execute task response command */ public class TaskExecuteResponseCommand implements Serializable { - public TaskExecuteResponseCommand() { } @@ -49,7 +50,7 @@ public class TaskExecuteResponseCommand implements Serializable { /** * end time */ - @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") private Date endTime; @@ -120,22 +121,22 @@ public class TaskExecuteResponseCommand implements Serializable { * package response command * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_EXECUTE_RESPONSE); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } @Override public String toString() { - return "TaskExecuteResponseCommand{" + - "taskInstanceId=" + taskInstanceId + - ", status=" + status + - ", endTime=" + endTime + - ", processId=" + processId + - ", appIds='" + appIds + '\'' + - '}'; + return "TaskExecuteResponseCommand{" + + "taskInstanceId=" + taskInstanceId + + ", status=" + status + + ", endTime=" + endTime + + ", processId=" + processId + + ", appIds='" + appIds + '\'' + + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java index 4c0830b7cf..155b31785e 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillRequestCommand.java @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.command; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.io.Serializable; @@ -30,7 +31,6 @@ public class TaskKillRequestCommand implements Serializable { */ private int taskInstanceId; - public int getTaskInstanceId() { return taskInstanceId; } @@ -44,18 +44,18 @@ public class TaskKillRequestCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_KILL_REQUEST); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } @Override public String toString() { - return "TaskKillRequestCommand{" + - "taskInstanceId=" + taskInstanceId + - '}'; + return "TaskKillRequestCommand{" + + "taskInstanceId=" + taskInstanceId + + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java index 4b48c1ef4f..f77221d2e5 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/TaskKillResponseCommand.java @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.command; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import java.io.Serializable; import java.util.List; @@ -52,7 +53,6 @@ public class TaskKillResponseCommand implements Serializable { */ protected List appIds; - public int getTaskInstanceId() { return taskInstanceId; } @@ -98,22 +98,22 @@ public class TaskKillResponseCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.TASK_KILL_RESPONSE); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } @Override public String toString() { - return "TaskKillResponseCommand{" + - "taskInstanceId=" + taskInstanceId + - ", host='" + host + '\'' + - ", status=" + status + - ", processId=" + processId + - ", appIds=" + appIds + - '}'; + return "TaskKillResponseCommand{" + + "taskInstanceId=" + taskInstanceId + + ", host='" + host + '\'' + + ", status=" + status + + ", processId=" + processId + + ", appIds=" + appIds + + '}'; } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java index e4b21e2f89..ef71e07cde 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -53,10 +53,10 @@ public class GetLogBytesRequestCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.GET_LOG_BYTES_REQUEST); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java index 349ec03855..e8e3eb2a10 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -54,10 +54,10 @@ public class GetLogBytesResponseCommand implements Serializable { * @param opaque request unique identification * @return command */ - public Command convert2Command(long opaque){ + public Command convert2Command(long opaque) { Command command = new Command(opaque); command.setType(CommandType.GET_LOG_BYTES_RESPONSE); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogRequestCommand.java index a91cb2add0..c5960d69f2 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogRequestCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -53,10 +53,10 @@ public class RemoveTaskLogRequestCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.REMOVE_TAK_LOG_REQUEST); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogResponseCommand.java index 39e8672127..6883ece815 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RemoveTaskLogResponseCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -53,10 +53,10 @@ public class RemoveTaskLogResponseCommand implements Serializable { * * @return command */ - public Command convert2Command(long opaque){ + public Command convert2Command(long opaque) { Command command = new Command(opaque); command.setType(CommandType.REMOVE_TAK_LOG_RESPONSE); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java index 00129c7e78..4afee09e6d 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -81,10 +81,10 @@ public class RollViewLogRequestCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.ROLL_VIEW_LOG_REQUEST); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java index a4f4f86c9b..0e9e44a87b 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -54,10 +54,10 @@ public class RollViewLogResponseCommand implements Serializable { * @param opaque request unique identification * @return command */ - public Command convert2Command(long opaque){ + public Command convert2Command(long opaque) { Command command = new Command(opaque); command.setType(CommandType.ROLL_VIEW_LOG_RESPONSE); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java index 1d51653eac..e8094690dd 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -53,10 +53,10 @@ public class ViewLogRequestCommand implements Serializable { * * @return command */ - public Command convert2Command(){ + public Command convert2Command() { Command command = new Command(); command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java index 6940104f71..33e263087c 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.remote.command.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; @@ -54,10 +54,10 @@ public class ViewLogResponseCommand implements Serializable { * @param opaque request unique identification * @return command */ - public Command convert2Command(long opaque){ + public Command convert2Command(long opaque) { Command command = new Command(opaque); command.setType(CommandType.VIEW_WHOLE_LOG_RESPONSE); - byte[] body = JsonSerializer.serialize(this); + byte[] body = JSONUtils.toJsonByteArray(this); command.setBody(body); return command; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java index 48d78d9ad6..a988acfe17 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java @@ -14,9 +14,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.handler; -import io.netty.channel.*; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; @@ -25,16 +25,24 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.ChannelUtils; import org.apache.dolphinscheduler.remote.utils.Constants; import org.apache.dolphinscheduler.remote.utils.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.netty.channel.Channel; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.timeout.IdleStateEvent; + /** - * netty client request handler + * netty client request handler */ @ChannelHandler.Sharable public class NettyClientHandler extends ChannelInboundHandlerAdapter { @@ -42,12 +50,14 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { private final Logger logger = LoggerFactory.getLogger(NettyClientHandler.class); /** - * netty client + * netty client */ private final NettyRemotingClient nettyRemotingClient; + private static byte[] heartBeatData = "heart_beat".getBytes(); + /** - * callback thread executor + * callback thread executor */ private final ExecutorService callbackExecutor; @@ -57,19 +67,19 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { private final ConcurrentHashMap> processors; /** - * default executor + * default executor */ private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS); - public NettyClientHandler(NettyRemotingClient nettyRemotingClient, ExecutorService callbackExecutor){ + public NettyClientHandler(NettyRemotingClient nettyRemotingClient, ExecutorService callbackExecutor) { this.nettyRemotingClient = nettyRemotingClient; this.callbackExecutor = callbackExecutor; this.processors = new ConcurrentHashMap(); } /** - * When the current channel is not active, - * the current channel has reached the end of its life cycle + * When the current channel is not active, + * the current channel has reached the end of its life cycle * * @param ctx channel handler context * @throws Exception @@ -81,7 +91,7 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { } /** - * The current channel reads data from the remote + * The current channel reads data from the remote * * @param ctx channel handler context * @param msg message @@ -89,55 +99,55 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { */ @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - processReceived(ctx.channel(), (Command)msg); + processReceived(ctx.channel(), (Command) msg); } /** * register processor * * @param commandType command type - * @param processor processor + * @param processor processor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { this.registerProcessor(commandType, processor, null); } /** - * register processor + * register processor * * @param commandType command type - * @param processor processor - * @param executor thread executor + * @param processor processor + * @param executor thread executor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { ExecutorService executorRef = executor; - if(executorRef == null){ + if (executorRef == null) { executorRef = defaultExecutor; } this.processors.putIfAbsent(commandType, new Pair<>(processor, executorRef)); } /** - * process received logic + * process received logic * * @param command command */ private void processReceived(final Channel channel, final Command command) { ResponseFuture future = ResponseFuture.getFuture(command.getOpaque()); - if(future != null){ + if (future != null) { future.setResponseCommand(command); future.release(); - if(future.getInvokeCallback() != null){ + if (future.getInvokeCallback() != null) { this.callbackExecutor.submit(new Runnable() { @Override public void run() { future.executeInvokeCallback(); } }); - } else{ + } else { future.putResponse(command); } - } else{ + } else { processByCommandType(channel, command); } } @@ -163,9 +173,10 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { } /** - * caught exception - * @param ctx channel handler context - * @param cause cause + * caught exception + * + * @param ctx channel handler context + * @param cause cause * @throws Exception */ @Override @@ -175,4 +186,18 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { ctx.channel().close(); } + @Override + public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { + if (evt instanceof IdleStateEvent) { + Command heartBeat = new Command(); + heartBeat.setType(CommandType.HEART_BEAT); + heartBeat.setBody(heartBeatData); + ctx.writeAndFlush(heartBeat) + .addListener(ChannelFutureListener.CLOSE_ON_FAILURE); + + } else { + super.userEventTriggered(ctx, evt); + } + } + } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java index da2a6ff8bf..09e41e9b54 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java @@ -17,22 +17,30 @@ package org.apache.dolphinscheduler.remote.handler; -import io.netty.channel.*; import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.ChannelUtils; import org.apache.dolphinscheduler.remote.utils.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.netty.channel.Channel; +import io.netty.channel.ChannelConfig; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.timeout.IdleStateEvent; + + /** - * netty server request handler + * netty server request handler */ @ChannelHandler.Sharable public class NettyServerHandler extends ChannelInboundHandlerAdapter { @@ -40,22 +48,23 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { private final Logger logger = LoggerFactory.getLogger(NettyServerHandler.class); /** - * netty remote server + * netty remote server */ private final NettyRemotingServer nettyRemotingServer; /** - * server processors queue + * server processors queue */ private final ConcurrentHashMap> processors = new ConcurrentHashMap(); - public NettyServerHandler(NettyRemotingServer nettyRemotingServer){ + public NettyServerHandler(NettyRemotingServer nettyRemotingServer) { this.nettyRemotingServer = nettyRemotingServer; } /** - * When the current channel is not active, - * the current channel has reached the end of its life cycle + * When the current channel is not active, + * the current channel has reached the end of its life cycle + * * @param ctx channel handler context * @throws Exception */ @@ -73,38 +82,39 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { */ @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { - processReceived(ctx.channel(), (Command)msg); + processReceived(ctx.channel(), (Command) msg); } /** * register processor * * @param commandType command type - * @param processor processor + * @param processor processor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { this.registerProcessor(commandType, processor, null); } /** - * register processor + * register processor * * @param commandType command type - * @param processor processor - * @param executor thread executor + * @param processor processor + * @param executor thread executor */ public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { ExecutorService executorRef = executor; - if(executorRef == null){ + if (executorRef == null) { executorRef = nettyRemotingServer.getDefaultExecutor(); } this.processors.putIfAbsent(commandType, new Pair<>(processor, executorRef)); } /** - * process received logic + * process received logic + * * @param channel channel - * @param msg message + * @param msg message */ private void processReceived(final Channel channel, final Command msg) { final CommandType commandType = msg.getType(); @@ -132,22 +142,22 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { } /** - * caught exception + * caught exception * - * @param ctx channel handler context + * @param ctx channel handler context * @param cause cause * @throws Exception */ @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - logger.error("exceptionCaught : {}",cause.getMessage(), cause); + logger.error("exceptionCaught : {}", cause.getMessage(), cause); ctx.channel().close(); } /** - * channel write changed + * channel write changed * - * @param ctx channel handler context + * @param ctx channel handler context * @throws Exception */ @Override @@ -158,16 +168,25 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { if (!ch.isWritable()) { if (logger.isWarnEnabled()) { logger.warn("{} is not writable, over high water level : {}", - ch, config.getWriteBufferHighWaterMark()); + ch, config.getWriteBufferHighWaterMark()); } config.setAutoRead(false); } else { if (logger.isWarnEnabled()) { logger.warn("{} is writable, to low water : {}", - ch, config.getWriteBufferLowWaterMark()); + ch, config.getWriteBufferLowWaterMark()); } config.setAutoRead(true); } } + + @Override + public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { + if (evt instanceof IdleStateEvent) { + ctx.channel().close(); + } else { + super.userEventTriggered(ctx, evt); + } + } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java index 91d4ac245e..866ebb6c2b 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java @@ -20,7 +20,6 @@ package org.apache.dolphinscheduler.remote.utils; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; - /** * constant */ @@ -30,6 +29,10 @@ public class Constants { public static final String SLASH = "/"; + public static final int NETTY_SERVER_HEART_BEAT_TIME = 1000 * 60 * 3 + 1000; + + public static final int NETTY_CLIENT_HEART_BEAT_TIME = 1000 * 60; + /** * charset */ diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java index 0c05232dd1..e183289f73 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java @@ -14,34 +14,37 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.remote.utils; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.nio.charset.StandardCharsets; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.nio.charset.StandardCharsets; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; /** - * json serialize or deserialize + * json serialize or deserialize */ public class JsonSerializer { - private static final ObjectMapper objectMapper = new ObjectMapper(); - private static final Logger logger = LoggerFactory.getLogger(JsonSerializer.class); + private static final ObjectMapper objectMapper = new ObjectMapper(); + private static final Logger logger = LoggerFactory.getLogger(JsonSerializer.class); + + private JsonSerializer() { - private JsonSerializer(){ + } - } - /** - * serialize to byte - * - * @param obj object - * @param object type - * @return byte array - */ - public static byte[] serialize(T obj) { + /** + * serialize to byte + * + * @param obj object + * @param object type + * @return byte array + */ + public static byte[] serialize(T obj) { String json = ""; try { json = objectMapper.writeValueAsString(obj); @@ -49,35 +52,36 @@ public class JsonSerializer { logger.error("serializeToString exception!", e); } - return json.getBytes(Constants.UTF8); - } + return json.getBytes(Constants.UTF8); + } - /** - * serialize to string - * @param obj object - * @param object type - * @return string - */ - public static String serializeToString(T obj) { - String json = ""; - try { - json = objectMapper.writeValueAsString(obj); - } catch (JsonProcessingException e) { - logger.error("serializeToString exception!", e); - } + /** + * serialize to string + * + * @param obj object + * @param object type + * @return string + */ + public static String serializeToString(T obj) { + String json = ""; + try { + json = objectMapper.writeValueAsString(obj); + } catch (JsonProcessingException e) { + logger.error("serializeToString exception!", e); + } - return json; - } + return json; + } - /** - * deserialize - * - * @param src byte array - * @param clazz class - * @param deserialize type - * @return deserialize type - */ - public static T deserialize(byte[] src, Class clazz) { + /** + * deserialize + * + * @param src byte array + * @param clazz class + * @param deserialize type + * @return deserialize type + */ + public static T deserialize(byte[] src, Class clazz) { String json = new String(src, StandardCharsets.UTF_8); try { @@ -87,6 +91,6 @@ public class JsonSerializer { return null; } - } + } } diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml index 18283a5bc7..9655b290c5 100644 --- a/dolphinscheduler-server/pom.xml +++ b/dolphinscheduler-server/pom.xml @@ -22,7 +22,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT dolphinscheduler-server dolphinscheduler-server diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java index 1589c365c2..a758028233 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java @@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.server.entity; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.io.Serializable; import java.util.Date; @@ -38,7 +38,6 @@ public class TaskExecutionContext implements Serializable { */ private int taskInstanceId; - /** * task name */ @@ -475,7 +474,7 @@ public class TaskExecutionContext implements Serializable { public Command toCommand() { TaskExecuteRequestCommand requestCommand = new TaskExecuteRequestCommand(); - requestCommand.setTaskExecutionContext(JsonSerializer.serializeToString(this)); + requestCommand.setTaskExecutionContext(JSONUtils.toJsonString(this)); return requestCommand.convert2Command(); } @@ -506,22 +505,22 @@ public class TaskExecutionContext implements Serializable { @Override public String toString() { return "TaskExecutionContext{" - + "taskInstanceId=" + taskInstanceId - + ", taskName='" + taskName + '\'' - + ", currentExecutionStatus=" + currentExecutionStatus - + ", firstSubmitTime=" + firstSubmitTime - + ", startTime=" + startTime - + ", taskType='" + taskType + '\'' - + ", host='" + host + '\'' - + ", executePath='" + executePath + '\'' - + ", logPath='" + logPath + '\'' - + ", taskJson='" + taskJson + '\'' - + ", processId=" + processId - + ", appIds='" + appIds + '\'' - + ", processInstanceId=" + processInstanceId - + ", scheduleTime=" + scheduleTime - + ", globalParams='" + globalParams + '\'' - + ", executorId=" + executorId + + "taskInstanceId=" + taskInstanceId + + ", taskName='" + taskName + '\'' + + ", currentExecutionStatus=" + currentExecutionStatus + + ", firstSubmitTime=" + firstSubmitTime + + ", startTime=" + startTime + + ", taskType='" + taskType + '\'' + + ", host='" + host + '\'' + + ", executePath='" + executePath + '\'' + + ", logPath='" + logPath + '\'' + + ", taskJson='" + taskJson + '\'' + + ", processId=" + processId + + ", appIds='" + appIds + '\'' + + ", processInstanceId=" + processInstanceId + + ", scheduleTime=" + scheduleTime + + ", globalParams='" + globalParams + '\'' + + ", executorId=" + executorId + ", cmdTypeIfComplement=" + cmdTypeIfComplement + ", tenantCode='" + tenantCode + '\'' + ", queue='" + queue + '\'' diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java index 458afa63b3..e9a85f456f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java @@ -14,19 +14,30 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.log; -import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.utils.IOUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; -import org.apache.dolphinscheduler.remote.command.log.*; +import org.apache.dolphinscheduler.remote.command.log.GetLogBytesRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.GetLogBytesResponseCommand; +import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.RemoveTaskLogResponseCommand; +import org.apache.dolphinscheduler.remote.command.log.RollViewLogRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.RollViewLogResponseCommand; +import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand; +import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.*; +import java.io.BufferedReader; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Collections; @@ -38,6 +49,11 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.netty.channel.Channel; + /** * logger request process logic */ @@ -47,7 +63,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { private final ThreadPoolExecutor executor; - public LoggerRequestProcessor(){ + public LoggerRequestProcessor() { this.executor = new ThreadPoolExecutor(4, 4, 10, TimeUnit.SECONDS, new LinkedBlockingQueue<>(100)); } @@ -59,35 +75,35 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { * reuqest task log command type */ final CommandType commandType = command.getType(); - switch (commandType){ + switch (commandType) { case GET_LOG_BYTES_REQUEST: - GetLogBytesRequestCommand getLogRequest = JsonSerializer.deserialize( + GetLogBytesRequestCommand getLogRequest = JSONUtils.parseObject( command.getBody(), GetLogBytesRequestCommand.class); byte[] bytes = getFileContentBytes(getLogRequest.getPath()); GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes); channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque())); break; case VIEW_WHOLE_LOG_REQUEST: - ViewLogRequestCommand viewLogRequest = JsonSerializer.deserialize( + ViewLogRequestCommand viewLogRequest = JSONUtils.parseObject( command.getBody(), ViewLogRequestCommand.class); String msg = readWholeFileContent(viewLogRequest.getPath()); ViewLogResponseCommand viewLogResponse = new ViewLogResponseCommand(msg); channel.writeAndFlush(viewLogResponse.convert2Command(command.getOpaque())); break; case ROLL_VIEW_LOG_REQUEST: - RollViewLogRequestCommand rollViewLogRequest = JsonSerializer.deserialize( + RollViewLogRequestCommand rollViewLogRequest = JSONUtils.parseObject( command.getBody(), RollViewLogRequestCommand.class); List lines = readPartFileContent(rollViewLogRequest.getPath(), rollViewLogRequest.getSkipLineNum(), rollViewLogRequest.getLimit()); StringBuilder builder = new StringBuilder(); - for (String line : lines){ + for (String line : lines) { builder.append(line + "\r\n"); } RollViewLogResponseCommand rollViewLogRequestResponse = new RollViewLogResponseCommand(builder.toString()); channel.writeAndFlush(rollViewLogRequestResponse.convert2Command(command.getOpaque())); break; case REMOVE_TAK_LOG_REQUEST: - RemoveTaskLogRequestCommand removeTaskLogRequest = JsonSerializer.deserialize( + RemoveTaskLogRequestCommand removeTaskLogRequest = JSONUtils.parseObject( command.getBody(), RemoveTaskLogRequestCommand.class); String taskLogPath = removeTaskLogRequest.getPath(); @@ -95,10 +111,10 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { File taskLogFile = new File(taskLogPath); Boolean status = true; try { - if (taskLogFile.exists()){ + if (taskLogFile.exists()) { status = taskLogFile.delete(); } - }catch (Exception e){ + } catch (Exception e) { status = false; } @@ -110,7 +126,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { } } - public ExecutorService getExecutor(){ + public ExecutorService getExecutor() { return this.executor; } @@ -121,7 +137,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { * @return byte array of file * @throws Exception exception */ - private byte[] getFileContentBytes(String filePath){ + private byte[] getFileContentBytes(String filePath) { InputStream in = null; ByteArrayOutputStream bos = null; try { @@ -133,9 +149,9 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { bos.write(buf, 0, len); } return bos.toByteArray(); - }catch (IOException e){ + } catch (IOException e) { logger.error("get file bytes error",e); - }finally { + } finally { IOUtils.closeQuietly(bos); IOUtils.closeQuietly(in); } @@ -152,11 +168,16 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { */ private List readPartFileContent(String filePath, int skipLine, - int limit){ - try (Stream stream = Files.lines(Paths.get(filePath))) { - return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); - } catch (IOException e) { - logger.error("read file error",e); + int limit) { + File file = new File(filePath); + if (file.exists() && file.isFile()) { + try (Stream stream = Files.lines(Paths.get(filePath))) { + return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); + } catch (IOException e) { + logger.error("read file error",e); + } + } else { + logger.info("file path: {} not exists", filePath); } return Collections.emptyList(); } @@ -167,19 +188,19 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { * @param filePath file path * @return whole file content */ - private String readWholeFileContent(String filePath){ + private String readWholeFileContent(String filePath) { BufferedReader br = null; String line; StringBuilder sb = new StringBuilder(); try { br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath))); - while ((line = br.readLine()) != null){ + while ((line = br.readLine()) != null) { sb.append(line + "\r\n"); } return sb.toString(); - }catch (IOException e){ + } catch (IOException e) { logger.error("read file error",e); - }finally { + } finally { IOUtils.closeQuietly(br); } return ""; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java new file mode 100755 index 0000000000..4d7cd8a0b2 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/TaskLogAppender.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.log; + +import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER; + +import org.slf4j.Marker; + +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.FileAppender; + +/** + * Task log appender + */ +public class TaskLogAppender extends FileAppender { + @Override + protected void append(ILoggingEvent event) { + Marker marker = event.getMarker(); + if (marker != null) { + if (marker.equals(FINALIZE_SESSION_MARKER)) { + stop(); + } + } + super.subAppend(event); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java index 21995c3867..23255084e0 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java @@ -19,8 +19,8 @@ package org.apache.dolphinscheduler.server.master.consumer; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.SqoopJobType; import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.enums.SqoopJobType; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.model.TaskNode; @@ -33,32 +33,53 @@ import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; +import org.apache.dolphinscheduler.common.utils.EnumUtils; +import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder; -import org.apache.dolphinscheduler.server.entity.*; +import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.ProcedureTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher; import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext; import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType; import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException; import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import javax.annotation.PostConstruct; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import javax.annotation.PostConstruct; -import java.util.*; -import java.util.stream.Collectors; -import java.util.stream.Stream; - /** * TaskUpdateQueue consumer */ @Component -public class TaskPriorityQueueConsumer extends Thread{ +public class TaskPriorityQueueConsumer extends Thread { /** * logger of TaskUpdateQueueConsumer @@ -69,7 +90,7 @@ public class TaskPriorityQueueConsumer extends Thread{ * taskUpdateQueue */ @Autowired - private TaskPriorityQueue taskPriorityQueue; + private TaskPriorityQueue taskPriorityQueue; /** * processService @@ -91,83 +112,91 @@ public class TaskPriorityQueueConsumer extends Thread{ private MasterConfig masterConfig; @PostConstruct - public void init(){ + public void init() { super.setName("TaskUpdateQueueConsumerThread"); super.start(); } @Override public void run() { - List failedDispatchTasks = new ArrayList<>(); - while (Stopper.isRunning()){ + List failedDispatchTasks = new ArrayList<>(); + while (Stopper.isRunning()) { try { int fetchTaskNum = masterConfig.getMasterDispatchTaskNumber(); failedDispatchTasks.clear(); - for(int i = 0; i < fetchTaskNum; i++){ - if(taskPriorityQueue.size() <= 0){ + for (int i = 0; i < fetchTaskNum; i++) { + if (taskPriorityQueue.size() <= 0) { Thread.sleep(Constants.SLEEP_TIME_MILLIS); continue; } // if not task , blocking here - String taskPriorityInfo = taskPriorityQueue.take(); - TaskPriority taskPriority = TaskPriority.of(taskPriorityInfo); - boolean dispatchResult = dispatch(taskPriority.getTaskId()); - if(!dispatchResult){ - failedDispatchTasks.add(taskPriorityInfo); + TaskPriority taskPriority = taskPriorityQueue.take(); + boolean dispatchResult = dispatch(taskPriority); + if (!dispatchResult) { + failedDispatchTasks.add(taskPriority); } } - for(String dispatchFailedTask : failedDispatchTasks){ - taskPriorityQueue.put(dispatchFailedTask); + if (!failedDispatchTasks.isEmpty()) { + for (TaskPriority dispatchFailedTask : failedDispatchTasks) { + taskPriorityQueue.put(dispatchFailedTask); + } + // If there are tasks in a cycle that cannot find the worker group, + // sleep for 1 second + if (taskPriorityQueue.size() <= failedDispatchTasks.size()) { + TimeUnit.MILLISECONDS.sleep(Constants.SLEEP_TIME_MILLIS); + } } - }catch (Exception e){ - logger.error("dispatcher task error",e); + + } catch (Exception e) { + logger.error("dispatcher task error", e); } } } - /** * dispatch task * - * @param taskInstanceId taskInstanceId + * @param taskPriority taskPriority * @return result */ - private boolean dispatch(int taskInstanceId){ + protected boolean dispatch(TaskPriority taskPriority) { boolean result = false; try { + int taskInstanceId = taskPriority.getTaskId(); TaskExecutionContext context = getTaskExecutionContext(taskInstanceId); ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER, context.getWorkerGroup()); - if (taskInstanceIsFinalState(taskInstanceId)){ + if (taskInstanceIsFinalState(taskInstanceId)) { // when task finish, ignore this task, there is no need to dispatch anymore return true; - }else{ + } else { result = dispatcher.dispatch(executionContext); } } catch (ExecuteException e) { - logger.error("dispatch error",e); + logger.error("dispatch error", e); } return result; } - /** * taskInstance is final state * success,failure,kill,stop,pause,threadwaiting is final state + * * @param taskInstanceId taskInstanceId * @return taskInstance is final state */ - public Boolean taskInstanceIsFinalState(int taskInstanceId){ + public Boolean taskInstanceIsFinalState(int taskInstanceId) { TaskInstance taskInstance = processService.findTaskInstanceById(taskInstanceId); return taskInstance.getState().typeIsFinished(); } /** * get TaskExecutionContext + * * @param taskInstanceId taskInstanceId * @return TaskExecutionContext */ - protected TaskExecutionContext getTaskExecutionContext(int taskInstanceId){ + protected TaskExecutionContext getTaskExecutionContext(int taskInstanceId) { TaskInstance taskInstance = processService.getTaskInstanceDetailByTaskId(taskInstanceId); // task type @@ -181,12 +210,12 @@ public class TaskPriorityQueueConsumer extends Thread{ // verify tenant is null if (verifyTenantIsNull(tenant, taskInstance)) { - processService.changeTaskState(ExecutionStatus.FAILURE, - taskInstance.getStartTime(), - taskInstance.getHost(), - null, - null, - taskInstance.getId()); + processService.changeTaskState(taskInstance, ExecutionStatus.FAILURE, + taskInstance.getStartTime(), + taskInstance.getHost(), + null, + null, + taskInstance.getId()); return null; } // set queue for process instance, user-specified queue takes precedence over tenant queue @@ -196,50 +225,46 @@ public class TaskPriorityQueueConsumer extends Thread{ taskInstance.setExecutePath(getExecLocalPath(taskInstance)); taskInstance.setResources(getResourceFullNames(taskNode)); - SQLTaskExecutionContext sqlTaskExecutionContext = new SQLTaskExecutionContext(); DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); ProcedureTaskExecutionContext procedureTaskExecutionContext = new ProcedureTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); - // SQL task - if (taskType == TaskType.SQL){ + if (taskType == TaskType.SQL) { setSQLTaskRelation(sqlTaskExecutionContext, taskNode); - } // DATAX task - if (taskType == TaskType.DATAX){ + if (taskType == TaskType.DATAX) { setDataxTaskRelation(dataxTaskExecutionContext, taskNode); } - // procedure task - if (taskType == TaskType.PROCEDURE){ + if (taskType == TaskType.PROCEDURE) { setProcedureTaskRelation(procedureTaskExecutionContext, taskNode); } - if (taskType == TaskType.SQOOP){ - setSqoopTaskRelation(sqoopTaskExecutionContext,taskNode); + if (taskType == TaskType.SQOOP) { + setSqoopTaskRelation(sqoopTaskExecutionContext, taskNode); } - return TaskExecutionContextBuilder.get() - .buildTaskInstanceRelatedInfo(taskInstance) - .buildProcessInstanceRelatedInfo(taskInstance.getProcessInstance()) - .buildProcessDefinitionRelatedInfo(taskInstance.getProcessDefine()) - .buildSQLTaskRelatedInfo(sqlTaskExecutionContext) - .buildDataxTaskRelatedInfo(dataxTaskExecutionContext) - .buildProcedureTaskRelatedInfo(procedureTaskExecutionContext) - .buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext) - .create(); + .buildTaskInstanceRelatedInfo(taskInstance) + .buildProcessInstanceRelatedInfo(taskInstance.getProcessInstance()) + .buildProcessDefinitionRelatedInfo(taskInstance.getProcessDefine()) + .buildSQLTaskRelatedInfo(sqlTaskExecutionContext) + .buildDataxTaskRelatedInfo(dataxTaskExecutionContext) + .buildProcedureTaskRelatedInfo(procedureTaskExecutionContext) + .buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext) + .create(); } /** * set procedure task relation + * * @param procedureTaskExecutionContext procedureTaskExecutionContext - * @param taskNode taskNode + * @param taskNode taskNode */ private void setProcedureTaskRelation(ProcedureTaskExecutionContext procedureTaskExecutionContext, TaskNode taskNode) { ProcedureParameters procedureParameters = JSONUtils.parseObject(taskNode.getParams(), ProcedureParameters.class); @@ -250,34 +275,34 @@ public class TaskPriorityQueueConsumer extends Thread{ /** * set datax task relation + * * @param dataxTaskExecutionContext dataxTaskExecutionContext - * @param taskNode taskNode + * @param taskNode taskNode */ - private void setDataxTaskRelation(DataxTaskExecutionContext dataxTaskExecutionContext, TaskNode taskNode) { + protected void setDataxTaskRelation(DataxTaskExecutionContext dataxTaskExecutionContext, TaskNode taskNode) { DataxParameters dataxParameters = JSONUtils.parseObject(taskNode.getParams(), DataxParameters.class); - DataSource dataSource = processService.findDataSourceById(dataxParameters.getDataSource()); - DataSource dataTarget = processService.findDataSourceById(dataxParameters.getDataTarget()); - + DataSource dbSource = processService.findDataSourceById(dataxParameters.getDataSource()); + DataSource dbTarget = processService.findDataSourceById(dataxParameters.getDataTarget()); - if (dataSource != null){ + if (dbSource != null) { dataxTaskExecutionContext.setDataSourceId(dataxParameters.getDataSource()); - dataxTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); - dataxTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); + dataxTaskExecutionContext.setSourcetype(dbSource.getType().getCode()); + dataxTaskExecutionContext.setSourceConnectionParams(dbSource.getConnectionParams()); } - if (dataTarget != null){ + if (dbTarget != null) { dataxTaskExecutionContext.setDataTargetId(dataxParameters.getDataTarget()); - dataxTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); - dataxTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); + dataxTaskExecutionContext.setTargetType(dbTarget.getType().getCode()); + dataxTaskExecutionContext.setTargetConnectionParams(dbTarget.getConnectionParams()); } } - /** * set sqoop task relation + * * @param sqoopTaskExecutionContext sqoopTaskExecutionContext - * @param taskNode taskNode + * @param taskNode taskNode */ private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) { SqoopParameters sqoopParameters = JSONUtils.parseObject(taskNode.getParams(), SqoopParameters.class); @@ -290,13 +315,13 @@ public class TaskPriorityQueueConsumer extends Thread{ DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource()); DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource()); - if (dataSource != null){ + if (dataSource != null) { sqoopTaskExecutionContext.setDataSourceId(dataSource.getId()); sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode()); sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); } - if (dataTarget != null){ + if (dataTarget != null) { sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId()); sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode()); sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams()); @@ -306,8 +331,9 @@ public class TaskPriorityQueueConsumer extends Thread{ /** * set SQL task relation + * * @param sqlTaskExecutionContext sqlTaskExecutionContext - * @param taskNode taskNode + * @param taskNode taskNode */ private void setSQLTaskRelation(SQLTaskExecutionContext sqlTaskExecutionContext, TaskNode taskNode) { SqlParameters sqlParameters = JSONUtils.parseObject(taskNode.getParams(), SqlParameters.class); @@ -317,20 +343,20 @@ public class TaskPriorityQueueConsumer extends Thread{ // whether udf type boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType()) - && StringUtils.isNotEmpty(sqlParameters.getUdfs()); + && StringUtils.isNotEmpty(sqlParameters.getUdfs()); - if (udfTypeFlag){ + if (udfTypeFlag) { String[] udfFunIds = sqlParameters.getUdfs().split(","); int[] udfFunIdsArray = new int[udfFunIds.length]; - for(int i = 0 ; i < udfFunIds.length;i++){ - udfFunIdsArray[i]=Integer.parseInt(udfFunIds[i]); + for (int i = 0; i < udfFunIds.length; i++) { + udfFunIdsArray[i] = Integer.parseInt(udfFunIds[i]); } - List udfFuncList = processService.queryUdfFunListByids(udfFunIdsArray); - Map udfFuncMap = new HashMap<>(); - for(UdfFunc udfFunc : udfFuncList) { + List udfFuncList = processService.queryUdfFunListByIds(udfFunIdsArray); + Map udfFuncMap = new HashMap<>(); + for (UdfFunc udfFunc : udfFuncList) { String tenantCode = processService.queryTenantCodeByResName(udfFunc.getResourceName(), ResourceType.UDF); - udfFuncMap.put(udfFunc,tenantCode); + udfFuncMap.put(udfFunc, tenantCode); } sqlTaskExecutionContext.setUdfFuncTenantCodeMap(udfFuncMap); @@ -342,25 +368,25 @@ public class TaskPriorityQueueConsumer extends Thread{ * * @return execute local path */ - private String getExecLocalPath(TaskInstance taskInstance){ + private String getExecLocalPath(TaskInstance taskInstance) { return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(), - taskInstance.getProcessDefine().getId(), - taskInstance.getProcessInstance().getId(), - taskInstance.getId()); + taskInstance.getProcessDefine().getId(), + taskInstance.getProcessInstance().getId(), + taskInstance.getId()); } - /** - * whehter tenant is null - * @param tenant tenant + * whehter tenant is null + * + * @param tenant tenant * @param taskInstance taskInstance * @return result */ - private boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) { - if(tenant == null){ + protected boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) { + if (tenant == null) { logger.error("tenant not exists,process instance id : {},task instance id : {}", - taskInstance.getProcessInstance().getId(), - taskInstance.getId()); + taskInstance.getProcessInstance().getId(), + taskInstance.getId()); return true; } return false; @@ -369,8 +395,8 @@ public class TaskPriorityQueueConsumer extends Thread{ /** * get resource map key is full name and value is tenantCode */ - private Map getResourceFullNames(TaskNode taskNode) { - Map resourceMap = new HashMap<>(); + protected Map getResourceFullNames(TaskNode taskNode) { + Map resourcesMap = new HashMap<>(); AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams()); if (baseParam != null) { @@ -382,7 +408,7 @@ public class TaskPriorityQueueConsumer extends Thread{ if (CollectionUtils.isNotEmpty(oldVersionResources)) { oldVersionResources.forEach( - (t)->resourceMap.put(t.getRes(), processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE)) + (t) -> resourcesMap.put(t.getRes(), processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE)) ); } @@ -395,12 +421,12 @@ public class TaskPriorityQueueConsumer extends Thread{ List resources = processService.listResourceByIds(resourceIds); resources.forEach( - (t)->resourceMap.put(t.getFullName(),processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE)) + (t) -> resourcesMap.put(t.getFullName(), processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE)) ); } } } - return resourceMap; + return resourcesMap; } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java index ee9b86babb..18a7d4691d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java @@ -17,8 +17,9 @@ package org.apache.dolphinscheduler.server.master.dispatch.executor; -import com.github.rholder.retry.RetryException; -import org.apache.dolphinscheduler.common.utils.RetryerUtils; +import org.apache.commons.collections.CollectionUtils; + +import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; @@ -31,14 +32,15 @@ import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProcessor; import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor; import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import javax.annotation.PostConstruct; + import java.util.*; -import java.util.concurrent.ExecutionException; /** * netty executor manager @@ -86,11 +88,17 @@ public class NettyExecutorManager extends AbstractExecutorManager{ */ @Override public Boolean execute(ExecutionContext context) throws ExecuteException { - LinkedList allNodes = new LinkedList<>(); - Set nodes = getAllNodes(context); - if (nodes != null) { - allNodes.addAll(nodes); - } + + /** + * all nodes + */ + Set allNodes = getAllNodes(context); + + /** + * fail nodes + */ + Set failNodeSet = new HashSet<>(); + /** * build command accord executeContext */ @@ -99,27 +107,31 @@ public class NettyExecutorManager extends AbstractExecutorManager{ /** * execute task host */ - String startHostAddress = context.getHost().getAddress(); - // remove start host address and add it to head - allNodes.remove(startHostAddress); - allNodes.addFirst(startHostAddress); - + Host host = context.getHost(); boolean success = false; - for (String address : allNodes) { + while (!success) { try { - Host host = Host.of(address); - doExecute(host, command); + doExecute(host,command); success = true; context.setHost(host); - break; } catch (ExecuteException ex) { - logger.error("retry execute command : {} host : {}", command, address); + logger.error(String.format("execute command : %s error", command), ex); + try { + failNodeSet.add(host.getAddress()); + Set tmpAllIps = new HashSet<>(allNodes); + Collection remained = CollectionUtils.subtract(tmpAllIps, failNodeSet); + if (remained != null && remained.size() > 0) { + host = Host.of(remained.iterator().next()); + logger.error("retry execute command : {} host : {}", command, host); + } else { + throw new ExecuteException("fail after try all nodes"); + } + } catch (Throwable t) { + throw new ExecuteException("fail after try all nodes"); + } } } - if (!success) { - throw new ExecuteException("fail after try all nodes"); - } - + return success; } @@ -136,13 +148,24 @@ public class NettyExecutorManager extends AbstractExecutorManager{ * @throws ExecuteException if error throws ExecuteException */ private void doExecute(final Host host, final Command command) throws ExecuteException { - try { - RetryerUtils.retryCall(() -> { + /** + * retry count,default retry 3 + */ + int retryCount = 3; + boolean success = false; + do { + try { nettyRemotingClient.send(host, command); - return Boolean.TRUE; - }); - } catch (ExecutionException | RetryException e) { - throw new ExecuteException(String.format("send command : %s to %s error", command, host), e); + success = true; + } catch (Exception ex) { + logger.error(String.format("send command : %s to %s error", command, host), ex); + retryCount--; + ThreadUtils.sleep(100); + } + } while (retryCount >= 0 && !success); + + if (!success) { + throw new ExecuteException(String.format("send command : %s to %s error", command, host)); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java index 0f038dd6ee..51d068ad08 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java @@ -17,28 +17,24 @@ package org.apache.dolphinscheduler.server.master.processor; -import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.Preconditions; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.ChannelUtils; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.dolphinscheduler.common.Constants.*; +import io.netty.channel.Channel; /** * task ack processor @@ -57,16 +53,9 @@ public class TaskAckProcessor implements NettyRequestProcessor { */ private final TaskInstanceCacheManager taskInstanceCacheManager; - - /** - * processService - */ - private ProcessService processService; - public TaskAckProcessor(){ this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); - this.processService = SpringApplicationContext.getBean(ProcessService.class); } /** @@ -77,7 +66,7 @@ public class TaskAckProcessor implements NettyRequestProcessor { @Override public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_EXECUTE_ACK == command.getType(), String.format("invalid command type : %s", command.getType())); - TaskExecuteAckCommand taskAckCommand = JsonSerializer.deserialize(command.getBody(), TaskExecuteAckCommand.class); + TaskExecuteAckCommand taskAckCommand = JSONUtils.parseObject(command.getBody(), TaskExecuteAckCommand.class); logger.info("taskAckCommand : {}", taskAckCommand); taskInstanceCacheManager.cacheTaskInstance(taskAckCommand); @@ -92,19 +81,10 @@ public class TaskAckProcessor implements NettyRequestProcessor { workerAddress, taskAckCommand.getExecutePath(), taskAckCommand.getLogPath(), - taskAckCommand.getTaskInstanceId()); + taskAckCommand.getTaskInstanceId(), + channel); taskResponseService.addResponse(taskResponseEvent); - - while (Stopper.isRunning()){ - TaskInstance taskInstance = processService.findTaskInstanceById(taskAckCommand.getTaskInstanceId()); - - if (taskInstance != null && ackStatus.typeIsRunning()){ - break; - } - ThreadUtils.sleep(SLEEP_TIME_MILLIS); - } - } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java index 2e51998cbd..afd0577d87 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java @@ -17,16 +17,18 @@ package org.apache.dolphinscheduler.server.master.processor; -import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.Preconditions; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import io.netty.channel.Channel; + /** * task response processor */ @@ -45,9 +47,8 @@ public class TaskKillResponseProcessor implements NettyRequestProcessor { public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_KILL_RESPONSE == command.getType(), String.format("invalid command type : %s", command.getType())); - TaskKillResponseCommand responseCommand = JsonSerializer.deserialize(command.getBody(), TaskKillResponseCommand.class); + TaskKillResponseCommand responseCommand = JSONUtils.parseObject(command.getBody(), TaskKillResponseCommand.class); logger.info("received task kill response command : {}", responseCommand); } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java index 2633ccd634..080fdd540d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessor.java @@ -17,27 +17,23 @@ package org.apache.dolphinscheduler.server.master.processor; -import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.Preconditions; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.apache.dolphinscheduler.server.master.cache.TaskInstanceCacheManager; import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.apache.dolphinscheduler.common.Constants.*; +import io.netty.channel.Channel; /** * task response processor @@ -56,15 +52,9 @@ public class TaskResponseProcessor implements NettyRequestProcessor { */ private final TaskInstanceCacheManager taskInstanceCacheManager; - /** - * processService - */ - private ProcessService processService; - public TaskResponseProcessor(){ this.taskResponseService = SpringApplicationContext.getBean(TaskResponseService.class); this.taskInstanceCacheManager = SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class); - this.processService = SpringApplicationContext.getBean(ProcessService.class); } /** @@ -78,31 +68,20 @@ public class TaskResponseProcessor implements NettyRequestProcessor { public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_EXECUTE_RESPONSE == command.getType(), String.format("invalid command type : %s", command.getType())); - TaskExecuteResponseCommand responseCommand = JsonSerializer.deserialize(command.getBody(), TaskExecuteResponseCommand.class); + TaskExecuteResponseCommand responseCommand = JSONUtils.parseObject(command.getBody(), TaskExecuteResponseCommand.class); logger.info("received command : {}", responseCommand); taskInstanceCacheManager.cacheTaskInstance(responseCommand); - ExecutionStatus responseStatus = ExecutionStatus.of(responseCommand.getStatus()); - // TaskResponseEvent TaskResponseEvent taskResponseEvent = TaskResponseEvent.newResult(ExecutionStatus.of(responseCommand.getStatus()), responseCommand.getEndTime(), responseCommand.getProcessId(), responseCommand.getAppIds(), responseCommand.getTaskInstanceId(), - responseCommand.getVarPool()); - + responseCommand.getVarPool(), + channel); taskResponseService.addResponse(taskResponseEvent); - - while (Stopper.isRunning()){ - TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId()); - - if (taskInstance != null && responseStatus.typeIsFinished()){ - break; - } - ThreadUtils.sleep(SLEEP_TIME_MILLIS); - } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java index ba07be50f3..0ca558a560 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseEvent.java @@ -18,10 +18,14 @@ package org.apache.dolphinscheduler.server.master.processor.queue; import com.fasterxml.jackson.annotation.JsonFormat; + +import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import java.util.Date; +import io.netty.channel.Channel; + /** * task event */ @@ -83,8 +87,19 @@ public class TaskResponseEvent { * varPool */ private String varPool; + + /** + * channel + */ + private Channel channel; - public static TaskResponseEvent newAck(ExecutionStatus state, Date startTime, String workerAddress, String executePath, String logPath, int taskInstanceId) { + public static TaskResponseEvent newAck(ExecutionStatus state, + Date startTime, + String workerAddress, + String executePath, + String logPath, + int taskInstanceId, + Channel channel) { TaskResponseEvent event = new TaskResponseEvent(); event.setState(state); event.setStartTime(startTime); @@ -93,10 +108,17 @@ public class TaskResponseEvent { event.setLogPath(logPath); event.setTaskInstanceId(taskInstanceId); event.setEvent(Event.ACK); + event.setChannel(channel); return event; } - public static TaskResponseEvent newResult(ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstanceId, String varPool) { + public static TaskResponseEvent newResult(ExecutionStatus state, + Date endTime, + int processId, + String appIds, + int taskInstanceId, + String varPool, + Channel channel) { TaskResponseEvent event = new TaskResponseEvent(); event.setState(state); event.setEndTime(endTime); @@ -105,6 +127,7 @@ public class TaskResponseEvent { event.setTaskInstanceId(taskInstanceId); event.setEvent(Event.RESULT); event.setVarPool(varPool); + event.setChannel(channel); return event; } @@ -196,8 +219,11 @@ public class TaskResponseEvent { this.event = event; } - public enum Event{ - ACK, - RESULT; + public Channel getChannel() { + return channel; + } + + public void setChannel(Channel channel) { + this.channel = channel; } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java index 6434db70e5..1b5eddbd6f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java @@ -17,20 +17,29 @@ package org.apache.dolphinscheduler.server.master.processor.queue; +import org.apache.dolphinscheduler.common.enums.Event; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.DBTaskAckCommand; +import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand; import org.apache.dolphinscheduler.service.process.ProcessService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import io.netty.channel.Channel; + /** * task manager */ @@ -59,21 +68,20 @@ public class TaskResponseService { */ private Thread taskResponseWorker; - @PostConstruct - public void start(){ + public void start() { this.taskResponseWorker = new TaskResponseWorker(); this.taskResponseWorker.setName("TaskResponseWorker"); this.taskResponseWorker.start(); } @PreDestroy - public void stop(){ + public void stop() { this.taskResponseWorker.interrupt(); - if(!eventQueue.isEmpty()){ + if (!eventQueue.isEmpty()) { List remainEvents = new ArrayList<>(eventQueue.size()); eventQueue.drainTo(remainEvents); - for(TaskResponseEvent event : remainEvents){ + for (TaskResponseEvent event : remainEvents) { this.persist(event); } } @@ -84,16 +92,15 @@ public class TaskResponseService { * * @param taskResponseEvent taskResponseEvent */ - public void addResponse(TaskResponseEvent taskResponseEvent){ + public void addResponse(TaskResponseEvent taskResponseEvent) { try { eventQueue.put(taskResponseEvent); } catch (InterruptedException e) { - logger.error("put task : {} error :{}", taskResponseEvent,e); + logger.error("put task : {} error :{}", taskResponseEvent, e); Thread.currentThread().interrupt(); } } - /** * task worker thread */ @@ -102,16 +109,16 @@ public class TaskResponseService { @Override public void run() { - while (Stopper.isRunning()){ + while (Stopper.isRunning()) { try { // if not task , blocking here TaskResponseEvent taskResponseEvent = eventQueue.take(); persist(taskResponseEvent); - } catch (InterruptedException e){ + } catch (InterruptedException e) { Thread.currentThread().interrupt(); break; - } catch (Exception e){ - logger.error("persist task error",e); + } catch (Exception e) { + logger.error("persist task error", e); } } logger.info("TaskResponseWorker stopped"); @@ -120,27 +127,55 @@ public class TaskResponseService { /** * persist taskResponseEvent + * * @param taskResponseEvent taskResponseEvent */ - private void persist(TaskResponseEvent taskResponseEvent){ - TaskResponseEvent.Event event = taskResponseEvent.getEvent(); + private void persist(TaskResponseEvent taskResponseEvent) { + Event event = taskResponseEvent.getEvent(); + Channel channel = taskResponseEvent.getChannel(); - switch (event){ + switch (event) { case ACK: - processService.changeTaskState(taskResponseEvent.getState(), - taskResponseEvent.getStartTime(), - taskResponseEvent.getWorkerAddress(), - taskResponseEvent.getExecutePath(), - taskResponseEvent.getLogPath(), - taskResponseEvent.getTaskInstanceId()); + try { + TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId()); + if (taskInstance != null) { + ExecutionStatus status = taskInstance.getState().typeIsFinished() ? taskInstance.getState() : taskResponseEvent.getState(); + processService.changeTaskState(taskInstance, status, + taskResponseEvent.getStartTime(), + taskResponseEvent.getWorkerAddress(), + taskResponseEvent.getExecutePath(), + taskResponseEvent.getLogPath(), + taskResponseEvent.getTaskInstanceId()); + } + // if taskInstance is null (maybe deleted) . retry will be meaningless . so ack success + DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskAckCommand.convert2Command()); + } catch (Exception e) { + logger.error("worker ack master error", e); + DBTaskAckCommand taskAckCommand = new DBTaskAckCommand(ExecutionStatus.FAILURE.getCode(), -1); + channel.writeAndFlush(taskAckCommand.convert2Command()); + } break; case RESULT: - processService.changeTaskState(taskResponseEvent.getState(), - taskResponseEvent.getEndTime(), - taskResponseEvent.getProcessId(), - taskResponseEvent.getAppIds(), - taskResponseEvent.getTaskInstanceId(), - taskResponseEvent.getVarPool()); + try { + TaskInstance taskInstance = processService.findTaskInstanceById(taskResponseEvent.getTaskInstanceId()); + if (taskInstance != null) { + processService.changeTaskState(taskInstance, taskResponseEvent.getState(), + taskResponseEvent.getEndTime(), + taskResponseEvent.getProcessId(), + taskResponseEvent.getAppIds(), + taskResponseEvent.getTaskInstanceId(), + taskResponseEvent.getVarPool() + ); + } + // if taskInstance is null (maybe deleted) . retry will be meaningless . so response success + DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.SUCCESS.getCode(), taskResponseEvent.getTaskInstanceId()); + channel.writeAndFlush(taskResponseCommand.convert2Command()); + } catch (Exception e) { + logger.error("worker response master error", e); + DBTaskResponseCommand taskResponseCommand = new DBTaskResponseCommand(ExecutionStatus.FAILURE.getCode(), -1); + channel.writeAndFlush(taskResponseCommand.convert2Command()); + } break; default: throw new IllegalArgumentException("invalid event type : " + event); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java index 021f10d444..11598d9ace 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java @@ -39,7 +39,6 @@ import java.util.concurrent.ConcurrentHashMap; public class ConditionsTaskExecThread extends MasterBaseTaskExecThread { - /** * dependent parameters */ @@ -134,7 +133,6 @@ public class ConditionsTaskExecThread extends MasterBaseTaskExecThread { this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(), DependentParameters.class); } - /** * depend result for depend item * @param item @@ -158,5 +156,4 @@ public class ConditionsTaskExecThread extends MasterBaseTaskExecThread { return dependResult; } - } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java index 319afedd7b..5b56911fd7 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java @@ -149,6 +149,10 @@ public class DependentTaskExecThread extends MasterBaseTaskExecThread { logger.error("process instance not exists , master task exec thread exit"); return true; } + if (checkTaskTimeout()) { + this.checkTimeoutFlag = !alertTimeout(); + handleTimeoutFailed(); + } if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){ cancelTaskInstance(); break; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java index ea3ad19950..be666aed70 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java @@ -17,19 +17,23 @@ package org.apache.dolphinscheduler.server.master.runner; -import static org.apache.dolphinscheduler.common.Constants.UNDERLINE; - import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.model.TaskNode; +import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl; +import java.util.Date; import java.util.concurrent.Callable; import org.slf4j.Logger; @@ -81,6 +85,16 @@ public class MasterBaseTaskExecThread implements Callable { */ private TaskPriorityQueue taskUpdateQueue; + /** + * whether need check task time out. + */ + protected boolean checkTimeoutFlag = false; + + /** + * task timeout parameters + */ + protected TaskTimeoutParameter taskTimeoutParameter; + /** * constructor of MasterBaseTaskExecThread * @@ -93,6 +107,27 @@ public class MasterBaseTaskExecThread implements Callable { this.taskInstance = taskInstance; this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); this.taskUpdateQueue = SpringApplicationContext.getBean(TaskPriorityQueueImpl.class); + initTaskParams(); + } + + /** + * init task ordinary parameters + */ + private void initTaskParams() { + initTimeoutParams(); + } + + /** + * init task timeout parameters + */ + private void initTimeoutParams() { + String taskJson = taskInstance.getTaskJson(); + TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class); + taskTimeoutParameter = taskNode.getTaskTimeoutParameter(); + + if (taskTimeoutParameter.getEnable()) { + checkTimeoutFlag = true; + } } /** @@ -124,37 +159,36 @@ public class MasterBaseTaskExecThread implements Callable { boolean submitDB = false; boolean submitTask = false; TaskInstance task = null; - while (retryTimes <= commitRetryTimes){ + while (retryTimes <= commitRetryTimes) { try { - if(!submitDB){ + if (!submitDB) { // submit task to db task = processService.submitTask(taskInstance); - if(task != null && task.getId() != 0){ + if (task != null && task.getId() != 0) { submitDB = true; } } - if(submitDB && !submitTask){ + if (submitDB && !submitTask) { // dispatch task submitTask = dispatchTask(task); } - if(submitDB && submitTask){ + if (submitDB && submitTask) { return task; } - if(!submitDB){ + if (!submitDB) { logger.error("task commit to db failed , taskId {} has already retry {} times, please check the database", taskInstance.getId(), retryTimes); - }else if(!submitTask){ + } else if (!submitTask) { logger.error("task commit failed , taskId {} has already retry {} times, please check", taskInstance.getId(), retryTimes); } Thread.sleep(commitRetryInterval); } catch (Exception e) { - logger.error("task commit to mysql and dispatcht task failed",e); + logger.error("task commit to mysql and dispatcht task failed", e); } retryTimes += 1; } return task; } - /** * dispatcht task * @@ -162,13 +196,14 @@ public class MasterBaseTaskExecThread implements Callable { * @return whether submit task success */ public Boolean dispatchTask(TaskInstance taskInstance) { - try{ - if(taskInstance.isConditionsTask() + + try { + if (taskInstance.isConditionsTask() || taskInstance.isDependTask() - || taskInstance.isSubProcess()){ + || taskInstance.isSubProcess()) { return true; } - if(taskInstance.getState().typeIsFinished()){ + if (taskInstance.getState().typeIsFinished()) { logger.info(String.format("submit task , but task [%s] state [%s] is already finished. ", taskInstance.getName(), taskInstance.getState().toString())); return true; } @@ -181,48 +216,40 @@ public class MasterBaseTaskExecThread implements Callable { logger.info("task ready to submit: {}", taskInstance); /** - * taskPriorityInfo + * taskPriority */ - String taskPriorityInfo = buildTaskPriorityInfo(processInstance.getProcessInstancePriority().getCode(), + TaskPriority taskPriority = buildTaskPriority(processInstance.getProcessInstancePriority().getCode(), processInstance.getId(), taskInstance.getProcessInstancePriority().getCode(), taskInstance.getId(), org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP); - taskUpdateQueue.put(taskPriorityInfo); - logger.info(String.format("master submit success, task : %s", taskInstance.getName()) ); + taskUpdateQueue.put(taskPriority); + logger.info(String.format("master submit success, task : %s", taskInstance.getName())); return true; - }catch (Exception e){ + } catch (Exception e) { logger.error("submit task Exception: ", e); logger.error("task error : %s", JSONUtils.toJsonString(taskInstance)); return false; } } - /** - * buildTaskPriorityInfo + * buildTaskPriority * * @param processInstancePriority processInstancePriority * @param processInstanceId processInstanceId * @param taskInstancePriority taskInstancePriority * @param taskInstanceId taskInstanceId * @param workerGroup workerGroup - * @return TaskPriorityInfo - */ - private String buildTaskPriorityInfo(int processInstancePriority, - int processInstanceId, - int taskInstancePriority, - int taskInstanceId, - String workerGroup) { - return processInstancePriority + - UNDERLINE + - processInstanceId + - UNDERLINE + - taskInstancePriority + - UNDERLINE + - taskInstanceId + - UNDERLINE + - workerGroup; + * @return TaskPriority + */ + private TaskPriority buildTaskPriority(int processInstancePriority, + int processInstanceId, + int taskInstancePriority, + int taskInstanceId, + String workerGroup) { + return new TaskPriority(processInstancePriority, processInstanceId, + taskInstancePriority, taskInstanceId, workerGroup); } /** @@ -246,4 +273,53 @@ public class MasterBaseTaskExecThread implements Callable { return submitWaitComplete(); } + /** + * alert time out + */ + protected boolean alertTimeout() { + if (TaskTimeoutStrategy.FAILED == this.taskTimeoutParameter.getStrategy()) { + return true; + } + logger.warn("process id:{} process name:{} task id: {},name:{} execution time out", + processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName()); + // send warn mail + ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); + alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(), processInstance.getId(), processInstance.getName(), + taskInstance.getId(), taskInstance.getName()); + return true; + } + + /** + * handle time out for time out strategy warn&&failed + */ + protected void handleTimeoutFailed() { + if (TaskTimeoutStrategy.WARN == this.taskTimeoutParameter.getStrategy()) { + return; + } + logger.info("process id:{} name:{} task id:{} name:{} cancel because of timeout.", + processInstance.getId(), processInstance.getName(), taskInstance.getId(), taskInstance.getName()); + this.cancel = true; + } + + /** + * check task remain time valid + */ + protected boolean checkTaskTimeout() { + if (!checkTimeoutFlag || taskInstance.getStartTime() == null) { + return false; + } + long remainTime = getRemainTime(taskTimeoutParameter.getInterval() * 60L); + return remainTime <= 0; + } + + /** + * get remain time + * + * @return remain time + */ + protected long getRemainTime(long timeoutSeconds) { + Date startTime = taskInstance.getStartTime(); + long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000; + return timeoutSeconds - usedTime; + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java index 3c28e16651..e68f9d5937 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java @@ -14,12 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.master.runner; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_RECOVERY_START_NODE_STRING; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_START_NODE_NAMES; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVERY_START_NODE_STRING; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; import static org.apache.dolphinscheduler.common.Constants.SEC_2_MINUTES_TIME_UNIT; @@ -35,7 +36,6 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; -import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils; @@ -55,6 +55,7 @@ import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.AlertManager; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; +import org.apache.dolphinscheduler.service.queue.PeerTaskInstancePriorityQueue; import org.apache.commons.io.FileUtils; @@ -68,6 +69,7 @@ import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; @@ -86,22 +88,18 @@ public class MasterExecThread implements Runnable { * logger of MasterExecThread */ private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class); - - /** - * process instance - */ - private ProcessInstance processInstance; - /** - * runing TaskNode + * runing TaskNode */ - private final Map> activeTaskNode = new ConcurrentHashMap<>(); - + private final Map> activeTaskNode = new ConcurrentHashMap<>(); /** * task exec service */ private final ExecutorService taskExecService; - + /** + * process instance + */ + private ProcessInstance processInstance; /** * submit failure nodes */ @@ -115,7 +113,7 @@ public class MasterExecThread implements Runnable { /** * error task list */ - private Map errorTaskList = new ConcurrentHashMap<>(); + private Map errorTaskList = new ConcurrentHashMap<>(); /** * complete task list @@ -123,9 +121,9 @@ public class MasterExecThread implements Runnable { private Map completeTaskList = new ConcurrentHashMap<>(); /** - * ready to submit task list + * ready to submit task queue */ - private Map readyToSubmitTaskList = new ConcurrentHashMap<>(); + private PeerTaskInstancePriorityQueue readyToSubmitTaskQueue = new PeerTaskInstancePriorityQueue(); /** * depend failed task map @@ -158,7 +156,7 @@ public class MasterExecThread implements Runnable { private DAG dag; /** - * process service + * process service */ private ProcessService processService; @@ -171,11 +169,18 @@ public class MasterExecThread implements Runnable { * */ private NettyRemotingClient nettyRemotingClient; + /** + * submit post node + * + * @param parentNodeName parent node name + */ + private Map propToValue = new ConcurrentHashMap(); /** * constructor of MasterExecThread - * @param processInstance processInstance - * @param processService processService + * + * @param processInstance processInstance + * @param processService processService * @param nettyRemotingClient nettyRemotingClient */ public MasterExecThread(ProcessInstance processInstance @@ -194,39 +199,36 @@ public class MasterExecThread implements Runnable { this.alertManager = alertManager; } - - - @Override public void run() { // process instance is null - if (processInstance == null){ + if (processInstance == null) { logger.info("process instance is not exists"); return; } // check to see if it's done - if (processInstance.getState().typeIsFinished()){ - logger.info("process instance is done : {}",processInstance.getId()); + if (processInstance.getState().typeIsFinished()) { + logger.info("process instance is done : {}", processInstance.getId()); return; } try { - if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()){ + if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()) { // sub process complement data executeComplementProcess(); - }else{ + } else { // execute flow executeProcess(); } - }catch (Exception e){ + } catch (Exception e) { logger.error("master exec thread exception", e); logger.error("process execute failed, process id:{}", processInstance.getId()); processInstance.setState(ExecutionStatus.FAILURE); processInstance.setEndTime(new Date()); processService.updateProcessInstance(processInstance); - }finally { + } finally { taskExecService.shutdown(); // post handle postHandle(); @@ -235,6 +237,7 @@ public class MasterExecThread implements Runnable { /** * execute process + * * @throws Exception exception */ private void executeProcess() throws Exception { @@ -245,6 +248,7 @@ public class MasterExecThread implements Runnable { /** * execute complement process + * * @throws Exception exception */ private void executeComplementProcess() throws Exception { @@ -259,7 +263,7 @@ public class MasterExecThread implements Runnable { int processDefinitionId = processInstance.getProcessDefinitionId(); List schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); List listDate = Lists.newLinkedList(); - if(!CollectionUtils.isEmpty(schedules)){ + if (!CollectionUtils.isEmpty(schedules)) { for (Schedule schedule : schedules) { listDate.addAll(CronUtils.getSelfFireDateList(startDate, endDate, schedule.getCrontab())); } @@ -267,26 +271,26 @@ public class MasterExecThread implements Runnable { // get first fire date Iterator iterator = null; Date scheduleDate = null; - if(!CollectionUtils.isEmpty(listDate)) { + if (!CollectionUtils.isEmpty(listDate)) { iterator = listDate.iterator(); scheduleDate = iterator.next(); processInstance.setScheduleTime(scheduleDate); processService.updateProcessInstance(processInstance); - }else{ + } else { scheduleDate = processInstance.getScheduleTime(); - if(scheduleDate == null){ + if (scheduleDate == null) { scheduleDate = startDate; } } - while(Stopper.isRunning()){ + while (Stopper.isRunning()) { logger.info("process {} start to complement {} data", processInstance.getId(), DateUtils.dateToString(scheduleDate)); // prepare dag and other info prepareProcess(); - if(dag == null){ + if (dag == null) { logger.error("process {} dag is null, please check out parameters", processInstance.getId()); processInstance.setState(ExecutionStatus.SUCCESS); @@ -299,23 +303,23 @@ public class MasterExecThread implements Runnable { endProcess(); // process instance failure ,no more complements - if(!processInstance.getState().typeIsSuccess()){ + if (!processInstance.getState().typeIsSuccess()) { logger.info("process {} state {}, complement not completely!", processInstance.getId(), processInstance.getState()); break; } // current process instance success ,next execute - if(null == iterator){ + if (null == iterator) { // loop by day scheduleDate = DateUtils.getSomeDay(scheduleDate, 1); - if(scheduleDate.after(endDate)){ + if (scheduleDate.after(endDate)) { // all success logger.info("process {} complement completely!", processInstance.getId()); break; } - }else{ + } else { // loop by schedule date - if(!iterator.hasNext()){ + if (!iterator.hasNext()) { // all success logger.info("process {} complement completely!", processInstance.getId()); break; @@ -325,8 +329,8 @@ public class MasterExecThread implements Runnable { // flow end // execute next process instance complement data processInstance.setScheduleTime(scheduleDate); - if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){ - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { + cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); } @@ -342,37 +346,37 @@ public class MasterExecThread implements Runnable { } } - /** * prepare process parameter + * * @throws Exception exception */ private void prepareProcess() throws Exception { - // init task queue - initTaskQueue(); // gen process dag buildFlowDag(); + + // init task queue + initTaskQueue(); logger.info("prepare process :{} end", processInstance.getId()); } - /** * process end handle */ private void endProcess() { processInstance.setEndTime(new Date()); processService.updateProcessInstance(processInstance); - if(processInstance.getState().typeIsWaitingThread()){ + if (processInstance.getState().typeIsWaitingThread()) { processService.createRecoveryWaitingThreadCommand(null, processInstance); } List taskInstances = processService.findValidTaskListByProcessId(processInstance.getId()); alertManager.sendAlertProcessInstance(processInstance, taskInstances); } - /** - * generate process dag + * generate process dag + * * @throws Exception exception */ private void buildFlowDag() throws Exception { @@ -384,7 +388,7 @@ public class MasterExecThread implements Runnable { List startNodeNameList = parseStartNodeName(processInstance.getCommandParam()); ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(), startNodeNameList, recoveryNameList, processInstance.getTaskDependType()); - if(processDag == null){ + if (processDag == null) { logger.error("processDag is null"); return; } @@ -395,7 +399,7 @@ public class MasterExecThread implements Runnable { /** * init task queue */ - private void initTaskQueue(){ + private void initTaskQueue() { taskFailedSubmit = false; activeTaskNode.clear(); @@ -403,11 +407,14 @@ public class MasterExecThread implements Runnable { completeTaskList.clear(); errorTaskList.clear(); List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); - for(TaskInstance task : taskInstanceList){ - if(task.isTaskComplete()){ + for (TaskInstance task : taskInstanceList) { + if (task.isTaskComplete()) { completeTaskList.put(task.getName(), task); } - if(task.getState().typeIsFailure() && !task.taskCanRetry()){ + if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { + continue; + } + if (task.getState().typeIsFailure() && !task.taskCanRetry()) { errorTaskList.put(task.getName(), task); } } @@ -436,18 +443,19 @@ public class MasterExecThread implements Runnable { /** * submit task to execute + * * @param taskInstance task instance * @return TaskInstance */ private TaskInstance submitTaskExec(TaskInstance taskInstance) { MasterBaseTaskExecThread abstractExecThread = null; - if(taskInstance.isSubProcess()){ + if (taskInstance.isSubProcess()) { abstractExecThread = new SubProcessTaskExecThread(taskInstance); - }else if(taskInstance.isDependTask()){ + } else if (taskInstance.isDependTask()) { abstractExecThread = new DependentTaskExecThread(taskInstance); - }else if(taskInstance.isConditionsTask()){ + } else if (taskInstance.isConditionsTask()) { abstractExecThread = new ConditionsTaskExecThread(taskInstance); - }else { + } else { abstractExecThread = new MasterTaskExecThread(taskInstance); } Future future = taskExecService.submit(abstractExecThread); @@ -458,13 +466,14 @@ public class MasterExecThread implements Runnable { /** * find task instance in db. * in case submit more than one same name task in the same time. + * * @param taskName task name * @return TaskInstance */ - private TaskInstance findTaskIfExists(String taskName){ + private TaskInstance findTaskIfExists(String taskName) { List taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId()); - for(TaskInstance taskInstance : taskInstanceList){ - if(taskInstance.getName().equals(taskName)){ + for (TaskInstance taskInstance : taskInstanceList) { + if (taskInstance.getName().equals(taskName)) { return taskInstance; } } @@ -473,15 +482,16 @@ public class MasterExecThread implements Runnable { /** * encapsulation task - * @param processInstance process instance - * @param nodeName node name + * + * @param processInstance process instance + * @param nodeName node name * @return TaskInstance */ private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName, TaskNode taskNode) { TaskInstance taskInstance = findTaskIfExists(nodeName); - if(taskInstance == null){ + if (taskInstance == null) { taskInstance = new TaskInstance(); // task name taskInstance.setName(nodeName); @@ -498,6 +508,9 @@ public class MasterExecThread implements Runnable { // task instance whether alert taskInstance.setAlertFlag(Flag.NO); + // task instance start time + taskInstance.setStartTime(null); + // task instance flag taskInstance.setFlag(Flag.YES); @@ -511,9 +524,9 @@ public class MasterExecThread implements Runnable { taskInstance.setRetryInterval(taskNode.getRetryInterval()); // task instance priority - if(taskNode.getTaskInstancePriority() == null){ + if (taskNode.getTaskInstancePriority() == null) { taskInstance.setTaskInstancePriority(Priority.MEDIUM); - }else{ + } else { taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority()); } @@ -522,7 +535,7 @@ public class MasterExecThread implements Runnable { String taskWorkerGroup = StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup(); if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) { taskInstance.setWorkerGroup(processWorkerGroup); - }else { + } else { taskInstance.setWorkerGroup(taskWorkerGroup); } @@ -532,134 +545,10 @@ public class MasterExecThread implements Runnable { return taskInstance; } - - - /** - * if all of the task dependence are skip, skip it too. - * @param taskNode - * @return - */ - private boolean isTaskNodeNeedSkip(TaskNode taskNode){ - if(CollectionUtils.isEmpty(taskNode.getDepList())){ - return false; - } - for(String depNode : taskNode.getDepList()){ - if(!skipTaskNodeList.containsKey(depNode)){ - return false; - } - } - return true; - } - - /** - * set task node skip if dependence all skip - * @param taskNodesSkipList - */ - private void setTaskNodeSkip(List taskNodesSkipList){ - for(String skipNode : taskNodesSkipList){ - skipTaskNodeList.putIfAbsent(skipNode, dag.getNode(skipNode)); - Collection postNodeList = DagHelper.getStartVertex(skipNode, dag, completeTaskList); - List postSkipList = new ArrayList<>(); - for(String post : postNodeList){ - TaskNode postNode = dag.getNode(post); - if(isTaskNodeNeedSkip(postNode)){ - postSkipList.add(post); - } - } - setTaskNodeSkip(postSkipList); - } - } - - - /** - * parse condition task find the branch process - * set skip flag for another one. - * @param nodeName - * @return - */ - private List parseConditionTask(String nodeName){ - List conditionTaskList = new ArrayList<>(); - TaskNode taskNode = dag.getNode(nodeName); - if(!taskNode.isConditionsTask()){ - return conditionTaskList; - } - ConditionsParameters conditionsParameters = - JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); - - TaskInstance taskInstance = completeTaskList.get(nodeName); - if(taskInstance == null){ - logger.error("task instance {} cannot find, please check it!", nodeName); - return conditionTaskList; - } - - if(taskInstance.getState().typeIsSuccess()){ - conditionTaskList = conditionsParameters.getSuccessNode(); - setTaskNodeSkip(conditionsParameters.getFailedNode()); - }else if(taskInstance.getState().typeIsFailure()){ - conditionTaskList = conditionsParameters.getFailedNode(); - setTaskNodeSkip(conditionsParameters.getSuccessNode()); - }else{ - conditionTaskList.add(nodeName); - } - return conditionTaskList; - } - - /** - * parse post node list of previous node - * if condition node: return process according to the settings - * if post node completed, return post nodes of the completed node - * @param previousNodeName - * @return - */ - private List parsePostNodeList(String previousNodeName){ - List postNodeList = new ArrayList<>(); - - TaskNode taskNode = dag.getNode(previousNodeName); - if(taskNode != null && taskNode.isConditionsTask()){ - return parseConditionTask(previousNodeName); - } - Collection postNodeCollection = DagHelper.getStartVertex(previousNodeName, dag, completeTaskList); - List postSkipList = new ArrayList<>(); - // delete success node, parse the past nodes - // if conditions node, - // 1. parse the branch process according the conditions setting - // 2. set skip flag on anther branch process - for(String postNode : postNodeCollection){ - if(completeTaskList.containsKey(postNode)){ - TaskInstance postTaskInstance = completeTaskList.get(postNode); - if(dag.getNode(postNode).isConditionsTask()){ - List conditionTaskNodeList = parseConditionTask(postNode); - for(String conditions : conditionTaskNodeList){ - postNodeList.addAll(parsePostNodeList(conditions)); - } - }else if(postTaskInstance.getState().typeIsSuccess()){ - postNodeList.addAll(parsePostNodeList(postNode)); - }else{ - postNodeList.add(postNode); - } - - }else if(isTaskNodeNeedSkip(dag.getNode(postNode))){ - postSkipList.add(postNode); - setTaskNodeSkip(postSkipList); - postSkipList.clear(); - }else{ - postNodeList.add(postNode); - } - } - return postNodeList; - } - - /** - * submit post node - * @param parentNodeName parent node name - */ - private Map propToValue = new ConcurrentHashMap(); - private void submitPostNode(String parentNodeName){ - - List submitTaskNodeList = parsePostNodeList(parentNodeName); - + private void submitPostNode(String parentNodeName) { + Set submitTaskNodeList = DagHelper.parsePostNodes(parentNodeName, skipTaskNodeList, dag, completeTaskList); List taskInstances = new ArrayList<>(); - for(String taskNode : submitTaskNodeList){ + for (String taskNode : submitTaskNodeList) { try { VarPoolUtils.convertVarPoolToMap(propToValue, processInstance.getVarPool()); } catch (ParseException e) { @@ -669,23 +558,23 @@ public class MasterExecThread implements Runnable { TaskNode taskNodeObject = dag.getNode(taskNode); VarPoolUtils.setTaskNodeLocalParams(taskNodeObject, propToValue); taskInstances.add(createTaskInstance(processInstance, taskNode, - taskNodeObject)); + taskNodeObject)); } // if previous node success , post node submit - for(TaskInstance task : taskInstances){ + for (TaskInstance task : taskInstances) { - if(readyToSubmitTaskList.containsKey(task.getName())){ + if (readyToSubmitTaskQueue.contains(task)) { continue; } - if(completeTaskList.containsKey(task.getName())){ + if (completeTaskList.containsKey(task.getName())) { logger.info("task {} has already run success", task.getName()); continue; } - if(task.getState().typeIsPause() || task.getState().typeIsCancel()){ + if (task.getState().typeIsPause() || task.getState().typeIsCancel()) { logger.info("task {} stopped, the state is {}", task.getName(), task.getState()); - }else{ + } else { addTaskToStandByList(task); } } @@ -693,56 +582,73 @@ public class MasterExecThread implements Runnable { /** * determine whether the dependencies of the task node are complete + * * @return DependResult */ private DependResult isTaskDepsComplete(String taskName) { Collection startNodes = dag.getBeginNode(); // if vertex,returns true directly - if(startNodes.contains(taskName)){ + if (startNodes.contains(taskName)) { return DependResult.SUCCESS; } - TaskNode taskNode = dag.getNode(taskName); List depNameList = taskNode.getDepList(); - for(String depsNode : depNameList ){ - if(!dag.containsNode(depsNode) + for (String depsNode : depNameList) { + if (!dag.containsNode(depsNode) || forbiddenTaskList.containsKey(depsNode) - || skipTaskNodeList.containsKey(depsNode)){ + || skipTaskNodeList.containsKey(depsNode)) { continue; } // dependencies must be fully completed - if(!completeTaskList.containsKey(depsNode)){ + if (!completeTaskList.containsKey(depsNode)) { return DependResult.WAITING; } ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); - // conditions task would not return failed. - if(depTaskState.typeIsFailure() - && !DagHelper.haveConditionsAfterNode(depsNode, dag ) - && !dag.getNode(depsNode).isConditionsTask()){ - return DependResult.FAILED; - } - - if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){ + if (depTaskState.typeIsPause() || depTaskState.typeIsCancel()) { return DependResult.WAITING; } + // ignore task state if current task is condition + if (taskNode.isConditionsTask()) { + continue; + } + if (!dependTaskSuccess(depsNode, taskName)) { + return DependResult.FAILED; + } } - logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray())); - return DependResult.SUCCESS; } + /** + * depend node is completed, but here need check the condition task branch is the next node + */ + private boolean dependTaskSuccess(String dependNodeName, String nextNodeName) { + if (dag.getNode(dependNodeName).isConditionsTask()) { + //condition task need check the branch to run + List nextTaskList = DagHelper.parseConditionTask(dependNodeName, skipTaskNodeList, dag, completeTaskList); + if (!nextTaskList.contains(nextNodeName)) { + return false; + } + } else { + ExecutionStatus depTaskState = completeTaskList.get(dependNodeName).getState(); + if (depTaskState.typeIsFailure()) { + return false; + } + } + return true; + } /** * query task instance by complete state + * * @param state state * @return task instance list */ - private List getCompleteTaskByState(ExecutionStatus state){ + private List getCompleteTaskByState(ExecutionStatus state) { List resultList = new ArrayList<>(); - for (Map.Entry entry: completeTaskList.entrySet()) { - if(entry.getValue().getState() == state){ + for (Map.Entry entry : completeTaskList.entrySet()) { + if (entry.getValue().getState() == state) { resultList.add(entry.getValue()); } } @@ -750,18 +656,19 @@ public class MasterExecThread implements Runnable { } /** - * where there are ongoing tasks + * where there are ongoing tasks + * * @param state state * @return ExecutionStatus */ - private ExecutionStatus runningState(ExecutionStatus state){ + private ExecutionStatus runningState(ExecutionStatus state) { if (state == ExecutionStatus.READY_STOP || state == ExecutionStatus.READY_PAUSE || state == ExecutionStatus.WAITTING_THREAD || state == ExecutionStatus.DELAY_EXECUTION) { // if the running task is not completed, the state remains unchanged return state; - }else{ + } else { return ExecutionStatus.RUNNING_EXECUTION; } } @@ -771,12 +678,12 @@ public class MasterExecThread implements Runnable { * * @return Boolean whether has failed task */ - private boolean hasFailedTask(){ + private boolean hasFailedTask() { - if(this.taskFailedSubmit){ + if (this.taskFailedSubmit) { return true; } - if(this.errorTaskList.size() > 0){ + if (this.errorTaskList.size() > 0) { return true; } return this.dependFailedTask.size() > 0; @@ -787,13 +694,13 @@ public class MasterExecThread implements Runnable { * * @return Boolean whether process instance failed */ - private boolean processFailed(){ - if(hasFailedTask()) { - if(processInstance.getFailureStrategy() == FailureStrategy.END){ + private boolean processFailed() { + if (hasFailedTask()) { + if (processInstance.getFailureStrategy() == FailureStrategy.END) { return true; } if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) { - return readyToSubmitTaskList.size() == 0 || activeTaskNode.size() == 0; + return readyToSubmitTaskQueue.size() == 0 || activeTaskNode.size() == 0; } } return false; @@ -801,9 +708,10 @@ public class MasterExecThread implements Runnable { /** * whether task for waiting thread + * * @return Boolean whether has waiting thread task */ - private boolean hasWaitingThreadTask(){ + private boolean hasWaitingThreadTask() { List waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD); return CollectionUtils.isNotEmpty(waitingList); } @@ -813,74 +721,75 @@ public class MasterExecThread implements Runnable { * 1,failed retry task in the preparation queue , returns to failure directly * 2,exists pause task,complement not completed, pending submission of tasks, return to suspension * 3,success + * * @return ExecutionStatus */ - private ExecutionStatus processReadyPause(){ - if(hasRetryTaskInStandBy()){ + private ExecutionStatus processReadyPause() { + if (hasRetryTaskInStandBy()) { return ExecutionStatus.FAILURE; } List pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE); - if(CollectionUtils.isNotEmpty(pauseList) + if (CollectionUtils.isNotEmpty(pauseList) || !isComplementEnd() - || readyToSubmitTaskList.size() > 0){ + || readyToSubmitTaskQueue.size() > 0) { return ExecutionStatus.PAUSE; - }else{ + } else { return ExecutionStatus.SUCCESS; } } - /** * generate the latest process instance status by the tasks state + * * @return process instance execution status */ - private ExecutionStatus getProcessInstanceState(){ + private ExecutionStatus getProcessInstanceState() { ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); ExecutionStatus state = instance.getState(); - if(activeTaskNode.size() > 0 || hasRetryTaskInStandBy()){ + if (activeTaskNode.size() > 0 || hasRetryTaskInStandBy()) { // active task and retry task exists return runningState(state); } // process failure - if(processFailed()){ + if (processFailed()) { return ExecutionStatus.FAILURE; } // waiting thread - if(hasWaitingThreadTask()){ + if (hasWaitingThreadTask()) { return ExecutionStatus.WAITTING_THREAD; } // pause - if(state == ExecutionStatus.READY_PAUSE){ + if (state == ExecutionStatus.READY_PAUSE) { return processReadyPause(); } // stop - if(state == ExecutionStatus.READY_STOP){ + if (state == ExecutionStatus.READY_STOP) { List stopList = getCompleteTaskByState(ExecutionStatus.STOP); List killList = getCompleteTaskByState(ExecutionStatus.KILL); - if(CollectionUtils.isNotEmpty(stopList) + if (CollectionUtils.isNotEmpty(stopList) || CollectionUtils.isNotEmpty(killList) - || !isComplementEnd()){ + || !isComplementEnd()) { return ExecutionStatus.STOP; - }else{ + } else { return ExecutionStatus.SUCCESS; } } // success - if(state == ExecutionStatus.RUNNING_EXECUTION){ + if (state == ExecutionStatus.RUNNING_EXECUTION) { List killTasks = getCompleteTaskByState(ExecutionStatus.KILL); - if(readyToSubmitTaskList.size() > 0){ + if (readyToSubmitTaskQueue.size() > 0) { //tasks currently pending submission, no retries, indicating that depend is waiting to complete return ExecutionStatus.RUNNING_EXECUTION; - }else if(CollectionUtils.isNotEmpty(killTasks)){ + } else if (CollectionUtils.isNotEmpty(killTasks)) { // tasks maybe killed manually return ExecutionStatus.FAILURE; - }else{ + } else { // if the waiting queue is empty and the status is in progress, then success return ExecutionStatus.SUCCESS; } @@ -889,12 +798,30 @@ public class MasterExecThread implements Runnable { return state; } + /** + * whether standby task list have retry tasks + */ + private boolean retryTaskExists() { + + boolean result = false; + + for (Iterator iter = readyToSubmitTaskQueue.iterator(); iter.hasNext(); ) { + TaskInstance task = iter.next(); + if (task.getState().typeIsFailure()) { + result = true; + break; + } + } + return result; + } + /** * whether complement end + * * @return Boolean whether is complement end */ private boolean isComplementEnd() { - if(!processInstance.isComplementData()){ + if (!processInstance.isComplementData()) { return true; } @@ -903,7 +830,7 @@ public class MasterExecThread implements Runnable { Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); return processInstance.getScheduleTime().equals(endTime); } catch (Exception e) { - logger.error("complement end failed ",e); + logger.error("complement end failed ", e); return false; } } @@ -914,7 +841,7 @@ public class MasterExecThread implements Runnable { */ private void updateProcessInstanceState() { ExecutionStatus state = getProcessInstanceState(); - if(processInstance.getState() != state){ + if (processInstance.getState() != state) { logger.info( "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}", processInstance.getId(), processInstance.getName(), @@ -931,38 +858,50 @@ public class MasterExecThread implements Runnable { /** * get task dependency result + * * @param taskInstance task instance * @return DependResult */ - private DependResult getDependResultForTask(TaskInstance taskInstance){ + private DependResult getDependResultForTask(TaskInstance taskInstance) { return isTaskDepsComplete(taskInstance.getName()); } /** * add task to standby list + * * @param taskInstance task instance */ - private void addTaskToStandByList(TaskInstance taskInstance){ + private void addTaskToStandByList(TaskInstance taskInstance) { logger.info("add task to stand by list: {}", taskInstance.getName()); - readyToSubmitTaskList.putIfAbsent(taskInstance.getName(), taskInstance); + try { + readyToSubmitTaskQueue.put(taskInstance); + } catch (Exception e) { + logger.error("add task instance to readyToSubmitTaskQueue error"); + } } /** * remove task from stand by list + * * @param taskInstance task instance */ - private void removeTaskFromStandbyList(TaskInstance taskInstance){ + private void removeTaskFromStandbyList(TaskInstance taskInstance) { logger.info("remove task from stand by list: {}", taskInstance.getName()); - readyToSubmitTaskList.remove(taskInstance.getName()); + try { + readyToSubmitTaskQueue.remove(taskInstance); + } catch (Exception e) { + logger.error("remove task instance from readyToSubmitTaskQueue error"); + } } /** * has retry task in standby + * * @return Boolean whether has retry task in standby */ - private boolean hasRetryTaskInStandBy(){ - for (Map.Entry entry: readyToSubmitTaskList.entrySet()) { - if(entry.getValue().getState().typeIsFailure()){ + private boolean hasRetryTaskInStandBy() { + for (Iterator iter = readyToSubmitTaskQueue.iterator(); iter.hasNext(); ) { + if (iter.next().getState().typeIsFailure()) { return true; } } @@ -972,44 +911,44 @@ public class MasterExecThread implements Runnable { /** * submit and watch the tasks, until the work flow stop */ - private void runProcess(){ + private void runProcess() { // submit start node submitPostNode(null); boolean sendTimeWarning = false; - while(!processInstance.isProcessInstanceStop()){ + while (!processInstance.isProcessInstanceStop() && Stopper.isRunning()) { // send warning email if process time out. - if(!sendTimeWarning && checkProcessTimeOut(processInstance) ){ + if (!sendTimeWarning && checkProcessTimeOut(processInstance)) { alertManager.sendProcessTimeoutAlert(processInstance, processService.findProcessDefineById(processInstance.getProcessDefinitionId())); sendTimeWarning = true; } - for(Map.Entry> entry: activeTaskNode.entrySet()) { + for (Map.Entry> entry : activeTaskNode.entrySet()) { Future future = entry.getValue(); - TaskInstance task = entry.getKey().getTaskInstance(); + TaskInstance task = entry.getKey().getTaskInstance(); - if(!future.isDone()){ + if (!future.isDone()) { continue; } // node monitor thread complete task = this.processService.findTaskInstanceById(task.getId()); - if(task == null){ + if (task == null) { this.taskFailedSubmit = true; activeTaskNode.remove(entry.getKey()); continue; } // node monitor thread complete - if(task.getState().typeIsFinished()){ + if (task.getState().typeIsFinished()) { activeTaskNode.remove(entry.getKey()); } logger.info("task :{}, id:{} complete, state is {} ", task.getName(), task.getId(), task.getState()); // node success , post node submit - if(task.getState() == ExecutionStatus.SUCCESS){ + if (task.getState() == ExecutionStatus.SUCCESS) { processInstance.setVarPool(task.getVarPool()); processService.updateProcessInstance(processInstance); completeTaskList.put(task.getName(), task); @@ -1017,20 +956,20 @@ public class MasterExecThread implements Runnable { continue; } // node fails, retry first, and then execute the failure process - if(task.getState().typeIsFailure()){ - if(task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){ + if (task.getState().typeIsFailure()) { + if (task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE) { this.recoverToleranceFaultTaskList.add(task); } - if(task.taskCanRetry()){ + if (task.taskCanRetry()) { addTaskToStandByList(task); - }else{ + } else { completeTaskList.put(task.getName(), task); - if( task.isConditionsTask() - || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { + if (task.isConditionsTask() + || DagHelper.haveConditionsAfterNode(task.getName(), dag)) { submitPostNode(task.getName()); - }else{ + } else { errorTaskList.put(task.getName(), task); - if(processInstance.getFailureStrategy() == FailureStrategy.END){ + if (processInstance.getFailureStrategy() == FailureStrategy.END) { killTheOtherTasks(); } } @@ -1041,30 +980,30 @@ public class MasterExecThread implements Runnable { completeTaskList.put(task.getName(), task); } // send alert - if(CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)){ + if (CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)) { alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList); this.recoverToleranceFaultTaskList.clear(); } // updateProcessInstance completed task status // failure priority is higher than pause // if a task fails, other suspended tasks need to be reset kill - if(errorTaskList.size() > 0){ - for(Map.Entry entry: completeTaskList.entrySet()) { + if (errorTaskList.size() > 0) { + for (Map.Entry entry : completeTaskList.entrySet()) { TaskInstance completeTask = entry.getValue(); - if(completeTask.getState()== ExecutionStatus.PAUSE){ + if (completeTask.getState() == ExecutionStatus.PAUSE) { completeTask.setState(ExecutionStatus.KILL); completeTaskList.put(entry.getKey(), completeTask); processService.updateTaskInstance(completeTask); } } } - if(canSubmitTaskToQueue()){ + if (canSubmitTaskToQueue()) { submitStandByTask(); } try { Thread.sleep(Constants.SLEEP_TIME_MILLIS); } catch (InterruptedException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); Thread.currentThread().interrupt(); } updateProcessInstanceState(); @@ -1075,29 +1014,30 @@ public class MasterExecThread implements Runnable { /** * whether check process time out + * * @param processInstance task instance * @return true if time out of process instance > running time of process instance */ private boolean checkProcessTimeOut(ProcessInstance processInstance) { - if(processInstance.getTimeout() == 0 ){ + if (processInstance.getTimeout() == 0) { return false; } Date now = new Date(); - long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); + long runningTime = DateUtils.diffMin(now, processInstance.getStartTime()); return runningTime > processInstance.getTimeout(); } /** * whether can submit task to queue + * * @return boolean */ private boolean canSubmitTaskToQueue() { return OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory()); } - /** * close the on going tasks */ @@ -1111,7 +1051,7 @@ public class MasterExecThread implements Runnable { TaskInstance taskInstance = taskExecThread.getTaskInstance(); taskInstance = processService.findTaskInstanceById(taskInstance.getId()); - if(taskInstance != null && taskInstance.getState().typeIsFinished()){ + if (taskInstance != null && taskInstance.getState().typeIsFinished()) { continue; } @@ -1127,16 +1067,19 @@ public class MasterExecThread implements Runnable { /** * whether the retry interval is timed out + * * @param taskInstance task instance * @return Boolean */ - private boolean retryTaskIntervalOverTime(TaskInstance taskInstance){ - if(taskInstance.getState() != ExecutionStatus.FAILURE){ + private boolean retryTaskIntervalOverTime(TaskInstance taskInstance) { + if (taskInstance.getState() != ExecutionStatus.FAILURE) { return true; } - if(taskInstance.getId() == 0 || - taskInstance.getMaxRetryTimes() ==0 || - taskInstance.getRetryInterval() == 0 ){ + if (taskInstance.getId() == 0 + || + taskInstance.getMaxRetryTimes() == 0 + || + taskInstance.getRetryInterval() == 0) { return true; } Date now = new Date(); @@ -1148,62 +1091,69 @@ public class MasterExecThread implements Runnable { /** * handling the list of tasks to be submitted */ - private void submitStandByTask(){ - for(Map.Entry entry: readyToSubmitTaskList.entrySet()) { - TaskInstance task = entry.getValue(); - DependResult dependResult = getDependResultForTask(task); - if(DependResult.SUCCESS == dependResult){ - if(retryTaskIntervalOverTime(task)){ - submitTaskExec(task); + private void submitStandByTask() { + try { + int length = readyToSubmitTaskQueue.size(); + for (int i = 0; i < length; i++) { + TaskInstance task = readyToSubmitTaskQueue.peek(); + DependResult dependResult = getDependResultForTask(task); + if (DependResult.SUCCESS == dependResult) { + if (retryTaskIntervalOverTime(task)) { + submitTaskExec(task); + removeTaskFromStandbyList(task); + } + } else if (DependResult.FAILED == dependResult) { + // if the dependency fails, the current node is not submitted and the state changes to failure. + dependFailedTask.put(task.getName(), task); removeTaskFromStandbyList(task); + logger.info("task {},id:{} depend result : {}", task.getName(), task.getId(), dependResult); } - }else if(DependResult.FAILED == dependResult){ - // if the dependency fails, the current node is not submitted and the state changes to failure. - dependFailedTask.put(entry.getKey(), task); - removeTaskFromStandbyList(task); - logger.info("task {},id:{} depend result : {}",task.getName(), task.getId(), dependResult); } + } catch (Exception e) { + logger.error("submit standby task error", e); } } /** * get recovery task instance + * * @param taskId task id * @return recovery task instance */ - private TaskInstance getRecoveryTaskInstance(String taskId){ - if(!StringUtils.isNotEmpty(taskId)){ + private TaskInstance getRecoveryTaskInstance(String taskId) { + if (!StringUtils.isNotEmpty(taskId)) { return null; } try { Integer intId = Integer.valueOf(taskId); TaskInstance task = processService.findTaskInstanceById(intId); - if(task == null){ - logger.error("start node id cannot be found: {}", taskId); - }else { + if (task == null) { + logger.error("start node id cannot be found: {}", taskId); + } else { return task; } - }catch (Exception e){ - logger.error("get recovery task instance failed ",e); + } catch (Exception e) { + logger.error("get recovery task instance failed ", e); } return null; } /** * get start task instance list + * * @param cmdParam command param * @return task instance list */ - private List getStartTaskInstanceList(String cmdParam){ + private List getStartTaskInstanceList(String cmdParam) { List instanceList = new ArrayList<>(); Map paramMap = JSONUtils.toMap(cmdParam); - if(paramMap != null && paramMap.containsKey(CMDPARAM_RECOVERY_START_NODE_STRING)){ - String[] idList = paramMap.get(CMDPARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA); - for(String nodeId : idList){ + if (paramMap != null && paramMap.containsKey(CMD_PARAM_RECOVERY_START_NODE_STRING)) { + String[] idList = paramMap.get(CMD_PARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA); + for (String nodeId : idList) { TaskInstance task = getRecoveryTaskInstance(nodeId); - if(task != null){ + if (task != null) { instanceList.add(task); } } @@ -1213,17 +1163,18 @@ public class MasterExecThread implements Runnable { /** * parse "StartNodeNameList" from cmd param + * * @param cmdParam command param * @return start node name list */ - private List parseStartNodeName(String cmdParam){ + private List parseStartNodeName(String cmdParam) { List startNodeNameList = new ArrayList<>(); Map paramMap = JSONUtils.toMap(cmdParam); - if(paramMap == null){ + if (paramMap == null) { return startNodeNameList; } - if(paramMap.containsKey(CMDPARAM_START_NODE_NAMES)){ - startNodeNameList = Arrays.asList(paramMap.get(CMDPARAM_START_NODE_NAMES).split(Constants.COMMA)); + if (paramMap.containsKey(CMD_PARAM_START_NODE_NAMES)) { + startNodeNameList = Arrays.asList(paramMap.get(CMD_PARAM_START_NODE_NAMES).split(Constants.COMMA)); } return startNodeNameList; } @@ -1231,11 +1182,12 @@ public class MasterExecThread implements Runnable { /** * generate start node name list from parsing command param; * if "StartNodeIdList" exists in command param, return StartNodeIdList + * * @return recovery node name list */ - private List getRecoveryNodeNameList(){ + private List getRecoveryNodeNameList() { List recoveryNodeNameList = new ArrayList<>(); - if(CollectionUtils.isNotEmpty(recoverNodeIdList)) { + if (CollectionUtils.isNotEmpty(recoverNodeIdList)) { for (TaskInstance task : recoverNodeIdList) { recoveryNodeNameList.add(task.getName()); } @@ -1245,17 +1197,18 @@ public class MasterExecThread implements Runnable { /** * generate flow dag + * * @param processDefinitionJson process definition json * @param startNodeNameList start node name list * @param recoveryNodeNameList recovery node name list * @param depNodeType depend node type * @return ProcessDag process dag - * @throws Exception exception + * @throws Exception exception */ public ProcessDag generateFlowDag(String processDefinitionJson, List startNodeNameList, List recoveryNodeNameList, - TaskDependType depNodeType)throws Exception{ + TaskDependType depNodeType) throws Exception { return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java index 30dd0f9f12..b0e0528c3e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerService.java @@ -124,52 +124,57 @@ public class MasterSchedulerService extends Thread { public void run() { logger.info("master scheduler started"); while (Stopper.isRunning()){ - InterProcessMutex mutex = null; try { boolean runCheckFlag = OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory()); - if(!runCheckFlag) { + if (!runCheckFlag) { Thread.sleep(Constants.SLEEP_TIME_MILLIS); continue; } if (zkMasterClient.getZkClient().getState() == CuratorFrameworkState.STARTED) { + scheduleProcess(); + } + } catch (Exception e) { + logger.error("master scheduler thread error", e); + } + } + } - mutex = zkMasterClient.blockAcquireMutex(); - - int activeCount = masterExecService.getActiveCount(); - // make sure to scan and delete command table in one transaction - Command command = processService.findOneCommand(); - if (command != null) { - logger.info("find one command: id: {}, type: {}", command.getId(),command.getCommandType()); - - try{ - - ProcessInstance processInstance = processService.handleCommand(logger, - getLocalAddress(), - this.masterConfig.getMasterExecThreads() - activeCount, command); - if (processInstance != null) { - logger.info("start master exec thread , split DAG ..."); - masterExecService.execute( - new MasterExecThread( - processInstance - , processService - , nettyRemotingClient - , alertManager - , masterConfig)); - } - }catch (Exception e){ - logger.error("scan command error ", e); - processService.moveToErrorCommand(command, e.toString()); - } - } else{ - //indicate that no command ,sleep for 1s - Thread.sleep(Constants.SLEEP_TIME_MILLIS); + private void scheduleProcess() throws Exception { + InterProcessMutex mutex = null; + try { + mutex = zkMasterClient.blockAcquireMutex(); + + int activeCount = masterExecService.getActiveCount(); + // make sure to scan and delete command table in one transaction + Command command = processService.findOneCommand(); + if (command != null) { + logger.info("find one command: id: {}, type: {}", command.getId(),command.getCommandType()); + + try { + + ProcessInstance processInstance = processService.handleCommand(logger, + getLocalAddress(), + this.masterConfig.getMasterExecThreads() - activeCount, command); + if (processInstance != null) { + logger.info("start master exec thread , split DAG ..."); + masterExecService.execute( + new MasterExecThread( + processInstance + , processService + , nettyRemotingClient + , alertManager + , masterConfig)); } + } catch (Exception e) { + logger.error("scan command error ", e); + processService.moveToErrorCommand(command, e.toString()); } - } catch (Exception e){ - logger.error("master scheduler thread error",e); - } finally{ - zkMasterClient.releaseMutex(mutex); + } else { + //indicate that no command ,sleep for 1s + Thread.sleep(Constants.SLEEP_TIME_MILLIS); } + } finally { + zkMasterClient.releaseMutex(mutex); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java index b55ecbb762..a87d28f826 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java @@ -122,16 +122,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { // query new state taskInstance = processService.findTaskInstanceById(taskInstance.getId()); logger.info("wait task: process id: {}, task id:{}, task name:{} complete", - this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName()); - // task time out - boolean checkTimeout = false; - TaskTimeoutParameter taskTimeoutParameter = getTaskTimeoutParameter(); - if (taskTimeoutParameter.getEnable()) { - TaskTimeoutStrategy strategy = taskTimeoutParameter.getStrategy(); - if (strategy == TaskTimeoutStrategy.WARN || strategy == TaskTimeoutStrategy.WARNFAILED) { - checkTimeout = true; - } - } + this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName()); while (Stopper.isRunning()) { try { @@ -152,17 +143,8 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { taskInstanceCacheManager.removeByTaskInstanceId(taskInstance.getId()); break; } - if (checkTimeout) { - long remainTime = DateUtils.getRemainTime(taskInstance.getStartTime(), taskTimeoutParameter.getInterval() * 60L); - if (remainTime < 0) { - logger.warn("task id: {} execution time out", taskInstance.getId()); - // process define - ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); - // send warn mail - alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(), processInstance.getId(), processInstance.getName(), - taskInstance.getId(), taskInstance.getName()); - checkTimeout = false; - } + if (checkTaskTimeout()) { + this.checkTimeoutFlag = !alertTimeout(); } // updateProcessInstance task instance taskInstance = processService.findTaskInstanceById(taskInstance.getId()); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java index 2b1508dc44..74b1c2f271 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java @@ -130,19 +130,20 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { while (Stopper.isRunning()) { // waiting for subflow process instance establishment if (subProcessInstance == null) { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - if(!setTaskInstanceState()){ continue; } } subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId()); + if (checkTaskTimeout()) { + this.checkTimeoutFlag = !alertTimeout(); + handleTimeoutFailed(); + } updateParentProcessState(); if (subProcessInstance.getState().typeIsFinished()){ break; } - if(this.processInstance.getState() == ExecutionStatus.READY_PAUSE){ // parent process "ready to pause" , child process "pause" pauseSubProcess(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java index f1d351de15..c881b89a4c 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; @@ -87,7 +88,7 @@ public class AlertManager { * get process instance content * * @param processInstance process instance - * @param taskInstances task instance list + * @param taskInstances task instance list * @return process instance format content */ public String getContentProcessInstance(ProcessInstance processInstance, @@ -139,7 +140,7 @@ public class AlertManager { /** * getting worker fault tolerant content * - * @param processInstance process instance + * @param processInstance process instance * @param toleranceTaskList tolerance task list * @return worker tolerance content */ @@ -162,7 +163,7 @@ public class AlertManager { /** * send worker alert fault tolerance * - * @param processInstance process instance + * @param processInstance process instance * @param toleranceTaskList tolerance task list */ public void sendAlertWorkerToleranceFault(ProcessInstance processInstance, List toleranceTaskList) { @@ -186,11 +187,13 @@ public class AlertManager { * send process instance alert * * @param processInstance process instance - * @param taskInstances task instance list + * @param taskInstances task instance list */ public void sendAlertProcessInstance(ProcessInstance processInstance, List taskInstances) { - + if (Flag.YES == processInstance.getIsSubProcess()) { + return; + } boolean sendWarnning = false; WarningType warningType = processInstance.getWarningType(); switch (warningType) { @@ -230,7 +233,7 @@ public class AlertManager { /** * send process timeout alert * - * @param processInstance process instance + * @param processInstance process instance * @param processDefinition process definition */ public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ArgsUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ArgsUtils.java new file mode 100644 index 0000000000..d71eb54f3c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ArgsUtils.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.utils; + +public class ArgsUtils { + + private ArgsUtils() throws IllegalStateException { + throw new IllegalStateException("Utility class"); + } + + public static String escape(String arg) { + return arg.replace(" ", "\\ ").replace("\"", "\\\"").replace("'", "\\'"); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java index 71c7d959e1..7f76baaa52 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java @@ -104,9 +104,7 @@ public class DependentExecute { ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(), dateInterval); if(processInstance == null){ - logger.error("cannot find the right process instance: definition id:{}, start:{}, end:{}", - dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() ); - return DependResult.FAILED; + return DependResult.WAITING; } // need to check workflow for updates, so get all task and check the task state if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){ diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java index eaaafc9568..2431eedd16 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java @@ -14,8 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.utils; +package org.apache.dolphinscheduler.server.utils; import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants; @@ -26,7 +26,6 @@ import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; import java.util.ArrayList; import java.util.List; - /** * flink args utils */ @@ -47,6 +46,7 @@ public class FlinkArgsUtils { if (StringUtils.isNotEmpty(tmpDeployMode)) { deployMode = tmpDeployMode; } + String others = param.getOthers(); if (!LOCAL_DEPLOY_MODE.equals(deployMode)) { args.add(Constants.FLINK_RUN_MODE); //-m @@ -61,10 +61,10 @@ public class FlinkArgsUtils { String appName = param.getAppName(); if (StringUtils.isNotEmpty(appName)) { //-ynm args.add(Constants.FLINK_APP_NAME); - args.add(appName); + args.add(ArgsUtils.escape(appName)); } - // judgy flink version,from flink1.10,the parameter -yn removed + // judge flink version,from flink1.10,the parameter -yn removed String flinkVersion = param.getFlinkVersion(); if (FLINK_VERSION_BEFORE_1_10.equals(flinkVersion)) { int taskManager = param.getTaskManager(); @@ -85,10 +85,23 @@ public class FlinkArgsUtils { args.add(taskManagerMemory); } + if (StringUtils.isEmpty(others) || !others.contains(Constants.FLINK_QUEUE)) { + String queue = param.getQueue(); + if (StringUtils.isNotEmpty(queue)) { // -yqu + args.add(Constants.FLINK_QUEUE); + args.add(queue); + } + } + args.add(Constants.FLINK_DETACH); //-d } + // -p -s -yqu -yat -sae -yD -D + if (StringUtils.isNotEmpty(others)) { + args.add(others); + } + ProgramType programType = param.getProgramType(); String mainClass = param.getMainClass(); if (programType != null && programType != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)) { @@ -106,21 +119,6 @@ public class FlinkArgsUtils { args.add(mainArgs); } - // --files --conf --libjar ... - String others = param.getOthers(); - String queue = param.getQueue(); - if (StringUtils.isNotEmpty(others)) { - - if (!others.contains(Constants.FLINK_QUEUE) && StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) { - args.add(Constants.FLINK_QUEUE); - args.add(param.getQueue()); - } - args.add(others); - } else if (StringUtils.isNotEmpty(queue) && !deployMode.equals(LOCAL_DEPLOY_MODE)) { - args.add(Constants.FLINK_QUEUE); - args.add(param.getQueue()); - } - return args; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LogUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LogUtils.java index bb8ddc85de..1dc4287f82 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LogUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/LogUtils.java @@ -25,8 +25,6 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.Optional; -import javax.transaction.NotSupportedException; - import org.slf4j.LoggerFactory; import ch.qos.logback.classic.sift.SiftingAppender; @@ -35,8 +33,8 @@ import ch.qos.logback.core.spi.AppenderAttachable; public class LogUtils { - private LogUtils() throws NotSupportedException { - throw new NotSupportedException(); + private LogUtils() throws IllegalStateException { + throw new IllegalStateException("Utility class"); } /** diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java index cf49285b9f..6c54c790fb 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java @@ -18,8 +18,11 @@ package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; @@ -27,9 +30,6 @@ import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.service.log.LogClientService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.io.File; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -37,369 +37,416 @@ import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** - * mainly used to get the start command line of a process. + * mainly used to get the start command line of a process. */ public class ProcessUtils { - /** - * logger. - */ - private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); - - /** - * Initialization regularization, solve the problem of pre-compilation performance, - * avoid the thread safety problem of multi-thread operation. - */ - private static final Pattern MACPATTERN = Pattern.compile("-[+|-]-\\s(\\d+)"); - - private static final Pattern WINDOWSATTERN = Pattern.compile("(\\d+)"); - - /** - * build command line characters. - * @param commandList command list - * @return command - */ - public static String buildCommandStr(List commandList) { - String cmdstr; - String[] cmd = commandList.toArray(new String[commandList.size()]); - SecurityManager security = System.getSecurityManager(); - boolean allowAmbiguousCommands = false; - if (security == null) { - allowAmbiguousCommands = true; - String value = System.getProperty("jdk.lang.Process.allowAmbiguousCommands"); - if (value != null) { - allowAmbiguousCommands = !"false".equalsIgnoreCase(value); - } - } - if (allowAmbiguousCommands) { - - String executablePath = new File(cmd[0]).getPath(); - - if (needsEscaping(VERIFICATION_LEGACY, executablePath)) { - executablePath = quoteString(executablePath); - } - - cmdstr = createCommandLine( - VERIFICATION_LEGACY, executablePath, cmd); - } else { - String executablePath; - try { - executablePath = getExecutablePath(cmd[0]); - } catch (IllegalArgumentException e) { - StringBuilder join = new StringBuilder(); - for (String s : cmd) { - join.append(s).append(' '); + /** + * logger + */ + private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); + + /** + * Initialization regularization, solve the problem of pre-compilation performance, + * avoid the thread safety problem of multi-thread operation + */ + private static final Pattern MACPATTERN = Pattern.compile("-[+|-]-\\s(\\d+)"); + + /** + * Expression of PID recognition in Windows scene + */ + private static final Pattern WINDOWSATTERN = Pattern.compile("(\\d+)"); + + private static final String LOCAL_PROCESS_EXEC = "jdk.lang.Process.allowAmbiguousCommands"; + + /** + * build command line characters. + * + * @param commandList command list + * @return command + */ + public static String buildCommandStr(List commandList) { + String cmdstr; + String[] cmd = commandList.toArray(new String[0]); + SecurityManager security = System.getSecurityManager(); + boolean allowAmbiguousCommands = isAllowAmbiguousCommands(security); + if (allowAmbiguousCommands) { + + String executablePath = new File(cmd[0]).getPath(); + + if (needsEscaping(VERIFICATION_LEGACY, executablePath)) { + executablePath = quoteString(executablePath); + } + + cmdstr = createCommandLine( + VERIFICATION_LEGACY, executablePath, cmd); + } else { + String executablePath; + try { + executablePath = getExecutablePath(cmd[0]); + } catch (IllegalArgumentException e) { + + StringBuilder join = new StringBuilder(); + for (String s : cmd) { + join.append(s).append(' '); + } + + cmd = getTokensFromCommand(join.toString()); + executablePath = getExecutablePath(cmd[0]); + + // Check new executable name once more + if (security != null) { + security.checkExec(executablePath); + } + } + + cmdstr = createCommandLine( + + isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd); } + return cmdstr; + } - cmd = getTokensFromCommand(join.toString()); - executablePath = getExecutablePath(cmd[0]); - - // Check new executable name once more - if (security != null) { - security.checkExec(executablePath); + /** + * check is allow ambiguous commands + * + * @param security security manager + * @return allow ambiguous command flag + */ + private static boolean isAllowAmbiguousCommands(SecurityManager security) { + boolean allowAmbiguousCommands = false; + if (security == null) { + allowAmbiguousCommands = true; + String value = System.getProperty(LOCAL_PROCESS_EXEC); + if (value != null) { + allowAmbiguousCommands = !Constants.STRING_FALSE.equalsIgnoreCase(value); + } } - } - - cmdstr = createCommandLine( + return allowAmbiguousCommands; + } - isShellFile(executablePath) ? VERIFICATION_CMD_BAT : VERIFICATION_WIN32, quoteString(executablePath), cmd); + /** + * get executable path. + * + * @param path path + * @return executable path + */ + private static String getExecutablePath(String path) { + boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments"); + + File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path); + return fileToRun.getPath(); } - return cmdstr; - } - - /** - * get executable path. - * - * @param path path - * @return executable path - */ - private static String getExecutablePath(String path) { - boolean pathIsQuoted = isQuoted(true, path, "Executable name has embedded quote, split the arguments"); - - File fileToRun = new File(pathIsQuoted ? path.substring(1, path.length() - 1) : path); - return fileToRun.getPath(); - } - - /** - * whether is shell file. - * - * @param executablePath executable path - * @return true if endsWith .CMD or .BAT - */ - private static boolean isShellFile(String executablePath) { - String upPath = executablePath.toUpperCase(); - return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT")); - } - - /** - * quote string. - * - * @param arg argument - * @return format arg - */ - private static String quoteString(String arg) { - StringBuilder argbuf = new StringBuilder(arg.length() + 2); - return argbuf.append('"').append(arg).append('"').toString(); - } - - /** - * get tokens from command. - * - * @param command command - * @return token string array - */ - private static String[] getTokensFromCommand(String command) { - ArrayList matchList = new ArrayList<>(8); - Matcher regexMatcher = LazyPattern.PATTERN.matcher(command); - while (regexMatcher.find()) { - matchList.add(regexMatcher.group()); + + /** + * whether is shell file. + * + * @param executablePath executable path + * @return true if endsWith .CMD or .BAT + */ + private static boolean isShellFile(String executablePath) { + String upPath = executablePath.toUpperCase(); + return (upPath.endsWith(".CMD") || upPath.endsWith(".BAT")); } - return matchList.toArray(new String[matchList.size()]); - } - - /** - * Lazy Pattern. - */ - private static class LazyPattern { - // Escape-support version: - // "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)"; - private static final Pattern PATTERN = Pattern.compile("[^\\s\"]+|\"[^\"]*\""); - } - - /** - * verification cmd bat. - */ - private static final int VERIFICATION_CMD_BAT = 0; - - /** - * verification win32. - */ - private static final int VERIFICATION_WIN32 = 1; - - /** - * verification legacy. - */ - private static final int VERIFICATION_LEGACY = 2; - - /** - * escape verification. - */ - private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'}, - - {' ', '\t', '<', '>'}, {' ', '\t'}}; - - /** - * create command line. - * @param verificationType verification type - * @param executablePath executable path - * @param cmd cmd - * @return command line - */ - private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) { - StringBuilder cmdbuf = new StringBuilder(80); - - cmdbuf.append(executablePath); - - for (int i = 1; i < cmd.length; ++i) { - cmdbuf.append(' '); - String s = cmd[i]; - if (needsEscaping(verificationType, s)) { - cmdbuf.append('"').append(s); - - if ((verificationType != VERIFICATION_CMD_BAT) && s.endsWith("\\")) { - cmdbuf.append('\\'); - } - cmdbuf.append('"'); - } else { - cmdbuf.append(s); - } + + /** + * quote string. + * + * @param arg argument + * @return format arg + */ + private static String quoteString(String arg) { + return '"' + arg + '"'; } - return cmdbuf.toString(); - } - - /** - * whether is quoted. - * @param noQuotesInside - * @param arg - * @param errorMessage - * @return boolean - */ - private static boolean isQuoted(boolean noQuotesInside, String arg, String errorMessage) { - int lastPos = arg.length() - 1; - if (lastPos >= 1 && arg.charAt(0) == '"' && arg.charAt(lastPos) == '"') { - // The argument has already been quoted. - if (noQuotesInside) { - if (arg.indexOf('"', 1) != lastPos) { - // There is ["] inside. - throw new IllegalArgumentException(errorMessage); + + /** + * get tokens from command. + * + * @param command command + * @return token string array + */ + private static String[] getTokensFromCommand(String command) { + ArrayList matchList = new ArrayList<>(8); + Matcher regexMatcher = LazyPattern.PATTERN.matcher(command); + while (regexMatcher.find()) { + matchList.add(regexMatcher.group()); } - } - return true; + return matchList.toArray(new String[0]); } - if (noQuotesInside) { - if (arg.indexOf('"') >= 0) { - // There is ["] inside. - throw new IllegalArgumentException(errorMessage); - } + + /** + * Lazy Pattern. + */ + private static class LazyPattern { + /** + * Escape-support version: + * "(\")((?:\\\\\\1|.)+?)\\1|([^\\s\"]+)"; + */ + private static final Pattern PATTERN = Pattern.compile("[^\\s\"]+|\"[^\"]*\""); } - return false; - } - - /** - * whether needs escaping. - * - * @param verificationType verification type - * @param arg arg - * @return boolean - */ - private static boolean needsEscaping(int verificationType, String arg) { - - boolean argIsQuoted = isQuoted((verificationType == VERIFICATION_CMD_BAT), arg, "Argument has embedded quote, use the explicit CMD.EXE call."); - - if (!argIsQuoted) { - char[] testEscape = ESCAPE_VERIFICATION[verificationType]; - for (int i = 0; i < testEscape.length; ++i) { - if (arg.indexOf(testEscape[i]) >= 0) { - return true; + + /** + * verification cmd bat. + */ + private static final int VERIFICATION_CMD_BAT = 0; + + /** + * verification win32. + */ + private static final int VERIFICATION_WIN32 = 1; + + /** + * verification legacy. + */ + private static final int VERIFICATION_LEGACY = 2; + + /** + * escape verification. + */ + private static final char[][] ESCAPE_VERIFICATION = {{' ', '\t', '<', '>', '&', '|', '^'}, + + {' ', '\t', '<', '>'}, {' ', '\t'}}; + + /** + * create command line. + * + * @param verificationType verification type + * @param executablePath executable path + * @param cmd cmd + * @return command line + */ + private static String createCommandLine(int verificationType, final String executablePath, final String[] cmd) { + StringBuilder cmdbuf = new StringBuilder(80); + + cmdbuf.append(executablePath); + + for (int i = 1; i < cmd.length; ++i) { + cmdbuf.append(' '); + String s = cmd[i]; + if (needsEscaping(verificationType, s)) { + cmdbuf.append('"').append(s); + + if ((verificationType != VERIFICATION_CMD_BAT) && s.endsWith("\\")) { + cmdbuf.append('\\'); + } + cmdbuf.append('"'); + } else { + cmdbuf.append(s); + } } - } + return cmdbuf.toString(); } - return false; - } - - /** - * kill yarn application. - * - * @param appIds app id list - * @param logger logger - * @param tenantCode tenant code - * @param executePath execute path - */ - public static void cancelApplication(List appIds, Logger logger, String tenantCode, String executePath) { - if (appIds.size() > 0) { - String appid = appIds.get(appIds.size() - 1); - String commandFile = String - .format("%s/%s.kill", executePath, appid); - String cmd = "yarn application -kill " + appid; - try { - StringBuilder sb = new StringBuilder(); - sb.append("#!/bin/sh\n"); - sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); - sb.append("cd $BASEDIR\n"); - if (CommonUtils.getSystemEnvPath() != null) { - sb.append("source " + CommonUtils.getSystemEnvPath() + "\n"); - } - sb.append("\n\n"); - sb.append(cmd); - File f = new File(commandFile); - - if (!f.exists()) { - FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); + /** + * whether is quoted. + * + * @param noQuotesInside no quotes inside + * @param arg arg + * @param errorMessage error message + * @return boolean + */ + private static boolean isQuoted(boolean noQuotesInside, String arg, String errorMessage) { + int lastPos = arg.length() - 1; + if (lastPos >= 1 && arg.charAt(0) == '"' && arg.charAt(lastPos) == '"') { + // The argument has already been quoted. + if (noQuotesInside && arg.indexOf('"', 1) != lastPos) { + // There is ["] inside. + throw new IllegalArgumentException(errorMessage); + } + return true; + } + if (noQuotesInside && arg.indexOf('"') >= 0) { + // There is ["] inside. + throw new IllegalArgumentException(errorMessage); } + return false; + } - String runCmd = "sh " + commandFile; - if (StringUtils.isNotEmpty(tenantCode)) { - runCmd = "sudo -u " + tenantCode + " " + runCmd; + /** + * whether needs escaping. + * + * @param verificationType verification type + * @param arg arg + * @return boolean + */ + private static boolean needsEscaping(int verificationType, String arg) { + + boolean argIsQuoted = isQuoted((verificationType == VERIFICATION_CMD_BAT), arg, "Argument has embedded quote, use the explicit CMD.EXE call."); + + if (!argIsQuoted) { + char[] testEscape = ESCAPE_VERIFICATION[verificationType]; + for (char c : testEscape) { + if (arg.indexOf(c) >= 0) { + return true; + } + } } + return false; + } - logger.info("kill cmd:{}", runCmd); + /** + * kill yarn application. + * + * @param appIds app id list + * @param logger logger + * @param tenantCode tenant code + * @param executePath execute path + */ + public static void cancelApplication(List appIds, Logger logger, String tenantCode, String executePath) { + if (CollectionUtils.isNotEmpty(appIds)) { + + for (String appId : appIds) { + try { + ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); + + if (!applicationStatus.typeIsFinished()) { + String commandFile = String + .format("%s/%s.kill", executePath, appId); + String cmd = "yarn application -kill " + appId; + execYarnKillCommand(logger, tenantCode, appId, commandFile, cmd); + } + } catch (Exception e) { + logger.error(String.format("Get yarn application app id [%s] status failed: [%s]", appId, e.getMessage())); + } + } + } + } - Runtime.getRuntime().exec(runCmd); - } catch (Exception e) { - logger.error("kill application error", e); - } + /** + * build kill command for yarn application + * + * @param logger logger + * @param tenantCode tenant code + * @param appId app id + * @param commandFile command file + * @param cmd cmd + */ + private static void execYarnKillCommand(Logger logger, String tenantCode, String appId, String commandFile, String cmd) { + try { + StringBuilder sb = new StringBuilder(); + sb.append("#!/bin/sh\n"); + sb.append("BASEDIR=$(cd `dirname $0`; pwd)\n"); + sb.append("cd $BASEDIR\n"); + if (CommonUtils.getSystemEnvPath() != null) { + sb.append("source ").append(CommonUtils.getSystemEnvPath()).append("\n"); + } + sb.append("\n\n"); + sb.append(cmd); + + File f = new File(commandFile); + + if (!f.exists()) { + FileUtils.writeStringToFile(new File(commandFile), sb.toString(), StandardCharsets.UTF_8); + } + + String runCmd = String.format("%s %s", Constants.SH, commandFile); + if (StringUtils.isNotEmpty(tenantCode)) { + runCmd = "sudo -u " + tenantCode + " " + runCmd; + } + + logger.info("kill cmd:{}", runCmd); + OSUtils.exeCmd(runCmd); + } catch (Exception e) { + logger.error(String.format("Kill yarn application app id [%s] failed: [%s]", appId, e.getMessage())); + } } - } - /** - * kill tasks according to different task types. - * - * @param taskExecutionContext taskExecutionContext - */ - public static void kill(TaskExecutionContext taskExecutionContext) { - try { - int processId = taskExecutionContext.getProcessId(); - if (processId == 0) { - logger.error("process kill failed, process id :{}, task id:{}", - processId, taskExecutionContext.getTaskInstanceId()); - return; - } + /** + * kill tasks according to different task types. + * + * @param taskExecutionContext taskExecutionContext + */ + public static void kill(TaskExecutionContext taskExecutionContext) { + try { + int processId = taskExecutionContext.getProcessId(); + if (processId == 0) { + logger.error("process kill failed, process id :{}, task id:{}", + processId, taskExecutionContext.getTaskInstanceId()); + return; + } - String cmd = String.format("sudo kill -9 %s", getPidsStr(processId)); + String cmd = String.format("sudo kill -9 %s", getPidsStr(processId)); - logger.info("process id:{}, cmd:{}", processId, cmd); + logger.info("process id:{}, cmd:{}", processId, cmd); - OSUtils.exeCmd(cmd); + OSUtils.exeCmd(cmd); - // find log and kill yarn job - killYarnJob(taskExecutionContext); + // find log and kill yarn job + killYarnJob(taskExecutionContext); - } catch (Exception e) { - logger.error("kill task failed", e); - } - } - - /** - * get pids str. - * - * @param processId process id - * @return pids - * @throws Exception exception - */ - public static String getPidsStr(int processId) throws Exception { - StringBuilder sb = new StringBuilder(); - Matcher mat; - // pstree pid get sub pids - if (OSUtils.isMacOS()) { - String pids = OSUtils.exeCmd("pstree -sp " + processId); - mat = MACPATTERN.matcher(pids); - } else { - String pids = OSUtils.exeCmd("pstree -p " + processId); - mat = WINDOWSATTERN.matcher(pids); + } catch (Exception e) { + logger.error("kill task failed", e); + } } - while (mat.find()) { - sb.append(mat.group(1)).append(" "); - } - return sb.toString().trim(); - } - - /** - * find logs and kill yarn tasks. - * - * @param taskExecutionContext taskExecutionContext - */ - public static void killYarnJob(TaskExecutionContext taskExecutionContext) { - try { - Thread.sleep(Constants.SLEEP_TIME_MILLIS); - LogClientService logClient = null; - String log = null; - try { - logClient = new LogClientService(); - log = logClient.viewLog(Host.of(taskExecutionContext.getHost()).getIp(), - Constants.RPC_PORT, - taskExecutionContext.getLogPath()); - } finally { - if (logClient != null) { - logClient.close(); - } - } - if (StringUtils.isNotEmpty(log)) { - List appIds = LoggerUtils.getAppIds(log, logger); - String workerDir = taskExecutionContext.getExecutePath(); - if (StringUtils.isEmpty(workerDir)) { - logger.error("task instance work dir is empty"); - throw new RuntimeException("task instance work dir is empty"); + /** + * get pids str. + * + * @param processId process id + * @return pids pid String + * @throws Exception exception + */ + public static String getPidsStr(int processId) throws Exception { + StringBuilder sb = new StringBuilder(); + Matcher mat = null; + // pstree pid get sub pids + if (OSUtils.isMacOS()) { + String pids = OSUtils.exeCmd(String.format("%s -sp %d", Constants.PSTREE, processId)); + if (null != pids) { + mat = MACPATTERN.matcher(pids); + } + } else { + String pids = OSUtils.exeCmd(String.format("%s -p %d", Constants.PSTREE, processId)); + mat = WINDOWSATTERN.matcher(pids); } - if (appIds.size() > 0) { - cancelApplication(appIds, logger, taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); + + if (null != mat) { + while (mat.find()) { + sb.append(mat.group(1)).append(" "); + } } - } - } catch (Exception e) { - logger.error("kill yarn job failure",e); + return sb.toString().trim(); + } + + /** + * find logs and kill yarn tasks. + * + * @param taskExecutionContext taskExecutionContext + */ + public static void killYarnJob(TaskExecutionContext taskExecutionContext) { + try { + Thread.sleep(Constants.SLEEP_TIME_MILLIS); + LogClientService logClient = null; + String log; + try { + logClient = new LogClientService(); + log = logClient.viewLog(Host.of(taskExecutionContext.getHost()).getIp(), + Constants.RPC_PORT, + taskExecutionContext.getLogPath()); + } finally { + if (logClient != null) { + logClient.close(); + } + } + if (StringUtils.isNotEmpty(log)) { + List appIds = LoggerUtils.getAppIds(log, logger); + String workerDir = taskExecutionContext.getExecutePath(); + if (StringUtils.isEmpty(workerDir)) { + logger.error("task instance work dir is empty"); + throw new RuntimeException("task instance work dir is empty"); + } + if (CollectionUtils.isNotEmpty(appIds)) { + cancelApplication(appIds, logger, taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); + } + } + + } catch (Exception e) { + logger.error("kill yarn job failure", e); + } } - } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java index 5cc7bd831a..0c68016db0 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/SparkArgsUtils.java @@ -14,24 +14,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.utils; +package org.apache.dolphinscheduler.server.utils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ProgramType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.spark.SparkParameters; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import java.util.ArrayList; import java.util.List; - /** - * spark args utils + * spark args utils */ public class SparkArgsUtils { + private static final String SPARK_CLUSTER = "cluster"; + + private static final String SPARK_LOCAL = "local"; + + private static final String SPARK_ON_YARN = "yarn"; + /** * build args * @@ -40,15 +45,15 @@ public class SparkArgsUtils { */ public static List buildArgs(SparkParameters param) { List args = new ArrayList<>(); - String deployMode = "cluster"; + String deployMode = SPARK_CLUSTER; args.add(Constants.MASTER); - if(StringUtils.isNotEmpty(param.getDeployMode())){ + if (StringUtils.isNotEmpty(param.getDeployMode())) { deployMode = param.getDeployMode(); } - if(!"local".equals(deployMode)){ - args.add("yarn"); + if (!SPARK_LOCAL.equals(deployMode)) { + args.add(SPARK_ON_YARN); args.add(Constants.DEPLOY_MODE); } @@ -56,7 +61,7 @@ public class SparkArgsUtils { ProgramType type = param.getProgramType(); String mainClass = param.getMainClass(); - if(type != null && type != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)){ + if (type != null && type != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)) { args.add(Constants.MAIN_CLASS); args.add(mainClass); } @@ -96,14 +101,14 @@ public class SparkArgsUtils { String queue = param.getQueue(); if (StringUtils.isNotEmpty(others)) { - if(!others.contains(Constants.SPARK_QUEUE) && StringUtils.isNotEmpty(queue)){ + if (!others.contains(Constants.SPARK_QUEUE) && StringUtils.isNotEmpty(queue)) { args.add(Constants.SPARK_QUEUE); args.add(queue); } args.add(others); - }else if (StringUtils.isNotEmpty(queue)) { + } else if (StringUtils.isNotEmpty(queue)) { args.add(Constants.SPARK_QUEUE); args.add(queue); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java index 3a8c8fe7d6..1bc79ab94e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/UDFUtils.java @@ -70,14 +70,16 @@ public class UDFUtils { */ private static void buildJarSql(List sqls, Map udfFuncTenantCodeMap) { String defaultFS = HadoopUtils.getInstance().getConfiguration().get(Constants.FS_DEFAULTFS); - + String resourceFullName; Set> entries = udfFuncTenantCodeMap.entrySet(); for (Map.Entry entry:entries){ String uploadPath = HadoopUtils.getHdfsUdfDir(entry.getValue()); if (!uploadPath.startsWith("hdfs:")) { uploadPath = defaultFS + uploadPath; } - sqls.add(String.format("add jar %s%s", uploadPath, entry.getKey().getResourceName())); + resourceFullName = entry.getKey().getResourceName(); + resourceFullName = resourceFullName.startsWith("/") ? resourceFullName : String.format("/%s",resourceFullName); + sqls.add(String.format("add jar %s%s", uploadPath, resourceFullName)); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java index 072e76aaeb..bcc082846b 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java @@ -23,9 +23,12 @@ import org.apache.dolphinscheduler.remote.NettyRemotingServer; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyServerConfig; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.server.worker.processor.DBTaskAckProcessor; +import org.apache.dolphinscheduler.server.worker.processor.DBTaskResponseProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskKillProcessor; import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; +import org.apache.dolphinscheduler.server.worker.runner.RetryReportTaskStatusThread; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; @@ -78,6 +81,9 @@ public class WorkerServer { */ private AlertClientService alertClientService; + @Autowired + private RetryReportTaskStatusThread retryReportTaskStatusThread; + /** * worker server startup * @@ -103,6 +109,8 @@ public class WorkerServer { this.nettyRemotingServer = new NettyRemotingServer(serverConfig); this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, new TaskExecuteProcessor()); this.nettyRemotingServer.registerProcessor(CommandType.TASK_KILL_REQUEST, new TaskKillProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.DB_TASK_ACK, new DBTaskAckProcessor()); + this.nettyRemotingServer.registerProcessor(CommandType.DB_TASK_RESPONSE, new DBTaskResponseProcessor()); this.nettyRemotingServer.start(); // worker registry @@ -111,6 +119,9 @@ public class WorkerServer { //alert-server client registry alertClientService = new AlertClientService(workerConfig.getAlertListenHost(),Constants.ALERT_RPC_PORT); + // retry report task status + this.retryReportTaskStatusThread.start(); + /** * register hooks, which are called before the process exits */ diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java new file mode 100644 index 0000000000..3639b8eba3 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/ResponceCache.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.cache; + +import org.apache.dolphinscheduler.common.enums.Event; +import org.apache.dolphinscheduler.remote.command.Command; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Responce Cache : cache worker send master result + */ +public class ResponceCache { + + private static final ResponceCache instance = new ResponceCache(); + + private ResponceCache(){} + + public static ResponceCache get(){ + return instance; + } + + private Map ackCache = new ConcurrentHashMap<>(); + private Map responseCache = new ConcurrentHashMap<>(); + + + /** + * cache response + * @param taskInstanceId taskInstanceId + * @param command command + * @param event event ACK/RESULT + */ + public void cache(Integer taskInstanceId, Command command, Event event){ + switch (event){ + case ACK: + ackCache.put(taskInstanceId,command); + break; + case RESULT: + responseCache.put(taskInstanceId,command); + break; + default: + throw new IllegalArgumentException("invalid event type : " + event); + } + } + + + /** + * remove ack cache + * @param taskInstanceId taskInstanceId + */ + public void removeAckCache(Integer taskInstanceId){ + ackCache.remove(taskInstanceId); + } + + /** + * remove reponse cache + * @param taskInstanceId taskInstanceId + */ + public void removeResponseCache(Integer taskInstanceId){ + responseCache.remove(taskInstanceId); + } + + /** + * getAckCache + * @return getAckCache + */ + public Map getAckCache(){ + return ackCache; + } + + /** + * getResponseCache + * @return getResponseCache + */ + public Map getResponseCache(){ + return responseCache; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java index 7df8e01b3d..71c795b0a3 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java @@ -17,7 +17,6 @@ package org.apache.dolphinscheduler.server.worker.cache; - import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; /** @@ -42,7 +41,16 @@ public interface TaskExecutionContextCacheManager { /** * remove taskInstance by taskInstanceId + * * @param taskInstanceId taskInstanceId */ void removeByTaskInstanceId(Integer taskInstanceId); + + /** + * If the value for the specified key is present and non-null,then perform the update,otherwise it will return false + * + * @param taskExecutionContext taskExecutionContext + * @return status + */ + boolean updateTaskExecutionContext(TaskExecutionContext taskExecutionContext); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java index 9c92fb2d64..5c3f9904b6 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java @@ -19,13 +19,14 @@ package org.apache.dolphinscheduler.server.worker.cache.impl; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; -import org.springframework.stereotype.Service; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import org.springframework.stereotype.Service; + /** - * TaskExecutionContextCache + * TaskExecutionContextCache */ @Service public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContextCacheManager { @@ -34,7 +35,7 @@ public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContex /** * taskInstance cache */ - private Map taskExecutionContextCache = new ConcurrentHashMap<>(); + private Map taskExecutionContextCache = new ConcurrentHashMap<>(); /** * get taskInstance by taskInstance id @@ -54,15 +55,22 @@ public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContex */ @Override public void cacheTaskExecutionContext(TaskExecutionContext taskExecutionContext) { - taskExecutionContextCache.put(taskExecutionContext.getTaskInstanceId(),taskExecutionContext); + taskExecutionContextCache.put(taskExecutionContext.getTaskInstanceId(), taskExecutionContext); } /** * remove taskInstance by taskInstanceId + * * @param taskInstanceId taskInstanceId */ @Override public void removeByTaskInstanceId(Integer taskInstanceId) { taskExecutionContextCache.remove(taskInstanceId); } + + @Override + public boolean updateTaskExecutionContext(TaskExecutionContext taskExecutionContext) { + taskExecutionContextCache.computeIfPresent(taskExecutionContext.getTaskInstanceId(), (k, v) -> taskExecutionContext); + return taskExecutionContextCache.containsKey(taskExecutionContext.getTaskInstanceId()); + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java new file mode 100644 index 0000000000..551661722f --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskAckProcessor.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.remote.command.*; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * db task ack processor + */ +public class DBTaskAckProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(DBTaskAckProcessor.class); + + + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.DB_TASK_ACK == command.getType(), + String.format("invalid command type : %s", command.getType())); + + DBTaskAckCommand taskAckCommand = JSONUtils.parseObject( + command.getBody(), DBTaskAckCommand.class); + + if (taskAckCommand == null){ + return; + } + + if (taskAckCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){ + ResponceCache.get().removeAckCache(taskAckCommand.getTaskInstanceId()); + } + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java new file mode 100644 index 0000000000..e382245b63 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/DBTaskResponseProcessor.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.Preconditions; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * db task response processor + */ +public class DBTaskResponseProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(DBTaskResponseProcessor.class); + + + @Override + public void process(Channel channel, Command command) { + Preconditions.checkArgument(CommandType.DB_TASK_RESPONSE == command.getType(), + String.format("invalid command type : %s", command.getType())); + + DBTaskResponseCommand taskResponseCommand = JSONUtils.parseObject( + command.getBody(), DBTaskResponseCommand.class); + + if (taskResponseCommand == null){ + return; + } + + if (taskResponseCommand.getStatus() == ExecutionStatus.SUCCESS.getCode()){ + ResponceCache.get().removeResponseCache(taskResponseCommand.getTaskInstanceId()); + } + } + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java index b1b67affcc..6e2fdeb5d9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/NettyRemoteChannel.java @@ -50,6 +50,12 @@ public class NettyRemoteChannel { this.opaque = opaque; } + public NettyRemoteChannel(Channel channel) { + this.channel = channel; + this.host = ChannelUtils.toAddress(channel); + this.opaque = -1; + } + public Channel getChannel() { return channel; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java index 4aeb6feffb..eda4da6dd9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java @@ -25,6 +25,7 @@ import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; @@ -66,6 +67,8 @@ public class TaskCallbackService { public TaskCallbackService(){ final NettyClientConfig clientConfig = new NettyClientConfig(); this.nettyRemotingClient = new NettyRemotingClient(clientConfig); + this.nettyRemotingClient.registerProcessor(CommandType.DB_TASK_ACK, new DBTaskAckProcessor()); + this.nettyRemotingClient.registerProcessor(CommandType.DB_TASK_RESPONSE, new DBTaskResponseProcessor()); } /** @@ -83,20 +86,21 @@ public class TaskCallbackService { * @return callback channel */ private NettyRemoteChannel getRemoteChannel(int taskInstanceId){ + Channel newChannel; NettyRemoteChannel nettyRemoteChannel = REMOTE_CHANNELS.get(taskInstanceId); - if(nettyRemoteChannel == null){ - throw new IllegalArgumentException("nettyRemoteChannel is empty, should call addRemoteChannel first"); - } - if(nettyRemoteChannel.isActive()){ - return nettyRemoteChannel; - } - Channel newChannel = nettyRemotingClient.getChannel(nettyRemoteChannel.getHost()); - if(newChannel != null){ - return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId); + if(nettyRemoteChannel != null){ + if(nettyRemoteChannel.isActive()){ + return nettyRemoteChannel; + } + newChannel = nettyRemotingClient.getChannel(nettyRemoteChannel.getHost()); + if(newChannel != null){ + return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId); + } + logger.warn("original master : {} for task : {} is not reachable, random select master", + nettyRemoteChannel.getHost(), + taskInstanceId); } - logger.warn("original master : {} for task : {} is not reachable, random select master", - nettyRemoteChannel.getHost(), - taskInstanceId); + Set masterNodes = null; int ntries = 0; while (Stopper.isRunning()) { @@ -116,7 +120,7 @@ public class TaskCallbackService { for (String masterNode : masterNodes) { newChannel = nettyRemotingClient.getChannel(Host.of(masterNode)); if (newChannel != null) { - return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId); + return getRemoteChannel(newChannel,taskInstanceId); } } masterNodes = null; @@ -138,6 +142,12 @@ public class TaskCallbackService { return remoteChannel; } + private NettyRemoteChannel getRemoteChannel(Channel newChannel, int taskInstanceId){ + NettyRemoteChannel remoteChannel = new NettyRemoteChannel(newChannel); + addRemoteChannel(taskInstanceId, remoteChannel); + return remoteChannel; + } + /** * remove callback channels * @param taskInstanceId taskInstanceId diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java index adef703e1d..cdda71d491 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java @@ -17,24 +17,25 @@ package org.apache.dolphinscheduler.server.worker.processor; +import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.NetUtils; import org.apache.dolphinscheduler.common.utils.Preconditions; -import org.apache.dolphinscheduler.common.utils.RetryerUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.LogUtils; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; +import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; +import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread; import org.apache.dolphinscheduler.service.alert.AlertClientService; @@ -42,35 +43,32 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import java.util.Date; import java.util.Optional; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.github.rholder.retry.RetryException; - import io.netty.channel.Channel; /** - * worker request processor + * worker request processor */ public class TaskExecuteProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskExecuteProcessor.class); + private static final Logger logger = LoggerFactory.getLogger(TaskExecuteProcessor.class); /** - * thread executor service + * thread executor service */ private final ExecutorService workerExecService; /** - * worker config + * worker config */ private final WorkerConfig workerConfig; /** - * task callback service + * task callback service */ private final TaskCallbackService taskCallbackService; @@ -79,26 +77,45 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { */ private AlertClientService alertClientService; + /** + * taskExecutionContextCacheManager + */ + private TaskExecutionContextCacheManager taskExecutionContextCacheManager; + public TaskExecuteProcessor() { this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Execute-Thread", workerConfig.getWorkerExecThreads()); + this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); + } + + /** + * Pre-cache task to avoid extreme situations when kill task. There is no such task in the cache + * + * @param taskExecutionContext task + */ + private void setTaskCache(TaskExecutionContext taskExecutionContext) { + TaskExecutionContext preTaskCache = new TaskExecutionContext(); + preTaskCache.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext); } public TaskExecuteProcessor(AlertClientService alertClientService) { this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Execute-Thread", workerConfig.getWorkerExecThreads()); + this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); + this.alertClientService = alertClientService; } @Override public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_EXECUTE_REQUEST == command.getType(), - String.format("invalid command type : %s", command.getType())); + String.format("invalid command type : %s", command.getType())); - TaskExecuteRequestCommand taskRequestCommand = JsonSerializer.deserialize( - command.getBody(), TaskExecuteRequestCommand.class); + TaskExecuteRequestCommand taskRequestCommand = JSONUtils.parseObject( + command.getBody(), TaskExecuteRequestCommand.class); logger.info("received command : {}", taskRequestCommand); @@ -115,13 +132,17 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { return; } - taskExecutionContext.setHost(NetUtils.getHost() + ":" + workerConfig.getListenPort()); - + setTaskCache(taskExecutionContext); // custom logger Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX, - taskExecutionContext.getProcessDefineId(), - taskExecutionContext.getProcessInstanceId(), - taskExecutionContext.getTaskInstanceId())); + taskExecutionContext.getProcessDefineId(), + taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getTaskInstanceId())); + + taskExecutionContext.setHost(NetUtils.getHost() + ":" + workerConfig.getListenPort()); + taskExecutionContext.setStartTime(new Date()); + taskExecutionContext.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext)); + taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); // local execute path String execLocalPath = getExecLocalPath(taskExecutionContext); @@ -129,42 +150,34 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { FileUtils.taskLoggerThreadLocal.set(taskLogger); try { - FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, taskExecutionContext.getTenantCode()); + FileUtils.createWorkDirIfAbsent(execLocalPath); } catch (Throwable ex) { String errorLog = String.format("create execLocalPath : %s", execLocalPath); - LoggerUtils.logError(Optional.ofNullable(logger), errorLog, ex); + LoggerUtils.logError(Optional.of(logger), errorLog, ex); LoggerUtils.logError(Optional.ofNullable(taskLogger), errorLog, ex); + taskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); } FileUtils.taskLoggerThreadLocal.remove(); taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(), - new NettyRemoteChannel(channel, command.getOpaque())); + new NettyRemoteChannel(channel, command.getOpaque())); - if (DateUtils.getRemainTime(taskExecutionContext.getFirstSubmitTime(), taskExecutionContext.getDelayTime() * 60L) > 0) { - taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.DELAY_EXECUTION); - taskExecutionContext.setStartTime(null); - } else { - taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); - taskExecutionContext.setStartTime(new Date()); - } + this.doAck(taskExecutionContext); - // tell master the status of this task (RUNNING_EXECUTION or DELAY_EXECUTION) - final Command ackCommand = buildAckCommand(taskExecutionContext).convert2Command(); + // submit task + workerExecService.submit(new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger, alertClientService)); + } - try { - RetryerUtils.retryCall(() -> { - taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(),ackCommand); - return Boolean.TRUE; - }); - // submit task - workerExecService.submit(new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger, alertClientService)); - } catch (ExecutionException | RetryException e) { - logger.error(e.getMessage(), e); - } + private void doAck(TaskExecutionContext taskExecutionContext) { + // tell master that task is in executing + TaskExecuteAckCommand ackCommand = buildAckCommand(taskExecutionContext); + ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command(), Event.ACK); + taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command()); } /** * build ack command + * * @param taskExecutionContext taskExecutionContext * @return TaskExecuteAckCommand */ @@ -186,13 +199,14 @@ public class TaskExecuteProcessor implements NettyRequestProcessor { /** * get execute local path + * * @param taskExecutionContext taskExecutionContext * @return execute local path */ private String getExecLocalPath(TaskExecutionContext taskExecutionContext) { return FileUtils.getProcessExecDir(taskExecutionContext.getProjectId(), - taskExecutionContext.getProcessDefineId(), - taskExecutionContext.getProcessInstanceId(), - taskExecutionContext.getTaskInstanceId()); + taskExecutionContext.getProcessDefineId(), + taskExecutionContext.getProcessInstanceId(), + taskExecutionContext.getTaskInstanceId()); } } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java index 800db211c3..45268e6d86 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java @@ -17,9 +17,9 @@ package org.apache.dolphinscheduler.server.worker.processor; -import io.netty.channel.Channel; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.Preconditions; @@ -29,7 +29,6 @@ import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand; import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.apache.dolphinscheduler.remote.utils.Host; import org.apache.dolphinscheduler.remote.utils.Pair; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; @@ -39,26 +38,29 @@ import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContext import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.log.LogClientService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.Collections; import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import io.netty.channel.Channel; + /** - * task kill processor + * task kill processor */ public class TaskKillProcessor implements NettyRequestProcessor { private final Logger logger = LoggerFactory.getLogger(TaskKillProcessor.class); /** - * worker config + * worker config */ private final WorkerConfig workerConfig; /** - * task callback service + * task callback service */ private final TaskCallbackService taskCallbackService; @@ -67,8 +69,7 @@ public class TaskKillProcessor implements NettyRequestProcessor { */ private TaskExecutionContextCacheManager taskExecutionContextCacheManager; - - public TaskKillProcessor(){ + public TaskKillProcessor() { this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class); @@ -83,34 +84,35 @@ public class TaskKillProcessor implements NettyRequestProcessor { @Override public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.TASK_KILL_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType())); - TaskKillRequestCommand killCommand = JsonSerializer.deserialize(command.getBody(), TaskKillRequestCommand.class); + TaskKillRequestCommand killCommand = JSONUtils.parseObject(command.getBody(), TaskKillRequestCommand.class); logger.info("received kill command : {}", killCommand); Pair> result = doKill(killCommand); taskCallbackService.addRemoteChannel(killCommand.getTaskInstanceId(), - new NettyRemoteChannel(channel, command.getOpaque())); + new NettyRemoteChannel(channel, command.getOpaque())); - TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(killCommand,result); + TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(killCommand, result); taskCallbackService.sendResult(taskKillResponseCommand.getTaskInstanceId(), taskKillResponseCommand.convert2Command()); taskExecutionContextCacheManager.removeByTaskInstanceId(taskKillResponseCommand.getTaskInstanceId()); } /** - * do kill + * do kill + * * @param killCommand * @return kill result */ - private Pair> doKill(TaskKillRequestCommand killCommand){ - List appIds = Collections.EMPTY_LIST; + private Pair> doKill(TaskKillRequestCommand killCommand) { + List appIds = Collections.emptyList(); try { - TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); - + int taskInstanceId = killCommand.getTaskInstanceId(); + TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId); Integer processId = taskExecutionContext.getProcessId(); - - if (processId == null || processId.equals(0)){ - logger.error("process kill failed, process id :{}, task id:{}", processId, killCommand.getTaskInstanceId()); - return Pair.of(false, appIds); + if (processId.equals(0)) { + taskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); + logger.info("the task has not been executed and has been cancelled, task id:{}", taskInstanceId); + return Pair.of(true, appIds); } String cmd = String.format("sudo kill -9 %s", ProcessUtils.getPidsStr(taskExecutionContext.getProcessId())); @@ -121,9 +123,9 @@ public class TaskKillProcessor implements NettyRequestProcessor { // find log and kill yarn job appIds = killYarnJob(Host.of(taskExecutionContext.getHost()).getIp(), - taskExecutionContext.getLogPath(), - taskExecutionContext.getExecutePath(), - taskExecutionContext.getTenantCode()); + taskExecutionContext.getLogPath(), + taskExecutionContext.getExecutePath(), + taskExecutionContext.getTenantCode()); return Pair.of(true, appIds); } catch (Exception e) { @@ -135,8 +137,8 @@ public class TaskKillProcessor implements NettyRequestProcessor { /** * build TaskKillResponseCommand * - * @param killCommand kill command - * @param result exe result + * @param killCommand kill command + * @param result exe result * @return build TaskKillResponseCommand */ private TaskKillResponseCommand buildKillTaskResponseCommand(TaskKillRequestCommand killCommand, @@ -145,7 +147,7 @@ public class TaskKillProcessor implements NettyRequestProcessor { taskKillResponseCommand.setStatus(result.getLeft() ? ExecutionStatus.SUCCESS.getCode() : ExecutionStatus.FAILURE.getCode()); taskKillResponseCommand.setAppIds(result.getRight()); TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId()); - if(taskExecutionContext != null){ + if (taskExecutionContext != null) { taskKillResponseCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId()); taskKillResponseCommand.setHost(taskExecutionContext.getHost()); taskKillResponseCommand.setProcessId(taskExecutionContext.getProcessId()); @@ -154,20 +156,20 @@ public class TaskKillProcessor implements NettyRequestProcessor { } /** - * kill yarn job + * kill yarn job * - * @param host host - * @param logPath logPath + * @param host host + * @param logPath logPath * @param executePath executePath - * @param tenantCode tenantCode + * @param tenantCode tenantCode * @return List appIds */ private List killYarnJob(String host, String logPath, String executePath, String tenantCode) { LogClientService logClient = null; try { logClient = new LogClientService(); - logger.info("view log host : {},logPath : {}", host,logPath); - String log = logClient.viewLog(host, Constants.RPC_PORT, logPath); + logger.info("view log host : {},logPath : {}", host, logPath); + String log = logClient.viewLog(host, Constants.RPC_PORT, logPath); if (StringUtils.isNotEmpty(log)) { List appIds = LoggerUtils.getAppIds(log, logger); @@ -181,9 +183,9 @@ public class TaskKillProcessor implements NettyRequestProcessor { } } } catch (Exception e) { - logger.error("kill yarn job error",e); + logger.error("kill yarn job error", e); } finally { - if(logClient != null){ + if (logClient != null) { logClient.close(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java new file mode 100644 index 0000000000..ec79238d39 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/RetryReportTaskStatusThread.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.runner; + +import org.apache.dolphinscheduler.common.thread.Stopper; + +import org.apache.dolphinscheduler.common.thread.ThreadUtils; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; +import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import java.util.Map; + +/** + * Retry Report Task Status Thread + */ +@Component +public class RetryReportTaskStatusThread implements Runnable { + + private final Logger logger = LoggerFactory.getLogger(RetryReportTaskStatusThread.class); + + /** + * every 5 minutes + */ + private static long RETRY_REPORT_TASK_STATUS_INTERVAL = 5 * 60 * 1000L; + /** + * task callback service + */ + private final TaskCallbackService taskCallbackService; + + public void start(){ + Thread thread = new Thread(this,"RetryReportTaskStatusThread"); + thread.start(); + } + + public RetryReportTaskStatusThread(){ + this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class); + } + + /** + * retry ack/response + */ + @Override + public void run() { + ResponceCache responceCache = ResponceCache.get(); + + while (Stopper.isRunning()){ + + // sleep 5 minutes + ThreadUtils.sleep(RETRY_REPORT_TASK_STATUS_INTERVAL); + + try { + if (!responceCache.getAckCache().isEmpty()){ + Map ackCache = responceCache.getAckCache(); + for (Map.Entry entry : ackCache.entrySet()){ + Integer taskInstanceId = entry.getKey(); + Command ackCommand = entry.getValue(); + taskCallbackService.sendAck(taskInstanceId,ackCommand); + } + } + + if (!responceCache.getResponseCache().isEmpty()){ + Map responseCache = responceCache.getResponseCache(); + for (Map.Entry entry : responseCache.entrySet()){ + Integer taskInstanceId = entry.getKey(); + Command responseCommand = entry.getValue(); + taskCallbackService.sendResult(taskInstanceId,responseCommand); + } + } + }catch (Exception e){ + logger.warn("retry report task status error", e); + } + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java index 39046e96eb..721656730d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java @@ -18,21 +18,23 @@ package org.apache.dolphinscheduler.server.worker.runner; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.Event; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; -import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.RetryerUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.cache.ResponceCache; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService; @@ -118,6 +120,15 @@ public class TaskExecuteThread implements Runnable { TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(taskExecutionContext.getTaskInstanceId()); try { logger.info("script path : {}", taskExecutionContext.getExecutePath()); + // check if the OS user exists + if (!OSUtils.getUserList().contains(taskExecutionContext.getTenantCode())) { + String errorLog = String.format("tenantCode: %s does not exist", taskExecutionContext.getTenantCode()); + taskLogger.error(errorLog); + responseCommand.setStatus(ExecutionStatus.FAILURE.getCode()); + responseCommand.setEndTime(new Date()); + return; + } + // task node TaskNode taskNode = JSONUtils.parseObject(taskExecutionContext.getTaskJson(), TaskNode.class); @@ -172,14 +183,10 @@ public class TaskExecuteThread implements Runnable { responseCommand.setProcessId(task.getProcessId()); responseCommand.setAppIds(task.getAppIds()); } finally { - try { - taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.of(responseCommand.getStatus())); - taskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); - taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command()); - } catch (Exception e) { - ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); - taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command()); - } + taskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); + ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(),responseCommand.convert2Command(),Event.RESULT); + taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command()); + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java index dddd1a64b7..da5c0e6980 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java @@ -14,21 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE; +import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_KILL; import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; -import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; -import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.common.utils.process.ProcessBuilderForWin32; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager; @@ -54,9 +53,6 @@ import java.util.regex.Pattern; import org.slf4j.Logger; -import com.sun.jna.platform.win32.Kernel32; -import com.sun.jna.platform.win32.WinNT; - /** * abstract command executor */ @@ -68,22 +64,22 @@ public abstract class AbstractCommandExecutor { protected StringBuilder varPool = new StringBuilder(); /** - * process + * process */ private Process process; /** - * log handler + * log handler */ protected Consumer> logHandler; /** - * logger + * logger */ protected Logger logger; /** - * log list + * log list */ protected final List logBuffer; @@ -98,8 +94,8 @@ public abstract class AbstractCommandExecutor { private TaskExecutionContextCacheManager taskExecutionContextCacheManager; public AbstractCommandExecutor(Consumer> logHandler, - TaskExecutionContext taskExecutionContext , - Logger logger){ + TaskExecutionContext taskExecutionContext, + Logger logger) { this.logHandler = logHandler; this.taskExecutionContext = taskExecutionContext; this.logger = logger; @@ -117,45 +113,24 @@ public abstract class AbstractCommandExecutor { // setting up user to run commands List command = new LinkedList<>(); - if (OSUtils.isWindows()) { - //init process builder - ProcessBuilderForWin32 processBuilder = new ProcessBuilderForWin32(); - // setting up a working directory - processBuilder.directory(new File(taskExecutionContext.getExecutePath())); - // setting up a username and password - processBuilder.user(taskExecutionContext.getTenantCode(), StringUtils.EMPTY); - // merge error information to standard output stream - processBuilder.redirectErrorStream(true); - - // setting up user to run commands - command.add(commandInterpreter()); - command.add("/c"); - command.addAll(commandOptions()); - command.add(commandFile); - - // setting commands - processBuilder.command(command); - process = processBuilder.start(); - } else { - //init process builder - ProcessBuilder processBuilder = new ProcessBuilder(); - // setting up a working directory - processBuilder.directory(new File(taskExecutionContext.getExecutePath())); - // merge error information to standard output stream - processBuilder.redirectErrorStream(true); - - // setting up user to run commands - command.add("sudo"); - command.add("-u"); - command.add(taskExecutionContext.getTenantCode()); - command.add(commandInterpreter()); - command.addAll(commandOptions()); - command.add(commandFile); - - // setting commands - processBuilder.command(command); - process = processBuilder.start(); - } + //init process builder + ProcessBuilder processBuilder = new ProcessBuilder(); + // setting up a working directory + processBuilder.directory(new File(taskExecutionContext.getExecutePath())); + // merge error information to standard output stream + processBuilder.redirectErrorStream(true); + + // setting up user to run commands + command.add("sudo"); + command.add("-u"); + command.add(taskExecutionContext.getTenantCode()); + command.add(commandInterpreter()); + command.addAll(commandOptions()); + command.add(commandFile); + + // setting commands + processBuilder.command(command); + process = processBuilder.start(); // print command printCommand(command); @@ -168,12 +143,18 @@ public abstract class AbstractCommandExecutor { * @return CommandExecuteResult * @throws Exception if error throws Exception */ - public CommandExecuteResult run(String execCommand) throws Exception{ + public CommandExecuteResult run(String execCommand) throws Exception { CommandExecuteResult result = new CommandExecuteResult(); - + int taskInstanceId = taskExecutionContext.getTaskInstanceId(); + // If the task has been killed, then the task in the cache is null + if (null == taskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId)) { + result.setExitStatusCode(EXIT_CODE_KILL); + return result; + } if (StringUtils.isEmpty(execCommand)) { + taskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); return result; } @@ -188,14 +169,18 @@ public abstract class AbstractCommandExecutor { // parse process output parseProcessOutput(process); - Integer processId = getProcessId(process); result.setProcessId(processId); // cache processId taskExecutionContext.setProcessId(processId); - taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext); + boolean updateTaskExecutionContextStatus = taskExecutionContextCacheManager.updateTaskExecutionContext(taskExecutionContext); + if (Boolean.FALSE.equals(updateTaskExecutionContextStatus)) { + ProcessUtils.kill(taskExecutionContext); + result.setExitStatusCode(EXIT_CODE_KILL); + return result; + } // print process id logger.info("process start, process id is: {}", processId); @@ -206,11 +191,10 @@ public abstract class AbstractCommandExecutor { // waiting for the run to finish boolean status = process.waitFor(remainTime, TimeUnit.SECONDS); - logger.info("process has exited, execute path:{}, processId:{} ,exitStatusCode:{}", - taskExecutionContext.getExecutePath(), - processId - , result.getExitStatusCode()); + taskExecutionContext.getExecutePath(), + processId + , result.getExitStatusCode()); // if SHELL task exit if (status) { @@ -222,7 +206,7 @@ public abstract class AbstractCommandExecutor { result.setExitStatusCode(process.exitValue()); // if yarn task , yarn state is final state - if (process.exitValue() == 0){ + if (process.exitValue() == 0) { result.setExitStatusCode(isSuccessOfYarnState(appIds) ? EXIT_CODE_SUCCESS : EXIT_CODE_FAILURE); } } else { @@ -231,16 +215,16 @@ public abstract class AbstractCommandExecutor { result.setExitStatusCode(EXIT_CODE_FAILURE); } - return result; } public String getVarPool() { return varPool.toString(); } - + /** * cancel application + * * @throws Exception exception */ public void cancelApplication() throws Exception { @@ -271,6 +255,7 @@ public abstract class AbstractCommandExecutor { /** * soft kill + * * @param processId process id * @return process is alive * @throws InterruptedException interrupted exception @@ -295,6 +280,7 @@ public abstract class AbstractCommandExecutor { /** * hard kill + * * @param processId process id */ private void hardKill(int processId) { @@ -313,33 +299,45 @@ public abstract class AbstractCommandExecutor { /** * print command + * * @param commands process builder */ private void printCommand(List commands) { - String cmdStr = ProcessUtils.buildCommandStr(commands); - logger.info("task run command:\n{}", cmdStr); + String cmdStr; + + try { + cmdStr = ProcessUtils.buildCommandStr(commands); + logger.info("task run command:\n{}", cmdStr); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } } /** * clear */ private void clear() { + + List markerList = new ArrayList<>(); + markerList.add(ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER.toString()); + if (!logBuffer.isEmpty()) { // log handle logHandler.accept(logBuffer); - logBuffer.clear(); } + logHandler.accept(markerList); } /** * get the standard output of the process + * * @param process process */ private void parseProcessOutput(Process process) { String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskExecutionContext.getTaskAppId()); ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName); - parseProcessOutputExecutorService.submit(new Runnable(){ + parseProcessOutputExecutorService.submit(new Runnable() { @Override public void run() { BufferedReader inReader = null; @@ -360,7 +358,7 @@ public abstract class AbstractCommandExecutor { } } } catch (Exception e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } finally { clear(); close(inReader); @@ -380,22 +378,22 @@ public abstract class AbstractCommandExecutor { boolean result = true; try { for (String appId : appIds) { - while(Stopper.isRunning()){ + while (Stopper.isRunning()) { ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId); - logger.info("appId:{}, final state:{}",appId,applicationStatus.name()); - if (applicationStatus.equals(ExecutionStatus.FAILURE) || - applicationStatus.equals(ExecutionStatus.KILL)) { + logger.info("appId:{}, final state:{}", appId, applicationStatus.name()); + if (applicationStatus.equals(ExecutionStatus.FAILURE) + || applicationStatus.equals(ExecutionStatus.KILL)) { return false; } - if (applicationStatus.equals(ExecutionStatus.SUCCESS)){ + if (applicationStatus.equals(ExecutionStatus.SUCCESS)) { break; } Thread.sleep(Constants.SLEEP_TIME_MILLIS); } } } catch (Exception e) { - logger.error(String.format("yarn applications: %s status failed ", appIds.toString()),e); + logger.error(String.format("yarn applications: %s status failed ", appIds.toString()), e); result = false; } return result; @@ -431,14 +429,15 @@ public abstract class AbstractCommandExecutor { /** * convert file to list + * * @param filename file name * @return line list */ private List convertFile2List(String filename) { List lineList = new ArrayList(100); - File file=new File(filename); + File file = new File(filename); - if (!file.exists()){ + if (!file.exists()) { return lineList; } @@ -450,13 +449,13 @@ public abstract class AbstractCommandExecutor { lineList.add(line); } } catch (Exception e) { - logger.error(String.format("read file: %s failed : ",filename),e); + logger.error(String.format("read file: %s failed : ", filename), e); } finally { - if(br != null){ + if (br != null) { try { br.close(); } catch (IOException e) { - logger.error(e.getMessage(),e); + logger.error(e.getMessage(), e); } } @@ -466,6 +465,7 @@ public abstract class AbstractCommandExecutor { /** * find app id + * * @param line line * @return appid */ @@ -477,14 +477,14 @@ public abstract class AbstractCommandExecutor { return null; } - /** - * get remain time?s? + * get remain time(s) * * @return remain time */ private long getRemaintime() { - long remainTime = DateUtils.getRemainTime(taskExecutionContext.getStartTime(), taskExecutionContext.getTaskTimeout()); + long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000; + long remainTime = taskExecutionContext.getTaskTimeout() - usedTime; if (remainTime < 0) { throw new RuntimeException("task execution time out"); @@ -506,12 +506,7 @@ public abstract class AbstractCommandExecutor { Field f = process.getClass().getDeclaredField(Constants.PID); f.setAccessible(true); - if (OSUtils.isWindows()) { - WinNT.HANDLE handle = (WinNT.HANDLE) f.get(process); - processId = Kernel32.INSTANCE.GetProcessId(handle); - } else { - processId = f.getInt(process); - } + processId = f.getInt(process); } catch (Throwable e) { logger.error(e.getMessage(), e); } @@ -522,7 +517,7 @@ public abstract class AbstractCommandExecutor { /** * when log buffer siz or flush time reach condition , then flush * - * @param lastFlushTime last flush time + * @param lastFlushTime last flush time * @return last flush time */ private long flush(long lastFlushTime) { @@ -559,7 +554,10 @@ public abstract class AbstractCommandExecutor { protected List commandOptions() { return Collections.emptyList(); } + protected abstract String buildCommandFilePath(); + protected abstract String commandInterpreter(); + protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException; } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java index 1a66349817..de7d35f404 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java @@ -16,32 +16,27 @@ */ package org.apache.dolphinscheduler.server.worker.task; +import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER; + import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskRecordStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; -import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; -import org.apache.dolphinscheduler.common.task.datax.DataxParameters; -import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; -import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; -import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; -import org.apache.dolphinscheduler.common.task.python.PythonParameters; -import org.apache.dolphinscheduler.common.task.shell.ShellParameters; -import org.apache.dolphinscheduler.common.task.spark.SparkParameters; -import org.apache.dolphinscheduler.common.task.sql.SqlParameters; -import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.TaskRecordDao; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; + import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; +import org.slf4j.Logger; + /** * executive task */ @@ -51,20 +46,20 @@ public abstract class AbstractTask { * varPool string */ protected String varPool; - + /** * taskExecutionContext **/ TaskExecutionContext taskExecutionContext; /** - * log record + * log record */ protected Logger logger; /** - * SHELL process pid + * SHELL process pid */ protected int processId; @@ -80,14 +75,15 @@ public abstract class AbstractTask { protected volatile boolean cancel = false; /** - * exit code + * exit code */ protected volatile int exitStatusCode = -1; /** * constructor + * * @param taskExecutionContext taskExecutionContext - * @param logger logger + * @param logger logger */ protected AbstractTask(TaskExecutionContext taskExecutionContext, Logger logger) { this.taskExecutionContext = taskExecutionContext; @@ -96,6 +92,7 @@ public abstract class AbstractTask { /** * init task + * * @throws Exception exception */ public void init() throws Exception { @@ -103,6 +100,7 @@ public abstract class AbstractTask { /** * task handle + * * @throws Exception exception */ public abstract void handle() throws Exception; @@ -110,6 +108,7 @@ public abstract class AbstractTask { /** * cancel application + * * @param status status * @throws Exception exception */ @@ -119,11 +118,16 @@ public abstract class AbstractTask { /** * log handle + * * @param logs log list */ public void logHandle(List logs) { // note that the "new line" is added here to facilitate log parsing - logger.info(" -> {}", String.join("\n\t", logs)); + if (logs.contains(FINALIZE_SESSION_MARKER.toString())) { + logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString()); + } else { + logger.info(" -> {}", String.join("\n\t", logs)); + } } public void setVarPool(String varPool) { @@ -133,10 +137,11 @@ public abstract class AbstractTask { public String getVarPool() { return varPool; } - + /** * get exit status code - * @return exit status code + * + * @return exit status code */ public int getExitStatusCode() { return exitStatusCode; @@ -164,21 +169,21 @@ public abstract class AbstractTask { /** * get task parameters + * * @return AbstractParameters */ public abstract AbstractParameters getParameters(); - /** * result processing */ - public void after(){ - if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){ + public void after() { + if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS) { // task recor flat : if true , start up qianfan if (TaskRecordDao.getTaskRecordFlag() - && TaskType.typeIsNormalTask(taskExecutionContext.getTaskType())){ - AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskExecutionContext.getTaskParams(), getCurTaskParamsClass()); + && TaskType.typeIsNormalTask(taskExecutionContext.getTaskType())) { + AbstractParameters params = TaskParametersUtils.getParameters(taskExecutionContext.getTaskType(), taskExecutionContext.getTaskParams()); // replace placeholder Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), @@ -187,81 +192,34 @@ public abstract class AbstractTask { CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), taskExecutionContext.getScheduleTime()); if (paramsMap != null && !paramsMap.isEmpty() - && paramsMap.containsKey("v_proc_date")){ + && paramsMap.containsKey("v_proc_date")) { String vProcDate = paramsMap.get("v_proc_date").getValue(); - if (!StringUtils.isEmpty(vProcDate)){ + if (!StringUtils.isEmpty(vProcDate)) { TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskExecutionContext.getTaskName(), vProcDate); - logger.info("task record status : {}",taskRecordState); - if (taskRecordState == TaskRecordStatus.FAILURE){ + logger.info("task record status : {}", taskRecordState); + if (taskRecordState == TaskRecordStatus.FAILURE) { setExitStatusCode(Constants.EXIT_CODE_FAILURE); } } } } - }else if (getExitStatusCode() == Constants.EXIT_CODE_KILL){ + } else if (getExitStatusCode() == Constants.EXIT_CODE_KILL) { setExitStatusCode(Constants.EXIT_CODE_KILL); - }else { + } else { setExitStatusCode(Constants.EXIT_CODE_FAILURE); } } - - - /** - * get current task parameter class - * @return Task Params Class - */ - private Class getCurTaskParamsClass(){ - Class paramsClass = null; - // get task type - TaskType taskType = TaskType.valueOf(taskExecutionContext.getTaskType()); - switch (taskType){ - case SHELL: - paramsClass = ShellParameters.class; - break; - case SQL: - paramsClass = SqlParameters.class; - break; - case PROCEDURE: - paramsClass = ProcedureParameters.class; - break; - case MR: - paramsClass = MapreduceParameters.class; - break; - case SPARK: - paramsClass = SparkParameters.class; - break; - case FLINK: - paramsClass = FlinkParameters.class; - break; - case PYTHON: - paramsClass = PythonParameters.class; - break; - case DATAX: - paramsClass = DataxParameters.class; - break; - case SQOOP: - paramsClass = SqoopParameters.class; - break; - case CONDITIONS: - paramsClass = ConditionsParameters.class; - break; - default: - logger.error("not support this task type: {}", taskType); - throw new IllegalArgumentException("not support this task type"); - } - return paramsClass; - } - /** * get exit status according to exitCode + * * @return exit status */ - public ExecutionStatus getExitStatus(){ + public ExecutionStatus getExitStatus() { ExecutionStatus status; - switch (getExitStatusCode()){ + switch (getExitStatusCode()) { case Constants.EXIT_CODE_SUCCESS: status = ExecutionStatus.SUCCESS; break; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java index 6d701a00a6..caf487947b 100755 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java @@ -14,27 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.worker.task.datax; +package org.apache.dolphinscheduler.server.worker.task.datax; -import com.alibaba.druid.sql.ast.SQLStatement; -import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; -import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr; -import com.alibaba.druid.sql.ast.statement.*; -import com.alibaba.druid.sql.parser.SQLStatementParser; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.commons.io.FileUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; @@ -46,7 +37,8 @@ import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; -import org.slf4j.Logger; + +import org.apache.commons.io.FileUtils; import java.io.File; import java.nio.charset.StandardCharsets; @@ -56,25 +48,48 @@ import java.nio.file.StandardOpenOption; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; -import java.sql.*; -import java.util.*; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.slf4j.Logger; + +import com.alibaba.druid.sql.ast.SQLStatement; +import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr; +import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr; +import com.alibaba.druid.sql.ast.statement.SQLSelect; +import com.alibaba.druid.sql.ast.statement.SQLSelectItem; +import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock; +import com.alibaba.druid.sql.ast.statement.SQLSelectStatement; +import com.alibaba.druid.sql.ast.statement.SQLUnionQuery; +import com.alibaba.druid.sql.parser.SQLStatementParser; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; /** * DataX task */ public class DataxTask extends AbstractTask { + /** + * jvm parameters + */ + public static final String JVM_EVN = " --jvm=\"-Xms%sG -Xmx%sG\" "; /** * python process(datax only supports version 2.7 by default) */ private static final String DATAX_PYTHON = "python2.7"; - /** * datax home path */ private static final String DATAX_HOME_EVN = "${DATAX_HOME}"; - /** * datax channel count */ @@ -97,6 +112,7 @@ public class DataxTask extends AbstractTask { /** * constructor + * * @param taskExecutionContext taskExecutionContext * @param logger logger */ @@ -104,9 +120,8 @@ public class DataxTask extends AbstractTask { super(taskExecutionContext, logger); this.taskExecutionContext = taskExecutionContext; - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, - taskExecutionContext,logger); + taskExecutionContext, logger); } /** @@ -149,9 +164,7 @@ public class DataxTask extends AbstractTask { setExitStatusCode(commandExecuteResult.getExitStatusCode()); setAppIds(commandExecuteResult.getAppIds()); setProcessId(commandExecuteResult.getProcessId()); - } - catch (Exception e) { - logger.error("datax task failure", e); + } catch (Exception e) { setExitStatusCode(Constants.EXIT_CODE_FAILURE); throw e; } @@ -189,9 +202,9 @@ public class DataxTask extends AbstractTask { return fileName; } - if (dataXParameters.getCustomConfig() == Flag.YES.ordinal()){ + if (dataXParameters.getCustomConfig() == Flag.YES.ordinal()) { json = dataXParameters.getJson().replaceAll("\\r\\n", "\n"); - }else { + } else { ObjectNode job = JSONUtils.createObjectNode(); job.putArray("content").addAll(buildDataxJobContentJson()); job.set("setting", buildDataxJobSettingJson()); @@ -248,7 +261,6 @@ public class DataxTask extends AbstractTask { readerParam.put("password", dataSourceCfg.getPassword()); readerParam.putArray("connection").addAll(readerConnArr); - ObjectNode reader = JSONUtils.createObjectNode(); reader.put("name", DataxUtils.getReaderPluginName(DbType.of(dataxTaskExecutionContext.getSourcetype()))); reader.set("parameter", readerParam); @@ -277,7 +289,6 @@ public class DataxTask extends AbstractTask { } writerParam.putArray("connection").addAll(writerConnArr); - if (CollectionUtils.isNotEmpty(dataXParameters.getPreStatements())) { ArrayNode preSqlArr = writerParam.putArray("preSql"); for (String preSql : dataXParameters.getPreStatements()) { @@ -368,7 +379,7 @@ public class DataxTask extends AbstractTask { * @throws Exception if error throws Exception */ private String buildShellCommandFile(String jobConfigFilePath, Map paramsMap) - throws Exception { + throws Exception { // generate scripts String fileName = String.format("%s/%s_node.%s", taskExecutionContext.getExecutePath(), @@ -387,6 +398,7 @@ public class DataxTask extends AbstractTask { sbr.append(" "); sbr.append(DATAX_HOME_EVN); sbr.append(" "); + sbr.append(loadJvmEnv(dataXParameters)); sbr.append(jobConfigFilePath); // replace placeholder @@ -409,17 +421,19 @@ public class DataxTask extends AbstractTask { return fileName; } + public String loadJvmEnv(DataxParameters dataXParameters) { + int xms = dataXParameters.getXms() < 1 ? 1 : dataXParameters.getXms(); + int xmx = dataXParameters.getXmx() < 1 ? 1 : dataXParameters.getXmx(); + return String.format(JVM_EVN, xms, xmx); + } + /** * parsing synchronized column names in SQL statements * - * @param dsType - * the database type of the data source - * @param dtType - * the database type of the data target - * @param dataSourceCfg - * the database connection parameters of the data source - * @param sql - * sql for data synchronization + * @param dsType the database type of the data source + * @param dtType the database type of the data target + * @param dataSourceCfg the database connection parameters of the data source + * @param sql sql for data synchronization * @return Keyword converted column names */ private String[] parsingSqlColumnNames(DbType dsType, DbType dtType, BaseDataSource dataSourceCfg, String sql) { @@ -438,10 +452,8 @@ public class DataxTask extends AbstractTask { /** * try grammatical parsing column * - * @param dbType - * database type - * @param sql - * sql for data synchronization + * @param dbType database type + * @param sql sql for data synchronization * @return column name array * @throws RuntimeException if error throws RuntimeException */ @@ -453,16 +465,16 @@ public class DataxTask extends AbstractTask { notNull(parser, String.format("database driver [%s] is not support", dbType.toString())); SQLStatement sqlStatement = parser.parseStatement(); - SQLSelectStatement sqlSelectStatement = (SQLSelectStatement)sqlStatement; + SQLSelectStatement sqlSelectStatement = (SQLSelectStatement) sqlStatement; SQLSelect sqlSelect = sqlSelectStatement.getSelect(); List selectItemList = null; if (sqlSelect.getQuery() instanceof SQLSelectQueryBlock) { - SQLSelectQueryBlock block = (SQLSelectQueryBlock)sqlSelect.getQuery(); + SQLSelectQueryBlock block = (SQLSelectQueryBlock) sqlSelect.getQuery(); selectItemList = block.getSelectList(); } else if (sqlSelect.getQuery() instanceof SQLUnionQuery) { - SQLUnionQuery unionQuery = (SQLUnionQuery)sqlSelect.getQuery(); - SQLSelectQueryBlock block = (SQLSelectQueryBlock)unionQuery.getRight(); + SQLUnionQuery unionQuery = (SQLUnionQuery) sqlSelect.getQuery(); + SQLSelectQueryBlock block = (SQLSelectQueryBlock) unionQuery.getRight(); selectItemList = block.getSelectList(); } @@ -470,7 +482,7 @@ public class DataxTask extends AbstractTask { String.format("select query type [%s] is not support", sqlSelect.getQuery().toString())); columnNames = new String[selectItemList.size()]; - for (int i = 0; i < selectItemList.size(); i++ ) { + for (int i = 0; i < selectItemList.size(); i++) { SQLSelectItem item = selectItemList.get(i); String columnName = null; @@ -479,10 +491,10 @@ public class DataxTask extends AbstractTask { columnName = item.getAlias(); } else if (item.getExpr() != null) { if (item.getExpr() instanceof SQLPropertyExpr) { - SQLPropertyExpr expr = (SQLPropertyExpr)item.getExpr(); + SQLPropertyExpr expr = (SQLPropertyExpr) item.getExpr(); columnName = expr.getName(); } else if (item.getExpr() instanceof SQLIdentifierExpr) { - SQLIdentifierExpr expr = (SQLIdentifierExpr)item.getExpr(); + SQLIdentifierExpr expr = (SQLIdentifierExpr) item.getExpr(); columnName = expr.getName(); } } else { @@ -497,8 +509,7 @@ public class DataxTask extends AbstractTask { columnNames[i] = columnName; } - } - catch (Exception e) { + } catch (Exception e) { logger.warn(e.getMessage(), e); return null; } @@ -509,10 +520,8 @@ public class DataxTask extends AbstractTask { /** * try to execute sql to resolve column names * - * @param baseDataSource - * the database connection parameters - * @param sql - * sql for data synchronization + * @param baseDataSource the database connection parameters + * @param sql sql for data synchronization * @return column name array */ public String[] tryExecuteSqlResolveColumnNames(BaseDataSource baseDataSource, String sql) { @@ -529,11 +538,10 @@ public class DataxTask extends AbstractTask { ResultSetMetaData md = resultSet.getMetaData(); int num = md.getColumnCount(); columnNames = new String[num]; - for (int i = 1; i <= num; i++ ) { + for (int i = 1; i <= num; i++) { columnNames[i - 1] = md.getColumnName(i); } - } - catch (SQLException e) { + } catch (SQLException e) { logger.warn(e.getMessage(), e); return null; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java index 58d5eafb2d..9de28e3e27 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java @@ -42,6 +42,7 @@ public class FlinkTask extends AbstractYarnTask { /** * flink command + * usage: flink run [OPTIONS] */ private static final String FLINK_COMMAND = "flink"; private static final String FLINK_RUN = "run"; @@ -102,6 +103,7 @@ public class FlinkTask extends AbstractYarnTask { */ @Override protected String buildCommand() { + // flink run [OPTIONS] List args = new ArrayList<>(); args.add(FLINK_COMMAND); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java index b805939bb5..5cbd3c151f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java @@ -14,8 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.shell; +import static java.util.Calendar.DAY_OF_MONTH; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; @@ -23,7 +25,7 @@ import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; @@ -31,7 +33,6 @@ import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; -import org.slf4j.Logger; import java.io.File; import java.nio.file.Files; @@ -40,139 +41,146 @@ import java.nio.file.StandardOpenOption; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; +import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Set; +import org.slf4j.Logger; + /** * shell task */ public class ShellTask extends AbstractTask { - /** - * shell parameters - */ - private ShellParameters shellParameters; - - /** - * shell command executor - */ - private ShellCommandExecutor shellCommandExecutor; - - /** - * taskExecutionContext - */ - private TaskExecutionContext taskExecutionContext; - - /** - * constructor - * @param taskExecutionContext taskExecutionContext - * @param logger logger - */ - public ShellTask(TaskExecutionContext taskExecutionContext, Logger logger) { - super(taskExecutionContext, logger); - - this.taskExecutionContext = taskExecutionContext; - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, + /** + * shell parameters + */ + private ShellParameters shellParameters; + + /** + * shell command executor + */ + private ShellCommandExecutor shellCommandExecutor; + + /** + * taskExecutionContext + */ + private TaskExecutionContext taskExecutionContext; + + /** + * constructor + * + * @param taskExecutionContext taskExecutionContext + * @param logger logger + */ + public ShellTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + + this.taskExecutionContext = taskExecutionContext; + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, logger); - } + } - @Override - public void init() { - logger.info("shell task params {}", taskExecutionContext.getTaskParams()); + @Override + public void init() { + logger.info("shell task params {}", taskExecutionContext.getTaskParams()); - shellParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ShellParameters.class); + shellParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ShellParameters.class); - if (!shellParameters.checkParameters()) { - throw new RuntimeException("shell task params is not valid"); - } - } - - @Override - public void handle() throws Exception { - try { - // construct process - CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(buildCommand()); - setExitStatusCode(commandExecuteResult.getExitStatusCode()); - setAppIds(commandExecuteResult.getAppIds()); - setProcessId(commandExecuteResult.getProcessId()); - } catch (Exception e) { - logger.error("shell task error", e); - setExitStatusCode(Constants.EXIT_CODE_FAILURE); - throw e; + if (!shellParameters.checkParameters()) { + throw new RuntimeException("shell task params is not valid"); + } } - } - - @Override - public void cancelApplication(boolean cancelApplication) throws Exception { - // cancel process - shellCommandExecutor.cancelApplication(); - } - - /** - * create command - * @return file name - * @throws Exception exception - */ - private String buildCommand() throws Exception { - // generate scripts - String fileName = String.format("%s/%s_node.%s", - taskExecutionContext.getExecutePath(), - taskExecutionContext.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); - Path path = new File(fileName).toPath(); + @Override + public void handle() throws Exception { + try { + // construct process + CommandExecuteResult commandExecuteResult = shellCommandExecutor.run(buildCommand()); + setExitStatusCode(commandExecuteResult.getExitStatusCode()); + setAppIds(commandExecuteResult.getAppIds()); + setProcessId(commandExecuteResult.getProcessId()); + } catch (Exception e) { + logger.error("shell task error", e); + setExitStatusCode(Constants.EXIT_CODE_FAILURE); + throw e; + } + } - if (Files.exists(path)) { - return fileName; + @Override + public void cancelApplication(boolean cancelApplication) throws Exception { + // cancel process + shellCommandExecutor.cancelApplication(); } - String script = shellParameters.getRawScript().replaceAll("\\r\\n", "\n"); /** - * combining local and global parameters + * create command + * + * @return file name + * @throws Exception exception */ - Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), - taskExecutionContext.getDefinedParams(), - shellParameters.getLocalParametersMap(), - CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), - taskExecutionContext.getScheduleTime()); - if (paramsMap != null){ - script = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); - } - // new - // replace variable TIME with $[YYYYmmddd...] in shell file when history run job and batch complement job - if (paramsMap != null) { - if (taskExecutionContext.getScheduleTime() != null) { - String dateTime = DateUtils.format(taskExecutionContext.getScheduleTime(), Constants.PARAMETER_FORMAT_TIME); - Property p = new Property(); - p.setValue(dateTime); - p.setProp(Constants.PARAMETER_SHECDULE_TIME); - paramsMap.put(Constants.PARAMETER_SHECDULE_TIME, p); - } - script = ParameterUtils.convertParameterPlaceholders2(script, ParamUtils.convert(paramsMap)); - } + private String buildCommand() throws Exception { + // generate scripts + String fileName = String.format("%s/%s_node.%s", + taskExecutionContext.getExecutePath(), + taskExecutionContext.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); - shellParameters.setRawScript(script); + Path path = new File(fileName).toPath(); - logger.info("raw script : {}", shellParameters.getRawScript()); - logger.info("task execute path : {}", taskExecutionContext.getExecutePath()); + if (Files.exists(path)) { + return fileName; + } - Set perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); - FileAttribute> attr = PosixFilePermissions.asFileAttribute(perms); + String script = shellParameters.getRawScript().replaceAll("\\r\\n", "\n"); + script = parseScript(script); + shellParameters.setRawScript(script); - if (OSUtils.isWindows()) { - Files.createFile(path); - } else { - Files.createFile(path, attr); - } + logger.info("raw script : {}", shellParameters.getRawScript()); + logger.info("task execute path : {}", taskExecutionContext.getExecutePath()); - Files.write(path, shellParameters.getRawScript().getBytes(), StandardOpenOption.APPEND); + Set perms = PosixFilePermissions.fromString(Constants.RWXR_XR_X); + FileAttribute> attr = PosixFilePermissions.asFileAttribute(perms); - return fileName; - } + if (OSUtils.isWindows()) { + Files.createFile(path); + } else { + Files.createFile(path, attr); + } - @Override - public AbstractParameters getParameters() { - return shellParameters; - } + Files.write(path, shellParameters.getRawScript().getBytes(), StandardOpenOption.APPEND); + return fileName; + } + + @Override + public AbstractParameters getParameters() { + return shellParameters; + } + + private String parseScript(String script) { + // combining local and global parameters + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), + taskExecutionContext.getDefinedParams(), + shellParameters.getLocalParametersMap(), + CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), + taskExecutionContext.getScheduleTime()); + // replace variable TIME with $[YYYYmmddd...] in shell file when history run job and batch complement job + if (taskExecutionContext.getScheduleTime() != null) { + if (paramsMap == null) { + paramsMap = new HashMap<>(); + } + Date date = taskExecutionContext.getScheduleTime(); + if (CommandType.COMPLEMENT_DATA.getCode() == taskExecutionContext.getCmdTypeIfComplement()) { + date = DateUtils.add(taskExecutionContext.getScheduleTime(), DAY_OF_MONTH, 1); + } + String dateTime = DateUtils.format(date, Constants.PARAMETER_FORMAT_TIME); + Property p = new Property(); + p.setValue(dateTime); + p.setProp(Constants.PARAMETER_DATETIME); + paramsMap.put(Constants.PARAMETER_DATETIME, p); + } + return ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java index c540d27810..4e1a4d5356 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTask.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.spark; import org.apache.dolphinscheduler.common.enums.CommandType; @@ -22,133 +23,136 @@ import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.spark.SparkParameters; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.dao.entity.Resource; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.SparkArgsUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; -import org.slf4j.Logger; import java.util.ArrayList; import java.util.List; import java.util.Map; +import org.slf4j.Logger; + /** * spark task */ public class SparkTask extends AbstractYarnTask { - /** - * spark1 command - */ - private static final String SPARK1_COMMAND = "${SPARK_HOME1}/bin/spark-submit"; + /** + * spark1 command + */ + private static final String SPARK1_COMMAND = "${SPARK_HOME1}/bin/spark-submit"; + + /** + * spark2 command + */ + private static final String SPARK2_COMMAND = "${SPARK_HOME2}/bin/spark-submit"; + + /** + * spark parameters + */ + private SparkParameters sparkParameters; + + /** + * taskExecutionContext + */ + private final TaskExecutionContext sparkTaskExecutionContext; + + public SparkTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.sparkTaskExecutionContext = taskExecutionContext; + } - /** - * spark2 command - */ - private static final String SPARK2_COMMAND = "${SPARK_HOME2}/bin/spark-submit"; + @Override + public void init() { - /** - * spark parameters - */ - private SparkParameters sparkParameters; + logger.info("spark task params {}", sparkTaskExecutionContext.getTaskParams()); - /** - * taskExecutionContext - */ - private TaskExecutionContext taskExecutionContext; + sparkParameters = JSONUtils.parseObject(sparkTaskExecutionContext.getTaskParams(), SparkParameters.class); - public SparkTask(TaskExecutionContext taskExecutionContext, Logger logger) { - super(taskExecutionContext, logger); - this.taskExecutionContext = taskExecutionContext; - } + if (null == sparkParameters) { + logger.error("Spark params is null"); + return; + } - @Override - public void init() { + if (!sparkParameters.checkParameters()) { + throw new RuntimeException("spark task params is not valid"); + } + sparkParameters.setQueue(sparkTaskExecutionContext.getQueue()); + setMainJarName(); + } - logger.info("spark task params {}", taskExecutionContext.getTaskParams()); + /** + * create command + * + * @return command + */ + @Override + protected String buildCommand() { + List args = new ArrayList<>(); - sparkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SparkParameters.class); + //spark version + String sparkCommand = SPARK2_COMMAND; - if (!sparkParameters.checkParameters()) { - throw new RuntimeException("spark task params is not valid"); - } - sparkParameters.setQueue(taskExecutionContext.getQueue()); + if (SparkVersion.SPARK1.name().equals(sparkParameters.getSparkVersion())) { + sparkCommand = SPARK1_COMMAND; + } - setMainJarName(); + args.add(sparkCommand); - if (StringUtils.isNotEmpty(sparkParameters.getMainArgs())) { - String args = sparkParameters.getMainArgs(); + // other parameters + args.addAll(SparkArgsUtils.buildArgs(sparkParameters)); - // replace placeholder - Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), - taskExecutionContext.getDefinedParams(), - sparkParameters.getLocalParametersMap(), - CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), - taskExecutionContext.getScheduleTime()); + // replace placeholder + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(sparkTaskExecutionContext.getDefinedParams()), + sparkTaskExecutionContext.getDefinedParams(), + sparkParameters.getLocalParametersMap(), + CommandType.of(sparkTaskExecutionContext.getCmdTypeIfComplement()), + sparkTaskExecutionContext.getScheduleTime()); - if (paramsMap != null ){ - args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); - } - sparkParameters.setMainArgs(args); - } - } + String command = null; - /** - * create command - * @return command - */ - @Override - protected String buildCommand() { - List args = new ArrayList<>(); + if (null != paramsMap) { + command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); + } - //spark version - String sparkCommand = SPARK2_COMMAND; + logger.info("spark task command: {}", command); - if (SparkVersion.SPARK1.name().equals(sparkParameters.getSparkVersion())) { - sparkCommand = SPARK1_COMMAND; + return command; } - args.add(sparkCommand); - - // other parameters - args.addAll(SparkArgsUtils.buildArgs(sparkParameters)); - - String command = ParameterUtils - .convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams()); - - logger.info("spark task command : {}", command); - - return command; - } - - @Override - protected void setMainJarName() { - // main jar - ResourceInfo mainJar = sparkParameters.getMainJar(); - if (mainJar != null) { - int resourceId = mainJar.getId(); - String resourceName; - if (resourceId == 0) { - resourceName = mainJar.getRes(); - } else { - Resource resource = processService.getResourceById(sparkParameters.getMainJar().getId()); - if (resource == null) { - logger.error("resource id: {} not exist", resourceId); - throw new RuntimeException(String.format("resource id: %d not exist", resourceId)); + @Override + protected void setMainJarName() { + // main jar + ResourceInfo mainJar = sparkParameters.getMainJar(); + + if (null == mainJar) { + throw new RuntimeException("Spark task jar params is null"); } - resourceName = resource.getFullName().replaceFirst("/", ""); - } - mainJar.setRes(resourceName); - sparkParameters.setMainJar(mainJar); + + int resourceId = mainJar.getId(); + String resourceName; + if (resourceId == 0) { + resourceName = mainJar.getRes(); + } else { + Resource resource = processService.getResourceById(sparkParameters.getMainJar().getId()); + if (resource == null) { + logger.error("resource id: {} not exist", resourceId); + throw new RuntimeException(String.format("resource id: %d not exist", resourceId)); + } + resourceName = resource.getFullName().replaceFirst("/", ""); + } + mainJar.setRes(resourceName); + sparkParameters.setMainJar(mainJar); + } - } - @Override - public AbstractParameters getParameters() { - return sparkParameters; - } + @Override + public AbstractParameters getParameters() { + return sparkParameters; + } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopConstants.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopConstants.java new file mode 100644 index 0000000000..772807b4d2 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopConstants.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.task.sqoop; + +public final class SqoopConstants { + + private SqoopConstants() { + } + + //sqoop general param + public static final String SQOOP = "sqoop"; + public static final String SQOOP_MR_JOB_NAME = "mapred.job.name"; + public static final String SQOOP_PARALLELISM = "-m"; + public static final String FIELDS_TERMINATED_BY = "--fields-terminated-by"; + public static final String LINES_TERMINATED_BY = "--lines-terminated-by"; + public static final String FIELD_NULL_PLACEHOLDER = "--null-non-string 'NULL' --null-string 'NULL'"; + + //sqoop db + public static final String DB_CONNECT = "--connect"; + public static final String DB_USERNAME = "--username"; + public static final String DB_PWD = "--password"; + public static final String TABLE = "--table"; + public static final String COLUMNS = "--columns"; + public static final String QUERY_WHERE = "where"; + public static final String QUERY = "--query"; + public static final String QUERY_CONDITION = "AND \\$CONDITIONS"; + public static final String QUERY_WITHOUT_CONDITION = "WHERE \\$CONDITIONS"; + public static final String MAP_COLUMN_HIVE = "--map-column-hive"; + public static final String MAP_COLUMN_JAVA = "--map-column-java"; + + + //sqoop hive source + public static final String HCATALOG_DATABASE = "--hcatalog-database"; + public static final String HCATALOG_TABLE = "--hcatalog-table"; + public static final String HCATALOG_PARTITION_KEYS = "--hcatalog-partition-keys"; + public static final String HCATALOG_PARTITION_VALUES = "--hcatalog-partition-values"; + + //sqoop hdfs + public static final String HDFS_EXPORT_DIR = "--export-dir"; + public static final String TARGET_DIR = "--target-dir"; + public static final String COMPRESSION_CODEC = "--compression-codec"; + + //sqoop hive + public static final String HIVE_IMPORT = "--hive-import"; + public static final String HIVE_DATABASE = "--hive-database"; + public static final String HIVE_TABLE = "--hive-table"; + public static final String CREATE_HIVE_TABLE = "--create-hive-table"; + public static final String HIVE_DROP_IMPORT_DELIMS = "--hive-drop-import-delims"; + public static final String HIVE_OVERWRITE = "--hive-overwrite"; + public static final String DELETE_TARGET_DIR = "--delete-target-dir"; + public static final String HIVE_DELIMS_REPLACEMENT = "--hive-delims-replacement"; + public static final String HIVE_PARTITION_KEY = "--hive-partition-key"; + public static final String HIVE_PARTITION_VALUE = "--hive-partition-value"; + + //sqoop update model + public static final String UPDATE_KEY = "--update-key"; + public static final String UPDATE_MODE = "--update-mode"; + + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java index c66fe95592..00d94f01bf 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java @@ -14,64 +14,73 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; -import org.slf4j.Logger; import java.util.Map; -import org.apache.dolphinscheduler.common.utils.*; + +import org.slf4j.Logger; /** * sqoop task extends the shell task */ public class SqoopTask extends AbstractYarnTask { + /** + * sqoop task params + */ private SqoopParameters sqoopParameters; /** * taskExecutionContext */ - private TaskExecutionContext taskExecutionContext; + private final TaskExecutionContext sqoopTaskExecutionContext; - public SqoopTask(TaskExecutionContext taskExecutionContext, Logger logger){ - super(taskExecutionContext,logger); - this.taskExecutionContext = taskExecutionContext; + public SqoopTask(TaskExecutionContext taskExecutionContext, Logger logger) { + super(taskExecutionContext, logger); + this.sqoopTaskExecutionContext = taskExecutionContext; } @Override - public void init() throws Exception { - logger.info("sqoop task params {}", taskExecutionContext.getTaskParams()); + public void init() { + logger.info("sqoop task params {}", sqoopTaskExecutionContext.getTaskParams()); sqoopParameters = - JSONUtils.parseObject(taskExecutionContext.getTaskParams(),SqoopParameters.class); - if (!sqoopParameters.checkParameters()) { - throw new RuntimeException("sqoop task params is not valid"); + JSONUtils.parseObject(sqoopTaskExecutionContext.getTaskParams(), SqoopParameters.class); + //check sqoop task params + if (null == sqoopParameters) { + throw new IllegalArgumentException("Sqoop Task params is null"); } + if (!sqoopParameters.checkParameters()) { + throw new IllegalArgumentException("Sqoop Task params check fail"); + } } @Override - protected String buildCommand() throws Exception { + protected String buildCommand() { //get sqoop scripts SqoopJobGenerator generator = new SqoopJobGenerator(); - String script = generator.generateSqoopJob(sqoopParameters,taskExecutionContext); + String script = generator.generateSqoopJob(sqoopParameters, sqoopTaskExecutionContext); - Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()), - taskExecutionContext.getDefinedParams(), - sqoopParameters.getLocalParametersMap(), - CommandType.of(taskExecutionContext.getCmdTypeIfComplement()), - taskExecutionContext.getScheduleTime()); + Map paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(sqoopTaskExecutionContext.getDefinedParams()), + sqoopTaskExecutionContext.getDefinedParams(), + sqoopParameters.getLocalParametersMap(), + CommandType.of(sqoopTaskExecutionContext.getCmdTypeIfComplement()), + sqoopTaskExecutionContext.getScheduleTime()); - if(paramsMap != null){ - String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); + if (paramsMap != null) { + String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); logger.info("sqoop script: {}", resultScripts); return resultScripts; } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java index ffca73544d..e3e7c9a3ae 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java @@ -14,71 +14,72 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * common script generator */ public class CommonGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(CommonGenerator.class); public String generate(SqoopParameters sqoopParameters) { - StringBuilder result = new StringBuilder(); - try{ - result.append("sqoop ") - .append(sqoopParameters.getModelType()); - //set sqoop job name - result.append(" -D mapred.job.name") - .append(Constants.EQUAL_SIGN) - .append(sqoopParameters.getJobName()); + StringBuilder commonSb = new StringBuilder(); + + try { + //sqoop task model + commonSb.append(SqoopConstants.SQOOP) + .append(Constants.SPACE) + .append(sqoopParameters.getModelType()); - //set hadoop custom param + //sqoop map-reduce job name + commonSb.append(Constants.SPACE).append(Constants.D).append(Constants.SPACE) + .append(String.format("%s%s%s", SqoopConstants.SQOOP_MR_JOB_NAME, + Constants.EQUAL_SIGN, sqoopParameters.getJobName())); + + //hadoop custom param List hadoopCustomParams = sqoopParameters.getHadoopCustomParams(); if (CollectionUtils.isNotEmpty(hadoopCustomParams)) { for (Property hadoopCustomParam : hadoopCustomParams) { - String hadoopCustomParamStr = " -D " + hadoopCustomParam.getProp() - + Constants.EQUAL_SIGN + hadoopCustomParam.getValue(); + String hadoopCustomParamStr = String.format("%s%s%s", hadoopCustomParam.getProp(), + Constants.EQUAL_SIGN, hadoopCustomParam.getValue()); - if (StringUtils.isNotEmpty(hadoopCustomParamStr)) { - result.append(hadoopCustomParamStr); - } + commonSb.append(Constants.SPACE).append(Constants.D) + .append(Constants.SPACE).append(hadoopCustomParamStr); } } - //set sqoop advanced custom param + //sqoop custom params List sqoopAdvancedParams = sqoopParameters.getSqoopAdvancedParams(); if (CollectionUtils.isNotEmpty(sqoopAdvancedParams)) { - for (Property sqoopAdvancedParam : sqoopAdvancedParams) { - String sqoopAdvancedParamStr = " " + sqoopAdvancedParam.getProp() - + " " + sqoopAdvancedParam.getValue(); - if (StringUtils.isNotEmpty(sqoopAdvancedParamStr)) { - result.append(sqoopAdvancedParamStr); - } + commonSb.append(Constants.SPACE).append(sqoopAdvancedParam.getProp()) + .append(Constants.SPACE).append(sqoopAdvancedParam.getValue()); } } - if(sqoopParameters.getConcurrency() >0){ - result.append(" -m ") - .append(sqoopParameters.getConcurrency()); + //sqoop parallelism + if (sqoopParameters.getConcurrency() > 0) { + commonSb.append(Constants.SPACE).append(SqoopConstants.SQOOP_PARALLELISM) + .append(Constants.SPACE).append(sqoopParameters.getConcurrency()); } - }catch (Exception e){ - logger.error(e.getMessage()); + } catch (Exception e) { + logger.error(String.format("Sqoop task general param build failed: [%s]", e.getMessage())); } - return result.toString(); + return commonSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java index 841654b699..e6a9576a25 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; @@ -26,9 +27,10 @@ public interface ISourceGenerator { /** * generate the source script - * @param sqoopParameters sqoopParameters + * + * @param sqoopParameters sqoopParameters * @param taskExecutionContext taskExecutionContext * @return source script */ - String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext); + String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java index 7bdaf49e83..eb355a7438 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; @@ -26,9 +27,10 @@ public interface ITargetGenerator { /** * generate the target script - * @param sqoopParameters sqoopParameters + * + * @param sqoopParameters sqoopParameters * @param taskExecutionContext taskExecutionContext * @return target script */ - String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext); + String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java index 6bc94d0f35..9feaffa5c6 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; import org.apache.dolphinscheduler.common.enums.SqoopJobType; @@ -46,37 +47,38 @@ public class SqoopJobGenerator { /** * common script generator */ - private CommonGenerator commonGenerator; + private final CommonGenerator commonGenerator; - public SqoopJobGenerator(){ + public SqoopJobGenerator() { commonGenerator = new CommonGenerator(); } - private void createSqoopJobGenerator(String sourceType,String targetType){ + private void createSqoopJobGenerator(String sourceType, String targetType) { sourceGenerator = createSourceGenerator(sourceType); targetGenerator = createTargetGenerator(targetType); } /** * get the final sqoop scripts - * @param sqoopParameters - * @return + * + * @param sqoopParameters sqoop params + * @return sqoop scripts */ - public String generateSqoopJob(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext){ + public String generateSqoopJob(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { String sqoopScripts = ""; if (SqoopJobType.TEMPLATE.getDescp().equals(sqoopParameters.getJobType())) { - createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType()); - if(sourceGenerator == null || targetGenerator == null){ + createSqoopJobGenerator(sqoopParameters.getSourceType(), sqoopParameters.getTargetType()); + if (sourceGenerator == null || targetGenerator == null) { throw new RuntimeException("sqoop task source type or target type is null"); } - sqoopScripts = commonGenerator.generate(sqoopParameters) - + sourceGenerator.generate(sqoopParameters,taskExecutionContext) - + targetGenerator.generate(sqoopParameters,taskExecutionContext); + sqoopScripts = String.format("%s%s%s", commonGenerator.generate(sqoopParameters), + sourceGenerator.generate(sqoopParameters, taskExecutionContext), + targetGenerator.generate(sqoopParameters, taskExecutionContext)); } else if (SqoopJobType.CUSTOM.getDescp().equals(sqoopParameters.getJobType())) { - sqoopScripts = sqoopParameters.getCustomShell().replaceAll("\\r\\n", "\n"); + sqoopScripts = sqoopParameters.getCustomShell().replaceAll("\\r\\n", "\n"); } return sqoopScripts; @@ -84,11 +86,12 @@ public class SqoopJobGenerator { /** * get the source generator - * @param sourceType - * @return + * + * @param sourceType sqoop source type + * @return sqoop source generator */ - private ISourceGenerator createSourceGenerator(String sourceType){ - switch (sourceType){ + private ISourceGenerator createSourceGenerator(String sourceType) { + switch (sourceType) { case MYSQL: return new MysqlSourceGenerator(); case HIVE: @@ -102,11 +105,12 @@ public class SqoopJobGenerator { /** * get the target generator - * @param targetType - * @return + * + * @param targetType sqoop target type + * @return sqoop target generator */ - private ITargetGenerator createTargetGenerator(String targetType){ - switch (targetType){ + private ITargetGenerator createTargetGenerator(String targetType) { + switch (targetType) { case MYSQL: return new MysqlTargetGenerator(); case HIVE: diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java index ded0d6db17..549d5dba63 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java @@ -14,14 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,28 +34,30 @@ import org.slf4j.LoggerFactory; */ public class HdfsSourceGenerator implements ISourceGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(HdfsSourceGenerator.class); @Override - public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { - StringBuilder result = new StringBuilder(); - try{ + public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { + + StringBuilder hdfsSourceSb = new StringBuilder(); + + try { SourceHdfsParameter sourceHdfsParameter - = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceHdfsParameter.class); - - if(sourceHdfsParameter != null){ - if(StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())){ - result.append(" --export-dir ") - .append(sourceHdfsParameter.getExportDir()); - }else{ - throw new Exception("--export-dir is null"); + = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHdfsParameter.class); + + if (null != sourceHdfsParameter) { + if (StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())) { + hdfsSourceSb.append(Constants.SPACE).append(SqoopConstants.HDFS_EXPORT_DIR) + .append(Constants.SPACE).append(sourceHdfsParameter.getExportDir()); + } else { + throw new IllegalArgumentException("Sqoop hdfs export dir is null"); } } - }catch (Exception e){ - logger.error("get hdfs source failed",e); + } catch (Exception e) { + logger.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage())); } - return result.toString(); + return hdfsSourceSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java index 131b616025..3229dcada7 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java @@ -14,14 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,33 +34,40 @@ import org.slf4j.LoggerFactory; */ public class HiveSourceGenerator implements ISourceGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(HiveSourceGenerator.class); @Override - public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { - StringBuilder sb = new StringBuilder(); - try{ + public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { + + StringBuilder hiveSourceSb = new StringBuilder(); + + try { SourceHiveParameter sourceHiveParameter - = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceHiveParameter.class); - if(sourceHiveParameter != null){ - if(StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())){ - sb.append(" --hcatalog-database ").append(sourceHiveParameter.getHiveDatabase()); + = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceHiveParameter.class); + + if (null != sourceHiveParameter) { + if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())) { + hiveSourceSb.append(Constants.SPACE).append(SqoopConstants.HCATALOG_DATABASE) + .append(Constants.SPACE).append(sourceHiveParameter.getHiveDatabase()); } - if(StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())){ - sb.append(" --hcatalog-table ").append(sourceHiveParameter.getHiveTable()); + if (StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())) { + hiveSourceSb.append(Constants.SPACE).append(SqoopConstants.HCATALOG_TABLE) + .append(Constants.SPACE).append(sourceHiveParameter.getHiveTable()); } - if(StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey())&& - StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())){ - sb.append(" --hcatalog-partition-keys ").append(sourceHiveParameter.getHivePartitionKey()) - .append(" --hcatalog-partition-values ").append(sourceHiveParameter.getHivePartitionValue()); + if (StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey()) + && StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())) { + hiveSourceSb.append(Constants.SPACE).append(SqoopConstants.HCATALOG_PARTITION_KEYS) + .append(Constants.SPACE).append(sourceHiveParameter.getHivePartitionKey()) + .append(Constants.SPACE).append(SqoopConstants.HCATALOG_PARTITION_VALUES) + .append(Constants.SPACE).append(sourceHiveParameter.getHivePartitionValue()); } } - }catch (Exception e){ - logger.error(e.getMessage()); + } catch (Exception e) { + logger.error(String.format("Sqoop hive source params build failed: [%s]", e.getMessage())); } - return sb.toString(); + return hiveSourceSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java index 47430d1102..58e508ac6f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java @@ -14,106 +14,118 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.enums.QueryType; +import org.apache.dolphinscheduler.common.enums.SqoopQueryType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * mysql source generator */ public class MysqlSourceGenerator implements ISourceGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(MysqlSourceGenerator.class); @Override - public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { - StringBuilder result = new StringBuilder(); - try { - SourceMysqlParameter sourceMysqlParameter - = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceMysqlParameter.class); + public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { + + StringBuilder mysqlSourceSb = new StringBuilder(); + try { + SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); - if(sourceMysqlParameter != null){ + if (null != sourceMysqlParameter) { BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getSourcetype()), - sqoopTaskExecutionContext.getSourceConnectionParams()); - if(baseDataSource != null){ - result.append(" --connect ") - .append(baseDataSource.getJdbcUrl()) - .append(" --username ") - .append(baseDataSource.getUser()) - .append(" --password ") - .append(baseDataSource.getPassword()); - - if(sourceMysqlParameter.getSrcQueryType() == QueryType.FORM.ordinal()){ - if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())){ - result.append(" --table ").append(sourceMysqlParameter.getSrcTable()); + sqoopTaskExecutionContext.getSourceConnectionParams()); + + if (null != baseDataSource) { + + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT) + .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getJdbcUrl()).append(Constants.DOUBLE_QUOTES) + .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME) + .append(Constants.SPACE).append(baseDataSource.getUser()) + .append(Constants.SPACE).append(SqoopConstants.DB_PWD) + .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES); + + //sqoop table & sql query + if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.FORM.getCode()) { + if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())) { + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.TABLE) + .append(Constants.SPACE).append(sourceMysqlParameter.getSrcTable()); } - if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())){ - result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns()); + if (StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())) { + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.COLUMNS) + .append(Constants.SPACE).append(sourceMysqlParameter.getSrcColumns()); } + } else if (sourceMysqlParameter.getSrcQueryType() == SqoopQueryType.SQL.getCode() + && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())) { - }else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal() - && StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){ String srcQuery = sourceMysqlParameter.getSrcQuerySql(); - if(srcQuery.toLowerCase().contains("where")){ - srcQuery += " AND "+"$CONDITIONS"; - }else{ - srcQuery += " WHERE $CONDITIONS"; - } - result.append(" --query \'").append(srcQuery).append("\'"); + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY) + .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(srcQuery); + if (srcQuery.toLowerCase().contains(SqoopConstants.QUERY_WHERE)) { + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY_CONDITION).append(Constants.DOUBLE_QUOTES); + } else { + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.QUERY_WITHOUT_CONDITION).append(Constants.DOUBLE_QUOTES); + } } - List mapColumnHive = sourceMysqlParameter.getMapColumnHive(); + //sqoop hive map column + List mapColumnHive = sourceMysqlParameter.getMapColumnHive(); - if(mapColumnHive != null && !mapColumnHive.isEmpty()){ + if (null != mapColumnHive && !mapColumnHive.isEmpty()) { StringBuilder columnMap = new StringBuilder(); - for(Property item:mapColumnHive){ - columnMap.append(item.getProp()).append("=").append(item.getValue()).append(","); + for (Property item : mapColumnHive) { + columnMap.append(item.getProp()).append(Constants.EQUAL_SIGN).append(item.getValue()).append(Constants.COMMA); } - if(StringUtils.isNotEmpty(columnMap.toString())){ - result.append(" --map-column-hive ") - .append(columnMap.substring(0,columnMap.length() - 1)); + if (StringUtils.isNotEmpty(columnMap.toString())) { + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.MAP_COLUMN_HIVE) + .append(Constants.SPACE).append(columnMap.substring(0, columnMap.length() - 1)); } } - List mapColumnJava = sourceMysqlParameter.getMapColumnJava(); + //sqoop map column java + List mapColumnJava = sourceMysqlParameter.getMapColumnJava(); - if(mapColumnJava != null && !mapColumnJava.isEmpty()){ + if (null != mapColumnJava && !mapColumnJava.isEmpty()) { StringBuilder columnMap = new StringBuilder(); - for(Property item:mapColumnJava){ - columnMap.append(item.getProp()).append("=").append(item.getValue()).append(","); + for (Property item : mapColumnJava) { + columnMap.append(item.getProp()).append(Constants.EQUAL_SIGN).append(item.getValue()).append(Constants.COMMA); } - if(StringUtils.isNotEmpty(columnMap.toString())){ - result.append(" --map-column-java ") - .append(columnMap.substring(0,columnMap.length() - 1)); + if (StringUtils.isNotEmpty(columnMap.toString())) { + mysqlSourceSb.append(Constants.SPACE).append(SqoopConstants.MAP_COLUMN_JAVA) + .append(Constants.SPACE).append(columnMap.substring(0, columnMap.length() - 1)); } } } } - }catch (Exception e){ - logger.error(e.getMessage()); + } catch (Exception e) { + logger.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage())); } - return result.toString(); + return mysqlSourceSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java index f94d10a6d1..3ea32546ec 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java @@ -14,14 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,47 +34,53 @@ import org.slf4j.LoggerFactory; */ public class HdfsTargetGenerator implements ITargetGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(HdfsTargetGenerator.class); @Override - public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { - StringBuilder result = new StringBuilder(); - try{ + public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { + + StringBuilder hdfsTargetSb = new StringBuilder(); + + try { TargetHdfsParameter targetHdfsParameter = - JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetHdfsParameter.class); + JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHdfsParameter.class); - if(targetHdfsParameter != null){ + if (null != targetHdfsParameter) { - if(StringUtils.isNotEmpty(targetHdfsParameter.getTargetPath())){ - result.append(" --target-dir ").append(targetHdfsParameter.getTargetPath()); + if (StringUtils.isNotEmpty(targetHdfsParameter.getTargetPath())) { + hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.TARGET_DIR) + .append(Constants.SPACE).append(targetHdfsParameter.getTargetPath()); } - if(StringUtils.isNotEmpty(targetHdfsParameter.getCompressionCodec())){ - result.append(" --compression-codec ").append(targetHdfsParameter.getCompressionCodec()); + if (StringUtils.isNotEmpty(targetHdfsParameter.getCompressionCodec())) { + hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.COMPRESSION_CODEC) + .append(Constants.SPACE).append(targetHdfsParameter.getCompressionCodec()); } - if(StringUtils.isNotEmpty(targetHdfsParameter.getFileType())){ - result.append(" ").append(targetHdfsParameter.getFileType()); + if (StringUtils.isNotEmpty(targetHdfsParameter.getFileType())) { + hdfsTargetSb.append(Constants.SPACE).append(targetHdfsParameter.getFileType()); } - if(targetHdfsParameter.isDeleteTargetDir()){ - result.append(" --delete-target-dir"); + if (targetHdfsParameter.isDeleteTargetDir()) { + hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.DELETE_TARGET_DIR); } - if(StringUtils.isNotEmpty(targetHdfsParameter.getFieldsTerminated())){ - result.append(" --fields-terminated-by '").append(targetHdfsParameter.getFieldsTerminated()).append("'"); + if (StringUtils.isNotEmpty(targetHdfsParameter.getFieldsTerminated())) { + hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELDS_TERMINATED_BY) + .append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetHdfsParameter.getFieldsTerminated()).append(Constants.SINGLE_QUOTES); } - if(StringUtils.isNotEmpty(targetHdfsParameter.getLinesTerminated())){ - result.append(" --lines-terminated-by '").append(targetHdfsParameter.getLinesTerminated()).append("'"); + if (StringUtils.isNotEmpty(targetHdfsParameter.getLinesTerminated())) { + hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.LINES_TERMINATED_BY) + .append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetHdfsParameter.getLinesTerminated()).append(Constants.SINGLE_QUOTES); } - result.append(" --null-non-string 'NULL' --null-string 'NULL'"); + hdfsTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELD_NULL_PLACEHOLDER); } - }catch(Exception e){ - logger.error(e.getMessage()); + } catch (Exception e) { + logger.error(String.format("Sqoop hdfs target params build failed: [%s]", e.getMessage())); } - return result.toString(); + return hdfsTargetSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java index 83c4123c45..769fc62f25 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java @@ -14,14 +14,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -30,57 +34,58 @@ import org.slf4j.LoggerFactory; */ public class HiveTargetGenerator implements ITargetGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(HiveTargetGenerator.class); @Override - public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { + public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { - StringBuilder result = new StringBuilder(); + StringBuilder hiveTargetSb = new StringBuilder(); - try{ + try { TargetHiveParameter targetHiveParameter = - JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetHiveParameter.class); - if(targetHiveParameter != null){ - - result.append(" --hive-import "); + JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetHiveParameter.class); + if (null != targetHiveParameter) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_IMPORT); - if(StringUtils.isNotEmpty(targetHiveParameter.getHiveDatabase())&& - StringUtils.isNotEmpty(targetHiveParameter.getHiveTable())){ - result.append(" --hive-table ") - .append(targetHiveParameter.getHiveDatabase()) - .append(".") - .append(targetHiveParameter.getHiveTable()); + if (StringUtils.isNotEmpty(targetHiveParameter.getHiveDatabase()) + && StringUtils.isNotEmpty(targetHiveParameter.getHiveTable())) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_DATABASE) + .append(Constants.SPACE).append(targetHiveParameter.getHiveDatabase()) + .append(Constants.SPACE).append(SqoopConstants.HIVE_TABLE) + .append(Constants.SPACE).append(targetHiveParameter.getHiveTable()); } - if(targetHiveParameter.isCreateHiveTable()){ - result.append(" --create-hive-table"); + if (targetHiveParameter.isCreateHiveTable()) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.CREATE_HIVE_TABLE); } - if(targetHiveParameter.isDropDelimiter()){ - result.append(" --hive-drop-import-delims"); + if (targetHiveParameter.isDropDelimiter()) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_DROP_IMPORT_DELIMS); } - if(targetHiveParameter.isHiveOverWrite()){ - result.append(" --hive-overwrite -delete-target-dir"); + if (targetHiveParameter.isHiveOverWrite()) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_OVERWRITE) + .append(Constants.SPACE).append(SqoopConstants.DELETE_TARGET_DIR); } - if(StringUtils.isNotEmpty(targetHiveParameter.getReplaceDelimiter())){ - result.append(" --hive-delims-replacement ").append(targetHiveParameter.getReplaceDelimiter()); + if (StringUtils.isNotEmpty(targetHiveParameter.getReplaceDelimiter())) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_DELIMS_REPLACEMENT) + .append(Constants.SPACE).append(targetHiveParameter.getReplaceDelimiter()); } - if(StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionKey())&& - StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionValue())){ - result.append(" --hive-partition-key ") - .append(targetHiveParameter.getHivePartitionKey()) - .append(" --hive-partition-value ") - .append(targetHiveParameter.getHivePartitionValue()); + if (StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionKey()) + && StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionValue())) { + hiveTargetSb.append(Constants.SPACE).append(SqoopConstants.HIVE_PARTITION_KEY) + .append(Constants.SPACE).append(targetHiveParameter.getHivePartitionKey()) + .append(Constants.SPACE).append(SqoopConstants.HIVE_PARTITION_VALUE) + .append(Constants.SPACE).append(targetHiveParameter.getHivePartitionValue()); } } - }catch(Exception e){ - logger.error(e.getMessage()); + } catch (Exception e) { + logger.error(String.format("Sqoop hive target params build failed: [%s]", e.getMessage())); } - return result.toString(); + return hiveTargetSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java index 406c6f661d..4384c96479 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java @@ -14,21 +14,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; -import org.apache.dolphinscheduler.common.utils.*; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; -import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.server.entity.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopConstants; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,59 +38,74 @@ import org.slf4j.LoggerFactory; */ public class MysqlTargetGenerator implements ITargetGenerator { - private Logger logger = LoggerFactory.getLogger(getClass()); + private static final Logger logger = LoggerFactory.getLogger(MysqlTargetGenerator.class); @Override - public String generate(SqoopParameters sqoopParameters,TaskExecutionContext taskExecutionContext) { + public String generate(SqoopParameters sqoopParameters, TaskExecutionContext taskExecutionContext) { - StringBuilder result = new StringBuilder(); - try{ + StringBuilder mysqlTargetSb = new StringBuilder(); + try { TargetMysqlParameter targetMysqlParameter = - JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetMysqlParameter.class); + JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class); SqoopTaskExecutionContext sqoopTaskExecutionContext = taskExecutionContext.getSqoopTaskExecutionContext(); - if(targetMysqlParameter != null && targetMysqlParameter.getTargetDatasource() != 0){ + if (null != targetMysqlParameter && targetMysqlParameter.getTargetDatasource() != 0) { // get datasource BaseDataSource baseDataSource = DataSourceFactory.getDatasource(DbType.of(sqoopTaskExecutionContext.getTargetType()), - sqoopTaskExecutionContext.getTargetConnectionParams()); - - if(baseDataSource != null){ - result.append(" --connect ") - .append(baseDataSource.getJdbcUrl()) - .append(" --username ") - .append(baseDataSource.getUser()) - .append(" --password ") - .append(baseDataSource.getPassword()) - .append(" --table ") - .append(targetMysqlParameter.getTargetTable()); - - if(StringUtils.isNotEmpty(targetMysqlParameter.getTargetColumns())){ - result.append(" --columns ").append(targetMysqlParameter.getTargetColumns()); + sqoopTaskExecutionContext.getTargetConnectionParams()); + + if (null != baseDataSource) { + + mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.DB_CONNECT) + .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getJdbcUrl()).append(Constants.DOUBLE_QUOTES) + .append(Constants.SPACE).append(SqoopConstants.DB_USERNAME) + .append(Constants.SPACE).append(baseDataSource.getUser()) + .append(Constants.SPACE).append(SqoopConstants.DB_PWD) + .append(Constants.SPACE).append(Constants.DOUBLE_QUOTES).append(baseDataSource.getPassword()).append(Constants.DOUBLE_QUOTES) + .append(Constants.SPACE).append(SqoopConstants.TABLE) + .append(Constants.SPACE).append(targetMysqlParameter.getTargetTable()); + + if (StringUtils.isNotEmpty(targetMysqlParameter.getTargetColumns())) { + mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.COLUMNS) + .append(Constants.SPACE).append(targetMysqlParameter.getTargetColumns()); } - if(StringUtils.isNotEmpty(targetMysqlParameter.getFieldsTerminated())){ - result.append(" --fields-terminated-by '").append(targetMysqlParameter.getFieldsTerminated()).append("'"); + if (StringUtils.isNotEmpty(targetMysqlParameter.getFieldsTerminated())) { + mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.FIELDS_TERMINATED_BY); + if (targetMysqlParameter.getFieldsTerminated().contains("'")) { + mysqlTargetSb.append(Constants.SPACE).append(targetMysqlParameter.getFieldsTerminated()); + + } else { + mysqlTargetSb.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetMysqlParameter.getFieldsTerminated()).append(Constants.SINGLE_QUOTES); + } } - if(StringUtils.isNotEmpty(targetMysqlParameter.getLinesTerminated())){ - result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'"); + if (StringUtils.isNotEmpty(targetMysqlParameter.getLinesTerminated())) { + mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.LINES_TERMINATED_BY); + if (targetMysqlParameter.getLinesTerminated().contains(Constants.SINGLE_QUOTES)) { + mysqlTargetSb.append(Constants.SPACE).append(targetMysqlParameter.getLinesTerminated()); + } else { + mysqlTargetSb.append(Constants.SPACE).append(Constants.SINGLE_QUOTES).append(targetMysqlParameter.getLinesTerminated()).append(Constants.SINGLE_QUOTES); + } } - if(targetMysqlParameter.getIsUpdate() - && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey()) - && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){ - result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) - .append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode()); + if (targetMysqlParameter.getIsUpdate() + && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey()) + && StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())) { + mysqlTargetSb.append(Constants.SPACE).append(SqoopConstants.UPDATE_KEY) + .append(Constants.SPACE).append(targetMysqlParameter.getTargetUpdateKey()) + .append(Constants.SPACE).append(SqoopConstants.UPDATE_MODE) + .append(Constants.SPACE).append(targetMysqlParameter.getTargetUpdateMode()); } } } - }catch (Exception e){ - logger.error(e.getMessage()); + } catch (Exception e) { + logger.error(String.format("Sqoop mysql target params build failed: [%s]", e.getMessage())); } - return result.toString(); + return mysqlTargetSb.toString(); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java index 49bfb5f9a8..f6d4d0d4bb 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java @@ -83,10 +83,10 @@ public class ZKMasterClient extends AbstractZKClient { // self tolerant if (getActiveMasterNum() == 1) { - failoverWorker(null, true); - failoverMaster(null); + removeZKNodePath(null, ZKNodeType.MASTER, true); + removeZKNodePath(null, ZKNodeType.WORKER, true); } - + registerListener(); } catch (Exception e) { logger.error("master start up exception", e); } finally { @@ -133,9 +133,16 @@ public class ZKMasterClient extends AbstractZKClient { mutex = new InterProcessMutex(getZkClient(), failoverPath); mutex.acquire(); - String serverHost = getHostByEventDataPath(path); - // handle dead server - handleDeadServer(path, zkNodeType, Constants.ADD_ZK_OP); + String serverHost = null; + if(StringUtils.isNotEmpty(path)){ + serverHost = getHostByEventDataPath(path); + if(StringUtils.isEmpty(serverHost)){ + logger.error("server down error: unknown path: {}", path); + return; + } + // handle dead server + handleDeadServer(path, zkNodeType, Constants.ADD_ZK_OP); + } //failover server if (failover) { failoverServerWhenDown(serverHost, zkNodeType); @@ -336,8 +343,11 @@ public class ZKMasterClient extends AbstractZKClient { List needFailoverProcessInstanceList = processService.queryNeedFailoverProcessInstances(masterHost); + logger.info("failover process list size:{} ", needFailoverProcessInstanceList.size()); //updateProcessInstance host is null and insert into command for (ProcessInstance processInstance : needFailoverProcessInstanceList) { + logger.info("failover process instance id: {} host:{}", + processInstance.getId(), processInstance.getHost()); if (Constants.NULL.equals(processInstance.getHost())) { continue; } diff --git a/dolphinscheduler-server/src/main/resources/logback-master.xml b/dolphinscheduler-server/src/main/resources/logback-master.xml index 7410c01f05..2b986ddad2 100644 --- a/dolphinscheduler-server/src/main/resources/logback-master.xml +++ b/dolphinscheduler-server/src/main/resources/logback-master.xml @@ -74,7 +74,6 @@ - diff --git a/dolphinscheduler-server/src/main/resources/logback-worker.xml b/dolphinscheduler-server/src/main/resources/logback-worker.xml index be1d0acde5..a09202fb7e 100644 --- a/dolphinscheduler-server/src/main/resources/logback-worker.xml +++ b/dolphinscheduler-server/src/main/resources/logback-worker.xml @@ -75,7 +75,6 @@ - diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java index 351be8aa65..0f6239af0e 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java @@ -217,6 +217,7 @@ public class DependentTaskTest { taskInstance.setId(252612); taskInstance.setName("C"); taskInstance.setProcessInstanceId(10111); + taskInstance.setTaskJson("{}"); taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); return taskInstance; } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java index d541f43a3b..d402afcee2 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterCommandTest.java @@ -111,29 +111,4 @@ public class MasterCommandTest { } - @Test - public void testDagHelper(){ - - ProcessDefinition processDefinition = processDefinitionMapper.selectById(19); - - try { - ProcessDag processDag = DagHelper.generateFlowDag(processDefinition.getProcessDefinitionJson(), - new ArrayList<>(), new ArrayList<>(), TaskDependType.TASK_POST); - - DAG dag = DagHelper.buildDagGraph(processDag); - Collection start = DagHelper.getStartVertex("1", dag, null); - - System.out.println(start.toString()); - - Map forbidden = DagHelper.getForbiddenTaskNodeMaps(processDefinition.getProcessDefinitionJson()); - System.out.println(forbidden); - } catch (Exception e) { - e.printStackTrace(); - } - - } - - - - } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java index bf1e7e2d7a..6979a939e7 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java @@ -14,19 +14,41 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.master; -import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.common.enums.*; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVERY_START_NODE_STRING; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; + +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.mock; + +import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Schedule; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.runner.MasterExecThread; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.lang.reflect.Field; +import java.lang.reflect.Method; +import java.text.ParseException; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -36,15 +58,6 @@ import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.springframework.context.ApplicationContext; -import java.lang.reflect.Field; -import java.lang.reflect.Method; -import java.text.ParseException; -import java.util.*; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.powermock.api.mockito.PowerMockito.mock; /** * test for MasterExecThread @@ -66,7 +79,7 @@ public class MasterExecThreadTest { private ApplicationContext applicationContext; @Before - public void init() throws Exception{ + public void init() throws Exception { processService = mock(ProcessService.class); applicationContext = mock(ApplicationContext.class); @@ -92,7 +105,7 @@ public class MasterExecThreadTest { masterExecThread = PowerMockito.spy(new MasterExecThread( processInstance , processService - ,null, null, config)); + , null, null, config)); // prepareProcess init dag Field dag = MasterExecThread.class.getDeclaredField("dag"); dag.setAccessible(true); @@ -106,18 +119,17 @@ public class MasterExecThreadTest { /** * without schedule - * @throws ParseException */ @Test public void testParallelWithOutSchedule() throws ParseException { - try{ + try { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Method method = MasterExecThread.class.getDeclaredMethod("executeComplementProcess"); method.setAccessible(true); method.invoke(masterExecThread); // one create save, and 1-30 for next save, and last day 20 no save verify(processService, times(20)).saveProcessInstance(processInstance); - }catch (Exception e){ + } catch (Exception e) { e.printStackTrace(); Assert.fail(); } @@ -125,27 +137,86 @@ public class MasterExecThreadTest { /** * with schedule - * @throws ParseException */ @Test - public void testParallelWithSchedule() throws ParseException { - try{ + public void testParallelWithSchedule() { + try { Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); Method method = MasterExecThread.class.getDeclaredMethod("executeComplementProcess"); method.setAccessible(true); method.invoke(masterExecThread); // one create save, and 9(1 to 20 step 2) for next save, and last day 31 no save verify(processService, times(9)).saveProcessInstance(processInstance); - }catch (Exception e){ + } catch (Exception e) { + Assert.fail(); + } + } + + @Test + public void testParseStartNodeName() throws ParseException { + try { + Map cmdParam = new HashMap<>(); + cmdParam.put(CMD_PARAM_START_NODE_NAMES, "t1,t2,t3"); + Mockito.when(processInstance.getCommandParam()).thenReturn(JSONUtils.toJsonString(cmdParam)); + Class masterExecThreadClass = MasterExecThread.class; + Method method = masterExecThreadClass.getDeclaredMethod("parseStartNodeName", String.class); + method.setAccessible(true); + List nodeNames = (List) method.invoke(masterExecThread, JSONUtils.toJsonString(cmdParam)); + Assert.assertEquals(3, nodeNames.size()); + } catch (Exception e) { + Assert.fail(); + } + } + + @Test + public void testRetryTaskIntervalOverTime() { + try { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(0); + taskInstance.setMaxRetryTimes(0); + taskInstance.setRetryInterval(0); + taskInstance.setState(ExecutionStatus.FAILURE); + Class masterExecThreadClass = MasterExecThread.class; + Method method = masterExecThreadClass.getDeclaredMethod("retryTaskIntervalOverTime", TaskInstance.class); + method.setAccessible(true); + Assert.assertTrue((Boolean) method.invoke(masterExecThread, taskInstance)); + } catch (Exception e) { + Assert.fail(); + } + } + + @Test + public void testGetStartTaskInstanceList() { + try { + TaskInstance taskInstance1 = new TaskInstance(); + taskInstance1.setId(1); + TaskInstance taskInstance2 = new TaskInstance(); + taskInstance2.setId(2); + TaskInstance taskInstance3 = new TaskInstance(); + taskInstance3.setId(3); + TaskInstance taskInstance4 = new TaskInstance(); + taskInstance4.setId(4); + Map cmdParam = new HashMap<>(); + cmdParam.put(CMD_PARAM_RECOVERY_START_NODE_STRING, "1,2,3,4"); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance1); + Mockito.when(processService.findTaskInstanceById(2)).thenReturn(taskInstance2); + Mockito.when(processService.findTaskInstanceById(3)).thenReturn(taskInstance3); + Mockito.when(processService.findTaskInstanceById(4)).thenReturn(taskInstance4); + Class masterExecThreadClass = MasterExecThread.class; + Method method = masterExecThreadClass.getDeclaredMethod("getStartTaskInstanceList", String.class); + method.setAccessible(true); + List taskInstances = (List) method.invoke(masterExecThread, JSONUtils.toJsonString(cmdParam)); + Assert.assertEquals(4, taskInstances.size()); + } catch (Exception e) { Assert.fail(); } } - private List zeroSchedulerList(){ + private List zeroSchedulerList() { return Collections.EMPTY_LIST; } - private List oneSchedulerList(){ + private List oneSchedulerList() { List schedulerList = new LinkedList<>(); Schedule schedule = new Schedule(); schedule.setCrontab("0 0 0 1/2 * ?"); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java index 32881b5681..8c2321dd8e 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java @@ -17,18 +17,22 @@ package org.apache.dolphinscheduler.server.master.consumer; -import java.util.Date; - import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.common.enums.ResourceType; +import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; +import org.apache.dolphinscheduler.server.entity.DataxTaskExecutionContext; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher; import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager; @@ -38,11 +42,20 @@ import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; import org.apache.dolphinscheduler.server.zk.SpringZKServer; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient; import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -51,9 +64,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; - @RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class, CuratorZookeeperClient.class, +@ContextConfiguration(classes = {DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class, CuratorZookeeperClient.class, NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class, ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class, CuratorZookeeperClient.class}) @@ -61,7 +73,7 @@ public class TaskPriorityQueueConsumerTest { @Autowired - private TaskPriorityQueue taskPriorityQueue; + private TaskPriorityQueue taskPriorityQueue; @Autowired private TaskPriorityQueueConsumer taskPriorityQueueConsumer; @@ -73,23 +85,21 @@ public class TaskPriorityQueueConsumerTest { private ExecutorDispatcher dispatcher; @Before - public void init(){ + public void init() { Tenant tenant = new Tenant(); tenant.setId(1); tenant.setTenantCode("journey"); - tenant.setTenantName("journey"); tenant.setDescription("journey"); tenant.setQueueId(1); tenant.setCreateTime(new Date()); tenant.setUpdateTime(new Date()); - Mockito.doReturn(tenant).when(processService).getTenantForProcess(1,2); + Mockito.doReturn(tenant).when(processService).getTenantForProcess(1, 2); Mockito.doReturn("default").when(processService).queryUserQueueByProcessInstanceId(1); } - @Test public void testSHELLTask() throws Exception { TaskInstance taskInstance = new TaskInstance(); @@ -98,12 +108,31 @@ public class TaskPriorityQueueConsumerTest { taskInstance.setProcessDefinitionId(1); taskInstance.setProcessInstanceId(1); taskInstance.setState(ExecutionStatus.KILL); - taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-55201\",\"maxRetryTimes\":0,\"name\":\"测试任务\",\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}"); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false," + + "\"depList\":[]," + + "\"dependence\":\"{}\"," + + "\"forbidden\":false," + + "\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0," + + "\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false," + + "\"interval\":0}," + + "\"timeout\":\"{\\\"enable\\\":false," + + "\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"default\"}"); taskInstance.setProcessInstancePriority(Priority.MEDIUM); taskInstance.setWorkerGroup("default"); taskInstance.setExecutorId(2); ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); processInstance.setTenantId(1); processInstance.setCommandType(CommandType.START_PROCESS); taskInstance.setProcessInstance(processInstance); @@ -114,11 +143,13 @@ public class TaskPriorityQueueConsumerTest { taskInstance.setProcessDefine(processDefinition); Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); - taskPriorityQueue.put("2_1_2_1_default"); + TaskPriority taskPriority = new TaskPriority(2, 1, 2, 1, "default"); + taskPriorityQueue.put(taskPriority); - Thread.sleep(10000); - } + TimeUnit.SECONDS.sleep(10); + Assert.assertNotNull(taskInstance); + } @Test public void testSQLTask() throws Exception { @@ -128,7 +159,13 @@ public class TaskPriorityQueueConsumerTest { taskInstance.setProcessDefinitionId(1); taskInstance.setProcessInstanceId(1); taskInstance.setState(ExecutionStatus.KILL); - taskInstance.setTaskJson("{\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-3655\",\"maxRetryTimes\":0,\"name\":\"UDF测试\",\"params\":\"{\\\"postStatements\\\":[],\\\"connParams\\\":\\\"\\\",\\\"receiversCc\\\":\\\"\\\",\\\"udfs\\\":\\\"1\\\",\\\"type\\\":\\\"HIVE\\\",\\\"title\\\":\\\"test\\\",\\\"sql\\\":\\\"select id,name,ds,zodia(ds) from t_journey_user\\\",\\\"preStatements\\\":[],\\\"sqlType\\\":0,\\\"receivers\\\":\\\"825193156@qq.com\\\",\\\"datasource\\\":3,\\\"showType\\\":\\\"TABLE\\\",\\\"localParams\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQL\"}"); + taskInstance.setTaskJson("{\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-3655\",\"maxRetryTimes\":0,\"name\":\"UDF测试\"," + + "\"params\":\"{\\\"postStatements\\\":[],\\\"connParams\\\":\\\"\\\",\\\"receiversCc\\\":\\\"\\\",\\\"udfs\\\":\\\"1\\\",\\\"type\\\":\\\"HIVE\\\",\\\"title\\\":\\\"test\\\"," + + "\\\"sql\\\":\\\"select id,name,ds,zodia(ds) from t_journey_user\\\",\\\"preStatements\\\":[]," + + "\\\"sqlType\\\":0,\\\"receivers\\\":\\\"825193156@qq.com\\\",\\\"datasource\\\":3,\\\"showType\\\":\\\"TABLE\\\",\\\"localParams\\\":[]}\"," + + "\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQL\"}"); taskInstance.setProcessInstancePriority(Priority.MEDIUM); taskInstance.setWorkerGroup("default"); taskInstance.setExecutorId(2); @@ -143,23 +180,28 @@ public class TaskPriorityQueueConsumerTest { processDefinition.setProjectId(1); taskInstance.setProcessDefine(processDefinition); Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); - taskPriorityQueue.put("2_1_2_1_default"); + TaskPriority taskPriority = new TaskPriority(2, 1, 2, 1, "default"); + taskPriorityQueue.put(taskPriority); DataSource dataSource = new DataSource(); dataSource.setId(1); dataSource.setName("sqlDatasource"); dataSource.setType(DbType.MYSQL); dataSource.setUserId(2); - dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}"); + dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\"," + + "\"database\":\"dolphinscheduler_qiaozhanwei\"," + + "\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\"," + + "\"user\":\"root\"," + + "\"password\":\"root@123\"}"); dataSource.setCreateTime(new Date()); dataSource.setUpdateTime(new Date()); Mockito.doReturn(dataSource).when(processService).findDataSourceById(1); - Thread.sleep(10000); + TimeUnit.SECONDS.sleep(10); + Assert.assertNotNull(taskInstance); } - @Test public void testDataxTask() throws Exception { TaskInstance taskInstance = new TaskInstance(); @@ -168,7 +210,26 @@ public class TaskPriorityQueueConsumerTest { taskInstance.setProcessDefinitionId(1); taskInstance.setProcessInstanceId(1); taskInstance.setState(ExecutionStatus.KILL); - taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-97625\",\"maxRetryTimes\":0,\"name\":\"MySQL数据相互导入\",\"params\":\"{\\\"targetTable\\\":\\\"pv2\\\",\\\"postStatements\\\":[],\\\"jobSpeedRecord\\\":1000,\\\"customConfig\\\":0,\\\"dtType\\\":\\\"MYSQL\\\",\\\"dsType\\\":\\\"MYSQL\\\",\\\"jobSpeedByte\\\":0,\\\"dataSource\\\":80,\\\"dataTarget\\\":80,\\\"sql\\\":\\\"SELECT dt,count FROM pv\\\",\\\"preStatements\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"DATAX\",\"workerGroup\":\"default\"}"); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\"," + + "\"forbidden\":false,\"id\":\"tasks-97625\"," + + "\"maxRetryTimes\":0,\"name\":\"MySQL数据相互导入\"," + + "\"params\":\"{\\\"targetTable\\\":\\\"pv2\\\"," + + " \\\"postStatements\\\":[]," + + " \\\"jobSpeedRecord\\\":1000," + + " \\\"customConfig\\\":0," + + " \\\"dtType\\\":\\\"MYSQL\\\"," + + " \\\"dsType\\\":\\\"MYSQL\\\"," + + " \\\"jobSpeedByte\\\":0," + + " \\\"dataSource\\\":80," + + " \\\"dataTarget\\\":80," + + " \\\"sql\\\":\\\"SELECT dt,count FROM pv\\\"," + + " \\\"preStatements\\\":[]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"DATAX\"," + + "\"workerGroup\":\"default\"}"); taskInstance.setProcessInstancePriority(Priority.MEDIUM); taskInstance.setWorkerGroup("default"); taskInstance.setExecutorId(2); @@ -183,23 +244,26 @@ public class TaskPriorityQueueConsumerTest { processDefinition.setProjectId(1); taskInstance.setProcessDefine(processDefinition); Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); - taskPriorityQueue.put("2_1_2_1_default"); - - + TaskPriority taskPriority = new TaskPriority(2, 1, 2, 1, "default"); + taskPriorityQueue.put(taskPriority); DataSource dataSource = new DataSource(); dataSource.setId(80); dataSource.setName("datax"); dataSource.setType(DbType.MYSQL); dataSource.setUserId(2); - dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}"); + dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\"," + + "\"database\":\"dolphinscheduler_qiaozhanwei\"," + + "\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\"," + + "\"user\":\"root\"," + + "\"password\":\"root@123\"}"); dataSource.setCreateTime(new Date()); dataSource.setUpdateTime(new Date()); Mockito.doReturn(dataSource).when(processService).findDataSourceById(80); - Thread.sleep(10000); + TimeUnit.SECONDS.sleep(10); + Assert.assertNotNull(taskInstance); } - @Test public void testSqoopTask() throws Exception { TaskInstance taskInstance = new TaskInstance(); @@ -208,7 +272,32 @@ public class TaskPriorityQueueConsumerTest { taskInstance.setProcessDefinitionId(1); taskInstance.setProcessInstanceId(1); taskInstance.setState(ExecutionStatus.KILL); - taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-63634\",\"maxRetryTimes\":0,\"name\":\"MySQL数据导入HDSF\",\"params\":\"{\\\"sourceType\\\":\\\"MYSQL\\\",\\\"targetType\\\":\\\"HDFS\\\",\\\"targetParams\\\":\\\"{\\\\\\\"targetPath\\\\\\\":\\\\\\\"/test/datatest\\\\\\\",\\\\\\\"deleteTargetDir\\\\\\\":true,\\\\\\\"fileType\\\\\\\":\\\\\\\"--as-textfile\\\\\\\",\\\\\\\"compressionCodec\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"fieldsTerminated\\\\\\\":\\\\\\\",\\\\\\\",\\\\\\\"linesTerminated\\\\\\\":\\\\\\\"\\\\\\\\\\\\\\\\n\\\\\\\"}\\\",\\\"modelType\\\":\\\"import\\\",\\\"sourceParams\\\":\\\"{\\\\\\\"srcType\\\\\\\":\\\\\\\"MYSQL\\\\\\\",\\\\\\\"srcDatasource\\\\\\\":1,\\\\\\\"srcTable\\\\\\\":\\\\\\\"t_ds_user\\\\\\\",\\\\\\\"srcQueryType\\\\\\\":\\\\\\\"0\\\\\\\",\\\\\\\"srcQuerySql\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"srcColumnType\\\\\\\":\\\\\\\"0\\\\\\\",\\\\\\\"srcColumns\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"srcConditionList\\\\\\\":[],\\\\\\\"mapColumnHive\\\\\\\":[],\\\\\\\"mapColumnJava\\\\\\\":[]}\\\",\\\"localParams\\\":[],\\\"concurrency\\\":1}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQOOP\",\"workerGroup\":\"default\"}"); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\"," + + "\"forbidden\":false,\"id\":\"tasks-63634\"," + + "\"maxRetryTimes\":0,\"name\":\"MySQL数据导入HDSF\"," + + "\"params\":\"{\\\"sourceType\\\":\\\"MYSQL\\\"," + + " \\\"targetType\\\":\\\"HDFS\\\"," + + " \\\"targetParams\\\":\\\"{\\\\\\\"targetPath\\\\\\\":\\\\\\\"/test/datatest\\\\\\\"," + + " \\\\\\\"deleteTargetDir\\\\\\\":true,\\\\\\\"fileType\\\\\\\":\\\\\\\"--as-textfile\\\\\\\"," + + " \\\\\\\"compressionCodec\\\\\\\":\\\\\\\"\\\\\\\"," + + " \\\\\\\"fieldsTerminated\\\\\\\":\\\\\\\",\\\\\\\"," + + " \\\\\\\"linesTerminated\\\\\\\":\\\\\\\"\\\\\\\\\\\\\\\\n\\\\\\\"}\\\"," + + " \\\"modelType\\\":\\\"import\\\"," + + " \\\"sourceParams\\\":\\\"{\\\\\\\"srcType\\\\\\\":\\\\\\\"MYSQL\\\\\\\"," + + " \\\\\\\"srcDatasource\\\\\\\":1,\\\\\\\"srcTable\\\\\\\":\\\\\\\"t_ds_user\\\\\\\"," + + " \\\\\\\"srcQueryType\\\\\\\":\\\\\\\"0\\\\\\\"," + + " \\\\\\\"srcQuerySql\\\\\\\":\\\\\\\"\\\\\\\"," + + " \\\\\\\"srcColumnType\\\\\\\":\\\\\\\"0\\\\\\\"," + + " \\\\\\\"srcColumns\\\\\\\":\\\\\\\"\\\\\\\"," + + " \\\\\\\"srcConditionList\\\\\\\":[],\\\\\\\"mapColumnHive\\\\\\\":[],\\\\\\\"mapColumnJava\\\\\\\":[]}\\\"," + + " \\\"localParams\\\":[],\\\"concurrency\\\":1}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SQOOP\"," + + "\"workerGroup\":\"default\"}"); taskInstance.setProcessInstancePriority(Priority.MEDIUM); taskInstance.setWorkerGroup("default"); taskInstance.setExecutorId(2); @@ -223,39 +312,357 @@ public class TaskPriorityQueueConsumerTest { processDefinition.setProjectId(1); taskInstance.setProcessDefine(processDefinition); Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); - taskPriorityQueue.put("2_1_2_1_default"); - + TaskPriority taskPriority = new TaskPriority(2, 1, 2, 1, "default"); + taskPriorityQueue.put(taskPriority); DataSource dataSource = new DataSource(); dataSource.setId(1); dataSource.setName("datax"); dataSource.setType(DbType.MYSQL); dataSource.setUserId(2); - dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}"); + dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\"," + + "\"database\":\"dolphinscheduler_qiaozhanwei\"," + + "\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\"," + + "\"user\":\"root\"," + + "\"password\":\"root@123\"}"); dataSource.setCreateTime(new Date()); dataSource.setUpdateTime(new Date()); Mockito.doReturn(dataSource).when(processService).findDataSourceById(1); - Thread.sleep(10000); + TimeUnit.SECONDS.sleep(10); + Assert.assertNotNull(taskInstance); } - @Test - public void testTaskInstanceIsFinalState(){ + public void testTaskInstanceIsFinalState() { TaskInstance taskInstance = new TaskInstance(); taskInstance.setId(1); taskInstance.setTaskType("SHELL"); taskInstance.setProcessDefinitionId(1); taskInstance.setProcessInstanceId(1); taskInstance.setState(ExecutionStatus.KILL); - taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-55201\",\"maxRetryTimes\":0,\"name\":\"测试任务\",\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}"); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\"," + + "\"forbidden\":false,\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0,\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\"," + + "\"retryInterval\":1,\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"default\"}"); taskInstance.setProcessInstancePriority(Priority.MEDIUM); taskInstance.setWorkerGroup("default"); taskInstance.setExecutorId(2); + Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1); + + Boolean state = taskPriorityQueueConsumer.taskInstanceIsFinalState(1); + Assert.assertNotNull(state); + } + + @Test + public void testNotFoundWorkerGroup() throws Exception { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setTaskType("SHELL"); + taskInstance.setProcessDefinitionId(1); + taskInstance.setProcessInstanceId(1); + taskInstance.setState(ExecutionStatus.KILL); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false," + + "\"depList\":[]," + + "\"dependence\":\"{}\"," + + "\"forbidden\":false," + + "\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0," + + "\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0}," + + "\"timeout\":\"{\\\"enable\\\":false," + + "\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"NoWorkGroup\"}"); + taskInstance.setProcessInstancePriority(Priority.MEDIUM); + taskInstance.setWorkerGroup("NoWorkGroup"); + taskInstance.setExecutorId(2); + + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setTenantId(1); + processInstance.setCommandType(CommandType.START_PROCESS); + taskInstance.setProcessInstance(processInstance); + taskInstance.setState(ExecutionStatus.DELAY_EXECUTION); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setUserId(2); + processDefinition.setProjectId(1); + taskInstance.setProcessDefine(processDefinition); + + Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); + Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1); + + TaskPriority taskPriority = new TaskPriority(2, 1, 2, 1, "NoWorkGroup"); + taskPriorityQueue.put(taskPriority); + + TimeUnit.SECONDS.sleep(10); + + Assert.assertNotNull(taskInstance); + + } + + @Test + public void testDispatch() throws Exception { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setTaskType("SHELL"); + taskInstance.setProcessDefinitionId(1); + taskInstance.setProcessInstanceId(1); + taskInstance.setState(ExecutionStatus.KILL); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false," + + "\"depList\":[]," + + "\"dependence\":\"{}\"," + + "\"forbidden\":false," + + "\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0," + + "\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0}," + + "\"timeout\":\"{\\\"enable\\\":false," + + "\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"NoWorkGroup\"}"); + taskInstance.setProcessInstancePriority(Priority.MEDIUM); + taskInstance.setWorkerGroup("NoWorkGroup"); + taskInstance.setExecutorId(2); + + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setTenantId(1); + processInstance.setCommandType(CommandType.START_PROCESS); + taskInstance.setProcessInstance(processInstance); + taskInstance.setState(ExecutionStatus.DELAY_EXECUTION); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setUserId(2); + processDefinition.setProjectId(1); + taskInstance.setProcessDefine(processDefinition); + + Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); + Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1); + + TaskPriority taskPriority = new TaskPriority(); + taskPriority.setTaskId(1); + boolean res = taskPriorityQueueConsumer.dispatch(taskPriority); + + Assert.assertFalse(res); + } + + @Test + public void testGetTaskExecutionContext() throws Exception { + + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setTaskType("SHELL"); + taskInstance.setProcessDefinitionId(1); + taskInstance.setProcessInstanceId(1); + taskInstance.setState(ExecutionStatus.KILL); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false," + + "\"depList\":[]," + + "\"dependence\":\"{}\"," + + "\"forbidden\":false," + + "\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0," + + "\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0}," + + "\"timeout\":\"{\\\"enable\\\":false," + + "\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"NoWorkGroup\"}"); + taskInstance.setProcessInstancePriority(Priority.MEDIUM); + taskInstance.setWorkerGroup("NoWorkGroup"); + taskInstance.setExecutorId(2); + + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setTenantId(1); + processInstance.setCommandType(CommandType.START_PROCESS); + taskInstance.setProcessInstance(processInstance); + taskInstance.setState(ExecutionStatus.DELAY_EXECUTION); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setUserId(2); + processDefinition.setProjectId(1); + taskInstance.setProcessDefine(processDefinition); + + Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); + Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1); + + TaskExecutionContext taskExecutionContext = taskPriorityQueueConsumer.getTaskExecutionContext(1); + + Assert.assertNotNull(taskExecutionContext); + } + + @Test + public void testGetResourceFullNames() throws Exception { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setTaskType("SHELL"); + taskInstance.setProcessDefinitionId(1); + taskInstance.setProcessInstanceId(1); + taskInstance.setState(ExecutionStatus.KILL); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false," + + "\"depList\":[]," + + "\"dependence\":\"{}\"," + + "\"forbidden\":false," + + "\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0," + + "\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[{\\\"id\\\":123},{\\\"res\\\":\\\"/data/file\\\"}]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0}," + + "\"timeout\":\"{\\\"enable\\\":false," + + "\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"NoWorkGroup\"}"); + + taskInstance.setProcessInstancePriority(Priority.MEDIUM); + taskInstance.setWorkerGroup("NoWorkGroup"); + taskInstance.setExecutorId(2); + // task node + TaskNode taskNode = JSONUtils.parseObject(taskInstance.getTaskJson(), TaskNode.class); + + Map map = taskPriorityQueueConsumer.getResourceFullNames(taskNode); + + List resourcesList = new ArrayList(); + Resource resource = new Resource(); + resource.setFileName("fileName"); + resourcesList.add(resource); + + Mockito.doReturn(resourcesList).when(processService).listResourceByIds(new Integer[]{123}); + Mockito.doReturn("tenantCode").when(processService).queryTenantCodeByResName(resource.getFullName(), ResourceType.FILE); + Assert.assertNotNull(map); + + } + + @Test + public void testVerifyTenantIsNull() throws Exception { + Tenant tenant = null; + + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setTaskType("SHELL"); + taskInstance.setProcessDefinitionId(1); + taskInstance.setProcessInstanceId(1); + + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + taskInstance.setProcessInstance(processInstance); + + boolean res = taskPriorityQueueConsumer.verifyTenantIsNull(tenant,taskInstance); + Assert.assertTrue(res); + + tenant = new Tenant(); + tenant.setId(1); + tenant.setTenantCode("journey"); + tenant.setDescription("journey"); + tenant.setQueueId(1); + tenant.setCreateTime(new Date()); + tenant.setUpdateTime(new Date()); + res = taskPriorityQueueConsumer.verifyTenantIsNull(tenant,taskInstance); + Assert.assertFalse(res); + + } + + @Test + public void testSetDataxTaskRelation() throws Exception { + + DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); + TaskNode taskNode = new TaskNode(); + taskNode.setParams("{\"dataSource\":1,\"dataTarget\":1}"); + DataSource dataSource = new DataSource(); + dataSource.setId(1); + dataSource.setConnectionParams(""); + dataSource.setType(DbType.MYSQL); + Mockito.doReturn(dataSource).when(processService).findDataSourceById(1); + + taskPriorityQueueConsumer.setDataxTaskRelation(dataxTaskExecutionContext,taskNode); + + Assert.assertEquals(1,dataxTaskExecutionContext.getDataSourceId()); + Assert.assertEquals(1,dataxTaskExecutionContext.getDataTargetId()); + } + + @Test + public void testRun() throws Exception { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setTaskType("SHELL"); + taskInstance.setProcessDefinitionId(1); + taskInstance.setProcessInstanceId(1); + taskInstance.setState(ExecutionStatus.KILL); + taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\"," + + "\"conditionsTask\":false," + + "\"depList\":[]," + + "\"dependence\":\"{}\"," + + "\"forbidden\":false," + + "\"id\":\"tasks-55201\"," + + "\"maxRetryTimes\":0," + + "\"name\":\"测试任务\"," + + "\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\"," + + "\"preTasks\":\"[]\"," + + "\"retryInterval\":1," + + "\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\"," + + "\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0}," + + "\"timeout\":\"{\\\"enable\\\":false," + + "\\\"strategy\\\":\\\"\\\"}\"," + + "\"type\":\"SHELL\"," + + "\"workerGroup\":\"NoWorkGroup\"}"); + taskInstance.setProcessInstancePriority(Priority.MEDIUM); + taskInstance.setWorkerGroup("NoWorkGroup"); + taskInstance.setExecutorId(2); + + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setTenantId(1); + processInstance.setCommandType(CommandType.START_PROCESS); + taskInstance.setProcessInstance(processInstance); + taskInstance.setState(ExecutionStatus.DELAY_EXECUTION); + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setUserId(2); + processDefinition.setProjectId(1); + taskInstance.setProcessDefine(processDefinition); + + Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1); Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1); - taskPriorityQueueConsumer.taskInstanceIsFinalState(1); + TaskPriority taskPriority = new TaskPriority(2, 1, 2, 1, "NoWorkGroup"); + taskPriorityQueue.put(taskPriority); + + taskPriorityQueueConsumer.run(); + + TimeUnit.SECONDS.sleep(10); + Assert.assertNotEquals(-1,taskPriorityQueue.size()); + } @After @@ -263,6 +670,4 @@ public class TaskPriorityQueueConsumerTest { Stopper.stop(); } - - } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java index 9e41cd68bf..1c9f4922a7 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/assign/RoundRobinSelectorTest.java @@ -23,7 +23,7 @@ import org.junit.Assert; import org.junit.Test; import java.util.Arrays; -import java.util.Collections; +import java.util.List; /** * round robin selector @@ -39,42 +39,59 @@ public class RoundRobinSelectorTest { @Test public void testSelect1() { RoundRobinSelector selector = new RoundRobinSelector(); + // dismiss of server warm-up time + long startTime = System.currentTimeMillis() - 60 * 10 * 1000; + List hostOneList = Arrays.asList( + new Host("192.168.1.1", 80, 20, startTime, "kris"), + new Host("192.168.1.2", 80, 10, startTime, "kris")); + + List hostTwoList = Arrays.asList( + new Host("192.168.1.1", 80, 20, startTime, "kris"), + new Host("192.168.1.2", 80, 10, startTime, "kris"), + new Host("192.168.1.3", 80, 10, startTime, "kris")); + Host result; - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.1", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.2", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.1", result.getIp()); - // add new host - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.1", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.2", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"), - new Host("192.168.1.3", 80, 10, System.currentTimeMillis(), "kris"))); + + // add new host + result = selector.select(hostTwoList); Assert.assertEquals("192.168.1.1", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"), - new Host("192.168.1.3", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostTwoList); Assert.assertEquals("192.168.1.3", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"), - new Host("192.168.1.3", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostTwoList); Assert.assertEquals("192.168.1.1", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"), - new Host("192.168.1.3", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostTwoList); Assert.assertEquals("192.168.1.2", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"), - new Host("192.168.1.3", 80, 10, System.currentTimeMillis(), "kris"))); + result = selector.select(hostTwoList); Assert.assertEquals("192.168.1.1", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"), - new Host("192.168.1.3", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostTwoList); Assert.assertEquals("192.168.1.3", result.getIp()); + // remove host3 - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.1", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.2", result.getIp()); - result = selector.select(Arrays.asList(new Host("192.168.1.1", 80, 20, System.currentTimeMillis(), "kris"), new Host("192.168.1.2", 80, 10, System.currentTimeMillis(), "kris"))); + + result = selector.select(hostOneList); Assert.assertEquals("192.168.1.1", result.getIp()); } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java new file mode 100644 index 0000000000..76ffe7904a --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessorTest.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor; + +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; +import org.apache.dolphinscheduler.server.master.cache.impl.TaskInstanceCacheManagerImpl; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.net.InetSocketAddress; +import java.util.Date; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import io.netty.channel.Channel; + +/** + * task ack processor test + */ +@RunWith(PowerMockRunner.class) +@PrepareForTest({SpringApplicationContext.class, TaskResponseEvent.class}) +public class TaskAckProcessorTest { + + private TaskAckProcessor taskAckProcessor; + private TaskResponseService taskResponseService; + private TaskInstanceCacheManagerImpl taskInstanceCacheManager; + private ProcessService processService; + private TaskExecuteAckCommand taskExecuteAckCommand; + private TaskResponseEvent taskResponseEvent; + private Channel channel; + + @Before + public void before() { + PowerMockito.mockStatic(SpringApplicationContext.class); + + taskResponseService = PowerMockito.mock(TaskResponseService.class); + PowerMockito.when(SpringApplicationContext.getBean(TaskResponseService.class)).thenReturn(taskResponseService); + + taskInstanceCacheManager = PowerMockito.mock(TaskInstanceCacheManagerImpl.class); + PowerMockito.when(SpringApplicationContext.getBean(TaskInstanceCacheManagerImpl.class)).thenReturn(taskInstanceCacheManager); + + processService = PowerMockito.mock(ProcessService.class); + PowerMockito.when(SpringApplicationContext.getBean(ProcessService.class)).thenReturn(processService); + + taskAckProcessor = new TaskAckProcessor(); + + channel = PowerMockito.mock(Channel.class); + taskResponseEvent = PowerMockito.mock(TaskResponseEvent.class); + + taskExecuteAckCommand = new TaskExecuteAckCommand(); + taskExecuteAckCommand.setStatus(1); + taskExecuteAckCommand.setExecutePath("/dolphinscheduler/worker"); + taskExecuteAckCommand.setHost("localhost"); + taskExecuteAckCommand.setLogPath("/temp/worker.log"); + taskExecuteAckCommand.setStartTime(new Date()); + taskExecuteAckCommand.setTaskInstanceId(1); + } + + @Test + public void testProcess() { +// Command command = taskExecuteAckCommand.convert2Command(); +// Assert.assertEquals(CommandType.TASK_EXECUTE_ACK,command.getType()); +// InetSocketAddress socketAddress = new InetSocketAddress("localhost",12345); +// PowerMockito.when(channel.remoteAddress()).thenReturn(socketAddress); +// PowerMockito.mockStatic(TaskResponseEvent.class); +// +// PowerMockito.when(TaskResponseEvent.newAck(Mockito.any(), Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.anyInt(), channel)) +// .thenReturn(taskResponseEvent); +// TaskInstance taskInstance = PowerMockito.mock(TaskInstance.class); +// PowerMockito.when(processService.findTaskInstanceById(Mockito.any())).thenReturn(taskInstance); +// +// taskAckProcessor.process(channel,command); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java new file mode 100644 index 0000000000..c7f047569e --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessorTest.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; + +import java.util.ArrayList; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.powermock.api.mockito.PowerMockito; + +import io.netty.channel.Channel; + +/** + * task response processor test + */ +public class TaskKillResponseProcessorTest { + + private TaskKillResponseProcessor taskKillResponseProcessor; + + private TaskKillResponseCommand taskKillResponseCommand; + + private Channel channel; + + @Before + public void before() { + taskKillResponseProcessor = new TaskKillResponseProcessor(); + channel = PowerMockito.mock(Channel.class); + taskKillResponseCommand = new TaskKillResponseCommand(); + taskKillResponseCommand.setAppIds( + new ArrayList() {{ add("task_1"); }}); + taskKillResponseCommand.setHost("localhost"); + taskKillResponseCommand.setProcessId(1); + taskKillResponseCommand.setStatus(1); + taskKillResponseCommand.setTaskInstanceId(1); + + } + + @Test + public void testProcess() { + Command command = taskKillResponseCommand.convert2Command(); + Assert.assertEquals(CommandType.TASK_KILL_RESPONSE,command.getType()); + taskKillResponseProcessor.process(channel,command); + } +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java index a2b1b4ecc2..5d10f849c5 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java @@ -14,55 +14,80 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.master.processor.queue; +package org.apache.dolphinscheduler.server.master.processor.queue; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.server.registry.DependencyConfig; -import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; -import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; -import org.apache.dolphinscheduler.server.zk.SpringZKServer; -import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient; -import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; -import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; -import org.junit.Assert; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.util.Date; + +import org.junit.After; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; -import java.util.Date; +import io.netty.channel.Channel; -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, TaskResponseService.class, ZookeeperRegistryCenter.class, - ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class, TaskResponseService.class, - CuratorZookeeperClient.class}) +@RunWith(MockitoJUnitRunner.class) public class TaskResponseServiceTest { - @Autowired - private TaskResponseService taskResponseService; + @Mock(name = "processService") + private ProcessService processService; - @Test - public void testAdd(){ - TaskResponseEvent taskResponseEvent = TaskResponseEvent.newAck(ExecutionStatus.RUNNING_EXECUTION, new Date(), - "", "", "", 1); - taskResponseService.addResponse(taskResponseEvent); - Assert.assertTrue(taskResponseService.getEventQueue().size() == 1); - try { - Thread.sleep(10); - } catch (InterruptedException ignore) { - } - //after sleep, inner worker will take the event - Assert.assertTrue(taskResponseService.getEventQueue().size() == 0); + @InjectMocks + TaskResponseService taskRspService; + + @Mock + private Channel channel; + + private TaskResponseEvent ackEvent; + + private TaskResponseEvent resultEvent; + + private TaskInstance taskInstance; + + @Before + public void before() { + taskRspService.start(); + + ackEvent = TaskResponseEvent.newAck(ExecutionStatus.RUNNING_EXECUTION, + new Date(), + "127.*.*.*", + "path", + "logPath", + 22, + channel); + + resultEvent = TaskResponseEvent.newResult(ExecutionStatus.SUCCESS, + new Date(), + 1, + "ids", + 22, + "varPol", + channel); + + taskInstance = new TaskInstance(); + taskInstance.setId(22); + taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION); } @Test - public void testStop(){ - TaskResponseEvent taskResponseEvent = TaskResponseEvent.newAck(ExecutionStatus.RUNNING_EXECUTION, new Date(), - "", "", "", 1); - taskResponseService.addResponse(taskResponseEvent); - taskResponseService.stop(); - Assert.assertTrue(taskResponseService.getEventQueue().size() == 0); + public void testAddResponse() { + Mockito.when(processService.findTaskInstanceById(Mockito.any())).thenReturn(taskInstance); + Mockito.when(channel.writeAndFlush(Mockito.any())).thenReturn(null); + taskRspService.addResponse(ackEvent); + taskRspService.addResponse(resultEvent); + } + + @After + public void after() { + taskRspService.stop(); } + } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThreadTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThreadTest.java index f6fdfaab63..405ad435cc 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThreadTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThreadTest.java @@ -57,7 +57,7 @@ public class MasterTaskExecThreadTest { this.zookeeperRegistryCenter = PowerMockito.mock(ZookeeperRegistryCenter.class); PowerMockito.when(SpringApplicationContext.getBean(ZookeeperRegistryCenter.class)) .thenReturn(this.zookeeperRegistryCenter); - this.masterTaskExecThread = new MasterTaskExecThread(null); + this.masterTaskExecThread = new MasterTaskExecThread(getTaskInstance()); } @Test @@ -114,6 +114,7 @@ public class MasterTaskExecThreadTest { taskInstance.setTaskType("SHELL"); taskInstance.setId(252612); taskInstance.setName("C"); + taskInstance.setTaskJson("{}"); taskInstance.setProcessInstanceId(10111); taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS); return taskInstance; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java index a315232796..bea6775e5c 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtilsTest.java @@ -46,9 +46,9 @@ public class FlinkArgsUtilsTest { public ProgramType programType = ProgramType.JAVA; public String mainClass = "com.test"; public ResourceInfo mainJar = null; - public String mainArgs = "testArgs"; + public String mainArgs = "testArgs --input file:///home"; public String queue = "queue1"; - public String others = "--input file:///home"; + public String others = "-p 4"; public String flinkVersion = "<1.10"; @@ -109,20 +109,20 @@ public class FlinkArgsUtilsTest { assertEquals("-ytm", result.get(10)); assertEquals(result.get(11),taskManagerMemory); - assertEquals("-d", result.get(12)); + assertEquals("-yqu", result.get(12)); + assertEquals(result.get(13),queue); - assertEquals("-c", result.get(13)); - assertEquals(result.get(14),mainClass); + assertEquals("-d", result.get(14)); - assertEquals(result.get(15),mainJar.getRes()); - assertEquals(result.get(16),mainArgs); + assertEquals(result.get(15),others); - assertEquals("--qu", result.get(17)); - assertEquals(result.get(18),queue); + assertEquals("-c", result.get(16)); + assertEquals(result.get(17),mainClass); - assertEquals(result.get(19),others); + assertEquals(result.get(18),mainJar.getRes()); + assertEquals(result.get(19),mainArgs); - //Others param without --qu + //Others param without -yqu FlinkParameters param1 = new FlinkParameters(); param1.setQueue(queue); param1.setDeployMode(mode); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java index ace5cd8471..4d1ed27619 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/utils/ProcessUtilsTest.java @@ -14,34 +14,126 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.utils; +import static org.powermock.api.mockito.PowerMockito.when; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.utils.HadoopUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; + +import java.util.ArrayList; +import java.util.List; + import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.MockitoAnnotations; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - +@RunWith(PowerMockRunner.class) +@PrepareForTest({System.class, OSUtils.class, HadoopUtils.class}) public class ProcessUtilsTest { - private static final Logger logger = LoggerFactory.getLogger(ProcessUtilsTest.class); + private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } @Test public void getPidsStr() throws Exception { - String pidList = ProcessUtils.getPidsStr(1); + int processId = 1; + String pidList = ProcessUtils.getPidsStr(processId); Assert.assertNotEquals("The child process of process 1 should not be empty", pidList, ""); - logger.info("Sub process list : {}", pidList); + + PowerMockito.mockStatic(OSUtils.class); + when(OSUtils.isMacOS()).thenReturn(true); + when(OSUtils.exeCmd(String.format("%s -p %d", Constants.PSTREE, processId))).thenReturn(null); + String pidListMac = ProcessUtils.getPidsStr(processId); + Assert.assertEquals("", pidListMac); } @Test public void testBuildCommandStr() { List commands = new ArrayList<>(); commands.add("sudo"); - Assert.assertEquals(ProcessUtils.buildCommandStr(commands), "sudo"); + commands.add("-u"); + commands.add("tenantCode"); + //allowAmbiguousCommands false + Assert.assertEquals("sudo -u tenantCode", ProcessUtils.buildCommandStr(commands)); + //quota + commands.clear(); + commands.add("\"sudo\""); + Assert.assertEquals("\"sudo\"", ProcessUtils.buildCommandStr(commands)); + + //allowAmbiguousCommands true + commands.clear(); + commands.add("sudo"); + System.setProperty("jdk.lang.Process.allowAmbiguousCommands", "false"); + Assert.assertEquals("\"sudo\"", ProcessUtils.buildCommandStr(commands)); + } + + @Test + public void testKill() { + //get taskExecutionContext + TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); + + //process id eq 0 + taskExecutionContext.setProcessId(0); + ProcessUtils.kill(taskExecutionContext); + + //process id not eq 0 + taskExecutionContext.setProcessId(1); + PowerMockito.mockStatic(OSUtils.class); + try { + when(OSUtils.exeCmd(String.format("%s -sp %d", Constants.PSTREE, 1))).thenReturn("1111"); + when(OSUtils.exeCmd(String.format("%s -p %d", Constants.PSTREE, 1))).thenReturn("1111"); + when(OSUtils.exeCmd("sudo kill -9")).thenReturn("1111"); + } catch (Exception e) { + e.printStackTrace(); + } + taskExecutionContext.setHost("127.0.0.1:8888"); + taskExecutionContext.setLogPath("/log/1.log"); + ProcessUtils.kill(taskExecutionContext); + Assert.assertEquals(1, taskExecutionContext.getProcessId()); } + @Test + public void testCancelApplication() { + List appIds = new ArrayList<>(); + appIds.add("application_1585532379175_228491"); + appIds.add("application_1598885606600_3677"); + String tenantCode = "dev"; + String executePath = "/ds-exec/1/1/1"; + ExecutionStatus running = ExecutionStatus.RUNNING_EXECUTION; + + PowerMockito.mockStatic(HadoopUtils.class); + HadoopUtils hadoop = HadoopUtils.getInstance(); + + try { + PowerMockito.whenNew(HadoopUtils.class).withAnyArguments().thenReturn(hadoop); + } catch (Exception e) { + e.printStackTrace(); + } + try { + when(hadoop.getApplicationStatus("application_1585532379175_228491")).thenReturn(running); + when(hadoop.getApplicationStatus("application_1598885606600_3677")).thenReturn(running); + } catch (Exception e) { + e.printStackTrace(); + ProcessUtils.cancelApplication(appIds, logger, tenantCode, executePath); + } + + Assert.assertNotNull(appIds); + } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManagerTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManagerTest.java new file mode 100644 index 0000000000..d871257e82 --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManagerTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.cache; + +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * TaskExecutionContextCacheManagerTest + */ +public class TaskExecutionContextCacheManagerTest { + + private TaskExecutionContextCacheManager taskExecutionContextCacheManager; + private TaskExecutionContext taskExecutionContext; + + @Before + public void before() { + taskExecutionContextCacheManager = new TaskExecutionContextCacheManagerImpl(); + } + + @Test + public void testGetByTaskInstanceId() { + taskExecutionContext = new TaskExecutionContext(); + taskExecutionContext.setTaskInstanceId(2); + taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext); + Assert.assertEquals(2, taskExecutionContextCacheManager.getByTaskInstanceId(2).getTaskInstanceId()); + } + + @Test + public void updateTaskExecutionContext() { + taskExecutionContext = new TaskExecutionContext(); + taskExecutionContext.setTaskInstanceId(1); + taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext); + Assert.assertTrue(taskExecutionContextCacheManager.updateTaskExecutionContext(taskExecutionContext)); + taskExecutionContextCacheManager.removeByTaskInstanceId(1); + Assert.assertFalse(taskExecutionContextCacheManager.updateTaskExecutionContext(taskExecutionContext)); + } + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java index 8d58459e85..5a5561d1bd 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java @@ -14,9 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.worker.processor; -import java.util.Date; +package org.apache.dolphinscheduler.server.worker.processor; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.utils.JSONUtils; @@ -37,6 +36,7 @@ import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseSer import org.apache.dolphinscheduler.server.master.registry.MasterRegistry; import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager; import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter; +import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry; import org.apache.dolphinscheduler.server.zk.SpringZKServer; @@ -44,10 +44,12 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient; import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; import org.apache.dolphinscheduler.service.zk.ZookeeperConfig; + +import java.util.Date; + import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; @@ -56,26 +58,28 @@ import io.netty.channel.Channel; /** * test task call back service + * todo refactor it in the form of mock */ @RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration(classes={ - TaskCallbackServiceTestConfig.class, - SpringZKServer.class, - SpringApplicationContext.class, - MasterRegistry.class, - WorkerRegistry.class, - ZookeeperRegistryCenter.class, - MasterConfig.class, - WorkerConfig.class, - ZookeeperCachedOperator.class, - ZookeeperConfig.class, - ZookeeperNodeManager.class, - TaskCallbackService.class, - TaskResponseService.class, - TaskAckProcessor.class, - TaskResponseProcessor.class, - TaskExecuteProcessor.class, - CuratorZookeeperClient.class}) +@ContextConfiguration(classes = { + TaskCallbackServiceTestConfig.class, + SpringZKServer.class, + SpringApplicationContext.class, + MasterRegistry.class, + WorkerRegistry.class, + ZookeeperRegistryCenter.class, + MasterConfig.class, + WorkerConfig.class, + ZookeeperCachedOperator.class, + ZookeeperConfig.class, + ZookeeperNodeManager.class, + TaskCallbackService.class, + TaskResponseService.class, + TaskAckProcessor.class, + TaskResponseProcessor.class, + TaskExecuteProcessor.class, + CuratorZookeeperClient.class, + TaskExecutionContextCacheManagerImpl.class}) public class TaskCallbackServiceTest { @Autowired @@ -95,10 +99,11 @@ public class TaskCallbackServiceTest { /** * send ack test + * * @throws Exception */ @Test - public void testSendAck() throws Exception{ + public void testSendAck() throws Exception { final NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(30000); NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); @@ -122,10 +127,11 @@ public class TaskCallbackServiceTest { /** * send result test + * * @throws Exception */ @Test - public void testSendResult() throws Exception{ + public void testSendResult() throws Exception { final NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(30000); NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); @@ -136,7 +142,7 @@ public class TaskCallbackServiceTest { NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); - TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(); + TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand(); responseCommand.setTaskInstanceId(1); responseCommand.setEndTime(new Date()); @@ -152,20 +158,13 @@ public class TaskCallbackServiceTest { nettyRemotingClient.close(); } - @Test(expected = IllegalArgumentException.class) - public void testSendAckWithIllegalArgumentException(){ - TaskExecuteAckCommand ackCommand = Mockito.mock(TaskExecuteAckCommand.class); - taskCallbackService.sendAck(1, ackCommand.convert2Command()); - Stopper.stop(); - } - @Test - public void testPause(){ - Assert.assertEquals(5000, taskCallbackService.pause(3));; + public void testPause() { + Assert.assertEquals(5000, taskCallbackService.pause(3)); } @Test - public void testSendAck1(){ + public void testSendAck1() { masterRegistry.registry(); final NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(30000); @@ -177,7 +176,7 @@ public class TaskCallbackServiceTest { NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); -// channel.close(); + // channel.close(); TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); ackCommand.setTaskInstanceId(1); @@ -185,7 +184,7 @@ public class TaskCallbackServiceTest { taskCallbackService.sendAck(1, ackCommand.convert2Command()); - Assert.assertEquals(true, channel.isOpen()); + Assert.assertTrue(channel.isOpen()); Stopper.stop(); @@ -195,7 +194,7 @@ public class TaskCallbackServiceTest { } @Test - public void testTaskExecuteProcessor() throws Exception{ + public void testTaskExecuteProcessor() throws Exception { final NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(30000); NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); @@ -207,11 +206,11 @@ public class TaskCallbackServiceTest { TaskExecuteRequestCommand taskExecuteRequestCommand = new TaskExecuteRequestCommand(); - nettyRemotingClient.send(new Host("localhost",30000),taskExecuteRequestCommand.convert2Command()); + nettyRemotingClient.send(new Host("localhost", 30000), taskExecuteRequestCommand.convert2Command()); taskExecuteRequestCommand.setTaskExecutionContext(JSONUtils.toJsonString(new TaskExecutionContext())); - nettyRemotingClient.send(new Host("localhost",30000),taskExecuteRequestCommand.convert2Command()); + nettyRemotingClient.send(new Host("localhost", 30000), taskExecuteRequestCommand.convert2Command()); Thread.sleep(5000); @@ -223,40 +222,4 @@ public class TaskCallbackServiceTest { nettyRemotingClient.close(); } -// @Test(expected = IllegalStateException.class) -// public void testSendAckWithIllegalStateException2(){ -// masterRegistry.registry(); -// final NettyServerConfig serverConfig = new NettyServerConfig(); -// serverConfig.setListenPort(30000); -// NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig); -// nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor); -// nettyRemotingServer.start(); -// -// final NettyClientConfig clientConfig = new NettyClientConfig(); -// NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig); -// Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000")); -// taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1)); -// channel.close(); -// TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand(); -// ackCommand.setTaskInstanceId(1); -// ackCommand.setStartTime(new Date()); -// -// nettyRemotingServer.close(); -// -// taskCallbackService.sendAck(1, ackCommand.convert2Command()); -// try { -// Thread.sleep(5000); -// } catch (InterruptedException e) { -// e.printStackTrace(); -// } -// -// Stopper.stop(); -// -// try { -// Thread.sleep(5000); -// } catch (InterruptedException e) { -// e.printStackTrace(); -// } -// } - } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessorTest.java new file mode 100644 index 0000000000..36a758ab1f --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessorTest.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.worker.processor; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl; +import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.log.LogClientService; + +import java.util.Collections; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import io.netty.channel.Channel; + +/** + * TaskKillProcessorTest + */ +@RunWith(PowerMockRunner.class) +@PrepareForTest({SpringApplicationContext.class, TaskKillProcessor.class, OSUtils.class, ProcessUtils.class, LoggerUtils.class}) +public class TaskKillProcessorTest { + + private TaskKillProcessor taskKillProcessor; + + private TaskExecutionContextCacheManagerImpl taskExecutionContextCacheManager; + + private Channel channel; + + private Command command; + + private TaskExecutionContext taskExecutionContext; + + @Before + public void before() throws Exception { + + TaskCallbackService taskCallbackService = PowerMockito.mock(TaskCallbackService.class); + WorkerConfig workerConfig = PowerMockito.mock(WorkerConfig.class); + taskExecutionContextCacheManager = PowerMockito.mock(TaskExecutionContextCacheManagerImpl.class); + + channel = PowerMockito.mock(Channel.class); + command = new Command(); + command.setType(CommandType.TASK_KILL_REQUEST); + TaskKillRequestCommand taskKillRequestCommand = new TaskKillRequestCommand(); + taskKillRequestCommand.setTaskInstanceId(1); + command.setBody(JSONUtils.toJsonString(taskKillRequestCommand).getBytes()); + taskExecutionContext = new TaskExecutionContext(); + taskExecutionContext.setTaskInstanceId(1); + LogClientService logClient = PowerMockito.mock(LogClientService.class); + + NettyRemoteChannel nettyRemoteChannel = PowerMockito.mock(NettyRemoteChannel.class); + PowerMockito.mockStatic(SpringApplicationContext.class); + PowerMockito.mockStatic(OSUtils.class); + PowerMockito.mockStatic(ProcessUtils.class); + PowerMockito.mockStatic(LoggerUtils.class); + PowerMockito.when(SpringApplicationContext.getBean(TaskCallbackService.class)).thenReturn(taskCallbackService); + PowerMockito.when(SpringApplicationContext.getBean(WorkerConfig.class)).thenReturn(workerConfig); + PowerMockito.when(SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class)).thenReturn(taskExecutionContextCacheManager); + PowerMockito.doNothing().when(taskCallbackService).addRemoteChannel(anyInt(), any()); + PowerMockito.whenNew(NettyRemoteChannel.class).withAnyArguments().thenReturn(null); + PowerMockito.when(OSUtils.exeCmd(any())).thenReturn(null); + PowerMockito.when(ProcessUtils.getPidsStr(102)).thenReturn("123"); + PowerMockito.whenNew(LogClientService.class).withAnyArguments().thenReturn(logClient); + PowerMockito.when(logClient.viewLog(any(), anyInt(), any())).thenReturn("test"); + PowerMockito.when(LoggerUtils.getAppIds(any(), any())).thenReturn(Collections.singletonList("id")); + + Command viewLogResponseCommand = new Command(); + viewLogResponseCommand.setBody("success".getBytes()); + + taskKillProcessor = new TaskKillProcessor(); + } + + @Test + public void testProcess() { + + PowerMockito.when(taskExecutionContextCacheManager.getByTaskInstanceId(1)).thenReturn(taskExecutionContext); + taskKillProcessor.process(channel, command); + + taskExecutionContext.setProcessId(101); + taskExecutionContext.setHost("127.0.0.1:22"); + taskExecutionContext.setLogPath("/log"); + taskExecutionContext.setExecutePath("/path"); + taskExecutionContext.setTenantCode("ten"); + taskKillProcessor.process(channel, command); + } + +} diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThreadTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThreadTest.java index 27c10db13d..9b757d4c2b 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThreadTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThreadTest.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand; @@ -34,7 +35,9 @@ import org.apache.dolphinscheduler.server.worker.task.TaskManager; import org.apache.dolphinscheduler.service.alert.AlertClientService; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import java.util.ArrayList; import java.util.Date; +import java.util.List; import org.junit.Assert; import org.junit.Before; @@ -50,7 +53,7 @@ import org.slf4j.LoggerFactory; * test task execute thread. */ @RunWith(PowerMockRunner.class) -@PrepareForTest({TaskManager.class, JSONUtils.class, CommonUtils.class, SpringApplicationContext.class}) +@PrepareForTest({TaskManager.class, JSONUtils.class, CommonUtils.class, SpringApplicationContext.class, OSUtils.class}) public class TaskExecuteThreadTest { private TaskExecutionContext taskExecutionContext; @@ -115,6 +118,12 @@ public class TaskExecuteThreadTest { PowerMockito.mockStatic(CommonUtils.class); PowerMockito.when(CommonUtils.getSystemEnvPath()).thenReturn("/user_home/.bash_profile"); + + List osUserList = new ArrayList() {{ + add("test"); + }}; + PowerMockito.mockStatic(OSUtils.class); + PowerMockito.when(OSUtils.getUserList()).thenReturn(osUserList); } @Test @@ -122,10 +131,14 @@ public class TaskExecuteThreadTest { taskExecutionContext.setTaskType("SQL"); taskExecutionContext.setStartTime(new Date()); taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION); + taskExecutionContext.setTenantCode("test"); TaskExecuteThread taskExecuteThread = new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger, alertClientService); + taskExecuteThread.run(); + taskExecutionContext.getCurrentExecutionStatus(); + taskExecuteThread.run(); - Assert.assertEquals(ExecutionStatus.SUCCESS, taskExecutionContext.getCurrentExecutionStatus()); + Assert.assertEquals(ExecutionStatus.RUNNING_EXECUTION, taskExecutionContext.getCurrentExecutionStatus()); } @Test @@ -134,10 +147,11 @@ public class TaskExecuteThreadTest { taskExecutionContext.setStartTime(null); taskExecutionContext.setDelayTime(1); taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.DELAY_EXECUTION); + taskExecutionContext.setTenantCode("test"); TaskExecuteThread taskExecuteThread = new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger, alertClientService); taskExecuteThread.run(); - Assert.assertEquals(ExecutionStatus.SUCCESS, taskExecutionContext.getCurrentExecutionStatus()); + Assert.assertEquals(ExecutionStatus.RUNNING_EXECUTION, taskExecutionContext.getCurrentExecutionStatus()); } private class SimpleTask extends AbstractTask { diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java index e5177aa786..8d03c1460a 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java @@ -14,19 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.server.worker.task.datax; +package org.apache.dolphinscheduler.server.worker.task.datax; -import java.lang.reflect.Method; -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.UUID; +import static org.apache.dolphinscheduler.common.enums.CommandType.START_PROCESS; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; @@ -39,6 +33,13 @@ import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.UUID; + import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -49,7 +50,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; -import static org.apache.dolphinscheduler.common.enums.CommandType.START_PROCESS; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; /** * DataxTask Tester. @@ -58,7 +60,13 @@ public class DataxTaskTest { private static final Logger logger = LoggerFactory.getLogger(DataxTaskTest.class); - private static final String CONNECTION_PARAMS = "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"; + private static final String CONNECTION_PARAMS = " {\n" + + " \"user\":\"root\",\n" + + " \"password\":\"123456\",\n" + + " \"address\":\"jdbc:mysql://127.0.0.1:3306\",\n" + + " \"database\":\"test\",\n" + + " \"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"\n" + + "}"; private DataxTask dataxTask; @@ -69,7 +77,7 @@ public class DataxTaskTest { private ApplicationContext applicationContext; private TaskExecutionContext taskExecutionContext; - private TaskProps props = new TaskProps(); + private final TaskProps props = new TaskProps(); @Before public void before() @@ -97,12 +105,40 @@ public class DataxTaskTest { props.setTaskTimeout(0); if (customConfig == 1) { props.setTaskParams( - "{\"customConfig\":1, \"localParams\":[{\"prop\":\"test\",\"value\":\"38294729\"}],\"json\":\"{\\\"job\\\":{\\\"setting\\\":{\\\"speed\\\":{\\\"byte\\\":1048576},\\\"errorLimit\\\":{\\\"record\\\":0,\\\"percentage\\\":0.02}},\\\"content\\\":[{\\\"reader\\\":{\\\"name\\\":\\\"rdbmsreader\\\",\\\"parameter\\\":{\\\"username\\\":\\\"xxx\\\",\\\"password\\\":\\\"${test}\\\",\\\"column\\\":[\\\"id\\\",\\\"name\\\"],\\\"splitPk\\\":\\\"pk\\\",\\\"connection\\\":[{\\\"querySql\\\":[\\\"SELECT * from dual\\\"],\\\"jdbcUrl\\\":[\\\"jdbc:dm://ip:port/database\\\"]}],\\\"fetchSize\\\":1024,\\\"where\\\":\\\"1 = 1\\\"}},\\\"writer\\\":{\\\"name\\\":\\\"streamwriter\\\",\\\"parameter\\\":{\\\"print\\\":true}}}]}}\"}"); + "{\n" + + " \"customConfig\":1,\n" + + " \"localParams\":[\n" + + " {\n" + + " \"prop\":\"test\",\n" + + " \"value\":\"38294729\"\n" + + " }\n" + + " ],\n" + + " \"json\":\"" + + "{\"job\":{\"setting\":{\"speed\":{\"byte\":1048576},\"errorLimit\":{\"record\":0,\"percentage\":0.02}},\"content\":[" + + "{\"reader\":{\"name\":\"rdbmsreader\",\"parameter\":{\"username\":\"xxx\",\"password\":\"${test}\",\"column\":[\"id\",\"name\"],\"splitPk\":\"pk\",\"" + + "connection\":[{\"querySql\":[\"SELECT * from dual\"],\"jdbcUrl\":[\"jdbc:dm://ip:port/database\"]}],\"fetchSize\":1024,\"where\":\"1 = 1\"}},\"" + + "writer\":{\"name\":\"streamwriter\",\"parameter\":{\"print\":true}}}]}}\"\n" + + "}"); -// "{\"customConfig\":1,\"json\":\"{\\\"job\\\":{\\\"setting\\\":{\\\"speed\\\":{\\\"byte\\\":1048576},\\\"errorLimit\\\":{\\\"record\\\":0,\\\"percentage\\\":0.02}},\\\"content\\\":[{\\\"reader\\\":{\\\"name\\\":\\\"rdbmsreader\\\",\\\"parameter\\\":{\\\"username\\\":\\\"xxx\\\",\\\"password\\\":\\\"xxx\\\",\\\"column\\\":[\\\"id\\\",\\\"name\\\"],\\\"splitPk\\\":\\\"pk\\\",\\\"connection\\\":[{\\\"querySql\\\":[\\\"SELECT * from dual\\\"],\\\"jdbcUrl\\\":[\\\"jdbc:dm://ip:port/database\\\"]}],\\\"fetchSize\\\":1024,\\\"where\\\":\\\"1 = 1\\\"}},\\\"writer\\\":{\\\"name\\\":\\\"streamwriter\\\",\\\"parameter\\\":{\\\"print\\\":true}}}]}}\"}"); } else { props.setTaskParams( - "{\"customConfig\":0,\"targetTable\":\"test\",\"postStatements\":[],\"jobSpeedByte\":1024,\"jobSpeedRecord\":1000,\"dtType\":\"MYSQL\",\"dataSource\":1,\"dsType\":\"MYSQL\",\"dataTarget\":2,\"jobSpeedByte\":0,\"sql\":\"select 1 as test from dual\",\"preStatements\":[\"delete from test\"],\"postStatements\":[\"delete from test\"]}"); + "{\n" + + " \"customConfig\":0,\n" + + " \"targetTable\":\"test\",\n" + + " \"postStatements\":[\n" + + " \"delete from test\"\n" + + " ],\n" + + " \"jobSpeedByte\":0,\n" + + " \"jobSpeedRecord\":1000,\n" + + " \"dtType\":\"MYSQL\",\n" + + " \"dataSource\":1,\n" + + " \"dsType\":\"MYSQL\",\n" + + " \"dataTarget\":2,\n" + + " \"sql\":\"select 1 as test from dual\",\n" + + " \"preStatements\":[\n" + + " \"delete from test\"\n" + + " ]\n" + + "}"); } taskExecutionContext = Mockito.mock(TaskExecutionContext.class); @@ -114,7 +150,6 @@ public class DataxTaskTest { Mockito.when(taskExecutionContext.getTaskTimeout()).thenReturn(10000); Mockito.when(taskExecutionContext.getLogPath()).thenReturn("/tmp/dx"); - DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); dataxTaskExecutionContext.setSourcetype(0); dataxTaskExecutionContext.setTargetType(0); @@ -126,7 +161,6 @@ public class DataxTaskTest { dataxTask.init(); props.setCmdTypeIfComplement(START_PROCESS); - Mockito.when(processService.findDataSourceById(1)).thenReturn(getDataSource()); Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); Mockito.when(processService.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); @@ -138,7 +172,6 @@ public class DataxTaskTest { e.printStackTrace(); } - dataxTask = PowerMockito.spy(new DataxTask(taskExecutionContext, logger)); dataxTask.init(); } @@ -405,4 +438,23 @@ public class DataxTaskTest { } } + @Test + public void testLoadJvmEnv() { + DataxTask dataxTask = new DataxTask(null,null); + DataxParameters dataxParameters = new DataxParameters(); + dataxParameters.setXms(0); + dataxParameters.setXmx(-100); + + String actual = dataxTask.loadJvmEnv(dataxParameters); + + String except = " --jvm=\"-Xms1G -Xmx1G\" "; + Assert.assertEquals(except,actual); + + dataxParameters.setXms(13); + dataxParameters.setXmx(14); + actual = dataxTask.loadJvmEnv(dataxParameters); + except = " --jvm=\"-Xms13G -Xmx14G\" "; + Assert.assertEquals(except,actual); + + } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java index c30f33c683..c5f2de82ea 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTaskTest.java @@ -14,227 +14,101 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.shell; -import org.apache.dolphinscheduler.common.enums.CommandType; -import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.dao.entity.DataSource; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import static org.mockito.ArgumentMatchers.anyString; + import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.server.worker.task.CommandExecuteResult; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; -import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.apache.dolphinscheduler.service.process.ProcessService; -import org.junit.*; + +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; import org.junit.runner.RunWith; import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.context.ApplicationContext; - -import java.util.Date; /** - * shell task test + * shell task test. */ @RunWith(PowerMockRunner.class) -@PrepareForTest(OSUtils.class) -@PowerMockIgnore({"javax.management.*"}) +@PrepareForTest({ShellTask.class}) public class ShellTaskTest { private static final Logger logger = LoggerFactory.getLogger(ShellTaskTest.class); private ShellTask shellTask; - - private ProcessService processService; - private ShellCommandExecutor shellCommandExecutor; - - private ApplicationContext applicationContext; private TaskExecutionContext taskExecutionContext; + private CommandExecuteResult commandExecuteResult; @Before public void before() throws Exception { - taskExecutionContext = new TaskExecutionContext(); - - PowerMockito.mockStatic(OSUtils.class); - processService = PowerMockito.mock(ProcessService.class); + System.setProperty("log4j2.disable.jmx", Boolean.TRUE.toString()); shellCommandExecutor = PowerMockito.mock(ShellCommandExecutor.class); - - applicationContext = PowerMockito.mock(ApplicationContext.class); - SpringApplicationContext springApplicationContext = new SpringApplicationContext(); - springApplicationContext.setApplicationContext(applicationContext); - PowerMockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); - - TaskProps props = new TaskProps(); - props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTenantCode("1"); - props.setEnvFile(".dolphinscheduler_env.sh"); - props.setTaskStartTime(new Date()); - props.setTaskTimeout(0); - props.setTaskParams("{\"rawScript\": \" echo 'hello world!'\"}"); - shellTask = new ShellTask(taskExecutionContext, logger); - shellTask.init(); - - PowerMockito.when(processService.findDataSourceById(1)).thenReturn(getDataSource()); - PowerMockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); - PowerMockito.when(processService.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); - - String fileName = String.format("%s/%s_node.%s", taskExecutionContext.getExecutePath(), - props.getTaskAppId(), OSUtils.isWindows() ? "bat" : "sh"); - PowerMockito.when(shellCommandExecutor.run("")).thenReturn(null); - } - - private DataSource getDataSource() { - DataSource dataSource = new DataSource(); - dataSource.setType(DbType.MYSQL); - dataSource.setConnectionParams( - "{\"user\":\"root\",\"password\":\"123456\",\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\"}"); - dataSource.setUserId(1); - return dataSource; - } - - private ProcessInstance getProcessInstance() { - ProcessInstance processInstance = new ProcessInstance(); - processInstance.setCommandType(CommandType.START_PROCESS); - processInstance.setScheduleTime(new Date()); - return processInstance; - } - - @After - public void after() {} - - /** - * Method: ShellTask() - */ - @Test - public void testShellTask() - throws Exception { - TaskProps props = new TaskProps(); - props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTenantCode("1"); - ShellTask shellTaskTest = new ShellTask(taskExecutionContext, logger); - Assert.assertNotNull(shellTaskTest); - } - - /** - * Method: init for Unix-like - */ - @Test - public void testInitForUnix() { - try { - PowerMockito.when(OSUtils.isWindows()).thenReturn(false); - shellTask.init(); - Assert.assertTrue(true); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - @Test - public void testInitException() { - TaskProps props = new TaskProps(); - props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTenantCode("1"); - props.setEnvFile(".dolphinscheduler_env.sh"); - props.setTaskStartTime(new Date()); - props.setTaskTimeout(0); - props.setTaskParams("{\"rawScript\": \"\"}"); - ShellTask shellTask = new ShellTask(taskExecutionContext, logger); - try { - shellTask.init(); - } catch (Exception e) { - logger.info(e.getMessage(), e); - if (e.getMessage().contains("shell task params is not valid")) { - Assert.assertTrue(true); - } - } - } - - /** - * Method: init for Windows - */ - @Test - public void testInitForWindows() { - try { - PowerMockito.when(OSUtils.isWindows()).thenReturn(true); - shellTask.init(); - Assert.assertTrue(true); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } - } - - /** - * Method: handle() for Unix-like - */ - @Test - public void testHandleForUnix() throws Exception { - try { - PowerMockito.when(OSUtils.isWindows()).thenReturn(false); - TaskProps props = new TaskProps(); - props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTenantCode("1"); - props.setEnvFile(".dolphinscheduler_env.sh"); - props.setTaskStartTime(new Date()); - props.setTaskTimeout(0); - props.setScheduleTime(new Date()); - props.setCmdTypeIfComplement(CommandType.START_PROCESS); - props.setTaskParams("{\"rawScript\": \" echo ${test}\", \"localParams\": [{\"prop\":\"test\", \"direct\":\"IN\", \"type\":\"VARCHAR\", \"value\":\"123\"}]}"); - ShellTask shellTask1 = new ShellTask(taskExecutionContext, logger); - shellTask1.init(); - shellTask1.handle(); - Assert.assertTrue(true); - } catch (Error | Exception e) { - if (!e.getMessage().contains("process error . exitCode is : -1") - && !System.getProperty("os.name").startsWith("Windows")) { - logger.error(e.getMessage()); - } - } + PowerMockito.whenNew(ShellCommandExecutor.class).withAnyArguments().thenReturn(shellCommandExecutor); + taskExecutionContext = new TaskExecutionContext(); + taskExecutionContext.setTaskInstanceId(1); + taskExecutionContext.setTaskName("kris test"); + taskExecutionContext.setTaskType("SHELL"); + taskExecutionContext.setHost("127.0.0.1:1234"); + taskExecutionContext.setExecutePath("/tmp"); + taskExecutionContext.setLogPath("/log"); + taskExecutionContext.setTaskJson( + "{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"" + + + "tasks-16849\",\"maxRetryTimes\":0,\"name\":\"shell test 001\",\"params\":\"{\\\"rawScript\\\":\\\"#!/bin/sh\\\\necho $[yyyy-MM-dd HH:mm:ss +3]\\\\necho \\\\\\\" ?? " + + "${time1} \\\\\\\"\\\\necho \\\\\\\" ????? ${time2}\\\\\\\"\\\\n\\\",\\\"localParams\\\":[{\\\"prop\\\":\\\"time1\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":" + + "\\\"VARCHAR\\\",\\\"value\\\":\\\"$[yyyy-MM-dd HH:mm:ss]\\\"},{\\\"prop\\\":\\\"time2\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"${time_gb}\\\"}" + + "],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":" + + "{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}"); + taskExecutionContext.setProcessInstanceId(1); + taskExecutionContext.setGlobalParams("[{\"direct\":\"IN\",\"prop\":\"time_gb\",\"type\":\"VARCHAR\",\"value\":\"2020-12-16 17:18:33\"}]"); + taskExecutionContext.setExecutorId(1); + taskExecutionContext.setCmdTypeIfComplement(5); + taskExecutionContext.setTenantCode("roo"); + taskExecutionContext.setScheduleTime(new Date()); + taskExecutionContext.setQueue("default"); + taskExecutionContext.setTaskParams( + "{\"rawScript\":\"#!/bin/sh\\necho $[yyyy-MM-dd HH:mm:ss +3]\\necho \\\" ?? ${time1} \\\"\\necho \\\" ????? ${time2}\\\"\\n\",\"localParams\":" + + + "[{\"prop\":\"time1\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"$[yyyy-MM-dd HH:mm:ss]\"},{\"prop\":\"time2\",\"direct\":\"IN\",\"type\":\"VARCHAR" + + "\",\"value\":\"${time_gb}\"}],\"resourceList\":[]}"); + Map definedParams = new HashMap<>(); + definedParams.put("time_gb", "2020-12-16 00:00:00"); + taskExecutionContext.setDefinedParams(definedParams); + PowerMockito.mockStatic(Files.class); + PowerMockito.when(Files.exists(Paths.get(anyString()))).thenReturn(true); + commandExecuteResult = new CommandExecuteResult(); + commandExecuteResult.setAppIds("appId"); + commandExecuteResult.setExitStatusCode(0); + commandExecuteResult.setProcessId(1); } - /** - * Method: handle() for Windows - */ @Test - public void testHandleForWindows() throws Exception { - try { - Assume.assumeTrue(OSUtils.isWindows()); - TaskProps props = new TaskProps(); - props.setTaskAppId(String.valueOf(System.currentTimeMillis())); - props.setTenantCode("1"); - props.setEnvFile(".dolphinscheduler_env.sh"); - props.setTaskStartTime(new Date()); - props.setTaskTimeout(0); - props.setScheduleTime(new Date()); - props.setCmdTypeIfComplement(CommandType.START_PROCESS); - props.setTaskParams("{\"rawScript\": \" echo ${test}\", \"localParams\": [{\"prop\":\"test\", \"direct\":\"IN\", \"type\":\"VARCHAR\", \"value\":\"123\"}]}"); - ShellTask shellTask1 = new ShellTask(taskExecutionContext, logger); - shellTask1.init(); - shellTask1.handle(); - Assert.assertTrue(true); - } catch (Error | Exception e) { - if (!e.getMessage().contains("process error . exitCode is : -1")) { - logger.error(e.getMessage()); - } - } + public void testComplementData() throws Exception { + shellTask = new ShellTask(taskExecutionContext, logger); + shellTask.init(); + PowerMockito.when(shellCommandExecutor.run(anyString())).thenReturn(commandExecuteResult); + shellTask.handle(); } - /** - * Method: cancelApplication() - */ @Test - public void testCancelApplication() throws Exception { - try { - shellTask.cancelApplication(true); - Assert.assertTrue(true); - } catch (Error | Exception e) { - logger.error(e.getMessage()); - } + public void testStartProcess() throws Exception { + taskExecutionContext.setCmdTypeIfComplement(0); + shellTask = new ShellTask(taskExecutionContext, logger); + shellTask.init(); + PowerMockito.when(shellCommandExecutor.run(anyString())).thenReturn(commandExecuteResult); + shellTask.handle(); } - } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java index 521650d3b6..2a3606dddf 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/spark/SparkTaskTest.java @@ -14,128 +14,121 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.spark; -import org.apache.dolphinscheduler.common.enums.SparkVersion; -import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.task.spark.SparkParameters; -import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.server.utils.ParamUtils; -import org.apache.dolphinscheduler.server.utils.SparkArgsUtils; -import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils; +import org.apache.dolphinscheduler.common.utils.placeholder.PropertyPlaceholderHelper; +import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.util.Date; + import org.junit.Assert; +import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContext; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - - +@RunWith(PowerMockRunner.class) +@PrepareForTest({ParameterUtils.class, PlaceholderUtils.class, PropertyPlaceholderHelper.class}) public class SparkTaskTest { private static final Logger logger = LoggerFactory.getLogger(SparkTaskTest.class); - /** - * spark1 command - */ - private static final String SPARK1_COMMAND = "${SPARK_HOME1}/bin/spark-submit"; - - /** - * spark2 command - */ - private static final String SPARK2_COMMAND = "${SPARK_HOME2}/bin/spark-submit"; + private TaskExecutionContext taskExecutionContext; + + private ApplicationContext applicationContext; + + private ProcessService processService; + + private SparkTask spark2Task; + + String spark1Params = "{" + + "\"mainArgs\":\"\", " + + "\"driverMemory\":\"1G\", " + + "\"executorMemory\":\"2G\", " + + "\"programType\":\"SCALA\", " + + "\"mainClass\":\"basicetl.GlobalUserCar\", " + + "\"driverCores\":\"2\", " + + "\"deployMode\":\"cluster\", " + + "\"executorCores\":2, " + + "\"mainJar\":{\"res\":\"test-1.0-SNAPSHOT.jar\"}, " + + "\"sparkVersion\":\"SPARK1\", " + + "\"numExecutors\":\"10\", " + + "\"localParams\":[], " + + "\"others\":\"\", " + + "\"resourceList\":[]" + + "}"; + + String spark2Params = "{" + + "\"mainArgs\":\"\", " + + "\"driverMemory\":\"1G\", " + + "\"executorMemory\":\"2G\", " + + "\"programType\":\"SCALA\", " + + "\"mainClass\":\"basicetl.GlobalUserCar\", " + + "\"driverCores\":\"2\", " + + "\"deployMode\":\"cluster\", " + + "\"executorCores\":2, " + + "\"mainJar\":{\"res\":\"test-1.0-SNAPSHOT.jar\"}, " + + "\"sparkVersion\":\"SPARK2\", " + + "\"numExecutors\":\"10\", " + + "\"localParams\":[], " + + "\"others\":\"\", " + + "\"resourceList\":[]" + + "}"; + + @Before + public void setTaskExecutionContext() { + taskExecutionContext = new TaskExecutionContext(); + taskExecutionContext.setTaskParams(spark2Params); + taskExecutionContext.setQueue("dev"); + taskExecutionContext.setTaskAppId(String.valueOf(System.currentTimeMillis())); + taskExecutionContext.setTenantCode("1"); + taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh"); + taskExecutionContext.setStartTime(new Date()); + taskExecutionContext.setTaskTimeout(0); + + processService = Mockito.mock(ProcessService.class); + applicationContext = Mockito.mock(ApplicationContext.class); + SpringApplicationContext springApplicationContext = new SpringApplicationContext(); + springApplicationContext.setApplicationContext(applicationContext); + Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); + + spark2Task = new SparkTask(taskExecutionContext, logger); + spark2Task.init(); + } @Test public void testSparkTaskInit() { - TaskProps taskProps = new TaskProps(); - - String spark1Params = "{" + - "\"mainArgs\":\"\", " + - "\"driverMemory\":\"1G\", " + - "\"executorMemory\":\"2G\", " + - "\"programType\":\"SCALA\", " + - "\"mainClass\":\"basicetl.GlobalUserCar\", " + - "\"driverCores\":\"2\", " + - "\"deployMode\":\"cluster\", " + - "\"executorCores\":2, " + - "\"mainJar\":{\"res\":\"test-1.0-SNAPSHOT.jar\"}, " + - "\"sparkVersion\":\"SPARK1\", " + - "\"numExecutors\":\"10\", " + - "\"localParams\":[], " + - "\"others\":\"\", " + - "\"resourceList\":[]" + - "}"; - - String spark2Params = "{" + - "\"mainArgs\":\"\", " + - "\"driverMemory\":\"1G\", " + - "\"executorMemory\":\"2G\", " + - "\"programType\":\"SCALA\", " + - "\"mainClass\":\"basicetl.GlobalUserCar\", " + - "\"driverCores\":\"2\", " + - "\"deployMode\":\"cluster\", " + - "\"executorCores\":2, " + - "\"mainJar\":{\"res\":\"test-1.0-SNAPSHOT.jar\"}, " + - "\"sparkVersion\":\"SPARK2\", " + - "\"numExecutors\":\"10\", " + - "\"localParams\":[], " + - "\"others\":\"\", " + - "\"resourceList\":[]" + - "}"; - - taskProps.setTaskParams(spark2Params); - - logger.info("spark task params {}", taskProps.getTaskParams()); - - SparkParameters sparkParameters = JSONUtils.parseObject(taskProps.getTaskParams(), SparkParameters.class); - - assert sparkParameters != null; - if (!sparkParameters.checkParameters()) { - throw new RuntimeException("spark task params is not valid"); - } - sparkParameters.setQueue(taskProps.getQueue()); - - if (StringUtils.isNotEmpty(sparkParameters.getMainArgs())) { - String args = sparkParameters.getMainArgs(); - - /** - * combining local and global parameters - */ - Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), - taskProps.getDefinedParams(), - sparkParameters.getLocalParametersMap(), - taskProps.getCmdTypeIfComplement(), - taskProps.getScheduleTime()); - if (paramsMap != null) { - args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap)); - } - sparkParameters.setMainArgs(args); - } - - List args = new ArrayList<>(); - - //spark version - String sparkCommand = SPARK2_COMMAND; - - if (SparkVersion.SPARK1.name().equals(sparkParameters.getSparkVersion())) { - sparkCommand = SPARK1_COMMAND; - } - - args.add(sparkCommand); - - // other parameters - args.addAll(SparkArgsUtils.buildArgs(sparkParameters)); - - String sparkArgs = String.join(" ", args); - - logger.info("spark task command : {}", sparkArgs); - - Assert.assertEquals(SPARK2_COMMAND, sparkArgs.split(" ")[0]); - + TaskExecutionContext sparkTaskCtx = new TaskExecutionContext(); + SparkTask sparkTask = new SparkTask(sparkTaskCtx, logger); + sparkTask.init(); + sparkTask.getParameters(); + Assert.assertNull(sparkTaskCtx.getTaskParams()); + + String spark2Command = spark2Task.buildCommand(); + String spark2Expected = "${SPARK_HOME2}/bin/spark-submit --master yarn --deploy-mode cluster " + + "--class basicetl.GlobalUserCar --driver-cores 2 --driver-memory 1G --num-executors 10 " + + "--executor-cores 2 --executor-memory 2G --queue dev test-1.0-SNAPSHOT.jar"; + Assert.assertEquals(spark2Expected, spark2Command); + + taskExecutionContext.setTaskParams(spark1Params); + + SparkTask spark1Task = new SparkTask(taskExecutionContext, logger); + spark1Task.init(); + String spark1Command = spark1Task.buildCommand(); + String spark1Expected = "${SPARK_HOME1}/bin/spark-submit --master yarn --deploy-mode cluster " + + "--class basicetl.GlobalUserCar --driver-cores 2 --driver-memory 1G --num-executors 10 " + + "--executor-cores 2 --executor-memory 2G --queue dev test-1.0-SNAPSHOT.jar"; + Assert.assertEquals(spark1Expected, spark1Command); } } diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java index 2d0e39aa69..0a4124044a 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.server.worker.task.sqoop; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; @@ -23,6 +24,9 @@ import org.apache.dolphinscheduler.server.entity.TaskExecutionContext; import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; + +import java.util.Date; + import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -33,7 +37,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; -import java.util.Date; /** * sqoop task test @@ -43,14 +46,12 @@ public class SqoopTaskTest { private static final Logger logger = LoggerFactory.getLogger(SqoopTaskTest.class); - private ProcessService processService; - private ApplicationContext applicationContext; private SqoopTask sqoopTask; @Before - public void before() throws Exception{ - processService = Mockito.mock(ProcessService.class); - applicationContext = Mockito.mock(ApplicationContext.class); + public void before() { + ProcessService processService = Mockito.mock(ProcessService.class); + ApplicationContext applicationContext = Mockito.mock(ApplicationContext.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); @@ -61,17 +62,17 @@ public class SqoopTaskTest { taskExecutionContext.setEnvFile(".dolphinscheduler_env.sh"); taskExecutionContext.setStartTime(new Date()); taskExecutionContext.setTaskTimeout(0); - taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," + - "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," + - "\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," + - "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[]," + - "\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" + - ",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," + - "\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," + - "\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); - - sqoopTask = new SqoopTask(taskExecutionContext,logger); - //test sqoop tash init method + taskExecutionContext.setTaskParams("{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1," + + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\"," + + "\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\"," + + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[]," + + "\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"" + + ",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true," + + "\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\"," + + "\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); + + sqoopTask = new SqoopTask(taskExecutionContext, logger); + //test sqoop task init method sqoopTask.init(); } @@ -79,40 +80,72 @@ public class SqoopTaskTest { * test SqoopJobGenerator */ @Test - public void testGenerator(){ + public void testGenerator() { TaskExecutionContext mysqlTaskExecutionContext = getMysqlTaskExecutionContext(); //sqoop TEMPLATE job - //import mysql to HDFS with hadoo - String mysqlToHdfs = "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}],\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}]," + - "\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; - SqoopParameters mysqlToHdfsParams = JSONUtils.parseObject(mysqlToHdfs,SqoopParameters.class); + //import mysql to HDFS with hadoop + String mysqlToHdfs = + "{\"jobName\":\"sqoop_import\",\"hadoopCustomParams\":[{\"prop\":\"mapreduce.map.memory.mb\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"4096\"}]," + + "\"sqoopAdvancedParams\":[{\"prop\":\"--direct\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}],\"jobType\":\"TEMPLATE\",\"concurrency\":1," + + "\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\"," + + "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\"," + + "\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\"," + + "\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\"," + + "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + SqoopParameters mysqlToHdfsParams = JSONUtils.parseObject(mysqlToHdfs, SqoopParameters.class); SqoopJobGenerator generator = new SqoopJobGenerator(); - String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams,mysqlTaskExecutionContext); - String mysqlToHdfsExpected = "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; + String mysqlToHdfsScript = generator.generateSqoopJob(mysqlToHdfsParams, mysqlTaskExecutionContext); + String mysqlToHdfsExpected = + "sqoop import -D mapred.job.name=sqoop_import -D mapreduce.map.memory.mb=4096 --direct -m 1 --connect \"jdbc:mysql://192.168.0.111:3306/test\" " + + "--username kylo --password \"123456\" --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile " + + "--delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; Assert.assertEquals(mysqlToHdfsExpected, mysqlToHdfsScript); //export hdfs to mysql using update mode - String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," + - "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," + - "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; - SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql,SqoopParameters.class); - String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams,mysqlTaskExecutionContext); - String hdfsToMysqlScriptExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; + String hdfsToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\"," + + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\"," + + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\"," + + "\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\"," + + "\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + SqoopParameters hdfsToMysqlParams = JSONUtils.parseObject(hdfsToMysql, SqoopParameters.class); + String hdfsToMysqlScript = generator.generateSqoopJob(hdfsToMysqlParams, mysqlTaskExecutionContext); + String hdfsToMysqlScriptExpected = + "sqoop export -D mapred.job.name=sqoop_import -m 1 --export-dir /ods/tmp/test/person7 --connect \"jdbc:mysql://192.168.0.111:3306/test\" " + + "--username kylo --password \"123456\" --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' " + + "--lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; Assert.assertEquals(hdfsToMysqlScriptExpected, hdfsToMysqlScript); //export hive to mysql - String hiveToMysql = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; - SqoopParameters hiveToMysqlParams = JSONUtils.parseObject(hiveToMysql,SqoopParameters.class); - String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams,mysqlTaskExecutionContext); - String hiveToMysqlExpected = "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'"; + String hiveToMysql = + "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\"," + + "\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\"," + + "\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\"," + + "\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\"," + + "\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\"," + + "\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + SqoopParameters hiveToMysqlParams = JSONUtils.parseObject(hiveToMysql, SqoopParameters.class); + String hiveToMysqlScript = generator.generateSqoopJob(hiveToMysqlParams, mysqlTaskExecutionContext); + String hiveToMysqlExpected = + "sqoop export -D mapred.job.name=sqoop_import -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date " + + "--hcatalog-partition-values 2020-02-17 --connect \"jdbc:mysql://192.168.0.111:3306/test\" --username kylo --password \"123456\" --table person_3 " + + "--fields-terminated-by '@' --lines-terminated-by '\\n'"; Assert.assertEquals(hiveToMysqlExpected, hiveToMysqlScript); //import mysql to hive - String mysqlToHive = "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; - SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive,SqoopParameters.class); - String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams,mysqlTaskExecutionContext); - String mysqlToHiveExpected = "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; + String mysqlToHive = + "{\"jobName\":\"sqoop_import\",\"jobType\":\"TEMPLATE\",\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\"," + + "\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\"," + + "\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[]," + + "\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\"," + + "\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false," + + "\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; + SqoopParameters mysqlToHiveParams = JSONUtils.parseObject(mysqlToHive, SqoopParameters.class); + String mysqlToHiveScript = generator.generateSqoopJob(mysqlToHiveParams, mysqlTaskExecutionContext); + String mysqlToHiveExpected = + "sqoop import -D mapred.job.name=sqoop_import -m 1 --connect \"jdbc:mysql://192.168.0.111:3306/test\" --username kylo --password \"123456\" " + + "--query \"SELECT * FROM person_2 WHERE \\$CONDITIONS\" --map-column-java id=Integer --hive-import --hive-database stg --hive-table person_internal_2 " + + "--create-hive-table --hive-overwrite --delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; Assert.assertEquals(mysqlToHiveExpected, mysqlToHiveScript); //sqoop CUSTOM job @@ -124,16 +157,18 @@ public class SqoopTaskTest { } - /** * get taskExecutionContext include mysql + * * @return TaskExecutionContext */ private TaskExecutionContext getMysqlTaskExecutionContext() { TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); - String mysqlSourceConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; - String mysqlTargetConnectionParams = "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; + String mysqlSourceConnectionParams = + "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; + String mysqlTargetConnectionParams = + "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"; sqoopTaskExecutionContext.setDataSourceId(2); sqoopTaskExecutionContext.setDataTargetId(2); sqoopTaskExecutionContext.setSourcetype(0); @@ -153,7 +188,7 @@ public class SqoopTaskTest { * Method: init */ @Test - public void testInit(){ + public void testInit() { try { sqoopTask.init(); } catch (Exception e) { diff --git a/dolphinscheduler-service/pom.xml b/dolphinscheduler-service/pom.xml index f0e8f408d7..fd1d2737de 100644 --- a/dolphinscheduler-service/pom.xml +++ b/dolphinscheduler-service/pom.xml @@ -20,7 +20,7 @@ dolphinscheduler org.apache.dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT 4.0.0 @@ -56,10 +56,18 @@ org.quartz-scheduler quartz - - c3p0 - c3p0 - + + com.mchange + c3p0 + + + com.mchange + mchange-commons-java + + + com.zaxxer + HikariCP-java6 + diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/exceptions/TaskPriorityQueueException.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/exceptions/TaskPriorityQueueException.java new file mode 100644 index 0000000000..30a72144bb --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/exceptions/TaskPriorityQueueException.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service.exceptions; + +/** + * task priority queue exception + */ +public class TaskPriorityQueueException extends Exception { + + /** + * Construct a new runtime exception with the detail message + * + * @param message message + */ + public TaskPriorityQueueException(String message) { + super(message); + } + + /** + * Construct a new runtime exception with the detail message and cause + * + * @param message message + * @param cause cause + */ + public TaskPriorityQueueException(String message, Throwable cause) { + super(message, cause); + } + +} diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java index 474bf12c77..75753c78d6 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java @@ -14,8 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.service.log; +import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.log.GetLogBytesRequestCommand; @@ -28,12 +30,10 @@ import org.apache.dolphinscheduler.remote.command.log.ViewLogRequestCommand; import org.apache.dolphinscheduler.remote.command.log.ViewLogResponseCommand; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import org.apache.dolphinscheduler.remote.utils.Host; -import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - /** * log client */ @@ -90,7 +90,7 @@ public class LogClientService { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { - RollViewLogResponseCommand rollReviewLog = JsonSerializer.deserialize( + RollViewLogResponseCommand rollReviewLog = JSONUtils.parseObject( response.getBody(), RollViewLogResponseCommand.class); return rollReviewLog.getMsg(); } @@ -119,7 +119,7 @@ public class LogClientService { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { - ViewLogResponseCommand viewLog = JsonSerializer.deserialize( + ViewLogResponseCommand viewLog = JSONUtils.parseObject( response.getBody(), ViewLogResponseCommand.class); return viewLog.getMsg(); } @@ -148,7 +148,7 @@ public class LogClientService { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { - GetLogBytesResponseCommand getLog = JsonSerializer.deserialize( + GetLogBytesResponseCommand getLog = JSONUtils.parseObject( response.getBody(), GetLogBytesResponseCommand.class); return getLog.getData(); } @@ -160,7 +160,6 @@ public class LogClientService { return result; } - /** * remove task log * @@ -178,7 +177,7 @@ public class LogClientService { Command command = request.convert2Command(); Command response = this.client.sendSync(address, command, LOG_REQUEST_TIMEOUT); if (response != null) { - RemoveTaskLogResponseCommand taskLogResponse = JsonSerializer.deserialize( + RemoveTaskLogResponseCommand taskLogResponse = JSONUtils.parseObject( response.getBody(), RemoveTaskLogResponseCommand.class); return taskLogResponse.getStatus(); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 5dde6d8ce7..3b0d0bdd0d 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -19,11 +19,11 @@ package org.apache.dolphinscheduler.service.process; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_EMPTY_SUB_PROCESS; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; -import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_PROCESS; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID; import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS; import static java.util.stream.Collectors.toSet; @@ -109,11 +109,11 @@ public class ProcessService { private final Logger logger = LoggerFactory.getLogger(getClass()); - private final int[] stateArray = new int[] {ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), - ExecutionStatus.RUNNING_EXECUTION.ordinal(), - ExecutionStatus.DELAY_EXECUTION.ordinal(), - ExecutionStatus.READY_PAUSE.ordinal(), - ExecutionStatus.READY_STOP.ordinal()}; + private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(), + ExecutionStatus.RUNNING_EXECUTION.ordinal(), + ExecutionStatus.DELAY_EXECUTION.ordinal(), + ExecutionStatus.READY_PAUSE.ordinal(), + ExecutionStatus.READY_STOP.ordinal()}; @Autowired private UserMapper userMapper; @@ -145,7 +145,6 @@ public class ProcessService { @Autowired private ResourceMapper resourceMapper; - @Autowired private ErrorCommandMapper errorCommandMapper; @@ -158,13 +157,13 @@ public class ProcessService { /** * handle Command (construct ProcessInstance from Command) , wrapped in transaction * - * @param logger logger - * @param host host + * @param logger logger + * @param host host * @param validThreadNum validThreadNum - * @param command found command + * @param command found command * @return process instance */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) { ProcessInstance processInstance = constructProcessInstance(command, host); //cannot construct process instance, return null; @@ -181,7 +180,7 @@ public class ProcessService { processInstance.addHistoryCmd(command.getCommandType()); saveProcessInstance(processInstance); this.setSubProcessParam(processInstance); - delCommandByid(command.getId()); + delCommandById(command.getId()); return processInstance; } @@ -191,17 +190,17 @@ public class ProcessService { * @param command command * @param message message */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public void moveToErrorCommand(Command command, String message) { ErrorCommand errorCommand = new ErrorCommand(command, message); this.errorCommandMapper.insert(errorCommand); - delCommandByid(command.getId()); + delCommandById(command.getId()); } /** * set process waiting thread * - * @param command command + * @param command command * @param processInstance processInstance * @return process instance */ @@ -219,7 +218,7 @@ public class ProcessService { /** * check thread num * - * @param command command + * @param command command * @param validThreadNum validThreadNum * @return if thread is enough */ @@ -267,14 +266,14 @@ public class ProcessService { if (cmdTypeMap.containsKey(commandType)) { ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam()); - int processInstanceId = cmdParamObj.path(CMDPARAM_RECOVER_PROCESS_ID_STRING).asInt(); + int processInstanceId = cmdParamObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt(); List commands = commandMapper.selectList(null); // for all commands for (Command tmpCommand : commands) { if (cmdTypeMap.containsKey(tmpCommand.getCommandType())) { ObjectNode tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam()); - if (tempObj != null && processInstanceId == tempObj.path(CMDPARAM_RECOVER_PROCESS_ID_STRING).asInt()) { + if (tempObj != null && processInstanceId == tempObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt()) { isNeedCreate = false; break; } @@ -296,9 +295,6 @@ public class ProcessService { /** * get task node list by definitionId - * - * @param defineId - * @return */ public List getTaskNodeListByDefinitionId(Integer defineId) { ProcessDefinition processDefinition = processDefineMapper.selectById(defineId); @@ -375,29 +371,37 @@ public class ProcessService { */ public void removeTaskLogFile(Integer processInstanceId) { - LogClientService logClient = new LogClientService(); + LogClientService logClient = null; - List taskInstanceList = findValidTaskListByProcessId(processInstanceId); + try { + logClient = new LogClientService(); + List taskInstanceList = findValidTaskListByProcessId(processInstanceId); - if (CollectionUtils.isEmpty(taskInstanceList)) { - return; - } + if (CollectionUtils.isEmpty(taskInstanceList)) { + return; + } - for (TaskInstance taskInstance : taskInstanceList) { - String taskLogPath = taskInstance.getLogPath(); - if (StringUtils.isEmpty(taskInstance.getHost())) { - continue; + for (TaskInstance taskInstance : taskInstanceList) { + String taskLogPath = taskInstance.getLogPath(); + if (StringUtils.isEmpty(taskInstance.getHost())) { + continue; + } + int port = Constants.RPC_PORT; + String ip = ""; + try { + ip = Host.of(taskInstance.getHost()).getIp(); + } catch (Exception e) { + // compatible old version + ip = taskInstance.getHost(); + } + + // remove task log from loggerserver + logClient.removeTaskLog(ip, port, taskLogPath); } - int port = Constants.RPC_PORT; - String ip = ""; - try { - ip = Host.of(taskInstance.getHost()).getIp(); - } catch (Exception e) { - // compatible old version - ip = taskInstance.getHost(); + } finally { + if (logClient != null) { + logClient.close(); } - // remove task log from loggerserver - logClient.removeTaskLog(ip, port, taskLogPath); } } @@ -417,7 +421,7 @@ public class ProcessService { * recursive query sub process definition id by parent id. * * @param parentId parentId - * @param ids ids + * @param ids ids */ public void recurseFindSubProcessId(int parentId, List ids) { ProcessDefinition processDefinition = processDefineMapper.selectById(parentId); @@ -432,7 +436,7 @@ public class ProcessService { for (TaskNode taskNode : taskNodeList) { String parameter = taskNode.getParams(); ObjectNode parameterJson = JSONUtils.parseObject(parameter); - if (parameterJson.get(CMDPARAM_SUB_PROCESS_DEFINE_ID) != null) { + if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) { SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class); ids.add(subProcessParam.getProcessDefinitionId()); recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids); @@ -448,7 +452,7 @@ public class ProcessService { * create recovery waiting thread command and delete origin command at the same time. * if the recovery command is exists, only update the field update_time * - * @param originCommand originCommand + * @param originCommand originCommand * @param processInstance processInstance */ public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) { @@ -461,20 +465,21 @@ public class ProcessService { return; } Map cmdParam = new HashMap<>(); - cmdParam.put(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD, String.valueOf(processInstance.getId())); + cmdParam.put(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD, String.valueOf(processInstance.getId())); // process instance quit by "waiting thread" state if (originCommand == null) { Command command = new Command( - CommandType.RECOVER_WAITTING_THREAD, - processInstance.getTaskDependType(), - processInstance.getFailureStrategy(), - processInstance.getExecutorId(), - processInstance.getProcessDefinitionId(), - JSONUtils.toJsonString(cmdParam), - processInstance.getWarningType(), - processInstance.getWarningGroupId(), - processInstance.getScheduleTime(), - processInstance.getProcessInstancePriority() + CommandType.RECOVER_WAITTING_THREAD, + processInstance.getTaskDependType(), + processInstance.getFailureStrategy(), + processInstance.getExecutorId(), + processInstance.getProcessDefinitionId(), + JSONUtils.toJsonString(cmdParam), + processInstance.getWarningType(), + processInstance.getWarningGroupId(), + processInstance.getScheduleTime(), + processInstance.getWorkerGroup(), + processInstance.getProcessInstancePriority() ); saveCommand(command); return; @@ -499,7 +504,7 @@ public class ProcessService { /** * get schedule time from command * - * @param command command + * @param command command * @param cmdParam cmdParam map * @return date */ @@ -517,8 +522,8 @@ public class ProcessService { * generate a new work process instance from command. * * @param processDefinition processDefinition - * @param command command - * @param cmdParam cmdParam map + * @param command command + * @param cmdParam cmdParam map * @return process instance */ private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition, @@ -552,10 +557,10 @@ public class ProcessService { processInstance.setConnects(processDefinition.getConnects()); // curing global params processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - getCommandTypeIfComplement(processInstance, command), - processInstance.getScheduleTime())); + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + getCommandTypeIfComplement(processInstance, command), + processInstance.getScheduleTime())); //copy process define json to process instance processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson()); @@ -575,7 +580,7 @@ public class ProcessService { * use definition creator's tenant. * * @param tenantId tenantId - * @param userId userId + * @param userId userId * @return tenant */ public Tenant getTenantForProcess(int tenantId, int userId) { @@ -598,15 +603,15 @@ public class ProcessService { /** * check command parameters is valid * - * @param command command + * @param command command * @param cmdParam cmdParam map * @return whether command param is valid */ private Boolean checkCmdParam(Command command, Map cmdParam) { if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) { if (cmdParam == null - || !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES) - || cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()) { + || !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES) + || cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) { logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } @@ -618,7 +623,7 @@ public class ProcessService { * construct process instance according to one command. * * @param command command - * @param host host + * @param host host * @return process instance */ private ProcessInstance constructProcessInstance(Command command, String host) { @@ -639,20 +644,20 @@ public class ProcessService { if (cmdParam != null) { Integer processInstanceId = 0; // recover from failure or pause tasks - if (cmdParam.containsKey(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING)) { - String processId = cmdParam.get(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING); + if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) { + String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING); processInstanceId = Integer.parseInt(processId); if (processInstanceId == 0) { logger.error("command parameter is error, [ ProcessInstanceId ] is 0"); return null; } - } else if (cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)) { + } else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { // sub process map - String pId = cmdParam.get(Constants.CMDPARAM_SUB_PROCESS); + String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS); processInstanceId = Integer.parseInt(pId); - } else if (cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD)) { + } else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) { // waiting thread command - String pId = cmdParam.get(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD); + String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD); processInstanceId = Integer.parseInt(pId); } if (processInstanceId == 0) { @@ -673,7 +678,7 @@ public class ProcessService { } } // reset command parameter if sub process - if (cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)) { + if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) { processInstance.setCommandParam(command.getCommandParam()); } } else { @@ -700,15 +705,15 @@ public class ProcessService { List failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE); List toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE); List killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL); - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); failedList.addAll(killedList); failedList.addAll(toleranceList); for (Integer taskId : failedList) { initTaskInstance(this.findTaskInstanceById(taskId)); } - cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, - String.join(Constants.COMMA, convertIntListToString(failedList))); + cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, + String.join(Constants.COMMA, convertIntListToString(failedList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; @@ -718,16 +723,16 @@ public class ProcessService { break; case RECOVER_SUSPENDED_PROCESS: // find pause tasks and init task's state - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); List suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE); List stopNodeList = findTaskIdByInstanceState(processInstance.getId(), - ExecutionStatus.KILL); + ExecutionStatus.KILL); suspendedNodeList.addAll(stopNodeList); for (Integer taskId : suspendedNodeList) { // initialize the pause state initTaskInstance(this.findTaskInstanceById(taskId)); } - cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); + cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList))); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); processInstance.setRunTimes(runTime + 1); break; @@ -747,8 +752,8 @@ public class ProcessService { break; case REPEAT_RUNNING: // delete the recover task names from command parameter - if (cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)) { - cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING); + if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) { + cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING); processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam)); } // delete all the valid tasks when repeat running @@ -775,7 +780,7 @@ public class ProcessService { * return complement data if the process start with complement data * * @param processInstance processInstance - * @param command command + * @param command command * @return command type */ private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) { @@ -790,8 +795,8 @@ public class ProcessService { * initialize complement data parameters * * @param processDefinition processDefinition - * @param processInstance processInstance - * @param cmdParam cmdParam + * @param processInstance processInstance + * @param cmdParam cmdParam */ private void initComplementDataParam(ProcessDefinition processDefinition, ProcessInstance processInstance, @@ -801,14 +806,14 @@ public class ProcessService { } Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE), - YYYY_MM_DD_HH_MM_SS); + YYYY_MM_DD_HH_MM_SS); if (Flag.NO == processInstance.getIsSubProcess()) { processInstance.setScheduleTime(startComplementTime); } processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( - processDefinition.getGlobalParamMap(), - processDefinition.getGlobalParamList(), - CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); + processDefinition.getGlobalParamMap(), + processDefinition.getGlobalParamList(), + CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); } @@ -827,21 +832,21 @@ public class ProcessService { } Map paramMap = JSONUtils.toMap(cmdParam); // write sub process id into cmd param. - if (paramMap.containsKey(CMDPARAM_SUB_PROCESS) - && CMDPARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMDPARAM_SUB_PROCESS))) { - paramMap.remove(CMDPARAM_SUB_PROCESS); - paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); + if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS) + && CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) { + paramMap.remove(CMD_PARAM_SUB_PROCESS); + paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId())); subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap)); subProcessInstance.setIsSubProcess(Flag.YES); this.saveProcessInstance(subProcessInstance); } // copy parent instance user def params to sub process.. - String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID); + String parentInstanceId = paramMap.get(CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID); if (StringUtils.isNotEmpty(parentInstanceId)) { ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId)); if (parentInstance != null) { subProcessInstance.setGlobalParams( - joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); + joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams())); this.saveProcessInstance(subProcessInstance); } else { logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); @@ -863,7 +868,7 @@ public class ProcessService { * only the keys doesn't in sub process global would be joined. * * @param parentGlobalParams parentGlobalParams - * @param subGlobalParams subGlobalParams + * @param subGlobalParams subGlobalParams * @return global params join */ private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) { @@ -906,16 +911,16 @@ public class ProcessService { * @param taskInstance taskInstance * @return task instance */ - @Transactional(rollbackFor = RuntimeException.class) + @Transactional(rollbackFor = Exception.class) public TaskInstance submitTask(TaskInstance taskInstance) { ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); logger.info("start submit task : {}, instance id:{}, state: {}", - taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); + taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); //submit to db TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance); if (task == null) { logger.error("end submit task to db error, task name:{}, process id:{} state: {} ", - taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); + taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState()); return task; } if (!task.getState().typeIsFinished()) { @@ -923,7 +928,7 @@ public class ProcessService { } logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ", - taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); + taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState()); return task; } @@ -934,7 +939,7 @@ public class ProcessService { * set map {parent instance id, task instance id, 0(child instance id)} * * @param parentInstance parentInstance - * @param parentTask parentTask + * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) { @@ -963,7 +968,7 @@ public class ProcessService { * find previous task work process map. * * @param parentProcessInstance parentProcessInstance - * @param parentTask parentTask + * @param parentTask parentTask * @return process instance map */ private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance, @@ -981,7 +986,7 @@ public class ProcessService { } } logger.info("sub process instance is not found,parent task:{},parent instance:{}", - parentTask.getId(), parentProcessInstance.getId()); + parentTask.getId(), parentProcessInstance.getId()); return null; } @@ -989,7 +994,7 @@ public class ProcessService { * create sub work process command * * @param parentProcessInstance parentProcessInstance - * @param task task + * @param task task */ public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) { if (!task.isSubProcess()) { @@ -1015,10 +1020,6 @@ public class ProcessService { /** * complement data needs transform parent parameter to child. - * - * @param instanceMap - * @param parentProcessInstance - * @return */ private String getSubWorkFlowParam(ProcessInstanceMap instanceMap, ProcessInstance parentProcessInstance) { // set sub work process command @@ -1037,11 +1038,6 @@ public class ProcessService { /** * create sub work process command - * - * @param parentProcessInstance - * @param childInstance - * @param instanceMap - * @param task */ public Command createSubProcessCommand(ProcessInstance parentProcessInstance, ProcessInstance childInstance, @@ -1050,28 +1046,27 @@ public class ProcessService { CommandType commandType = getSubCommandType(parentProcessInstance, childInstance); TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class); Map subProcessParam = JSONUtils.toMap(taskNode.getParams()); - Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID)); + Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID)); String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance); return new Command( - commandType, - TaskDependType.TASK_POST, - parentProcessInstance.getFailureStrategy(), - parentProcessInstance.getExecutorId(), - childDefineId, - processParam, - parentProcessInstance.getWarningType(), - parentProcessInstance.getWarningGroupId(), - parentProcessInstance.getScheduleTime(), - parentProcessInstance.getProcessInstancePriority() + commandType, + TaskDependType.TASK_POST, + parentProcessInstance.getFailureStrategy(), + parentProcessInstance.getExecutorId(), + childDefineId, + processParam, + parentProcessInstance.getWarningType(), + parentProcessInstance.getWarningGroupId(), + parentProcessInstance.getScheduleTime(), + task.getWorkerGroup(), + parentProcessInstance.getProcessInstancePriority() ); } /** * initialize sub work flow state * child instance state would be initialized when 'recovery from pause/stop/failure' - * - * @param childInstance */ private void initSubInstanceState(ProcessInstance childInstance) { if (childInstance != null) { @@ -1084,9 +1079,6 @@ public class ProcessService { * get sub work flow command type * child instance exist: child command = fatherCommand * child instance not exists: child command = fatherCommand[0] - * - * @param parentProcessInstance - * @return */ private CommandType getSubCommandType(ProcessInstance parentProcessInstance, ProcessInstance childInstance) { CommandType commandType = parentProcessInstance.getCommandType(); @@ -1098,10 +1090,10 @@ public class ProcessService { } /** - * update sub process definition todo + * update sub process definition * * @param parentProcessInstance parentProcessInstance - * @param childDefinitionId childDefinitionId + * @param childDefinitionId childDefinitionId */ private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) { ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId()); @@ -1115,7 +1107,7 @@ public class ProcessService { /** * submit task to mysql * - * @param taskInstance taskInstance + * @param taskInstance taskInstance * @param processInstance processInstance * @return task instance */ @@ -1128,7 +1120,7 @@ public class ProcessService { } else { if (processInstanceState != ExecutionStatus.READY_STOP - && processInstanceState != ExecutionStatus.READY_PAUSE) { + && processInstanceState != ExecutionStatus.READY_PAUSE) { // failure task set invalid taskInstance.setFlag(Flag.NO); updateTaskInstance(taskInstance); @@ -1161,33 +1153,6 @@ public class ProcessService { return taskInstance; } - /** - * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskInstanceId}_${task executed by ip1},${ip2}... - * The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low. - * - * @param taskInstance taskInstance - * @return task zk queue str - */ - public String taskZkInfo(TaskInstance taskInstance) { - - String taskWorkerGroup = getTaskWorkerGroup(taskInstance); - ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId()); - if (processInstance == null) { - logger.error("process instance is null. please check the task info, task id: " + taskInstance.getId()); - return ""; - } - - StringBuilder sb = new StringBuilder(100); - - sb.append(processInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE) - .append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE) - .append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE) - .append(taskInstance.getId()).append(Constants.UNDERLINE) - .append(taskInstance.getWorkerGroup()); - - return sb.toString(); - } - /** * get submit task instance state by the work process state * cannot modify the task state when running/kill/submit success, or this @@ -1196,7 +1161,7 @@ public class ProcessService { * return stop if work process state is ready stop * if all of above are not satisfied, return submit success * - * @param taskInstance taskInstance + * @param taskInstance taskInstance * @param processInstanceState processInstanceState * @return process instance state */ @@ -1206,10 +1171,9 @@ public class ProcessService { // running, delayed or killed // the task already exists in task queue // return state - state == ExecutionStatus.RUNNING_EXECUTION - || state == ExecutionStatus.DELAY_EXECUTION - || state == ExecutionStatus.KILL - || checkTaskExistsInTaskQueue(taskInstance) + state == ExecutionStatus.RUNNING_EXECUTION + || state == ExecutionStatus.DELAY_EXECUTION + || state == ExecutionStatus.KILL ) { return state; } @@ -1218,7 +1182,7 @@ public class ProcessService { if (processInstanceState == ExecutionStatus.READY_PAUSE) { state = ExecutionStatus.PAUSE; } else if (processInstanceState == ExecutionStatus.READY_STOP - || !checkProcessStrategy(taskInstance)) { + || !checkProcessStrategy(taskInstance)) { state = ExecutionStatus.KILL; } else { state = ExecutionStatus.SUBMITTED_SUCCESS; @@ -1248,22 +1212,6 @@ public class ProcessService { return true; } - /** - * check the task instance existing in queue - * - * @param taskInstance taskInstance - * @return whether taskinstance exists queue - */ - public boolean checkTaskExistsInTaskQueue(TaskInstance taskInstance) { - if (taskInstance.isSubProcess()) { - return false; - } - - String taskZkInfo = taskZkInfo(taskInstance); - - return false; - } - /** * create a new process instance * @@ -1349,7 +1297,7 @@ public class ProcessService { * * @param id id */ - public void delCommandByid(int id) { + public void delCommandById(int id) { commandMapper.deleteById(id); } @@ -1389,7 +1337,7 @@ public class ProcessService { * get id list by task state * * @param instanceId instanceId - * @param state state + * @param state state * @return task instance states */ public List findTaskIdByInstanceState(int instanceId, ExecutionStatus state) { @@ -1433,7 +1381,7 @@ public class ProcessService { * @return create process instance result */ public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) { - Integer count = 0; + int count = 0; if (processInstanceMap != null) { return processInstanceMapMapper.insert(processInstanceMap); } @@ -1444,7 +1392,7 @@ public class ProcessService { * find work process map by parent process id and parent task id. * * @param parentWorkProcessId parentWorkProcessId - * @param parentTaskId parentTaskId + * @param parentTaskId parentTaskId * @return process instance map */ public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) { @@ -1466,7 +1414,7 @@ public class ProcessService { * find sub process instance * * @param parentProcessId parentProcessId - * @param parentTaskId parentTaskId + * @param parentTaskId parentTaskId * @return process instance */ public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) { @@ -1498,18 +1446,17 @@ public class ProcessService { /** * change task state * - * @param state state - * @param startTime startTime - * @param host host + * @param state state + * @param startTime startTime + * @param host host * @param executePath executePath - * @param logPath logPath - * @param taskInstId taskInstId + * @param logPath logPath + * @param taskInstId taskInstId */ - public void changeTaskState(ExecutionStatus state, Date startTime, String host, + public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host, String executePath, String logPath, int taskInstId) { - TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId); taskInstance.setState(state); taskInstance.setStartTime(startTime); taskInstance.setHost(host); @@ -1532,12 +1479,12 @@ public class ProcessService { * update the process instance * * @param processInstanceId processInstanceId - * @param processJson processJson - * @param globalParams globalParams - * @param scheduleTime scheduleTime - * @param flag flag - * @param locations locations - * @param connects connects + * @param processJson processJson + * @param globalParams globalParams + * @param scheduleTime scheduleTime + * @param flag flag + * @param locations locations + * @param connects connects * @return update process instance result */ public int updateProcessInstance(Integer processInstanceId, String processJson, @@ -1558,18 +1505,17 @@ public class ProcessService { /** * change task state * - * @param state state - * @param endTime endTime + * @param state state + * @param endTime endTime * @param taskInstId taskInstId - * @param varPool varPool + * @param varPool varPool */ - public void changeTaskState(ExecutionStatus state, + public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date endTime, int processId, String appIds, int taskInstId, String varPool) { - TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId); taskInstance.setPid(processId); taskInstance.setAppLink(appIds); taskInstance.setState(state); @@ -1622,7 +1568,6 @@ public class ProcessService { * @return process instance list */ public List queryNeedFailoverProcessInstances(String host) { - return processInstanceMapper.queryByHostAndStatus(host, stateArray); } @@ -1640,7 +1585,7 @@ public class ProcessService { //2 insert into recover command Command cmd = new Command(); cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId()); - cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); + cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId())); cmd.setExecutorId(processInstance.getExecutorId()); cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS); createCommand(cmd); @@ -1654,7 +1599,7 @@ public class ProcessService { */ public List queryNeedFailoverTaskInstances(String host) { return taskInstanceMapper.queryByHostAndStatus(host, - stateArray); + stateArray); } /** @@ -1671,7 +1616,7 @@ public class ProcessService { * update process instance state by id * * @param processInstanceId processInstanceId - * @param executionStatus executionStatus + * @param executionStatus executionStatus * @return update process result */ public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { @@ -1701,19 +1646,21 @@ public class ProcessService { * @param ids ids * @return udf function list */ - public List queryUdfFunListByids(int[] ids) { + public List queryUdfFunListByIds(int[] ids) { return udfFuncMapper.queryUdfByIdStr(ids, null); } /** * find tenant code by resource name * - * @param resName resource name + * @param resName resource name * @param resourceType resource type * @return tenant code */ public String queryTenantCodeByResName(String resName, ResourceType resourceType) { - return resourceMapper.queryTenantCodeByResourceName(resName, resourceType.ordinal()); + // in order to query tenant code successful although the version is older + String fullName = resName.startsWith("/") ? resName : String.format("/%s", resName); + return resourceMapper.queryTenantCodeByResourceName(fullName, resourceType.ordinal()); } /** @@ -1724,20 +1671,20 @@ public class ProcessService { */ public List selectAllByProcessDefineId(int[] ids) { return scheduleMapper.selectAllByProcessDefineArray( - ids); + ids); } /** * get dependency cycle by work process define id and scheduler fire time * - * @param masterId masterId + * @param masterId masterId * @param processDefinitionId processDefinitionId - * @param scheduledFireTime the time the task schedule is expected to trigger + * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency * @throws Exception if error throws Exception */ public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception { - List list = getCycleDependencies(masterId, new int[] {processDefinitionId}, scheduledFireTime); + List list = getCycleDependencies(masterId, new int[]{processDefinitionId}, scheduledFireTime); return list.size() > 0 ? list.get(0) : null; } @@ -1745,15 +1692,15 @@ public class ProcessService { /** * get dependency cycle list by work process define id list and scheduler fire time * - * @param masterId masterId - * @param ids ids + * @param masterId masterId + * @param ids ids * @param scheduledFireTime the time the task schedule is expected to trigger * @return CycleDependency list * @throws Exception if error throws Exception */ public List getCycleDependencies(int masterId, int[] ids, Date scheduledFireTime) throws Exception { List cycleDependencyList = new ArrayList(); - if (ids == null || ids.length == 0) { + if (null == ids || ids.length == 0) { logger.warn("ids[] is empty!is invalid!"); return cycleDependencyList; } @@ -1823,8 +1770,8 @@ public class ProcessService { */ public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastSchedulerProcess(definitionId, - dateInterval.getStartTime(), - dateInterval.getEndTime()); + dateInterval.getStartTime(), + dateInterval.getEndTime()); } /** @@ -1836,23 +1783,23 @@ public class ProcessService { */ public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) { return processInstanceMapper.queryLastManualProcess(definitionId, - dateInterval.getStartTime(), - dateInterval.getEndTime()); + dateInterval.getStartTime(), + dateInterval.getEndTime()); } /** * find last running process instance * * @param definitionId process definition id - * @param startTime start time - * @param endTime end time + * @param startTime start time + * @param endTime end time * @return process instance */ public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) { return processInstanceMapper.queryLastRunningProcess(definitionId, - startTime, - endTime, - stateArray); + startTime, + endTime, + stateArray); } /** @@ -1883,6 +1830,7 @@ public class ProcessService { */ public String getTaskWorkerGroup(TaskInstance taskInstance) { String workerGroup = taskInstance.getWorkerGroup(); + if (StringUtils.isNotBlank(workerGroup)) { return workerGroup; } @@ -1934,7 +1882,7 @@ public class ProcessService { /** * list unauthorized udf function * - * @param userId user id + * @param userId user id * @param needChecks data source id array * @return unauthorized udf function list */ @@ -2007,9 +1955,6 @@ public class ProcessService { /** * format task app id in task instance - * - * @param taskInstance - * @return */ public String formatTaskAppId(TaskInstance taskInstance) { ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId()); @@ -2019,9 +1964,9 @@ public class ProcessService { return ""; } return String.format("%s_%s_%s", - definition.getId(), - processInstanceById.getId(), - taskInstance.getId()); + definition.getId(), + processInstanceById.getId(), + taskInstance.getId()); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java new file mode 100644 index 0000000000..d7a902550f --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/PeerTaskInstancePriorityQueue.java @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service.queue; + +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.service.exceptions.TaskPriorityQueueException; + +import java.util.Comparator; +import java.util.Iterator; +import java.util.PriorityQueue; + +/** + * Task instances priority queue implementation + * All the task instances are in the same process instance. + */ +public class PeerTaskInstancePriorityQueue implements TaskPriorityQueue { + /** + * queue size + */ + private static final Integer QUEUE_MAX_SIZE = 3000; + + /** + * queue + */ + private PriorityQueue queue = new PriorityQueue<>(QUEUE_MAX_SIZE, new TaskInfoComparator()); + + /** + * put task instance to priority queue + * + * @param taskInstance taskInstance + * @throws TaskPriorityQueueException + */ + public void put(TaskInstance taskInstance) throws TaskPriorityQueueException { + queue.add(taskInstance); + } + + /** + * take task info + * + * @return task instance + * @throws TaskPriorityQueueException + */ + @Override + public TaskInstance take() throws TaskPriorityQueueException { + return queue.poll(); + } + + /** + * peek taskInfo + * + * @return task instance + */ + public TaskInstance peek() { + return queue.peek(); + } + + /** + * queue size + * + * @return size + */ + public int size() { + return queue.size(); + } + + /** + * whether contains the task instance + * + * @param taskInstance task instance + * @return true is contains + */ + public boolean contains(TaskInstance taskInstance) { + return queue.contains(taskInstance); + } + + /** + * remove task + * + * @param taskInstance task instance + * @return true if remove success + */ + public boolean remove(TaskInstance taskInstance) { + return queue.remove(taskInstance); + } + + /** + * get iterator + * + * @return Iterator + */ + public Iterator iterator() { + return queue.iterator(); + } + + /** + * TaskInfoComparator + */ + private class TaskInfoComparator implements Comparator { + + /** + * compare o1 o2 + * + * @param o1 o1 + * @param o2 o2 + * @return compare result + */ + @Override + public int compare(TaskInstance o1, TaskInstance o2) { + return o1.getTaskInstancePriority().compareTo(o2.getTaskInstancePriority()); + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskPriority.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriority.java similarity index 56% rename from dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskPriority.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriority.java index 991eeed493..a872f6db9f 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskPriority.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriority.java @@ -15,14 +15,15 @@ * limitations under the License. */ -package org.apache.dolphinscheduler.server.entity; +package org.apache.dolphinscheduler.service.queue; -import static org.apache.dolphinscheduler.common.Constants.*; +import java.util.Map; +import java.util.Objects; /** * task priority info */ -public class TaskPriority { +public class TaskPriority implements Comparable { /** * processInstancePriority @@ -50,9 +51,9 @@ public class TaskPriority { private String groupName; /** - * ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${groupName} + * context */ - private String taskPriorityInfo; + private Map context; public TaskPriority(){} @@ -65,15 +66,6 @@ public class TaskPriority { this.taskInstancePriority = taskInstancePriority; this.taskId = taskId; this.groupName = groupName; - this.taskPriorityInfo = this.processInstancePriority + - UNDERLINE + - this.processInstanceId + - UNDERLINE + - this.taskInstancePriority + - UNDERLINE + - this.taskId + - UNDERLINE + - this.groupName; } public int getProcessInstancePriority() { @@ -104,6 +96,10 @@ public class TaskPriority { return taskId; } + public Map getContext() { + return context; + } + public void setTaskId(int taskId) { this.taskId = taskId; } @@ -116,32 +112,61 @@ public class TaskPriority { this.groupName = groupName; } - public String getTaskPriorityInfo() { - return taskPriorityInfo; + public void setContext(Map context) { + this.context = context; } - public void setTaskPriorityInfo(String taskPriorityInfo) { - this.taskPriorityInfo = taskPriorityInfo; - } + @Override + public int compareTo(TaskPriority other) { + if (this.getProcessInstancePriority() > other.getProcessInstancePriority()) { + return 1; + } + if (this.getProcessInstancePriority() < other.getProcessInstancePriority()) { + return -1; + } - /** - * taskPriorityInfo convert taskPriority - * - * @param taskPriorityInfo taskPriorityInfo - * @return TaskPriority - */ - public static TaskPriority of(String taskPriorityInfo){ - String[] parts = taskPriorityInfo.split(UNDERLINE); + if (this.getProcessInstanceId() > other.getProcessInstanceId()) { + return 1; + } + if (this.getProcessInstanceId() < other.getProcessInstanceId()) { + return -1; + } + + if (this.getTaskInstancePriority() > other.getTaskInstancePriority()) { + return 1; + } + if (this.getTaskInstancePriority() < other.getTaskInstancePriority()) { + return -1; + } + + if (this.getTaskId() > other.getTaskId()) { + return 1; + } + if (this.getTaskId() < other.getTaskId()) { + return -1; + } - if (parts.length != 5) { - throw new IllegalArgumentException(String.format("TaskPriority : %s illegal.", taskPriorityInfo)); + return this.getGroupName().compareTo(other.getGroupName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } - TaskPriority taskPriority = new TaskPriority( - Integer.parseInt(parts[0]), - Integer.parseInt(parts[1]), - Integer.parseInt(parts[2]), - Integer.parseInt(parts[3]), - parts[4]); - return taskPriority; + TaskPriority that = (TaskPriority) o; + return processInstancePriority == that.processInstancePriority + && processInstanceId == that.processInstanceId + && taskInstancePriority == that.taskInstancePriority + && taskId == that.taskId + && Objects.equals(groupName, that.groupName); + } + + @Override + public int hashCode() { + return Objects.hash(processInstancePriority, processInstanceId, taskInstancePriority, taskId, groupName); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java index 3ad9aef6c5..14c6b382d4 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueue.java @@ -14,31 +14,38 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.service.queue; +import org.apache.dolphinscheduler.service.exceptions.TaskPriorityQueueException; -public interface TaskPriorityQueue { +/** + * task priority queue + * @param + */ +public interface TaskPriorityQueue { /** * put task info * * @param taskInfo taskInfo - * @throws Exception + * @throws TaskPriorityQueueException */ - void put(String taskInfo) throws Exception; + void put(T taskInfo) throws TaskPriorityQueueException; /** * take taskInfo + * * @return taskInfo - * @throws Exception + * @throws TaskPriorityQueueException */ - String take()throws Exception; + T take() throws TaskPriorityQueueException, InterruptedException; /** * size * * @return size - * @throws Exception + * @throws TaskPriorityQueueException */ - int size() throws Exception; + int size() throws TaskPriorityQueueException; } \ No newline at end of file diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java index 0a0fb1b9b0..694d4c4763 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskPriorityQueueImpl.java @@ -14,22 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.service.queue; +package org.apache.dolphinscheduler.service.queue; -import org.springframework.stereotype.Service; +import org.apache.dolphinscheduler.service.exceptions.TaskPriorityQueueException; -import java.util.*; import java.util.concurrent.PriorityBlockingQueue; -import static org.apache.dolphinscheduler.common.Constants.*; +import org.springframework.stereotype.Service; /** * A singleton of a task queue implemented with zookeeper * tasks queue implementation */ @Service -public class TaskPriorityQueueImpl implements TaskPriorityQueue { +public class TaskPriorityQueueImpl implements TaskPriorityQueue { /** * queue size */ @@ -38,67 +37,38 @@ public class TaskPriorityQueueImpl implements TaskPriorityQueue { /** * queue */ - private PriorityBlockingQueue queue = new PriorityBlockingQueue<>(QUEUE_MAX_SIZE, new TaskInfoComparator()); + private PriorityBlockingQueue queue = new PriorityBlockingQueue<>(QUEUE_MAX_SIZE); /** * put task takePriorityInfo * * @param taskPriorityInfo takePriorityInfo - * @throws Exception + * @throws TaskPriorityQueueException */ @Override - public void put(String taskPriorityInfo) throws Exception { + public void put(TaskPriority taskPriorityInfo) throws TaskPriorityQueueException { queue.put(taskPriorityInfo); } /** * take taskInfo + * * @return taskInfo - * @throws Exception + * @throws TaskPriorityQueueException */ @Override - public String take() throws Exception { + public TaskPriority take() throws TaskPriorityQueueException, InterruptedException { return queue.take(); } /** * queue size + * * @return size - * @throws Exception + * @throws TaskPriorityQueueException */ @Override - public int size() throws Exception { + public int size() throws TaskPriorityQueueException { return queue.size(); } - - /** - * TaskInfoComparator - */ - private class TaskInfoComparator implements Comparator{ - - /** - * compare o1 o2 - * @param o1 o1 - * @param o2 o2 - * @return compare result - */ - @Override - public int compare(String o1, String o2) { - String s1 = o1; - String s2 = o2; - String[] s1Array = s1.split(UNDERLINE); - if(s1Array.length > TASK_INFO_LENGTH){ - // warning: if this length > 5, need to be changed - s1 = s1.substring(0, s1.lastIndexOf(UNDERLINE) ); - } - - String[] s2Array = s2.split(UNDERLINE); - if(s2Array.length > TASK_INFO_LENGTH){ - // warning: if this length > 5, need to be changed - s2 = s2.substring(0, s2.lastIndexOf(UNDERLINE) ); - } - - return s1.compareTo(s2); - } - } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClient.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClient.java index c08da0ef72..5a04c5a23b 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClient.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClient.java @@ -24,6 +24,7 @@ import org.apache.curator.framework.state.ConnectionState; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; @@ -32,6 +33,7 @@ import org.springframework.stereotype.Component; import java.nio.charset.StandardCharsets; import java.util.List; +import java.util.concurrent.TimeUnit; import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; @@ -55,9 +57,10 @@ public class CuratorZookeeperClient implements InitializingBean { } private CuratorFramework buildClient() { - logger.info("zookeeper registry center init, server lists is: {}.", zookeeperConfig.getServerList()); + logger.info("zookeeper registry center init, server lists is: [{}]", zookeeperConfig.getServerList()); - CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder().ensembleProvider(new DefaultEnsembleProvider(checkNotNull(zookeeperConfig.getServerList(),"zookeeper quorum can't be null"))) + CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder() + .ensembleProvider(new DefaultEnsembleProvider(checkNotNull(zookeeperConfig.getServerList(), "zookeeper quorum can't be null"))) .retryPolicy(new ExponentialBackoffRetry(zookeeperConfig.getBaseSleepTimeMs(), zookeeperConfig.getMaxRetries(), zookeeperConfig.getMaxSleepMs())); //these has default value @@ -84,7 +87,9 @@ public class CuratorZookeeperClient implements InitializingBean { zkClient = builder.build(); zkClient.start(); try { - zkClient.blockUntilConnected(); + logger.info("trying to connect zookeeper server list:{}", zookeeperConfig.getServerList()); + zkClient.blockUntilConnected(30, TimeUnit.SECONDS); + } catch (final Exception ex) { throw new RuntimeException(ex); } @@ -95,12 +100,14 @@ public class CuratorZookeeperClient implements InitializingBean { checkNotNull(zkClient); zkClient.getConnectionStateListenable().addListener((client, newState) -> { - if(newState == ConnectionState.LOST){ + if (newState == ConnectionState.LOST) { logger.error("connection lost from zookeeper"); - } else if(newState == ConnectionState.RECONNECTED){ + } else if (newState == ConnectionState.RECONNECTED) { logger.info("reconnected to zookeeper"); - } else if(newState == ConnectionState.SUSPENDED){ + } else if (newState == ConnectionState.SUSPENDED) { logger.warn("connection SUSPENDED to zookeeper"); + } else if (newState == ConnectionState.CONNECTED) { + logger.info("connected to zookeeper server list:[{}]", zookeeperConfig.getServerList()); } }); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java index 4fe941503e..6dfce79a3a 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java @@ -16,6 +16,8 @@ */ package org.apache.dolphinscheduler.service.zk; +import org.apache.dolphinscheduler.common.thread.ThreadUtils; + import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.TreeCache; @@ -39,14 +41,6 @@ public class ZookeeperCachedOperator extends ZookeeperOperator { */ @Override protected void registerListener() { - treeCache = new TreeCache(getZkClient(), getZookeeperConfig().getDsRoot() + "/nodes"); - logger.info("add listener to zk path: {}", getZookeeperConfig().getDsRoot()); - try { - treeCache.start(); - } catch (Exception e) { - logger.error("add listener to zk path: {} failed", getZookeeperConfig().getDsRoot()); - throw new RuntimeException(e); - } treeCache.getListenable().addListener((client, event) -> { String path = null == event.getData() ? "" : event.getData().getPath(); @@ -55,7 +49,18 @@ public class ZookeeperCachedOperator extends ZookeeperOperator { } dataChanged(client, event, path); }); + } + @Override + protected void treeCacheStart() { + treeCache = new TreeCache(zkClient, getZookeeperConfig().getDsRoot() + "/nodes"); + logger.info("add listener to zk path: {}", getZookeeperConfig().getDsRoot()); + try { + treeCache.start(); + } catch (Exception e) { + logger.error("add listener to zk path: {} failed", getZookeeperConfig().getDsRoot()); + throw new RuntimeException(e); + } } //for sub class @@ -80,11 +85,7 @@ public class ZookeeperCachedOperator extends ZookeeperOperator { @Override public void close() { treeCache.close(); - try { - Thread.sleep(500); - } catch (InterruptedException ignore) { - Thread.currentThread().interrupt(); - } + ThreadUtils.sleep(500); super.close(); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java index 7aeb7289c4..e7b049f8bf 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java @@ -50,11 +50,15 @@ public class ZookeeperOperator implements InitializingBean { private final Logger logger = LoggerFactory.getLogger(ZookeeperOperator.class); @Autowired - private CuratorZookeeperClient zookeeperClient; + private ZookeeperConfig zookeeperConfig; + + protected CuratorFramework zkClient; @Override public void afterPropertiesSet() throws Exception { - registerListener(); + this.zkClient = buildClient(); + initStateLister(); + treeCacheStart(); } /** @@ -62,9 +66,62 @@ public class ZookeeperOperator implements InitializingBean { */ protected void registerListener(){} + protected void treeCacheStart(){} + + public void initStateLister() { + checkNotNull(zkClient); + + zkClient.getConnectionStateListenable().addListener((client, newState) -> { + if(newState == ConnectionState.LOST){ + logger.error("connection lost from zookeeper"); + } else if(newState == ConnectionState.RECONNECTED){ + logger.info("reconnected to zookeeper"); + } else if(newState == ConnectionState.SUSPENDED){ + logger.warn("connection SUSPENDED to zookeeper"); + } + }); + } + + private CuratorFramework buildClient() { + logger.info("zookeeper registry center init, server lists is: {}.", zookeeperConfig.getServerList()); + + CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder().ensembleProvider(new DefaultEnsembleProvider(checkNotNull(zookeeperConfig.getServerList(),"zookeeper quorum can't be null"))) + .retryPolicy(new ExponentialBackoffRetry(zookeeperConfig.getBaseSleepTimeMs(), zookeeperConfig.getMaxRetries(), zookeeperConfig.getMaxSleepMs())); + + //these has default value + if (0 != zookeeperConfig.getSessionTimeoutMs()) { + builder.sessionTimeoutMs(zookeeperConfig.getSessionTimeoutMs()); + } + if (0 != zookeeperConfig.getConnectionTimeoutMs()) { + builder.connectionTimeoutMs(zookeeperConfig.getConnectionTimeoutMs()); + } + if (StringUtils.isNotBlank(zookeeperConfig.getDigest())) { + builder.authorization("digest", zookeeperConfig.getDigest().getBytes(StandardCharsets.UTF_8)).aclProvider(new ACLProvider() { + + @Override + public List getDefaultAcl() { + return ZooDefs.Ids.CREATOR_ALL_ACL; + } + + @Override + public List getAclForPath(final String path) { + return ZooDefs.Ids.CREATOR_ALL_ACL; + } + }); + } + zkClient = builder.build(); + zkClient.start(); + try { + zkClient.blockUntilConnected(); + } catch (final Exception ex) { + throw new RuntimeException(ex); + } + return zkClient; + } + public String get(final String key) { try { - return new String(zookeeperClient.getZkClient().getData().forPath(key), StandardCharsets.UTF_8); + return new String(zkClient.getData().forPath(key), StandardCharsets.UTF_8); } catch (Exception ex) { logger.error("get key : {}", key, ex); } @@ -74,7 +131,7 @@ public class ZookeeperOperator implements InitializingBean { public List getChildrenKeys(final String key) { List values; try { - values = zookeeperClient.getZkClient().getChildren().forPath(key); + values = zkClient.getChildren().forPath(key); return values; } catch (InterruptedException ex) { logger.error("getChildrenKeys key : {} InterruptedException", key); @@ -88,7 +145,7 @@ public class ZookeeperOperator implements InitializingBean { public boolean hasChildren(final String key){ Stat stat ; try { - stat = zookeeperClient.getZkClient().checkExists().forPath(key); + stat = zkClient.checkExists().forPath(key); return stat.getNumChildren() >= 1; } catch (Exception ex) { throw new IllegalStateException(ex); @@ -97,7 +154,7 @@ public class ZookeeperOperator implements InitializingBean { public boolean isExisted(final String key) { try { - return zookeeperClient.getZkClient().checkExists().forPath(key) != null; + return zkClient.checkExists().forPath(key) != null; } catch (Exception ex) { logger.error("isExisted key : {}", key, ex); } @@ -107,7 +164,7 @@ public class ZookeeperOperator implements InitializingBean { public void persist(final String key, final String value) { try { if (!isExisted(key)) { - zookeeperClient.getZkClient().create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(key, value.getBytes(StandardCharsets.UTF_8)); + zkClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(key, value.getBytes(StandardCharsets.UTF_8)); } else { update(key, value); } @@ -118,11 +175,7 @@ public class ZookeeperOperator implements InitializingBean { public void update(final String key, final String value) { try { - - CuratorOp check = zookeeperClient.getZkClient().transactionOp().check().forPath(key); - CuratorOp setData = zookeeperClient.getZkClient().transactionOp().setData().forPath(key, value.getBytes(StandardCharsets.UTF_8)); - zookeeperClient.getZkClient().transaction().forOperations(check, setData); - + zkClient.inTransaction().check().forPath(key).and().setData().forPath(key, value.getBytes(StandardCharsets.UTF_8)).and().commit(); } catch (Exception ex) { logger.error("update key : {} , value : {}", key, value, ex); } @@ -132,12 +185,12 @@ public class ZookeeperOperator implements InitializingBean { try { if (isExisted(key)) { try { - zookeeperClient.getZkClient().delete().deletingChildrenIfNeeded().forPath(key); + zkClient.delete().deletingChildrenIfNeeded().forPath(key); } catch (KeeperException.NoNodeException ignore) { //NOP } } - zookeeperClient.getZkClient().create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); + zkClient.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); } catch (final Exception ex) { logger.error("persistEphemeral key : {} , value : {}", key, value, ex); } @@ -149,7 +202,7 @@ public class ZookeeperOperator implements InitializingBean { persistEphemeral(key, value); } else { if (!isExisted(key)) { - zookeeperClient.getZkClient().create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); + zkClient.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); } } } catch (final Exception ex) { @@ -159,7 +212,7 @@ public class ZookeeperOperator implements InitializingBean { public void persistEphemeralSequential(final String key, String value) { try { - zookeeperClient.getZkClient().create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); + zkClient.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).forPath(key, value.getBytes(StandardCharsets.UTF_8)); } catch (final Exception ex) { logger.error("persistEphemeralSequential key : {}", key, ex); } @@ -168,7 +221,7 @@ public class ZookeeperOperator implements InitializingBean { public void remove(final String key) { try { if (isExisted(key)) { - zookeeperClient.getZkClient().delete().deletingChildrenIfNeeded().forPath(key); + zkClient.delete().deletingChildrenIfNeeded().forPath(key); } } catch (KeeperException.NoNodeException ignore) { //NOP @@ -178,14 +231,14 @@ public class ZookeeperOperator implements InitializingBean { } public CuratorFramework getZkClient() { - return zookeeperClient.getZkClient(); + return zkClient; } public ZookeeperConfig getZookeeperConfig() { - return zookeeperClient.getZookeeperConfig(); + return zookeeperConfig; } public void close() { - CloseableUtils.closeQuietly(zookeeperClient.getZkClient()); + CloseableUtils.closeQuietly(zkClient); } } \ No newline at end of file diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java index 74b52bb316..4ac91f017c 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java @@ -17,30 +17,75 @@ package org.apache.dolphinscheduler.service.process; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; +import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID; + import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; +import org.apache.dolphinscheduler.common.enums.Flag; import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.CommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; +import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.dao.mapper.UserMapper; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtilsTest; +import java.util.ArrayList; import java.util.Date; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.JsonNode; /** * process service test */ +@RunWith(MockitoJUnitRunner.class) public class ProcessServiceTest { + private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class); + + @InjectMocks + private ProcessService processService; + + + @Mock + private CommandMapper commandMapper; + + + @Mock + private ErrorCommandMapper errorCommandMapper; + + @Mock + private ProcessDefinitionMapper processDefineMapper; + @Mock + private ProcessInstanceMapper processInstanceMapper; + @Mock + private UserMapper userMapper; + @Mock + TaskInstanceMapper taskInstanceMapper; + @Test public void testCreateSubCommand() { ProcessService processService = new ProcessService(); @@ -89,7 +134,7 @@ public class ProcessServiceTest { String endString = "2020-01-10 00:00:00"; parentInstance.setCommandType(CommandType.START_FAILURE_TASK_PROCESS); parentInstance.setHistoryCmd("COMPLEMENT_DATA,START_FAILURE_TASK_PROCESS"); - Map complementMap = new HashMap<>(); + Map complementMap = new HashMap<>(); complementMap.put(Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE, startString); complementMap.put(Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE, endString); parentInstance.setCommandParam(JSONUtils.toJsonString(complementMap)); @@ -113,4 +158,168 @@ public class ProcessServiceTest { ); Assert.assertEquals(CommandType.START_FAILURE_TASK_PROCESS, command.getCommandType()); } + + @Test + public void testVerifyIsNeedCreateCommand() { + + List commands = new ArrayList<>(); + + Command command = new Command(); + command.setCommandType(CommandType.REPEAT_RUNNING); + command.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"111\"}"); + commands.add(command); + Mockito.when(commandMapper.selectList(null)).thenReturn(commands); + Assert.assertFalse(processService.verifyIsNeedCreateCommand(command)); + + Command command1 = new Command(); + command1.setCommandType(CommandType.REPEAT_RUNNING); + command1.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"222\"}"); + Assert.assertTrue(processService.verifyIsNeedCreateCommand(command1)); + + Command command2 = new Command(); + command2.setCommandType(CommandType.PAUSE); + Assert.assertTrue(processService.verifyIsNeedCreateCommand(command2)); + } + + @Test + public void testCreateRecoveryWaitingThreadCommand() { + + int id = 123; + Mockito.when(commandMapper.deleteById(id)).thenReturn(1); + ProcessInstance subProcessInstance = new ProcessInstance(); + subProcessInstance.setIsSubProcess(Flag.YES); + Command originCommand = new Command(); + originCommand.setId(id); + processService.createRecoveryWaitingThreadCommand(originCommand, subProcessInstance); + + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(111); + processService.createRecoveryWaitingThreadCommand(null, subProcessInstance); + + Command recoverCommand = new Command(); + recoverCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD); + processService.createRecoveryWaitingThreadCommand(recoverCommand, subProcessInstance); + + Command repeatRunningCommand = new Command(); + recoverCommand.setCommandType(CommandType.REPEAT_RUNNING); + processService.createRecoveryWaitingThreadCommand(repeatRunningCommand, subProcessInstance); + + ProcessInstance subProcessInstance2 = new ProcessInstance(); + subProcessInstance2.setId(111); + subProcessInstance2.setIsSubProcess(Flag.NO); + processService.createRecoveryWaitingThreadCommand(repeatRunningCommand, subProcessInstance2); + + } + + @Test + public void testHandleCommand() { + + //cannot construct process instance, return null; + String host = "127.0.0.1"; + int validThreadNum = 1; + Command command = new Command(); + command.setProcessDefinitionId(222); + command.setCommandType(CommandType.REPEAT_RUNNING); + command.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"111\",\"" + + CMD_PARAM_SUB_PROCESS_DEFINE_ID + "\":\"222\"}"); + Mockito.when(processDefineMapper.selectById(command.getProcessDefinitionId())).thenReturn(null); + Assert.assertNull(processService.handleCommand(logger, host, validThreadNum, command)); + + //there is not enough thread for this command + Command command1 = new Command(); + command1.setProcessDefinitionId(123); + command1.setCommandParam("{\"ProcessInstanceId\":222}"); + command1.setCommandType(CommandType.START_PROCESS); + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(123); + processDefinition.setName("test"); + processDefinition.setVersion(1); + processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":" + + "{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\",\"dependence\":{}" + + ",\"description\":\"\",\"id\":\"tasks-3011\",\"maxRetryTimes\":\"0\",\"name\":\"tsssss\"" + + ",\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]}" + + ",\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\"" + + ",\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":\"SHELL\"" + + ",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}"); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(222); + Mockito.when(processDefineMapper.selectById(command1.getProcessDefinitionId())).thenReturn(processDefinition); + Mockito.when(processInstanceMapper.queryDetailById(222)).thenReturn(processInstance); + Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command1)); + + Command command2 = new Command(); + command2.setCommandParam("{\"ProcessInstanceId\":222,\"StartNodeIdList\":\"n1,n2\"}"); + command2.setProcessDefinitionId(123); + command2.setCommandType(CommandType.RECOVER_SUSPENDED_PROCESS); + + Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command2)); + + Command command3 = new Command(); + command3.setProcessDefinitionId(123); + command3.setCommandParam("{\"WaitingThreadInstanceId\":222}"); + command3.setCommandType(CommandType.START_FAILURE_TASK_PROCESS); + Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command3)); + + Command command4 = new Command(); + command4.setProcessDefinitionId(123); + command4.setCommandParam("{\"WaitingThreadInstanceId\":222,\"StartNodeIdList\":\"n1,n2\"}"); + command4.setCommandType(CommandType.REPEAT_RUNNING); + Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command4)); + } + + @Test + public void testGetUserById() { + User user = new User(); + user.setId(123); + Mockito.when(userMapper.selectById(123)).thenReturn(user); + Assert.assertEquals(user, processService.getUserById(123)); + } + + @Test + public void testFormatTaskAppId() { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(333); + taskInstance.setProcessDefinitionId(111); + taskInstance.setProcessInstanceId(222); + Mockito.when(processService.findProcessDefineById(taskInstance.getProcessDefinitionId())).thenReturn(null); + Mockito.when(processService.findProcessInstanceById(taskInstance.getProcessInstanceId())).thenReturn(null); + Assert.assertEquals("", processService.formatTaskAppId(taskInstance)); + + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(111); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(222); + Mockito.when(processService.findProcessDefineById(taskInstance.getProcessDefinitionId())).thenReturn(processDefinition); + Mockito.when(processService.findProcessInstanceById(taskInstance.getProcessInstanceId())).thenReturn(processInstance); + Assert.assertEquals("111_222_333", processService.formatTaskAppId(taskInstance)); + + } + + @Test + public void testRecurseFindSubProcessId() { + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":" + + "{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\"" + + ",\"dependence\":{},\"description\":\"\",\"id\":\"tasks-76544\"" + + ",\"maxRetryTimes\":\"0\",\"name\":\"test\",\"params\":{\"localParams\":[]," + + "\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[],\"processDefinitionId\"" + + ":\"222\"},\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\"," + + "\"taskInstancePriority\":\"MEDIUM\",\"timeout\":{\"enable\":false,\"interval\":" + + "null,\"strategy\":\"\"},\"type\":\"SHELL\",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}]," + + "\"tenantId\":4,\"timeout\":0}"); + int parentId = 111; + List ids = new ArrayList<>(); + ProcessDefinition processDefinition2 = new ProcessDefinition(); + processDefinition2.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\"" + + ":{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\",\"dependence\":{}," + + "\"description\":\"\",\"id\":\"tasks-76544\",\"maxRetryTimes\":\"0\",\"name\":\"test\"," + + "\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]}," + + "\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":" + + "\"MEDIUM\",\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":" + + "\"SHELL\",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}"); + Mockito.when(processDefineMapper.selectById(parentId)).thenReturn(processDefinition); + Mockito.when(processDefineMapper.selectById(222)).thenReturn(processDefinition2); + processService.recurseFindSubProcessId(parentId, ids); + + } } diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClientTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClientTest.java index c0297799ea..b1c2ec5e25 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClientTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/zk/CuratorZookeeperClientTest.java @@ -59,9 +59,8 @@ public class CuratorZookeeperClientTest { zookeeperConfig.setDsRoot("/dolphinscheduler"); zookeeperConfig.setMaxWaitTime(30000); zookeeperClient.setZookeeperConfig(zookeeperConfig); - System.out.println("start"); zookeeperClient.afterPropertiesSet(); - System.out.println("end"); + Assert.assertNotNull(zookeeperClient.getZkClient()); } } \ No newline at end of file diff --git a/dolphinscheduler-service/src/test/java/queue/PeerTaskInstancePriorityQueueTest.java b/dolphinscheduler-service/src/test/java/queue/PeerTaskInstancePriorityQueueTest.java new file mode 100644 index 0000000000..cf39d57b8b --- /dev/null +++ b/dolphinscheduler-service/src/test/java/queue/PeerTaskInstancePriorityQueueTest.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package queue; + +import org.apache.dolphinscheduler.common.enums.Priority; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.service.queue.PeerTaskInstancePriorityQueue; + +import org.junit.Assert; +import org.junit.Test; + +/** + * Task instances priority queue implementation + * All the task instances are in the same process instance. + */ +public class PeerTaskInstancePriorityQueueTest { + + @Test + public void testPut() throws Exception { + PeerTaskInstancePriorityQueue queue = new PeerTaskInstancePriorityQueue(); + TaskInstance taskInstanceHigPriority = createTaskInstance("high", Priority.HIGH); + TaskInstance taskInstanceMediumPriority = createTaskInstance("high", Priority.MEDIUM); + queue.put(taskInstanceHigPriority); + queue.put(taskInstanceMediumPriority); + Assert.assertEquals(2,queue.size()); + } + + @Test + public void testPeek() throws Exception { + PeerTaskInstancePriorityQueue queue = getPeerTaskInstancePriorityQueue(); + int peekBeforeLength = queue.size(); + queue.peek(); + Assert.assertEquals(peekBeforeLength,queue.size()); + + } + + @Test + public void testTake() throws Exception { + PeerTaskInstancePriorityQueue queue = getPeerTaskInstancePriorityQueue(); + int peekBeforeLength = queue.size(); + queue.take(); + Assert.assertTrue(queue.size() < peekBeforeLength); + } + + /** + * get queue + * + * @return queue + * @throws Exception + */ + private PeerTaskInstancePriorityQueue getPeerTaskInstancePriorityQueue() throws Exception { + PeerTaskInstancePriorityQueue queue = new PeerTaskInstancePriorityQueue(); + TaskInstance taskInstanceHigPriority = createTaskInstance("high", Priority.HIGH); + TaskInstance taskInstanceMediumPriority = createTaskInstance("high", Priority.MEDIUM); + queue.put(taskInstanceHigPriority); + queue.put(taskInstanceMediumPriority); + return queue; + } + + /** + * create task instance + * + * @param name name + * @param priority priority + * @return + */ + private TaskInstance createTaskInstance(String name, Priority priority) { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setName(name); + taskInstance.setTaskInstancePriority(priority); + return taskInstance; + } + +} \ No newline at end of file diff --git a/dolphinscheduler-service/src/test/java/queue/TaskPriorityTest.java b/dolphinscheduler-service/src/test/java/queue/TaskPriorityTest.java new file mode 100644 index 0000000000..151177016f --- /dev/null +++ b/dolphinscheduler-service/src/test/java/queue/TaskPriorityTest.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package queue; + +import org.apache.dolphinscheduler.service.queue.TaskPriority; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.junit.Assert; +import org.junit.Test; + +public class TaskPriorityTest { + + @Test + public void testSort() { + TaskPriority priorityOne = new TaskPriority(1, 0, 0, 0, "default"); + TaskPriority priorityTwo = new TaskPriority(2, 0, 0, 0, "default"); + TaskPriority priorityThree = new TaskPriority(3, 0, 0, 0, "default"); + List taskPrioritys = Arrays.asList(priorityOne, priorityThree, priorityTwo); + Collections.sort(taskPrioritys); + Assert.assertEquals( + Arrays.asList(priorityOne, priorityTwo, priorityThree), + taskPrioritys + ); + + priorityOne = new TaskPriority(0, 1, 0, 0, "default"); + priorityTwo = new TaskPriority(0, 2, 0, 0, "default"); + priorityThree = new TaskPriority(0, 3, 0, 0, "default"); + taskPrioritys = Arrays.asList(priorityOne, priorityThree, priorityTwo); + Collections.sort(taskPrioritys); + Assert.assertEquals( + Arrays.asList(priorityOne, priorityTwo, priorityThree), + taskPrioritys + ); + + priorityOne = new TaskPriority(0, 0, 1, 0, "default"); + priorityTwo = new TaskPriority(0, 0, 2, 0, "default"); + priorityThree = new TaskPriority(0, 0, 3, 0, "default"); + taskPrioritys = Arrays.asList(priorityOne, priorityThree, priorityTwo); + Collections.sort(taskPrioritys); + Assert.assertEquals( + Arrays.asList(priorityOne, priorityTwo, priorityThree), + taskPrioritys + ); + + priorityOne = new TaskPriority(0, 0, 0, 1, "default"); + priorityTwo = new TaskPriority(0, 0, 0, 2, "default"); + priorityThree = new TaskPriority(0, 0, 0, 3, "default"); + taskPrioritys = Arrays.asList(priorityOne, priorityThree, priorityTwo); + Collections.sort(taskPrioritys); + Assert.assertEquals( + Arrays.asList(priorityOne, priorityTwo, priorityThree), + taskPrioritys + ); + + priorityOne = new TaskPriority(0, 0, 0, 0, "default_1"); + priorityTwo = new TaskPriority(0, 0, 0, 0, "default_2"); + priorityThree = new TaskPriority(0, 0, 0, 0, "default_3"); + taskPrioritys = Arrays.asList(priorityOne, priorityThree, priorityTwo); + Collections.sort(taskPrioritys); + Assert.assertEquals( + Arrays.asList(priorityOne, priorityTwo, priorityThree), + taskPrioritys + ); + } +} diff --git a/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java b/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java index ca6c083a67..2c13afa227 100644 --- a/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java +++ b/dolphinscheduler-service/src/test/java/queue/TaskUpdateQueueTest.java @@ -17,6 +17,7 @@ package queue; +import org.apache.dolphinscheduler.service.queue.TaskPriority; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue; import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl; import org.junit.Test; @@ -31,19 +32,16 @@ public class TaskUpdateQueueTest { @Test public void testQueue() throws Exception{ - // ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskId}_${groupName} - /** * 1_1_2_1_default * 1_1_2_2_default * 1_1_0_3_default * 1_1_0_4_default */ - - String taskInfo1 = "1_1_2_1_default"; - String taskInfo2 = "1_1_2_2_default"; - String taskInfo3 = "1_1_0_3_default"; - String taskInfo4 = "1_1_0_4_default"; + TaskPriority taskInfo1 = new TaskPriority(1, 1, 2, 1, "default"); + TaskPriority taskInfo2 = new TaskPriority(1, 1, 2, 2, "default"); + TaskPriority taskInfo3 = new TaskPriority(1, 1, 0, 3, "default"); + TaskPriority taskInfo4 = new TaskPriority(1, 1, 0, 4, "default"); TaskPriorityQueue queue = new TaskPriorityQueueImpl(); queue.put(taskInfo1); @@ -51,9 +49,9 @@ public class TaskUpdateQueueTest { queue.put(taskInfo3); queue.put(taskInfo4); - assertEquals("1_1_0_3_default", queue.take()); - assertEquals("1_1_0_4_default", queue.take()); - assertEquals("1_1_2_1_default",queue.take()); - assertEquals("1_1_2_2_default",queue.take()); + assertEquals(taskInfo3, queue.take()); + assertEquals(taskInfo4, queue.take()); + assertEquals(taskInfo1, queue.take()); + assertEquals(taskInfo2, queue.take()); } } diff --git a/dolphinscheduler-spi/pom.xml b/dolphinscheduler-spi/pom.xml index dd0bae81de..3f18c48b0e 100644 --- a/dolphinscheduler-spi/pom.xml +++ b/dolphinscheduler-spi/pom.xml @@ -20,7 +20,7 @@ org.apache.dolphinscheduler dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT dolphinscheduler-spi ${project.artifactId} diff --git a/pom.xml b/pom.xml index cd281ba988..c203384d74 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 org.apache.dolphinscheduler dolphinscheduler - 1.3.2-SNAPSHOT + 1.3.4-SNAPSHOT pom ${project.artifactId} http://dolphinscheduler.apache.org @@ -59,19 +59,19 @@ UTF-8 UTF-8 4.3.0 - 5.1.5.RELEASE - 2.1.3.RELEASE + 5.1.18.RELEASE + 2.1.17.RELEASE 1.8 1.2.3 2.7.3 - 2.2.3 - 2.9.8 + 2.3.0 + 2.9.10 3.2.0 2.0.1 5.0.5 1.1.22 1.4.200 - 1.6 + 1.11 1.1.1 4.4.1 4.4.1 @@ -118,6 +118,7 @@ 2.5 1.9.3 2.9.2 + 1.5.24 2.0.0 0.184 ${dep.airlift.version} @@ -549,6 +550,12 @@ ${springfox.version} + + io.swagger + swagger-models + ${swagger-models.version} + + com.github.xiaoymin swagger-bootstrap-ui @@ -771,6 +778,7 @@ **/api/controller/ProcessDefinitionControllerTest.java + **/api/controller/TenantControllerTest.java **/api/dto/resources/filter/ResourceFilterTest.java **/api/dto/resources/visitor/ResourceTreeVisitorTest.java **/api/enums/testGetEnum.java @@ -778,8 +786,10 @@ **/api/exceptions/ApiExceptionHandlerTest.java **/api/exceptions/ServiceExceptionTest.java **/api/interceptor/LoginHandlerInterceptorTest.java - **/api/security/PasswordAuthenticatorTest.java - **/api/security/SecurityConfigTest.java + **/api/security/impl/pwd/PasswordAuthenticatorTest.java + **/api/security/impl/ldap/LdapAuthenticatorTest.java + **/api/security/SecurityConfigLDAPTest.java + **/api/security/SecurityConfigPasswordTest.java **/api/service/AccessTokenServiceTest.java **/api/service/AlertGroupServiceTest.java **/api/service/BaseDAGServiceTest.java @@ -807,6 +817,7 @@ **/api/service/WorkerGroupServiceTest.java **/api/service/WorkFlowLineageServiceTest.java **/api/controller/ProcessDefinitionControllerTest.java + **/api/controller/TaskInstanceControllerTest.java **/api/controller/WorkFlowLineageControllerTest.java **/api/utils/exportprocess/DataSourceParamTest.java **/api/utils/exportprocess/DependentParamTest.java @@ -819,15 +830,13 @@ **/common/os/OshiTest.java **/common/os/OSUtilsTest.java **/common/shell/ShellExecutorTest.java + **/common/task/DataxParametersTest.java **/common/task/EntityTestUtils.java **/common/task/FlinkParametersTest.java **/common/task/HttpParametersTest.java **/common/task/SqoopParameterEntityTest.java **/common/threadutils/ThreadPoolExecutorsTest.java **/common/threadutils/ThreadUtilsTest.java - **/common/utils/process/ProcessBuilderForWin32Test.java - **/common/utils/process/ProcessEnvironmentForWin32Test.java - **/common/utils/process/ProcessImplForWin32Test.java **/common/utils/CollectionUtilsTest.java **/common/utils/CommonUtilsTest.java **/common/utils/DateUtilsTest.java @@ -839,6 +848,7 @@ **/common/utils/LoggerUtilsTest.java **/common/utils/OSUtilsTest.java **/common/utils/ParameterUtilsTest.java + **/common/utils/TimePlaceholderUtilsTest.java **/common/utils/PreconditionsTest.java **/common/utils/PropertyUtilsTest.java **/common/utils/SchemaUtilsTest.java @@ -894,11 +904,14 @@ **/server/master/MasterExecThreadTest.java **/server/master/ParamsTest.java **/server/master/SubProcessTaskTest.java + **/server/master/processor/TaskAckProcessorTest.java + **/server/master/processor/TaskKillResponseProcessorTest.java + **/server/master/processor/queue/TaskResponseServiceTest.java **/server/register/ZookeeperNodeManagerTest.java **/server/utils/DataxUtilsTest.java **/server/utils/ExecutionContextTestUtils.java **/server/utils/HostTest.java - + **/server/utils/FlinkArgsUtilsTest.java **/server/utils/LogUtilsTest.java **/server/utils/ParamUtilsTest.java **/server/utils/ProcessUtilsTest.java @@ -914,6 +927,7 @@ **/server/worker/task/sqoop/SqoopTaskTest.java + **/server/worker/task/shell/ShellTaskTest.java **/server/worker/task/TaskManagerTest.java **/server/worker/EnvFileTest.java **/server/worker/runner/TaskExecuteThreadTest.java @@ -923,6 +937,8 @@ **/service/zk/ZKServerTest.java **/service/zk/CuratorZookeeperClientTest.java **/service/queue/TaskUpdateQueueTest.java + **/service/queue/PeerTaskInstancePriorityQueueTest.java + **/service/alert/AlertClientServiceTest.java **/dao/mapper/DataSourceUserMapperTest.java @@ -1045,7 +1061,7 @@ **/*.txt **/docs/** **/*.babelrc - **/*.eslintrc + **/*.eslint* **/.mvn/jvm.config **/.mvn/wrapper/** **/*.iml diff --git a/script/dolphinscheduler-daemon.sh b/script/dolphinscheduler-daemon.sh index 26c751f6e5..5e7652199e 100644 --- a/script/dolphinscheduler-daemon.sh +++ b/script/dolphinscheduler-daemon.sh @@ -16,7 +16,7 @@ # limitations under the License. # -usage="Usage: dolphinscheduler-daemon.sh (start|stop|status) " +usage="Usage: dolphinscheduler-daemon.sh (start|stop|status) " # if no args specified, show usage if [ $# -le 1 ]; then @@ -45,7 +45,6 @@ export DOLPHINSCHEDULER_LOG_DIR=$DOLPHINSCHEDULER_HOME/logs export DOLPHINSCHEDULER_CONF_DIR=$DOLPHINSCHEDULER_HOME/conf export DOLPHINSCHEDULER_LIB_JARS=$DOLPHINSCHEDULER_HOME/lib/* -export DOLPHINSCHEDULER_OPTS=${DOLPHINSCHEDULER_OPTS:-"-server -Xmx16g -Xms1g -Xss512k -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=10m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70"} export STOP_TIMEOUT=5 if [ ! -d "$DOLPHINSCHEDULER_LOG_DIR" ]; then @@ -58,18 +57,33 @@ pid=$DOLPHINSCHEDULER_PID_DIR/dolphinscheduler-$command.pid cd $DOLPHINSCHEDULER_HOME if [ "$command" = "api-server" ]; then + HEAP_INITIAL_SIZE=1g + HEAP_MAX_SIZE=1g + HEAP_NEW_GENERATION__SIZE=500m LOG_FILE="-Dlogging.config=classpath:logback-api.xml -Dspring.profiles.active=api" CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer elif [ "$command" = "master-server" ]; then + HEAP_INITIAL_SIZE=4g + HEAP_MAX_SIZE=4g + HEAP_NEW_GENERATION__SIZE=2g LOG_FILE="-Dlogging.config=classpath:logback-master.xml -Ddruid.mysql.usePingMethod=false" CLASS=org.apache.dolphinscheduler.server.master.MasterServer elif [ "$command" = "worker-server" ]; then + HEAP_INITIAL_SIZE=2g + HEAP_MAX_SIZE=2g + HEAP_NEW_GENERATION__SIZE=1g LOG_FILE="-Dlogging.config=classpath:logback-worker.xml -Ddruid.mysql.usePingMethod=false" CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer elif [ "$command" = "alert-server" ]; then + HEAP_INITIAL_SIZE=1g + HEAP_MAX_SIZE=1g + HEAP_NEW_GENERATION__SIZE=500m LOG_FILE="-Dlogback.configurationFile=conf/logback-alert.xml" CLASS=org.apache.dolphinscheduler.alert.AlertServer elif [ "$command" = "logger-server" ]; then + HEAP_INITIAL_SIZE=1g + HEAP_MAX_SIZE=1g + HEAP_NEW_GENERATION__SIZE=500m CLASS=org.apache.dolphinscheduler.server.log.LoggerServer elif [ "$command" = "zookeeper-server" ]; then #note: this command just for getting a quick experience,not recommended for production. this operation will start a standalone zookeeper server @@ -80,6 +94,8 @@ else exit 1 fi +export DOLPHINSCHEDULER_OPTS="-server -Xms$HEAP_INITIAL_SIZE -Xmx$HEAP_MAX_SIZE -Xmn$HEAP_NEW_GENERATION__SIZE -XX:MetaspaceSize=128m -XX:MaxMetaspaceSize=128m -Xss512k -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70 -XX:+PrintGCDetails -Xloggc:gc.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=dump.hprof $DOLPHINSCHEDULER_OPTS" + case $startStop in (start) [ -w "$DOLPHINSCHEDULER_PID_DIR" ] || mkdir -p "$DOLPHINSCHEDULER_PID_DIR" @@ -141,3 +157,5 @@ case $startStop in ;; esac + +echo "End $startStop $command." \ No newline at end of file diff --git a/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql b/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql index ac0851aa65..86b474d2b6 100644 --- a/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql +++ b/sql/create/release-1.0.0_schema/mysql/dolphinscheduler_ddl.sql @@ -381,7 +381,6 @@ DROP TABLE IF EXISTS `t_escheduler_tenant`; CREATE TABLE `t_escheduler_tenant` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `tenant_code` varchar(64) DEFAULT NULL COMMENT 'tenant code', - `tenant_name` varchar(64) DEFAULT NULL COMMENT 'tenant name', `desc` varchar(256) DEFAULT NULL COMMENT 'description', `queue_id` int(11) DEFAULT NULL COMMENT 'queue id', `create_time` datetime DEFAULT NULL COMMENT 'create time', diff --git a/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql b/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql index 75a5d56e77..14d99e10fb 100644 --- a/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql +++ b/sql/create/release-1.2.0_schema/postgresql/dolphinscheduler_ddl.sql @@ -584,7 +584,6 @@ DROP TABLE IF EXISTS t_ds_tenant; CREATE TABLE t_ds_tenant ( id int NOT NULL , tenant_code varchar(64) DEFAULT NULL , - tenant_name varchar(64) DEFAULT NULL , description varchar(256) DEFAULT NULL , queue_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , diff --git a/sql/dolphinscheduler-postgre.sql b/sql/dolphinscheduler-postgre.sql index 078ff7e14c..8347d98f3d 100644 --- a/sql/dolphinscheduler-postgre.sql +++ b/sql/dolphinscheduler-postgre.sql @@ -588,7 +588,6 @@ DROP TABLE IF EXISTS t_ds_tenant; CREATE TABLE t_ds_tenant ( id int NOT NULL , tenant_code varchar(64) DEFAULT NULL , - tenant_name varchar(64) DEFAULT NULL , description varchar(256) DEFAULT NULL , queue_id int DEFAULT NULL , create_time timestamp DEFAULT NULL , diff --git a/sql/dolphinscheduler_mysql.sql b/sql/dolphinscheduler_mysql.sql index 986df1f668..8ac20a2dd2 100644 --- a/sql/dolphinscheduler_mysql.sql +++ b/sql/dolphinscheduler_mysql.sql @@ -29,7 +29,7 @@ CREATE TABLE `QRTZ_BLOB_TRIGGERS` ( PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `SCHED_NAME` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_BLOB_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_BLOB_TRIGGERS @@ -44,7 +44,7 @@ CREATE TABLE `QRTZ_CALENDARS` ( `CALENDAR_NAME` varchar(200) NOT NULL, `CALENDAR` blob NOT NULL, PRIMARY KEY (`SCHED_NAME`,`CALENDAR_NAME`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CALENDARS @@ -62,7 +62,7 @@ CREATE TABLE `QRTZ_CRON_TRIGGERS` ( `TIME_ZONE_ID` varchar(80) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_CRON_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_CRON_TRIGGERS @@ -93,7 +93,7 @@ CREATE TABLE `QRTZ_FIRED_TRIGGERS` ( KEY `IDX_QRTZ_FT_JG` (`SCHED_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_FT_T_G` (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), KEY `IDX_QRTZ_FT_TG` (`SCHED_NAME`,`TRIGGER_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_FIRED_TRIGGERS @@ -117,7 +117,7 @@ CREATE TABLE `QRTZ_JOB_DETAILS` ( PRIMARY KEY (`SCHED_NAME`,`JOB_NAME`,`JOB_GROUP`), KEY `IDX_QRTZ_J_REQ_RECOVERY` (`SCHED_NAME`,`REQUESTS_RECOVERY`), KEY `IDX_QRTZ_J_GRP` (`SCHED_NAME`,`JOB_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_JOB_DETAILS @@ -131,7 +131,7 @@ CREATE TABLE `QRTZ_LOCKS` ( `SCHED_NAME` varchar(120) NOT NULL, `LOCK_NAME` varchar(40) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`LOCK_NAME`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_LOCKS @@ -145,7 +145,7 @@ CREATE TABLE `QRTZ_PAUSED_TRIGGER_GRPS` ( `SCHED_NAME` varchar(120) NOT NULL, `TRIGGER_GROUP` varchar(200) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_PAUSED_TRIGGER_GRPS @@ -161,7 +161,7 @@ CREATE TABLE `QRTZ_SCHEDULER_STATE` ( `LAST_CHECKIN_TIME` bigint(13) NOT NULL, `CHECKIN_INTERVAL` bigint(13) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`INSTANCE_NAME`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SCHEDULER_STATE @@ -180,7 +180,7 @@ CREATE TABLE `QRTZ_SIMPLE_TRIGGERS` ( `TIMES_TRIGGERED` bigint(10) NOT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPLE_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPLE_TRIGGERS @@ -207,7 +207,7 @@ CREATE TABLE `QRTZ_SIMPROP_TRIGGERS` ( `BOOL_PROP_2` varchar(1) DEFAULT NULL, PRIMARY KEY (`SCHED_NAME`,`TRIGGER_NAME`,`TRIGGER_GROUP`), CONSTRAINT `QRTZ_SIMPROP_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) REFERENCES `QRTZ_TRIGGERS` (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_SIMPROP_TRIGGERS @@ -248,7 +248,7 @@ CREATE TABLE `QRTZ_TRIGGERS` ( KEY `IDX_QRTZ_T_NFT_ST_MISFIRE` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_STATE`), KEY `IDX_QRTZ_T_NFT_ST_MISFIRE_GRP` (`SCHED_NAME`,`MISFIRE_INSTR`,`NEXT_FIRE_TIME`,`TRIGGER_GROUP`,`TRIGGER_STATE`), CONSTRAINT `QRTZ_TRIGGERS_ibfk_1` FOREIGN KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) REFERENCES `QRTZ_JOB_DETAILS` (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of QRTZ_TRIGGERS @@ -702,7 +702,7 @@ CREATE TABLE `t_ds_task_instance` ( `alert_flag` tinyint(4) DEFAULT NULL COMMENT 'whether alert', `retry_times` int(4) DEFAULT '0' COMMENT 'task retry times', `pid` int(4) DEFAULT NULL COMMENT 'pid of task', - `app_link` varchar(255) DEFAULT NULL COMMENT 'yarn app id', + `app_link` text COMMENT 'yarn app id', `flag` tinyint(4) DEFAULT '1' COMMENT '0 not available, 1 available', `retry_interval` int(4) DEFAULT NULL COMMENT 'retry interval when task failed ', `max_retry_times` int(2) DEFAULT NULL COMMENT 'max retry times', @@ -729,7 +729,6 @@ DROP TABLE IF EXISTS `t_ds_tenant`; CREATE TABLE `t_ds_tenant` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'key', `tenant_code` varchar(64) DEFAULT NULL COMMENT 'tenant code', - `tenant_name` varchar(64) DEFAULT NULL COMMENT 'tenant name', `description` varchar(256) DEFAULT NULL, `queue_id` int(11) DEFAULT NULL COMMENT 'queue id', `create_time` datetime DEFAULT NULL COMMENT 'create time', diff --git a/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql b/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000..4a14f326b9 --- /dev/null +++ b/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql b/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000..383d8a4407 --- /dev/null +++ b/sql/upgrade/1.3.2_schema/mysql/dolphinscheduler_dml.sql @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); +SET FOREIGN_KEY_CHECKS=0; +UPDATE t_ds_user SET phone = '' WHERE phone = 'xx'; \ No newline at end of file diff --git a/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql b/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql new file mode 100644 index 0000000000..4a14f326b9 --- /dev/null +++ b/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_ddl.sql @@ -0,0 +1,16 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ diff --git a/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql b/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql new file mode 100644 index 0000000000..bf043ad469 --- /dev/null +++ b/sql/upgrade/1.3.2_schema/postgresql/dolphinscheduler_dml.sql @@ -0,0 +1,17 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ +UPDATE t_ds_user SET phone = '' WHERE phone = 'xx'; \ No newline at end of file diff --git a/tools/dependencies/known-dependencies.txt b/tools/dependencies/known-dependencies.txt index 9cb67dd505..1b0f880bc5 100755 --- a/tools/dependencies/known-dependencies.txt +++ b/tools/dependencies/known-dependencies.txt @@ -2,27 +2,27 @@ HikariCP-3.2.0.jar animal-sniffer-annotations-1.14.jar ant-1.6.5.jar aopalliance-1.0.jar -apache-el-8.5.35.1.jar +apache-el-8.5.54.jar apacheds-i18n-2.0.0-M15.jar apacheds-kerberos-codec-2.0.0-M15.jar api-asn1-api-1.0.0-M20.jar api-util-1.0.0-M20.jar asm-3.1.jar -aspectjweaver-1.9.2.jar +aspectjweaver-1.9.6.jar audience-annotations-0.5.0.jar avro-1.7.4.jar aws-java-sdk-1.7.4.jar bonecp-0.8.0.RELEASE.jar -byte-buddy-1.9.10.jar +byte-buddy-1.9.16.jar checker-compat-qual-2.0.0.jar classmate-1.4.0.jar clickhouse-jdbc-0.1.52.jar commons-cli-1.2.jar -commons-codec-1.6.jar +commons-codec-1.11.jar commons-collections-3.2.2.jar commons-collections4-4.1.jar commons-compress-1.4.1.jar -commons-compiler-3.0.12.jar +commons-compiler-3.0.16.jar commons-configuration-1.10.jar commons-daemon-1.0.13.jar commons-beanutils-1.7.0.jar @@ -45,7 +45,7 @@ datanucleus-rdbms-4.1.7.jar derby-10.14.2.0.jar error_prone_annotations-2.1.3.jar druid-1.1.22.jar -gson-2.8.5.jar +gson-2.8.6.jar guava-24.1-jre.jar guava-retrying-2.0.0.jar guice-3.0.jar @@ -67,7 +67,7 @@ hadoop-yarn-client-2.7.3.jar hadoop-yarn-common-2.7.3.jar hadoop-yarn-server-common-2.7.3.jar hamcrest-core-1.3.jar -hibernate-validator-6.0.14.Final.jar +hibernate-validator-6.0.20.Final.jar hive-common-2.1.0.jar hive-jdbc-2.1.0.jar hive-metastore-2.1.0.jar @@ -79,20 +79,20 @@ hive-storage-api-2.1.0.jar htrace-core-3.1.0-incubating.jar httpclient-4.4.1.jar httpcore-4.4.1.jar -httpmime-4.5.7.jar +httpmime-4.5.12.jar j2objc-annotations-1.1.jar -jackson-annotations-2.9.8.jar -jackson-core-2.9.8.jar +jackson-annotations-2.9.10.jar +jackson-core-2.9.10.jar jackson-core-asl-1.9.13.jar -jackson-databind-2.9.8.jar -jackson-datatype-jdk8-2.9.8.jar -jackson-datatype-jsr310-2.9.8.jar +jackson-databind-2.9.10.jar +jackson-datatype-jdk8-2.9.10.jar +jackson-datatype-jsr310-2.9.10.jar jackson-jaxrs-1.9.13.jar jackson-mapper-asl-1.9.13.jar -jackson-module-parameter-names-2.9.8.jar +jackson-module-parameter-names-2.9.10.jar jackson-xc-1.9.13.jar jamon-runtime-2.3.1.jar -janino-3.0.12.jar +janino-3.0.16.jar java-xmlbuilder-0.4.jar javax.activation-api-1.2.0.jar javax.annotation-api-1.3.2.jar @@ -102,7 +102,7 @@ javax.servlet-api-3.1.0.jar javolution-5.5.1.jar jaxb-api-2.3.1.jar jaxb-impl-2.2.3-1.jar -jboss-logging-3.3.2.Final.jar +jboss-logging-3.3.3.Final.jar jdo-api-3.0.1.jar jersey-client-1.9.jar jersey-core-1.9.jar @@ -112,21 +112,21 @@ jersey-server-1.9.jar jets3t-0.9.0.jar jettison-1.1.jar jetty-6.1.26.jar -jetty-continuation-9.4.14.v20181114.jar -jetty-http-9.4.14.v20181114.jar -jetty-io-9.4.14.v20181114.jar -jetty-security-9.4.14.v20181114.jar -jetty-server-9.4.14.v20181114.jar -jetty-servlet-9.4.14.v20181114.jar -jetty-servlets-9.4.14.v20181114.jar +jetty-continuation-9.4.31.v20200723.jar +jetty-http-9.4.31.v20200723.jar +jetty-io-9.4.31.v20200723.jar +jetty-security-9.4.31.v20200723.jar +jetty-server-9.4.31.v20200723.jar +jetty-servlet-9.4.31.v20200723.jar +jetty-servlets-9.4.31.v20200723.jar jetty-util-6.1.26.jar -jetty-util-9.4.14.v20181114.jar -jetty-webapp-9.4.14.v20181114.jar -jetty-xml-9.4.14.v20181114.jar +jetty-util-9.4.31.v20200723.jar +jetty-webapp-9.4.31.v20200723.jar +jetty-xml-9.4.31.v20200723.jar jline-0.9.94.jar jna-4.5.2.jar jna-platform-4.5.2.jar -joda-time-2.10.1.jar +joda-time-2.10.6.jar jpam-1.1.jar jsch-0.1.42.jar jsp-2.1-6.1.14.jar @@ -135,7 +135,7 @@ jsp-api-2.1.jar jsqlparser-2.1.jar jsr305-3.0.0.jar jta-1.1.jar -jul-to-slf4j-1.7.25.jar +jul-to-slf4j-1.7.30.jar junit-4.12.jar leveldbjni-all-1.8.jar libfb303-0.9.3.jar @@ -157,7 +157,7 @@ mybatis-plus-core-3.2.0.jar mybatis-plus-extension-3.2.0.jar mybatis-spring-2.0.2.jar netty-3.6.2.Final.jar -netty-all-4.1.33.Final.jar +netty-all-4.1.52.Final.jar opencsv-2.3.jar oshi-core-3.5.0.jar paranamer-2.3.jar @@ -166,33 +166,33 @@ poi-3.17.jar postgresql-42.1.4.jar presto-jdbc-0.238.1.jar protobuf-java-2.5.0.jar -quartz-2.2.3.jar -quartz-jobs-2.2.3.jar +quartz-2.3.0.jar +quartz-jobs-2.3.0.jar slf4j-api-1.7.5.jar snakeyaml-1.23.jar snappy-0.2.jar snappy-java-1.0.4.1.jar -spring-aop-5.1.5.RELEASE.jar -spring-beans-5.1.5.RELEASE.jar -spring-boot-2.1.3.RELEASE.jar -spring-boot-autoconfigure-2.1.3.RELEASE.jar -spring-boot-starter-2.1.3.RELEASE.jar -spring-boot-starter-aop-2.1.3.RELEASE.jar -spring-boot-starter-jdbc-2.1.3.RELEASE.jar -spring-boot-starter-jetty-2.1.3.RELEASE.jar -spring-boot-starter-json-2.1.3.RELEASE.jar -spring-boot-starter-logging-2.1.3.RELEASE.jar -spring-boot-starter-web-2.1.3.RELEASE.jar -spring-context-5.1.5.RELEASE.jar -spring-core-5.1.5.RELEASE.jar -spring-expression-5.1.5.RELEASE.jar -spring-jcl-5.1.5.RELEASE.jar -spring-jdbc-5.1.5.RELEASE.jar +spring-aop-5.1.18.RELEASE.jar +spring-beans-5.1.18.RELEASE.jar +spring-boot-2.1.17.RELEASE.jar +spring-boot-autoconfigure-2.1.17.RELEASE.jar +spring-boot-starter-2.1.17.RELEASE.jar +spring-boot-starter-aop-2.1.17.RELEASE.jar +spring-boot-starter-jdbc-2.1.17.RELEASE.jar +spring-boot-starter-jetty-2.1.17.RELEASE.jar +spring-boot-starter-json-2.1.17.RELEASE.jar +spring-boot-starter-logging-2.1.17.RELEASE.jar +spring-boot-starter-web-2.1.17.RELEASE.jar +spring-context-5.1.18.RELEASE.jar +spring-core-5.1.18.RELEASE.jar +spring-expression-5.1.18.RELEASE.jar +spring-jcl-5.1.18.RELEASE.jar +spring-jdbc-5.1.18.RELEASE.jar spring-plugin-core-1.2.0.RELEASE.jar spring-plugin-metadata-1.2.0.RELEASE.jar -spring-tx-5.1.5.RELEASE.jar -spring-web-5.1.5.RELEASE.jar -spring-webmvc-5.1.5.RELEASE.jar +spring-tx-5.1.18.RELEASE.jar +spring-web-5.1.18.RELEASE.jar +spring-webmvc-5.1.18.RELEASE.jar springfox-core-2.9.2.jar springfox-schema-2.9.2.jar springfox-spi-2.9.2.jar @@ -202,7 +202,7 @@ springfox-swagger-ui-2.9.2.jar springfox-swagger2-2.9.2.jar swagger-annotations-1.5.20.jar swagger-bootstrap-ui-1.9.3.jar -swagger-models-1.5.20.jar +swagger-models-1.5.24.jar tephra-api-0.6.0.jar threetenbp-1.3.6.jar transaction-api-1.1.jar @@ -211,4 +211,4 @@ xercesImpl-2.9.1.jar xml-apis-1.4.01.jar xmlenc-0.52.jar xz-1.0.jar -zookeeper-3.4.14.jar \ No newline at end of file +zookeeper-3.4.14.jar