diff --git a/.github/workflows/ci_e2e.yml b/.github/workflows/ci_e2e.yml new file mode 100644 index 0000000000..13f12641fe --- /dev/null +++ b/.github/workflows/ci_e2e.yml @@ -0,0 +1,67 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +on: ["push", "pull_request"] +env: + DOCKER_DIR: ./docker + LOG_DIR: /tmp/dolphinscheduler + +name: e2e Test + +jobs: + + build: + name: Test + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v1 + with: + submodules: true + - uses: actions/cache@v1 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + - name: Build Image + run: | + export VERSION=`cat $(pwd)/pom.xml| grep "SNAPSHOT" | awk -F "-SNAPSHOT" '{print $1}' | awk -F ">" '{print $2}'` + sh ./dockerfile/hooks/build + - name: Docker Run + run: | + VERSION=`cat $(pwd)/pom.xml| grep "SNAPSHOT" | awk -F "-SNAPSHOT" '{print $1}' | awk -F ">" '{print $2}'` + docker run -dit -e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test -p 8888:8888 dolphinscheduler:$VERSION all + - name: Check Server Status + run: sh ./dockerfile/hooks/check + - name: Prepare e2e env + run: | + sudo apt-get install -y libxss1 libappindicator1 libindicator7 xvfb unzip + wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb + sudo dpkg -i google-chrome*.deb + sudo apt-get install -f -y + wget -N https://chromedriver.storage.googleapis.com/80.0.3987.106/chromedriver_linux64.zip + unzip chromedriver_linux64.zip + sudo mv -f chromedriver /usr/local/share/chromedriver + sudo ln -s /usr/local/share/chromedriver /usr/local/bin/chromedriver + - name: Run e2e Test + run: cd ./e2e && mvn -B clean test + - name: Collect logs + run: | + mkdir -p ${LOG_DIR} + docker logs dolphinscheduler > ${LOG_DIR}/dolphinscheduler.txt + continue-on-error: true diff --git a/.github/workflows/ci_ut.yml b/.github/workflows/ci_ut.yml index 8013d40f31..6f3eda0362 100644 --- a/.github/workflows/ci_ut.yml +++ b/.github/workflows/ci_ut.yml @@ -20,7 +20,7 @@ env: DOCKER_DIR: ./docker LOG_DIR: /tmp/dolphinscheduler -name: Test Coveralls Parallel +name: Unit Test jobs: diff --git a/README.md b/README.md index 3fbd6345b6..0a9a164b85 100644 --- a/README.md +++ b/README.md @@ -45,17 +45,16 @@ HA is supported by itself | All process definition operations are visualized, dr Overload processing: Task queue mechanism, the number of schedulable tasks on a single machine can be flexibly configured, when too many tasks will be cached in the task queue, will not cause machine jam. | One-click deployment | Supports traditional shell tasks, and also support big data platform task scheduling: MR, Spark, SQL (mysql, postgresql, hive, sparksql), Python, Procedure, Sub_Process | | - - ### System partial screenshot -![image](https://user-images.githubusercontent.com/48329107/61368744-1f5f3b00-a8c1-11e9-9cf1-10f8557a6b3b.png) - -![image](https://user-images.githubusercontent.com/48329107/61368966-9dbbdd00-a8c1-11e9-8dcc-a9469d33583e.png) - -![image](https://user-images.githubusercontent.com/48329107/61372146-f347b800-a8c8-11e9-8882-66e8934ada23.png) - - +![home page](https://user-images.githubusercontent.com/15833811/75218288-bf286400-57d4-11ea-8263-d639c6511d5f.jpg) +![dag](https://user-images.githubusercontent.com/15833811/75236750-3374fe80-57f9-11ea-857d-62a66a5a559d.png) +![process definition list page](https://user-images.githubusercontent.com/15833811/75216886-6f479e00-57d0-11ea-92dd-66e7640a186f.png) +![view task log online](https://user-images.githubusercontent.com/15833811/75216924-9900c500-57d0-11ea-91dc-3522a76bdbbe.png) +![resource management](https://user-images.githubusercontent.com/15833811/75216984-be8dce80-57d0-11ea-840d-58546edc8788.png) +![monitor](https://user-images.githubusercontent.com/59273635/75625839-c698a480-5bfc-11ea-8bbe-895b561b337f.png) +![security](https://user-images.githubusercontent.com/15833811/75236441-bfd2f180-57f8-11ea-88bd-f24311e01b7e.png) +![treeview](https://user-images.githubusercontent.com/15833811/75217191-3fe56100-57d1-11ea-8856-f19180d9a879.png) ### Document - Backend deployment documentation @@ -100,16 +99,9 @@ It is because of the shoulders of these open source projects that the birth of t ### Get Help 1. Submit an issue 1. Subscribe the mail list : https://dolphinscheduler.apache.org/en-us/docs/development/subscribe.html. then send mail to dev@dolphinscheduler.apache.org -1. Contact WeChat group manager, ID 510570367. This is for Mandarin(CN) discussion. +1. Contact WeChat(dailidong66). This is just for Mandarin(CN) discussion. ### License Please refer to [LICENSE](https://github.com/apache/incubator-dolphinscheduler/blob/dev/LICENSE) file. - - - - - - - diff --git a/README_zh_CN.md b/README_zh_CN.md index e782c1030d..6a4adc8daa 100644 --- a/README_zh_CN.md +++ b/README_zh_CN.md @@ -36,11 +36,19 @@ Dolphin Scheduler Official Website ### 系统部分截图 -![](http://geek.analysys.cn/static/upload/221/2019-03-29/0a9dea80-fb02-4fa5-a812-633b67035ffc.jpeg) +![home page](https://user-images.githubusercontent.com/15833811/75208819-abbad000-57b7-11ea-8d3c-67e7c270671f.jpg) -![](http://geek.analysys.cn/static/upload/221/2019-04-01/83686def-a54f-4169-8cae-77b1f8300cc1.png) +![dag](https://user-images.githubusercontent.com/15833811/75209584-93e44b80-57b9-11ea-952e-537fb24ec72d.jpg) -![](http://geek.analysys.cn/static/upload/221/2019-03-29/83c937c7-1793-4d7a-aa28-b98460329fe0.jpeg) +![log](https://user-images.githubusercontent.com/15833811/75209645-c55d1700-57b9-11ea-94d4-e3fa91ab5218.jpg) + +![gantt](https://user-images.githubusercontent.com/15833811/75209640-c0986300-57b9-11ea-878e-a2098533ad44.jpg) + +![resources](https://user-images.githubusercontent.com/15833811/75209403-11f42280-57b9-11ea-9b59-d4be77063553.jpg) + +![monitor](https://user-images.githubusercontent.com/15833811/75209631-b5ddce00-57b9-11ea-8d22-cdf15cf0ee25.jpg) + +![security](https://user-images.githubusercontent.com/15833811/75209633-baa28200-57b9-11ea-9def-94bef2e212a7.jpg) ### 文档 diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json b/ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json new file mode 100644 index 0000000000..769245b366 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/alerts.json @@ -0,0 +1,158 @@ +{ + "DOLPHIN": { + "service": [], + "DOLPHIN_API": [ + { + "name": "dolphin_api_port_check", + "label": "dolphin_api_port_check", + "description": "dolphin_api_port_check.", + "interval": 10, + "scope": "ANY", + "source": { + "type": "PORT", + "uri": "{{dolphin-application-api/server.port}}", + "default_port": 12345, + "reporting": { + "ok": { + "text": "TCP OK - {0:.3f}s response on port {1}" + }, + "warning": { + "text": "TCP OK - {0:.3f}s response on port {1}", + "value": 1.5 + }, + "critical": { + "text": "Connection failed: {0} to {1}:{2}", + "value": 5.0 + } + } + } + } + ], + "DOLPHIN_LOGGER": [ + { + "name": "dolphin_logger_port_check", + "label": "dolphin_logger_port_check", + "description": "dolphin_logger_port_check.", + "interval": 10, + "scope": "ANY", + "source": { + "type": "PORT", + "uri": "{{dolphin-common/loggerserver.rpc.port}}", + "default_port": 50051, + "reporting": { + "ok": { + "text": "TCP OK - {0:.3f}s response on port {1}" + }, + "warning": { + "text": "TCP OK - {0:.3f}s response on port {1}", + "value": 1.5 + }, + "critical": { + "text": "Connection failed: {0} to {1}:{2}", + "value": 5.0 + } + } + } + } + ], + "DOLPHIN_MASTER": [ + { + "name": "DOLPHIN_MASTER_CHECK", + "label": "check dolphin scheduler master status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_MASTER", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ], + "DOLPHIN_WORKER": [ + { + "name": "DOLPHIN_WORKER_CHECK", + "label": "check dolphin scheduler worker status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_WORKER", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ], + "DOLPHIN_ALERT": [ + { + "name": "DOLPHIN_DOLPHIN_ALERT_CHECK", + "label": "check dolphin scheduler alert status", + "description": "", + "interval":10, + "scope": "HOST", + "enabled": true, + "source": { + "type": "SCRIPT", + "path": "DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py", + "parameters": [ + + { + "name": "connection.timeout", + "display_name": "Connection Timeout", + "value": 5.0, + "type": "NUMERIC", + "description": "The maximum time before this alert is considered to be CRITICAL", + "units": "seconds", + "threshold": "CRITICAL" + }, + { + "name": "alertName", + "display_name": "alertName", + "value": "DOLPHIN_ALERT", + "type": "STRING", + "description": "alert name" + } + ] + } + } + ] + } +} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml new file mode 100644 index 0000000000..5b82230148 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-alert.xml @@ -0,0 +1,144 @@ + + + + alert.type + EMAIL + alert type is EMAIL/SMS + + + + mail.protocol + SMTP + + + + + mail.server.host + xxx.xxx.com + + + + + mail.server.port + 25 + + int + + + + + + mail.sender + admin + + + + + mail.user + admin + + + + + mail.passwd + 000000 + + PASSWORD + + password + + + + + + mail.smtp.starttls.enable + true + + boolean + + + + + + mail.smtp.ssl.enable + true + + boolean + + + + + + mail.smtp.ssl.trust + xxx.xxx.com + + + + + + xls.file.path + /tmp/xls + + + + + + enterprise.wechat.enable + false + + + value-list + + + true + + + + false + + + + 1 + + + + + enterprise.wechat.corp.id + wechatId + + + + + enterprise.wechat.secret + secret + + + + + enterprise.wechat.agent.id + agentId + + + + + enterprise.wechat.users + wechatUsers + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml new file mode 100644 index 0000000000..ea4cb82afd --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application-api.xml @@ -0,0 +1,71 @@ + + + + server.port + 12345 + + server port + + + int + + + + server.servlet.session.timeout + 7200 + + int + + + + + + spring.servlet.multipart.max-file-size + 1024 + + MB + int + + + + + + spring.servlet.multipart.max-request-size + 1024 + + MB + int + + + + + + server.jetty.max-http-post-size + 5000000 + + int + + + + + + spring.messages.encoding + UTF-8 + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml new file mode 100644 index 0000000000..6e50a1b649 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-application.xml @@ -0,0 +1,467 @@ + + + + spring.datasource.initialSize + 5 + + Init connection number + + + int + + + + + spring.datasource.minIdle + 5 + + Min connection number + + + int + + + + + spring.datasource.maxActive + 50 + + Max connection number + + + int + + + + + spring.datasource.maxWait + 60000 + + Max wait time for get a connection in milliseconds. + If configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. + If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. + + + int + + + + + spring.datasource.timeBetweenEvictionRunsMillis + 60000 + + Milliseconds for check to close free connections + + + int + + + + + spring.datasource.timeBetweenConnectErrorMillis + 60000 + + The Destroy thread detects the connection interval and closes the physical connection in milliseconds + if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. + + + int + + + + + spring.datasource.minEvictableIdleTimeMillis + 300000 + + The longest time a connection remains idle without being evicted, in milliseconds + + + int + + + + + spring.datasource.validationQuery + SELECT 1 + + The SQL used to check whether the connection is valid requires a query statement. + If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. + + + + + spring.datasource.validationQueryTimeout + 3 + + int + + + Check whether the connection is valid for timeout, in seconds + + + + + spring.datasource.testWhileIdle + true + + boolean + + + When applying for a connection, + if it is detected that the connection is idle longer than time Between Eviction Runs Millis, + validation Query is performed to check whether the connection is valid + + + + + spring.datasource.testOnBorrow + true + + boolean + + + Execute validation to check if the connection is valid when applying for a connection + + + + + spring.datasource.testOnReturn + false + + boolean + + + Execute validation to check if the connection is valid when the connection is returned + + + + + spring.datasource.defaultAutoCommit + true + + boolean + + + + + + + spring.datasource.keepAlive + false + + boolean + + + + + + + + spring.datasource.poolPreparedStatements + true + + boolean + + + Open PSCache, specify count PSCache for every connection + + + + + spring.datasource.maxPoolPreparedStatementPerConnectionSize + 20 + + int + + + + + + spring.datasource.spring.datasource.filters + stat,wall,log4j + + + + + spring.datasource.connectionProperties + druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 + + + + + + mybatis-plus.mapper-locations + classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml + + + + + mybatis-plus.typeEnumsPackage + org.apache.dolphinscheduler.*.enums + + + + + mybatis-plus.typeAliasesPackage + org.apache.dolphinscheduler.dao.entity + + Entity scan, where multiple packages are separated by a comma or semicolon + + + + + mybatis-plus.global-config.db-config.id-type + AUTO + + value-list + + + AUTO + + + + INPUT + + + + ID_WORKER + + + + UUID + + + + 1 + + + Primary key type AUTO:" database ID AUTO ", + INPUT:" user INPUT ID", + ID_WORKER:" global unique ID (numeric type unique ID)", + UUID:" global unique ID UUID"; + + + + + mybatis-plus.global-config.db-config.field-strategy + NOT_NULL + + value-list + + + IGNORED + + + + NOT_NULL + + + + NOT_EMPTY + + + + 1 + + + Field policy IGNORED:" ignore judgment ", + NOT_NULL:" not NULL judgment "), + NOT_EMPTY:" not NULL judgment" + + + + + mybatis-plus.global-config.db-config.column-underline + true + + boolean + + + + + + mybatis-plus.global-config.db-config.logic-delete-value + 1 + + int + + + + + + mybatis-plus.global-config.db-config.logic-not-delete-value + 0 + + int + + + + + + mybatis-plus.global-config.db-config.banner + true + + boolean + + + + + + + mybatis-plus.configuration.map-underscore-to-camel-case + true + + boolean + + + + + + mybatis-plus.configuration.cache-enabled + false + + boolean + + + + + + mybatis-plus.configuration.call-setters-on-nulls + true + + boolean + + + + + + mybatis-plus.configuration.jdbc-type-for-null + null + + + + + master.exec.threads + 100 + + int + + + + + + master.exec.task.num + 20 + + int + + + + + + master.heartbeat.interval + 10 + + int + + + + + + master.task.commit.retryTimes + 5 + + int + + + + + + master.task.commit.interval + 1000 + + int + + + + + + master.max.cpuload.avg + 100 + + int + + + + + + master.reserved.memory + 0.1 + + float + + + + + + worker.exec.threads + 100 + + int + + + + + + worker.heartbeat.interval + 10 + + int + + + + + + worker.fetch.task.num + 3 + + int + + + + + + worker.max.cpuload.avg + 100 + + int + + + + + + worker.reserved.memory + 0.1 + + float + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml new file mode 100644 index 0000000000..41e2836e37 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-common.xml @@ -0,0 +1,232 @@ + + + + dolphinscheduler.queue.impl + zookeeper + + Task queue implementation, default "zookeeper" + + + + + zookeeper.dolphinscheduler.root + /dolphinscheduler + + dolphinscheduler root directory + + + + + zookeeper.session.timeout + 300 + + int + + + + + + + zookeeper.connection.timeout + 300 + + int + + + + + + + zookeeper.retry.base.sleep + 100 + + int + + + + + + + zookeeper.retry.max.sleep + 30000 + + int + + + + + + + zookeeper.retry.maxtime + 5 + + int + + + + + + + + res.upload.startup.type + Choose Resource Upload Startup Type + + Resource upload startup type : HDFS,S3,NONE + + NONE + + value-list + + + HDFS + + + + S3 + + + + NONE + + + + 1 + + + + + hdfs.root.user + hdfs + + Users who have permission to create directories under the HDFS root path + + + + + data.store2hdfs.basepath + /dolphinscheduler + + Data base dir, resource file will store to this hadoop hdfs path, self configuration, + please make sure the directory exists on hdfs and have read write permissions。 + "/dolphinscheduler" is recommended + + + + + data.basedir.path + /tmp/dolphinscheduler + + User data directory path, self configuration, + please make sure the directory exists and have read write permissions + + + + + hadoop.security.authentication.startup.state + false + + value-list + + + true + + + + false + + + + 1 + + + + + java.security.krb5.conf.path + /opt/krb5.conf + + java.security.krb5.conf path + + + + + login.user.keytab.username + hdfs-mycluster@ESZ.COM + + LoginUserFromKeytab user + + + + + login.user.keytab.path + /opt/hdfs.headless.keytab + + LoginUserFromKeytab path + + + + + resource.view.suffixs + txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties + + + + + fs.defaultFS + hdfs://mycluster:8020 + + HA or single namenode, + If namenode ha needs to copy core-site.xml and hdfs-site.xml to the conf directory, + support s3,for example : s3a://dolphinscheduler + + + + + fs.s3a.endpoint + http://host:9010 + + s3 need,s3 endpoint + + + + + fs.s3a.access.key + A3DXS30FO22544RE + + s3 need,s3 access key + + + + + fs.s3a.secret.key + OloCLq3n+8+sdPHUhJ21XrSxTC+JK + + s3 need,s3 secret key + + + + + loggerserver.rpc.port + 50051 + + intF + + + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml new file mode 100644 index 0000000000..8e14716d05 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-env.xml @@ -0,0 +1,123 @@ + + + + dolphin.database.type + mysql + Dolphin Scheduler DataBase Type Which Is Select + Dolphin Database Type + + value-list + + + mysql + + + + postgresql + + + + 1 + + + + + + dolphin.database.host + + Dolphin Database Host + + + + + dolphin.database.port + + Dolphin Database Port + + + + + dolphin.database.username + + Dolphin Database Username + + + + + dolphin.database.password + + Dolphin Database Password + PASSWORD + + password + + + + + + dolphin.user + + Which user to install and admin dolphin scheduler + Deploy User + + + + dolphin.group + + Which user to install and admin dolphin scheduler + Deploy Group + + + + + dolphinscheduler-env-content + Dolphinscheduler Env template + This is the jinja template for dolphinscheduler.env.sh file + # +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export HADOOP_HOME=/opt/soft/hadoop +export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop +export SPARK_HOME1=/opt/soft/spark1 +export SPARK_HOME2=/opt/soft/spark2 +export PYTHON_HOME=/opt/soft/python +export JAVA_HOME=/opt/soft/java +export HIVE_HOME=/opt/soft/hive +export FLINK_HOME=/opt/soft/flink + + content + false + false + + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml new file mode 100644 index 0000000000..82b59d8827 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/configuration/dolphin-quartz.xml @@ -0,0 +1,131 @@ + + + + org.quartz.scheduler.instanceName + DolphinScheduler + + + + + org.quartz.scheduler.instanceId + AUTO + + + + org.quartz.scheduler.makeSchedulerThreadDaemon + true + + boolean + + + + + org.quartz.jobStore.useProperties + false + + boolean + + + + + org.quartz.threadPool.class + org.quartz.simpl.SimpleThreadPool + + + + org.quartz.threadPool.makeThreadsDaemons + true + + boolean + + + + + org.quartz.threadPool.threadCount + 25 + + int + + + + + org.quartz.threadPool.threadPriority + 5 + + int + + + + + org.quartz.jobStore.class + org.quartz.impl.jdbcjobstore.JobStoreTX + + + + org.quartz.jobStore.tablePrefix + QRTZ_ + + + + org.quartz.jobStore.isClustered + true + + boolean + + + + + org.quartz.jobStore.misfireThreshold + 60000 + + int + + + + + org.quartz.jobStore.clusterCheckinInterval + 5000 + + int + + + + + org.quartz.jobStore.dataSource + myDs + + + + org.quartz.dataSource.myDs.connectionProvider.class + org.apache.dolphinscheduler.server.quartz.DruidConnectionProvider + + + + org.quartz.dataSource.myDs.maxConnections + 10 + + int + + + + + org.quartz.dataSource.myDs.validationQuery + select 1 + + + \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml b/ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml new file mode 100644 index 0000000000..0d2bbe3163 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/metainfo.xml @@ -0,0 +1,137 @@ + + + + 2.0 + + + DOLPHIN + Dolphin Scheduler + 分布式易扩展的可视化DAG工作流任务调度系统 + 1.2.1 + + + DOLPHIN_MASTER + DS Master + MASTER + 1+ + + + PYTHON + 600 + + + + + DOLPHIN_LOGGER + DS Logger + SLAVE + 1+ + + + PYTHON + 600 + + + + + DOLPHIN_WORKER + DS Worker + SLAVE + 1+ + + + DOLPHIN/DOLPHIN_LOGGER + host + + true + + + + + + PYTHON + 600 + + + + + DOLPHIN_ALERT + DS Alert + SLAVE + 1 + + + PYTHON + 600 + + + + + DOLPHIN_API + DS_Api + SLAVE + 1 + + + PYTHON + 600 + + + + + + ZOOKEEPER + + + + + any + + + apache-dolphinscheduler-incubating-1.2.1* + + + + + + + dolphin-alert + dolphin-app-api + dolphin-app-dao + dolphin-common + dolphin-env + dolphin-quartz + + + + + theme.json + true + + + + quicklinks + + + quicklinks.json + true + + + + + diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py new file mode 100644 index 0000000000..87cc7b453b --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/alerts/alert_dolphin_scheduler_status.py @@ -0,0 +1,124 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import socket +import urllib2 +import os +import logging +import ambari_simplejson as json +from resource_management.libraries.script.script import Script +import sys +reload(sys) +sys.setdefaultencoding('utf-8') + +logger = logging.getLogger('ambari_alerts') + +config = Script.get_config() + + +def get_tokens(): + """ + Returns a tuple of tokens in the format {{site/property}} that will be used + to build the dictionary passed into execute + + :rtype tuple + """ + +def get_info(url, connection_timeout): + response = None + + try: + response = urllib2.urlopen(url, timeout=connection_timeout) + json_data = response.read() + return json_data + finally: + if response is not None: + try: + response.close() + except: + pass + + +def execute(configurations={}, parameters={}, host_name=None): + """ + Returns a tuple containing the result code and a pre-formatted result label + + Keyword arguments: + configurations : a mapping of configuration key to value + parameters : a mapping of script parameter key to value + host_name : the name of this host where the alert is running + + :type configurations dict + :type parameters dict + :type host_name str + """ + + alert_name = parameters['alertName'] + + dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" + + pid = "0" + + + from resource_management.core import sudo + + is_running = True + pid_file_path = "" + if alert_name == 'DOLPHIN_MASTER': + pid_file_path = dolphin_pidfile_dir + "/master-server.pid" + elif alert_name == 'DOLPHIN_WORKER': + pid_file_path = dolphin_pidfile_dir + "/worker-server.pid" + elif alert_name == 'DOLPHIN_ALERT': + pid_file_path = dolphin_pidfile_dir + "/alert-server.pid" + elif alert_name == 'DOLPHIN_LOGGER': + pid_file_path = dolphin_pidfile_dir + "/logger-server.pid" + elif alert_name == 'DOLPHIN_API': + pid_file_path = dolphin_pidfile_dir + "/api-server.pid" + + if not pid_file_path or not os.path.isfile(pid_file_path): + is_running = False + + try: + pid = int(sudo.read_file(pid_file_path)) + except: + is_running = False + + try: + # Kill will not actually kill the process + # From the doc: + # If sig is 0, then no signal is sent, but error checking is still + # performed; this can be used to check for the existence of a + # process ID or process group ID. + sudo.kill(pid, 0) + except OSError: + is_running = False + + if host_name is None: + host_name = socket.getfqdn() + + if not is_running: + result_code = "CRITICAL" + else: + result_code = "OK" + + label = "The comment {0} of DOLPHIN_SCHEDULER on {1} is {2}".format(alert_name, host_name, result_code) + + return ((result_code, [label])) + +if __name__ == "__main__": + pass diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py new file mode 100644 index 0000000000..62255a3432 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_alert_service.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinAlertService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/alert-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/alert-server.pid` | grep `cat {dolphin_pidfile_dir}/alert-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start alert-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop alert-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "alert-server.pid") + + +if __name__ == "__main__": + DolphinAlertService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py new file mode 100644 index 0000000000..bdc18fb602 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_api_service.py @@ -0,0 +1,70 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinApiService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + + #init + init_cmd=format("sh " + params.dolphin_home + "/script/create-dolphinscheduler.sh") + Execute(init_cmd, user=params.dolphin_user) + + #upgrade + upgrade_cmd=format("sh " + params.dolphin_home + "/script/upgrade-dolphinscheduler.sh") + Execute(upgrade_cmd, user=params.dolphin_user) + + no_op_test = format("ls {dolphin_pidfile_dir}/api-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/api-server.pid` | grep `cat {dolphin_pidfile_dir}/api-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start api-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop api-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "api-server.pid") + + +if __name__ == "__main__": + DolphinApiService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py new file mode 100644 index 0000000000..235605894f --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_env.py @@ -0,0 +1,121 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * + + +def dolphin_env(): + import params + + Directory(params.dolphin_pidfile_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_log_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_conf_dir, + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + + + Directory(params.dolphin_alert_map['xls.file.path'], + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_common_map['data.basedir.path'], + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_common_map['data.download.basedir.path'], + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + Directory(params.dolphin_common_map['process.exec.basepath'], + mode=0777, + owner=params.dolphin_user, + group=params.dolphin_group, + create_parents=True + ) + + + File(format(params.dolphin_env_path), + mode=0777, + content=InlineTemplate(params.dolphin_env_content), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + + File(format(params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh"), + mode=0755, + content=Template("dolphin-daemon.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + + File(format(params.dolphin_conf_dir + "/alert.properties"), + mode=0755, + content=Template("alert.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/application.properties"), + mode=0755, + content=Template("application.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/application-api.properties"), + mode=0755, + content=Template("application-api.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/common.properties"), + mode=0755, + content=Template("common.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) + + File(format(params.dolphin_conf_dir + "/quartz.properties"), + mode=0755, + content=Template("quartz.properties.j2"), + owner=params.dolphin_user, + group=params.dolphin_group + ) diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py new file mode 100644 index 0000000000..f1c19bd66f --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_logger_service.py @@ -0,0 +1,61 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinLoggerService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/logger-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/logger-server.pid` | grep `cat {dolphin_pidfile_dir}/logger-server.pid` >/dev/null 2>&1") + + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start logger-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop logger-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "logger-server.pid") + + +if __name__ == "__main__": + DolphinLoggerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py new file mode 100644 index 0000000000..6ee7ecfcf3 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_master_service.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinMasterService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/master-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/master-server.pid` | grep `cat {dolphin_pidfile_dir}/master-server.pid` >/dev/null 2>&1") + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start master-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop master-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "master-server.pid") + + +if __name__ == "__main__": + DolphinMasterService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py new file mode 100644 index 0000000000..2d145ee730 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/dolphin_worker_service.py @@ -0,0 +1,60 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import time +from resource_management import * + +from dolphin_env import dolphin_env + + +class DolphinWorkerService(Script): + def install(self, env): + import params + env.set_params(params) + self.install_packages(env) + Execute(('chmod', '-R', '777', params.dolphin_home), user=params.dolphin_user, sudo=True) + + def configure(self, env): + import params + params.pika_slave = True + env.set_params(params) + + dolphin_env() + + def start(self, env): + import params + env.set_params(params) + self.configure(env) + no_op_test = format("ls {dolphin_pidfile_dir}/worker-server.pid >/dev/null 2>&1 && ps `cat {dolphin_pidfile_dir}/worker-server.pid` | grep `cat {dolphin_pidfile_dir}/worker-server.pid` >/dev/null 2>&1") + start_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh start worker-server") + Execute(start_cmd, user=params.dolphin_user, not_if=no_op_test) + + def stop(self, env): + import params + env.set_params(params) + stop_cmd = format("sh " + params.dolphin_bin_dir + "/dolphinscheduler-daemon.sh stop worker-server") + Execute(stop_cmd, user=params.dolphin_user) + time.sleep(5) + + def status(self, env): + import status_params + env.set_params(status_params) + check_process_status(status_params.dolphin_run_dir + "worker-server.pid") + + +if __name__ == "__main__": + DolphinWorkerService().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py new file mode 100644 index 0000000000..230ad14565 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/params.py @@ -0,0 +1,150 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE_2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import sys +from resource_management import * +from resource_management.core.logger import Logger +from resource_management.libraries.functions import default + +Logger.initialize_logger() +reload(sys) +sys.setdefaultencoding('utf-8') + +# server configurations +config = Script.get_config() + +# conf_dir = "/etc/" +dolphin_home = "/opt/soft/apache-dolphinscheduler-incubating-1.2.1" +dolphin_conf_dir = dolphin_home + "/conf" +dolphin_log_dir = dolphin_home + "/logs" +dolphin_bin_dir = dolphin_home + "/bin" +dolphin_lib_jars = dolphin_home + "/lib/*" +dolphin_pidfile_dir = "/opt/soft/run/dolphinscheduler" + +rmHosts = default("/clusterHostInfo/rm_host", []) + +# dolphin-env +dolphin_env_map = {} +dolphin_env_map.update(config['configurations']['dolphin-env']) + +# which user to install and admin dolphin scheduler +dolphin_user = dolphin_env_map['dolphin.user'] +dolphin_group = dolphin_env_map['dolphin.group'] + +# .dolphinscheduler_env.sh +dolphin_env_path = dolphin_conf_dir + '/env/dolphinscheduler_env.sh' +dolphin_env_content = dolphin_env_map['dolphinscheduler-env-content'] + +# database config +dolphin_database_config = {} +dolphin_database_config['dolphin_database_type'] = dolphin_env_map['dolphin.database.type'] +dolphin_database_config['dolphin_database_host'] = dolphin_env_map['dolphin.database.host'] +dolphin_database_config['dolphin_database_port'] = dolphin_env_map['dolphin.database.port'] +dolphin_database_config['dolphin_database_username'] = dolphin_env_map['dolphin.database.username'] +dolphin_database_config['dolphin_database_password'] = dolphin_env_map['dolphin.database.password'] + +if 'mysql' == dolphin_database_config['dolphin_database_type']: + dolphin_database_config['dolphin_database_driver'] = 'com.mysql.jdbc.Driver' + dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.StdJDBCDelegate' + dolphin_database_config['dolphin_database_url'] = 'jdbc:mysql://' + dolphin_env_map['dolphin.database.host'] \ + + ':' + dolphin_env_map['dolphin.database.port'] \ + + '/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8' +else: + dolphin_database_config['dolphin_database_driver'] = 'org.postgresql.Driver' + dolphin_database_config['driverDelegateClass'] = 'org.quartz.impl.jdbcjobstore.PostgreSQLDelegate' + dolphin_database_config['dolphin_database_url'] = 'jdbc:postgresql://' + dolphin_env_map['dolphin.database.host'] \ + + ':' + dolphin_env_map['dolphin.database.port'] \ + + '/dolphinscheduler' + +# application-alert.properties +dolphin_alert_map = {} +wechat_push_url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token' +wechat_token_url = 'https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret' +wechat_team_send_msg = '{\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"}' +wechat_user_send_msg = '{\"touser\":\"$toUser\",\"agentid\":\"$agentId\",\"msgtype\":\"markdown\",\"markdown\":{\"content\":\"$msg\"}}' + +dolphin_alert_map['enterprise.wechat.push.ur'] = wechat_push_url +dolphin_alert_map['enterprise.wechat.token.url'] = wechat_token_url +dolphin_alert_map['enterprise.wechat.team.send.msg'] = wechat_team_send_msg +dolphin_alert_map['enterprise.wechat.user.send.msg'] = wechat_user_send_msg +dolphin_alert_map.update(config['configurations']['dolphin-alert']) + +# application-api.properties +dolphin_app_api_map = {} +dolphin_app_api_map['logging.config'] = 'classpath:apiserver_logback.xml' +dolphin_app_api_map['spring.messages.basename'] = 'i18n/messages' +dolphin_app_api_map['server.servlet.context-path'] = '/dolphinscheduler/' +dolphin_app_api_map.update(config['configurations']['dolphin-application-api']) + +# application-dao.properties +dolphin_application_map = {} +dolphin_application_map['spring.datasource.type'] = 'com.alibaba.druid.pool.DruidDataSource' +dolphin_application_map['spring.datasource.driver-class-name'] = dolphin_database_config['dolphin_database_driver'] +dolphin_application_map['spring.datasource.url'] = dolphin_database_config['dolphin_database_url'] +dolphin_application_map['spring.datasource.username'] = dolphin_database_config['dolphin_database_username'] +dolphin_application_map['spring.datasource.password'] = dolphin_database_config['dolphin_database_password'] +dolphin_application_map.update(config['configurations']['dolphin-application']) + +# common.properties +dolphin_common_map = {} + +if 'yarn-site' in config['configurations'] and \ + 'yarn.resourcemanager.webapp.address' in config['configurations']['yarn-site']: + yarn_resourcemanager_webapp_address = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'] + yarn_application_status_address = 'http://' + yarn_resourcemanager_webapp_address + '/ws/v1/cluster/apps/%s' + dolphin_common_map['yarn.application.status.address'] = yarn_application_status_address + +rmHosts = default("/clusterHostInfo/rm_host", []) +if len(rmHosts) > 1: + dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = ','.join(rmHosts) +else: + dolphin_common_map['yarn.resourcemanager.ha.rm.ids'] = '' + +dolphin_common_map_tmp = config['configurations']['dolphin-common'] +data_basedir_path = dolphin_common_map_tmp['data.basedir.path'] +process_exec_basepath = data_basedir_path + '/exec' +data_download_basedir_path = data_basedir_path + '/download' +dolphin_common_map['process.exec.basepath'] = process_exec_basepath +dolphin_common_map['data.download.basedir.path'] = data_download_basedir_path +dolphin_common_map['dolphinscheduler.env.path'] = dolphin_env_path + +zookeeperHosts = default("/clusterHostInfo/zookeeper_hosts", []) +if len(zookeeperHosts) > 0 and "clientPort" in config['configurations']['zoo.cfg']: + clientPort = config['configurations']['zoo.cfg']['clientPort'] + zookeeperPort = ":" + clientPort + "," + dolphin_common_map['zookeeper.quorum'] = zookeeperPort.join(zookeeperHosts) + ":" + clientPort + +dolphin_common_map.update(config['configurations']['dolphin-common']) + +# quartz.properties +dolphin_quartz_map = {} +dolphin_quartz_map['org.quartz.jobStore.driverDelegateClass'] = dolphin_database_config['driverDelegateClass'] +dolphin_quartz_map['org.quartz.dataSource.myDs.driver'] = dolphin_database_config['dolphin_database_driver'] +dolphin_quartz_map['org.quartz.dataSource.myDs.URL'] = dolphin_database_config['dolphin_database_url'] +dolphin_quartz_map['org.quartz.dataSource.myDs.user'] = dolphin_database_config['dolphin_database_username'] +dolphin_quartz_map['org.quartz.dataSource.myDs.password'] = dolphin_database_config['dolphin_database_password'] +dolphin_quartz_map.update(config['configurations']['dolphin-quartz']) + +# if 'ganglia_server_host' in config['clusterHostInfo'] and \ +# len(config['clusterHostInfo']['ganglia_server_host'])>0: +# ganglia_installed = True +# ganglia_server = config['clusterHostInfo']['ganglia_server_host'][0] +# ganglia_report_interval = 60 +# else: +# ganglia_installed = False diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py new file mode 100644 index 0000000000..0e12f69932 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/service_check.py @@ -0,0 +1,31 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +from resource_management import * +from resource_management.libraries.functions import get_unique_id_and_date + +class ServiceCheck(Script): + def service_check(self, env): + import params + #env.set_params(params) + + # Execute(format("which pika_server")) + +if __name__ == "__main__": + ServiceCheck().execute() diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py new file mode 100644 index 0000000000..24b2c8b1bc --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/scripts/status_params.py @@ -0,0 +1,23 @@ +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from resource_management import * + +config = Script.get_config() + +dolphin_run_dir = "/opt/soft/run/dolphinscheduler/" diff --git a/dockerfile/conf/dolphinscheduler/conf/config/run_config.conf b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 similarity index 90% rename from dockerfile/conf/dolphinscheduler/conf/config/run_config.conf rename to ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 index 69a28db458..73840b8c18 100644 --- a/dockerfile/conf/dolphinscheduler/conf/config/run_config.conf +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/alert.properties.j2 @@ -15,7 +15,6 @@ # limitations under the License. # -masters=ark0,ark1 -workers=ark2,ark3,ark4 -alertServer=ark3 -apiServers=ark1 \ No newline at end of file +{% for key, value in dolphin_alert_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/config/install_config.conf b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 similarity index 89% rename from dockerfile/conf/dolphinscheduler/conf/config/install_config.conf rename to ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 index 196a78f49c..70118003b9 100644 --- a/dockerfile/conf/dolphinscheduler/conf/config/install_config.conf +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application-api.properties.j2 @@ -15,6 +15,6 @@ # limitations under the License. # -installPath=/data1_1T/dolphinscheduler -deployUser=dolphinscheduler -ips=ark0,ark1,ark2,ark3,ark4 +{% for key, value in dolphin_app_api_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/env/.escheduler_env.sh b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 similarity index 85% rename from dockerfile/conf/dolphinscheduler/conf/env/.escheduler_env.sh rename to ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 index 5b85917fc2..7bb9f8aff3 100644 --- a/dockerfile/conf/dolphinscheduler/conf/env/.escheduler_env.sh +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/application.properties.j2 @@ -15,6 +15,6 @@ # limitations under the License. # -export PYTHON_HOME=/usr/bin/python -export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 -export PATH=$PYTHON_HOME:$JAVA_HOME/bin:$PATH \ No newline at end of file +{% for key, value in dolphin_application_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/env/.dolphinscheduler_env.sh b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 similarity index 81% rename from dockerfile/conf/dolphinscheduler/conf/env/.dolphinscheduler_env.sh rename to ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 index 8e842fe28e..2220c4effa 100644 --- a/dockerfile/conf/dolphinscheduler/conf/env/.dolphinscheduler_env.sh +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/common.properties.j2 @@ -15,7 +15,6 @@ # limitations under the License. # -export PYTHON_HOME=/usr/bin/python -export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 -export PATH=$PYTHON_HOME:$JAVA_HOME/bin:$PATH -export DATAX_HOME=/opt/datax/bin/datax.py \ No newline at end of file +{% for key, value in dolphin_common_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 new file mode 100644 index 0000000000..ab99ffda47 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/dolphin-daemon.j2 @@ -0,0 +1,119 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +usage="Usage: dolphinscheduler-daemon.sh (start|stop) " + +# if no args specified, show usage +if [ $# -le 1 ]; then + echo $usage + exit 1 +fi + +startStop=$1 +shift +command=$1 +shift + +echo "Begin $startStop $command......" + +BIN_DIR=`dirname $0` +BIN_DIR=`cd "$BIN_DIR"; pwd` +DOLPHINSCHEDULER_HOME=$BIN_DIR/.. + +export HOSTNAME=`hostname` + +DOLPHINSCHEDULER_LIB_JARS={{dolphin_lib_jars}} + +DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms4g -Xss512k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" +STOP_TIMEOUT=5 + +log={{dolphin_log_dir}}/dolphinscheduler-$command-$HOSTNAME.out +pid={{dolphin_pidfile_dir}}/$command.pid + +cd $DOLPHINSCHEDULER_HOME + +if [ "$command" = "api-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/apiserver_logback.xml -Dspring.profiles.active=api" + CLASS=org.apache.dolphinscheduler.api.ApiApplicationServer +elif [ "$command" = "master-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/master_logback.xml -Ddruid.mysql.usePingMethod=false" + CLASS=org.apache.dolphinscheduler.server.master.MasterServer +elif [ "$command" = "worker-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/worker_logback.xml -Ddruid.mysql.usePingMethod=false" + CLASS=org.apache.dolphinscheduler.server.worker.WorkerServer +elif [ "$command" = "alert-server" ]; then + LOG_FILE="-Dlogback.configurationFile={{dolphin_conf_dir}}/alert_logback.xml" + CLASS=org.apache.dolphinscheduler.alert.AlertServer +elif [ "$command" = "logger-server" ]; then + CLASS=org.apache.dolphinscheduler.server.rpc.LoggerServer +elif [ "$command" = "combined-server" ]; then + LOG_FILE="-Dlogging.config={{dolphin_conf_dir}}/combined_logback.xml -Dspring.profiles.active=api -Dserver.is-combined-server=true" + CLASS=org.apache.dolphinscheduler.api.CombinedApplicationServer +else + echo "Error: No command named \`$command' was found." + exit 1 +fi + +case $startStop in + (start) + + if [ -f $pid ]; then + if kill -0 `cat $pid` > /dev/null 2>&1; then + echo $command running as process `cat $pid`. Stop it first. + exit 1 + fi + fi + + echo starting $command, logging to $log + + exec_command="$LOG_FILE $DOLPHINSCHEDULER_OPTS -classpath {{dolphin_conf_dir}}:{{dolphin_lib_jars}} $CLASS" + + echo "nohup java $exec_command > $log 2>&1 < /dev/null &" + nohup java $exec_command > $log 2>&1 < /dev/null & + echo $! > $pid + ;; + + (stop) + + if [ -f $pid ]; then + TARGET_PID=`cat $pid` + if kill -0 $TARGET_PID > /dev/null 2>&1; then + echo stopping $command + kill $TARGET_PID + sleep $STOP_TIMEOUT + if kill -0 $TARGET_PID > /dev/null 2>&1; then + echo "$command did not stop gracefully after $STOP_TIMEOUT seconds: killing with kill -9" + kill -9 $TARGET_PID + fi + else + echo no $command to stop + fi + rm -f $pid + else + echo no $command to stop + fi + ;; + + (*) + echo $usage + exit 1 + ;; + +esac + +echo "End $startStop $command." \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 new file mode 100644 index 0000000000..e027a263b5 --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/package/templates/quartz.properties.j2 @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +{% for key, value in dolphin_quartz_map.iteritems() -%} + {{key}}={{value}} +{% endfor %} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json b/ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json new file mode 100755 index 0000000000..8753004fef --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/quicklinks/quicklinks.json @@ -0,0 +1,26 @@ +{ + "name": "default", + "description": "default quick links configuration", + "configuration": { + "protocol": + { + "type":"http" + }, + + "links": [ + { + "name": "dolphin-application-ui", + "label": "DolphinApplication UI", + "requires_user_name": "false", + "component_name": "DOLPHIN_API", + "url": "%@://%@:%@/dolphinscheduler/ui/view/login/index.html", + "port":{ + "http_property": "server.port", + "http_default_port": "12345", + "regex": "^(\\d+)$", + "site": "dolphin-application-api" + } + } + ] + } +} \ No newline at end of file diff --git a/ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json b/ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json new file mode 100644 index 0000000000..23e46076aa --- /dev/null +++ b/ambari_plugin/common-services/DOLPHIN/1.2.1/themes/theme.json @@ -0,0 +1,605 @@ +{ + "name": "default", + "description": "Default theme for Dolphin Scheduler service", + "configuration": { + "layouts": [ + { + "name": "default", + "tabs": [ + { + "name": "settings", + "display-name": "Settings", + "layout": { + "tab-rows": "3", + "tab-columns": "3", + "sections": [ + { + "name": "dolphin-env-config", + "display-name": "Dolphin Env Config", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "2", + "subsections": [ + { + "name": "env-row1-col1", + "display-name": "Deploy User Info", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "env-row1-col2", + "display-name": "System Env Optimization", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + } + ] + }, + { + "name": "dolphin-database-config", + "display-name": "Database Config", + "row-index": "1", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "3", + "subsections": [ + { + "name": "database-row1-col1", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "database-row1-col2", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + }, + { + "name": "database-row1-col3", + "row-index": "0", + "column-index": "2", + "row-span": "1", + "column-span": "1" + } + ] + }, + { + "name": "dynamic-config", + "row-index": "2", + "column-index": "0", + "row-span": "1", + "column-span": "2", + "section-rows": "1", + "section-columns": "3", + "subsections": [ + { + "name": "dynamic-row1-col1", + "display-name": "Resource FS Config", + "row-index": "0", + "column-index": "0", + "row-span": "1", + "column-span": "1" + }, + { + "name": "dynamic-row1-col2", + "display-name": "Kerberos Info", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + }, + { + "name": "dynamic-row1-col3", + "display-name": "Wechat Info", + "row-index": "0", + "column-index": "1", + "row-span": "1", + "column-span": "1" + } + ] + } + ] + } + } + ] + } + ], + "placement": { + "configuration-layout": "default", + "configs": [ + { + "config": "dolphin-env/dolphin.database.type", + "subsection-name": "database-row1-col1" + }, + { + "config": "dolphin-env/dolphin.database.host", + "subsection-name": "database-row1-col2" + }, + { + "config": "dolphin-env/dolphin.database.port", + "subsection-name": "database-row1-col2" + }, + { + "config": "dolphin-env/dolphin.database.username", + "subsection-name": "database-row1-col3" + }, + { + "config": "dolphin-env/dolphin.database.password", + "subsection-name": "database-row1-col3" + }, + { + "config": "dolphin-env/dolphin.user", + "subsection-name": "env-row1-col1" + }, + { + "config": "dolphin-env/dolphin.group", + "subsection-name": "env-row1-col1" + }, + { + "config": "dolphin-env/dolphinscheduler-env-content", + "subsection-name": "env-row1-col2" + }, + { + "config": "dolphin-common/res.upload.startup.type", + "subsection-name": "dynamic-row1-col1" + }, + { + "config": "dolphin-common/hdfs.root.user", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/res.upload.startup.type" + ], + "if": "${dolphin-common/res.upload.startup.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/data.store2hdfs.basepath", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/res.upload.startup.type" + ], + "if": "${dolphin-common/res.upload.startup.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.defaultFS", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/res.upload.startup.type" + ], + "if": "${dolphin-common/res.upload.startup.type} === HDFS", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.endpoint", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/res.upload.startup.type" + ], + "if": "${dolphin-common/res.upload.startup.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.access.key", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/res.upload.startup.type" + ], + "if": "${dolphin-common/res.upload.startup.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/fs.s3a.secret.key", + "subsection-name": "dynamic-row1-col1", + "depends-on": [ + { + "configs":[ + "dolphin-common/res.upload.startup.type" + ], + "if": "${dolphin-common/res.upload.startup.type} === S3", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/hadoop.security.authentication.startup.state", + "subsection-name": "dynamic-row1-col2" + }, + { + "config": "dolphin-common/java.security.krb5.conf.path", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/login.user.keytab.username", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-common/login.user.keytab.path", + "subsection-name": "dynamic-row1-col2", + "depends-on": [ + { + "configs":[ + "dolphin-common/hadoop.security.authentication.startup.state" + ], + "if": "${dolphin-common/hadoop.security.authentication.startup.state}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.enable", + "subsection-name": "dynamic-row1-col3" + }, + { + "config": "dolphin-alert/enterprise.wechat.corp.id", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.secret", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.agent.id", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "dolphin-alert/enterprise.wechat.users", + "subsection-name": "dynamic-row1-col3", + "depends-on": [ + { + "configs":[ + "dolphin-alert/enterprise.wechat.enable" + ], + "if": "${dolphin-alert/enterprise.wechat.enable}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + } + ] + }, + "widgets": [ + { + "config": "dolphin-env/dolphin.database.type", + "widget": { + "type": "combo" + } + }, + { + "config": "dolphin-env/dolphin.database.host", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.database.port", + "widget": { + "type": "text-field", + "units": [ + { + "unit-name": "int" + } + ] + } + }, + { + "config": "dolphin-env/dolphin.database.username", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.database.password", + "widget": { + "type": "password" + } + }, + { + "config": "dolphin-env/dolphin.user", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphin.group", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-env/dolphinscheduler-env-content", + "widget": { + "type": "text-area" + } + }, + { + "config": "dolphin-common/res.upload.startup.type", + "widget": { + "type": "combo" + } + }, + { + "config": "dolphin-common/hdfs.root.user", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/data.store2hdfs.basepath", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.defaultFS", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.endpoint", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.access.key", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/fs.s3a.secret.key", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/hadoop.security.authentication.startup.state", + "widget": { + "type": "toggle" + } + }, + { + "config": "dolphin-common/java.security.krb5.conf.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/login.user.keytab.username", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-common/login.user.keytab.path", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.enable", + "widget": { + "type": "toggle" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.corp.id", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.secret", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.agent.id", + "widget": { + "type": "text-field" + } + }, + { + "config": "dolphin-alert/enterprise.wechat.users", + "widget": { + "type": "text-field" + } + } + ] + } +} diff --git a/ambari_plugin/readme.pdf b/ambari_plugin/readme.pdf new file mode 100644 index 0000000000..1209375701 Binary files /dev/null and b/ambari_plugin/readme.pdf differ diff --git a/ambari_plugin/statcks/DOLPHIN/metainfo.xml b/ambari_plugin/statcks/DOLPHIN/metainfo.xml new file mode 100755 index 0000000000..c41db5f513 --- /dev/null +++ b/ambari_plugin/statcks/DOLPHIN/metainfo.xml @@ -0,0 +1,26 @@ + + + + 2.0 + + + DOLPHIN + common-services/DOLPHIN/1.2.1 + + + \ No newline at end of file diff --git a/dockerfile/Dockerfile b/dockerfile/Dockerfile index 217b2c052f..1fc064c489 100644 --- a/dockerfile/Dockerfile +++ b/dockerfile/Dockerfile @@ -15,122 +15,78 @@ # limitations under the License. # -FROM ubuntu:18.04 - -ENV LANG=C.UTF-8 -ENV DEBIAN_FRONTEND=noninteractive - -ARG version -ARG tar_version - -#1,install jdk - -RUN apt-get update \ - && apt-get -y install openjdk-8-jdk \ - && rm -rf /var/lib/apt/lists/* - -ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64 +FROM nginx:alpine + +ARG VERSION + +ENV TZ Asia/Shanghai +ENV LANG C.UTF-8 +ENV DEBIAN_FRONTEND noninteractive + +#1. install dos2unix shadow bash openrc python sudo vim wget iputils net-tools ssh pip kazoo. +#If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example: +#RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories +RUN apk update && \ + apk add dos2unix shadow bash openrc python sudo vim wget iputils net-tools openssh-server py2-pip && \ + apk add --update procps && \ + openrc boot && \ + pip install kazoo + +#2. install jdk +RUN apk add openjdk8 +ENV JAVA_HOME /usr/lib/jvm/java-1.8-openjdk ENV PATH $JAVA_HOME/bin:$PATH - -#install wget -RUN apt-get update && \ - apt-get -y install wget -#2,install ZK - +#3. install zk RUN cd /opt && \ - wget https://www-us.apache.org/dist/zookeeper/zookeeper-3.4.14/zookeeper-3.4.14.tar.gz && \ - tar -zxvf zookeeper-3.4.14.tar.gz && \ - mv zookeeper-3.4.14 zookeeper && \ - rm -rf ./zookeeper-*tar.gz && \ + wget https://downloads.apache.org/zookeeper/zookeeper-3.5.7/apache-zookeeper-3.5.7-bin.tar.gz && \ + tar -zxvf apache-zookeeper-3.5.7-bin.tar.gz && \ + mv apache-zookeeper-3.5.7-bin zookeeper && \ mkdir -p /tmp/zookeeper && \ + rm -rf ./zookeeper-*tar.gz && \ rm -rf /opt/zookeeper/conf/zoo_sample.cfg - -ADD ./dockerfile/conf/zookeeper/zoo.cfg /opt/zookeeper/conf -ENV ZK_HOME=/opt/zookeeper -ENV PATH $PATH:$ZK_HOME/bin - -#3,install maven -RUN cd /opt && \ - wget http://apache-mirror.rbc.ru/pub/apache/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz && \ - tar -zxvf apache-maven-3.3.9-bin.tar.gz && \ - mv apache-maven-3.3.9 maven && \ - rm -rf ./apache-maven-*tar.gz && \ - rm -rf /opt/maven/conf/settings.xml -ADD ./dockerfile/conf/maven/settings.xml /opt/maven/conf -ENV MAVEN_HOME=/opt/maven -ENV PATH $PATH:$MAVEN_HOME/bin - -#4,install node -RUN cd /opt && \ - wget https://nodejs.org/download/release/v8.9.4/node-v8.9.4-linux-x64.tar.gz && \ - tar -zxvf node-v8.9.4-linux-x64.tar.gz && \ - mv node-v8.9.4-linux-x64 node && \ - rm -rf ./node-v8.9.4-*tar.gz -ENV NODE_HOME=/opt/node -ENV PATH $PATH:$NODE_HOME/bin - -#5,install postgresql -RUN apt-get update && \ - apt-get install -y postgresql postgresql-contrib sudo && \ - sed -i 's/localhost/*/g' /etc/postgresql/10/main/postgresql.conf - -#6,install nginx -RUN apt-get update && \ - apt-get install -y nginx && \ - rm -rf /var/lib/apt/lists/* && \ - echo "\ndaemon off;" >> /etc/nginx/nginx.conf && \ - chown -R www-data:www-data /var/lib/nginx - -#7,install sudo,python,vim,ping and ssh command -RUN apt-get update && \ - apt-get -y install sudo && \ - apt-get -y install python && \ - apt-get -y install vim && \ - apt-get -y install iputils-ping && \ - apt-get -y install net-tools && \ - apt-get -y install openssh-server && \ - apt-get -y install python-pip && \ - pip install kazoo - -#8,add dolphinscheduler source code to /opt/dolphinscheduler_source -ADD . /opt/dolphinscheduler_source - - -#9,backend compilation -RUN cd /opt/dolphinscheduler_source && \ - mvn clean package -Prelease -Dmaven.test.skip=true - -#10,frontend compilation -RUN chmod -R 777 /opt/dolphinscheduler_source/dolphinscheduler-ui && \ - cd /opt/dolphinscheduler_source/dolphinscheduler-ui && \ - rm -rf /opt/dolphinscheduler_source/dolphinscheduler-ui/node_modules && \ - npm install node-sass --unsafe-perm && \ - npm install && \ - npm run build - -#11,modify dolphinscheduler configuration file -#backend configuration -RUN tar -zxvf /opt/dolphinscheduler_source/dolphinscheduler-dist/dolphinscheduler-backend/target/apache-dolphinscheduler-incubating-${tar_version}-dolphinscheduler-backend-bin.tar.gz -C /opt && \ - mv /opt/apache-dolphinscheduler-incubating-${tar_version}-dolphinscheduler-backend-bin /opt/dolphinscheduler && \ - rm -rf /opt/dolphinscheduler/conf - -ADD ./dockerfile/conf/dolphinscheduler/conf /opt/dolphinscheduler/conf -#frontend nginx configuration -ADD ./dockerfile/conf/nginx/dolphinscheduler.conf /etc/nginx/conf.d - -#12,open port -EXPOSE 2181 2888 3888 3306 80 12345 8888 - -COPY ./dockerfile/startup.sh /root/startup.sh -#13,modify permissions and set soft links -RUN chmod +x /root/startup.sh && \ - chmod +x /opt/dolphinscheduler/script/create-dolphinscheduler.sh && \ - chmod +x /opt/zookeeper/bin/zkServer.sh && \ - chmod +x /opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh && \ - rm -rf /bin/sh && \ - ln -s /bin/bash /bin/sh && \ - mkdir -p /tmp/xls - +ADD ./conf/zookeeper/zoo.cfg /opt/zookeeper/conf +ENV ZK_HOME /opt/zookeeper +ENV PATH $ZK_HOME/bin:$PATH + +#4. install pg +RUN apk add postgresql postgresql-contrib + +#5. add dolphinscheduler +ADD ./apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin.tar.gz /opt/ +RUN mv /opt/apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin/ /opt/dolphinscheduler/ +ENV DOLPHINSCHEDULER_HOME /opt/dolphinscheduler + +#6. modify nginx +RUN echo "daemon off;" >> /etc/nginx/nginx.conf && \ + rm -rf /etc/nginx/conf.d/* +ADD ./conf/nginx/dolphinscheduler.conf /etc/nginx/conf.d + +#7. add configuration and modify permissions and set soft links +ADD ./startup-init-conf.sh /root/startup-init-conf.sh +ADD ./startup.sh /root/startup.sh +ADD ./conf/dolphinscheduler/*.tpl /opt/dolphinscheduler/conf/ +ADD ./conf/dolphinscheduler/env/dolphinscheduler_env /opt/dolphinscheduler/conf/env/ +RUN chmod +x /root/startup-init-conf.sh && \ + chmod +x /root/startup.sh && \ + chmod +x /opt/dolphinscheduler/conf/env/dolphinscheduler_env && \ + chmod +x /opt/dolphinscheduler/script/*.sh && \ + chmod +x /opt/dolphinscheduler/bin/*.sh && \ + chmod +x /opt/zookeeper/bin/*.sh && \ + dos2unix /root/startup-init-conf.sh && \ + dos2unix /root/startup.sh && \ + dos2unix /opt/dolphinscheduler/conf/env/dolphinscheduler_env && \ + dos2unix /opt/dolphinscheduler/script/*.sh && \ + dos2unix /opt/dolphinscheduler/bin/*.sh && \ + dos2unix /opt/zookeeper/bin/*.sh && \ + rm -rf /bin/sh && \ + ln -s /bin/bash /bin/sh && \ + mkdir -p /tmp/xls + +#8. remove apk index cache +RUN rm -rf /var/cache/apk/* + +#9. expose port +EXPOSE 2181 2888 3888 5432 12345 8888 ENTRYPOINT ["/root/startup.sh"] \ No newline at end of file diff --git a/dockerfile/README.md b/dockerfile/README.md index 33b58cacde..b5a9d0d3aa 100644 --- a/dockerfile/README.md +++ b/dockerfile/README.md @@ -1,11 +1,306 @@ -## Build Image +## What is Dolphin Scheduler? + +Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing. + +Github URL: https://github.com/apache/incubator-dolphinscheduler + +Official Website: https://dolphinscheduler.apache.org + +![Dolphin Scheduler](https://dolphinscheduler.apache.org/img/hlogo_colorful.svg) + +[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md) +[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md) + +## How to use this docker image + +#### You can start a dolphinscheduler instance +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test \ +-p 8888:8888 \ +dolphinscheduler all +``` + +The default postgres user `root`, postgres password `root` and database `dolphinscheduler` are created in the `startup.sh`. + +The default zookeeper is created in the `startup.sh`. + +#### Or via Environment Variables **`POSTGRESQL_HOST`** **`POSTGRESQL_PORT`** **`ZOOKEEPER_QUORUM`** + +You can specify **existing postgres service**. Example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +-p 8888:8888 \ +dolphinscheduler all +``` + +You can specify **existing zookeeper service**. Example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e ZOOKEEPER_QUORUM="l92.168.x.x:2181" +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +-p 8888:8888 \ +dolphinscheduler all +``` + +#### Or start a standalone dolphinscheduler server + +You can start a standalone dolphinscheduler server. + +* Start a **master server**, For example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e ZOOKEEPER_QUORUM="l92.168.x.x:2181" +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +dolphinscheduler master-server +``` + +* Start a **worker server**, For example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e ZOOKEEPER_QUORUM="l92.168.x.x:2181" +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +dolphinscheduler worker-server +``` + +* Start a **api server**, For example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +-p 12345:12345 \ +dolphinscheduler api-server +``` + +* Start a **alert server**, For example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +dolphinscheduler alert-server +``` + +* Start a **frontend**, For example: + +``` +$ docker run -dit --name dolphinscheduler \ +-e FRONTEND_API_SERVER_HOST="192.168.x.x" -e FRONTEND_API_SERVER_PORT="12345" \ +-p 8888:8888 \ +dolphinscheduler frontend +``` + +**Note**: You must be specify `POSTGRESQL_HOST` `POSTGRESQL_PORT` `ZOOKEEPER_QUORUM` when start a standalone dolphinscheduler server. + +## How to build a docker image + +You can build a docker image in A Unix-like operating system, You can also build it in Windows operating system. + +In Unix-Like, Example: + +```bash +$ cd path/incubator-dolphinscheduler +$ sh ./dockerfile/hooks/build +``` + +In Windows, Example: + +```bat +c:\incubator-dolphinscheduler>.\dockerfile\hooks\build.bat +``` + +Please read `./dockerfile/hooks/build` `./dockerfile/hooks/build.bat` script files if you don't understand + +## Environment Variables + +The Dolphin Scheduler image uses several environment variables which are easy to miss. While none of the variables are required, they may significantly aid you in using the image. + +**`POSTGRESQL_HOST`** + +This environment variable sets the host for PostgreSQL. The default value is `127.0.0.1`. + +**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`. + +**`POSTGRESQL_PORT`** + +This environment variable sets the port for PostgreSQL. The default value is `5432`. + +**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`. + +**`POSTGRESQL_USERNAME`** + +This environment variable sets the username for PostgreSQL. The default value is `root`. + +**`POSTGRESQL_PASSWORD`** + +This environment variable sets the password for PostgreSQL. The default value is `root`. + +**`DOLPHINSCHEDULER_ENV_PATH`** + +This environment variable sets the runtime environment for task. The default value is `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`. + +**`TASK_QUEUE`** + +This environment variable sets the task queue for `master-server` and `worker-serverr`. The default value is `zookeeper`. + +**`ZOOKEEPER_QUORUM`** + +This environment variable sets zookeeper quorum for `master-server` and `worker-serverr`. The default value is `127.0.0.1:2181`. + +**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`. + +**`MASTER_EXEC_THREADS`** + +This environment variable sets exec thread num for `master-server`. The default value is `100`. + +**`MASTER_EXEC_TASK_NUM`** + +This environment variable sets exec task num for `master-server`. The default value is `20`. + +**`MASTER_HEARTBEAT_INTERVAL`** + +This environment variable sets heartbeat interval for `master-server`. The default value is `10`. + +**`MASTER_TASK_COMMIT_RETRYTIMES`** + +This environment variable sets task commit retry times for `master-server`. The default value is `5`. + +**`MASTER_TASK_COMMIT_INTERVAL`** + +This environment variable sets task commit interval for `master-server`. The default value is `1000`. + +**`MASTER_MAX_CPULOAD_AVG`** + +This environment variable sets max cpu load avg for `master-server`. The default value is `100`. + +**`MASTER_RESERVED_MEMORY`** + +This environment variable sets reserved memory for `master-server`. The default value is `0.1`. + +**`WORKER_EXEC_THREADS`** + +This environment variable sets exec thread num for `worker-server`. The default value is `100`. + +**`WORKER_HEARTBEAT_INTERVAL`** + +This environment variable sets heartbeat interval for `worker-server`. The default value is `10`. + +**`WORKER_FETCH_TASK_NUM`** + +This environment variable sets fetch task num for `worker-server`. The default value is `3`. + +**`WORKER_MAX_CPULOAD_AVG`** + +This environment variable sets max cpu load avg for `worker-server`. The default value is `100`. + +**`WORKER_RESERVED_MEMORY`** + +This environment variable sets reserved memory for `worker-server`. The default value is `0.1`. + +**`XLS_FILE_PATH`** + +This environment variable sets xls file path for `alert-server`. The default value is `/tmp/xls`. + +**`MAIL_SERVER_HOST`** + +This environment variable sets mail server host for `alert-server`. The default value is empty. + +**`MAIL_SERVER_PORT`** + +This environment variable sets mail server port for `alert-server`. The default value is empty. + +**`MAIL_SENDER`** + +This environment variable sets mail sender for `alert-server`. The default value is empty. + +**`MAIL_USER=`** + +This environment variable sets mail user for `alert-server`. The default value is empty. + +**`MAIL_PASSWD`** + +This environment variable sets mail password for `alert-server`. The default value is empty. + +**`MAIL_SMTP_STARTTLS_ENABLE`** + +This environment variable sets SMTP tls for `alert-server`. The default value is `true`. + +**`MAIL_SMTP_SSL_ENABLE`** + +This environment variable sets SMTP ssl for `alert-server`. The default value is `false`. + +**`MAIL_SMTP_SSL_TRUST`** + +This environment variable sets SMTP ssl truest for `alert-server`. The default value is empty. + +**`ENTERPRISE_WECHAT_ENABLE`** + +This environment variable sets enterprise wechat enable for `alert-server`. The default value is `false`. + +**`ENTERPRISE_WECHAT_CORP_ID`** + +This environment variable sets enterprise wechat corp id for `alert-server`. The default value is empty. + +**`ENTERPRISE_WECHAT_SECRET`** + +This environment variable sets enterprise wechat secret for `alert-server`. The default value is empty. + +**`ENTERPRISE_WECHAT_AGENT_ID`** + +This environment variable sets enterprise wechat agent id for `alert-server`. The default value is empty. + +**`ENTERPRISE_WECHAT_USERS`** + +This environment variable sets enterprise wechat users for `alert-server`. The default value is empty. + +**`FRONTEND_API_SERVER_HOST`** + +This environment variable sets api server host for `frontend`. The default value is `127.0.0.1`. + +**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `api-server`. + +**`FRONTEND_API_SERVER_PORT`** + +This environment variable sets api server port for `frontend`. The default value is `123451`. + +**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `api-server`. + +## Initialization scripts + +If you would like to do additional initialization in an image derived from this one, add one or more environment variable under `/root/start-init-conf.sh`, and modify template files in `/opt/dolphinscheduler/conf/*.tpl`. + +For example, to add an environment variable `API_SERVER_PORT` in `/root/start-init-conf.sh`: + +``` +export API_SERVER_PORT=5555 +``` + +and to modify `/opt/dolphinscheduler/conf/application-api.properties.tpl` template file, add server port: ``` - cd .. - docker build -t dolphinscheduler --build-arg version=1.1.0 --build-arg tar_version=1.1.0-SNAPSHOT -f dockerfile/Dockerfile . - docker run -p 12345:12345 -p 8888:8888 --rm --name dolphinscheduler -d dolphinscheduler +server.port=${API_SERVER_PORT} ``` -* Visit the url: http://127.0.0.1:8888 -* UserName:admin Password:dolphinscheduler123 -## Note -* MacOS: The memory of docker needs to be set to 4G, default 2G. Steps: Preferences -> Advanced -> adjust resources -> Apply & Restart +`/root/start-init-conf.sh` will dynamically generate config file: + +```sh +echo "generate app config" +ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do +eval "cat << EOF +$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line}) +EOF +" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*} +done + +echo "generate nginx config" +sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf +sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf +``` diff --git a/dockerfile/README_zh_CN.md b/dockerfile/README_zh_CN.md new file mode 100644 index 0000000000..5830e30b07 --- /dev/null +++ b/dockerfile/README_zh_CN.md @@ -0,0 +1,306 @@ +## Dolphin Scheduler是什么? + +一个分布式易扩展的可视化DAG工作流任务调度系统。致力于解决数据处理流程中错综复杂的依赖关系,使调度系统在数据处理流程中`开箱即用`。 + +Github URL: https://github.com/apache/incubator-dolphinscheduler + +Official Website: https://dolphinscheduler.apache.org + +![Dolphin Scheduler](https://dolphinscheduler.apache.org/img/hlogo_colorful.svg) + +[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md) +[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md) + +## 如何使用docker镜像 + +#### 你可以运行一个dolphinscheduler实例 +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_USERNAME=test -e POSTGRESQL_PASSWORD=test \ +-p 8888:8888 \ +dolphinscheduler all +``` + +在`startup.sh`脚本中,默认的创建`Postgres`的用户、密码和数据库,默认值分别为:`root`、`root`、`dolphinscheduler`。 + +同时,默认的`Zookeeper`也会在`startup.sh`脚本中被创建。 + +#### 或者通过环境变量 **`POSTGRESQL_HOST`** **`POSTGRESQL_PORT`** **`ZOOKEEPER_QUORUM`** 使用已存在的服务 + +你可以指定一个已经存在的 **`Postgres`** 服务. 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +-p 8888:8888 \ +dolphinscheduler all +``` + +你也可以指定一个已经存在的 **Zookeeper** 服务. 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e ZOOKEEPER_QUORUM="l92.168.x.x:2181" +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +-p 8888:8888 \ +dolphinscheduler all +``` + +#### 或者运行dolphinscheduler中的部分服务 + +你能够运行dolphinscheduler中的部分服务。 + +* 启动一个 **master server**, 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e ZOOKEEPER_QUORUM="l92.168.x.x:2181" +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +dolphinscheduler master-server +``` + +* 启动一个 **worker server**, 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e ZOOKEEPER_QUORUM="l92.168.x.x:2181" +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +dolphinscheduler worker-server +``` + +* 启动一个 **api server**, 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +-p 12345:12345 \ +dolphinscheduler api-server +``` + +* 启动一个 **alert server**, 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e POSTGRESQL_HOST="192.168.x.x" -e POSTGRESQL_PORT="5432" \ +-e POSTGRESQL_USERNAME="test" -e POSTGRESQL_PASSWORD="test" \ +dolphinscheduler alert-server +``` + +* 启动一个 **frontend**, 如下: + +``` +$ docker run -dit --name dolphinscheduler \ +-e FRONTEND_API_SERVER_HOST="192.168.x.x" -e FRONTEND_API_SERVER_PORT="12345" \ +-p 8888:8888 \ +dolphinscheduler frontend +``` + +**注意**: 当你运行dolphinscheduler中的部分服务时,你必须指定这些环境变量 `POSTGRESQL_HOST` `POSTGRESQL_PORT` `ZOOKEEPER_QUORUM`。 + +## 如何构建一个docker镜像 + +你能够在类Unix系统和Windows系统中构建一个docker镜像。 + +类Unix系统, 如下: + +```bash +$ cd path/incubator-dolphinscheduler +$ sh ./dockerfile/hooks/build +``` + +Windows系统, 如下: + +```bat +c:\incubator-dolphinscheduler>.\dockerfile\hooks\build.bat +``` + +如果你不理解这些脚本 `./dockerfile/hooks/build` `./dockerfile/hooks/build.bat`,请阅读里面的内容。 + +## 环境变量 + +Dolphin Scheduler映像使用了几个容易遗漏的环境变量。虽然这些变量不是必须的,但是可以帮助你更容易配置镜像并根据你的需求定义相应的服务配置。 + +**`POSTGRESQL_HOST`** + +配置`PostgreSQL`的`HOST`, 默认值 `127.0.0.1`。 + +**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。 + +**`POSTGRESQL_PORT`** + +配置`PostgreSQL`的`PORT`, 默认值 `5432`。 + +**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`、`api-server`、`alert-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。 + +**`POSTGRESQL_USERNAME`** + +配置`PostgreSQL`的`USERNAME`, 默认值 `root`。 + +**`POSTGRESQL_PASSWORD`** + +配置`PostgreSQL`的`PASSWORD`, 默认值 `root`。 + +**`DOLPHINSCHEDULER_ENV_PATH`** + +任务执行时的环境变量配置文件, 默认值 `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`。 + +**`TASK_QUEUE`** + +配置`master-server`和`worker-serverr`的`Zookeeper`任务队列名, 默认值 `zookeeper`。 + +**`ZOOKEEPER_QUORUM`** + +配置`master-server`和`worker-serverr`的`Zookeeper`地址, 默认值 `127.0.0.1:2181`。 + +**注意**: 当运行`dolphinscheduler`中`master-server`、`worker-server`这些服务时,必须指定这个环境变量,以便于你更好的搭建分布式服务。 + +**`MASTER_EXEC_THREADS`** + +配置`master-server`中的执行线程数量,默认值 `100`。 + +**`MASTER_EXEC_TASK_NUM`** + +配置`master-server`中的执行任务数量,默认值 `20`。 + +**`MASTER_HEARTBEAT_INTERVAL`** + +配置`master-server`中的心跳交互时间,默认值 `10`。 + +**`MASTER_TASK_COMMIT_RETRYTIMES`** + +配置`master-server`中的任务提交重试次数,默认值 `5`。 + +**`MASTER_TASK_COMMIT_INTERVAL`** + +配置`master-server`中的任务提交交互时间,默认值 `1000`。 + +**`MASTER_MAX_CPULOAD_AVG`** + +配置`master-server`中的CPU中的`load average`值,默认值 `100`。 + +**`MASTER_RESERVED_MEMORY`** + +配置`master-server`的保留内存,默认值 `0.1`。 + +**`WORKER_EXEC_THREADS`** + +配置`worker-server`中的执行线程数量,默认值 `100`。 + +**`WORKER_HEARTBEAT_INTERVAL`** + +配置`worker-server`中的心跳交互时间,默认值 `10`。 + +**`WORKER_FETCH_TASK_NUM`** + +配置`worker-server`中的获取任务的数量,默认值 `3`。 + +**`WORKER_MAX_CPULOAD_AVG`** + +配置`worker-server`中的CPU中的最大`load average`值,默认值 `100`。 + +**`WORKER_RESERVED_MEMORY`** + +配置`worker-server`的保留内存,默认值 `0.1`。 + +**`XLS_FILE_PATH`** + +配置`alert-server`的`XLS`文件的存储路径,默认值 `/tmp/xls`。 + +**`MAIL_SERVER_HOST`** + +配置`alert-server`的邮件服务地址,默认值 `空`。 + +**`MAIL_SERVER_PORT`** + +配置`alert-server`的邮件服务端口,默认值 `空`。 + +**`MAIL_SENDER`** + +配置`alert-server`的邮件发送人,默认值 `空`。 + +**`MAIL_USER=`** + +配置`alert-server`的邮件服务用户名,默认值 `空`。 + +**`MAIL_PASSWD`** + +配置`alert-server`的邮件服务用户密码,默认值 `空`。 + +**`MAIL_SMTP_STARTTLS_ENABLE`** + +配置`alert-server`的邮件服务是否启用TLS,默认值 `true`。 + +**`MAIL_SMTP_SSL_ENABLE`** + +配置`alert-server`的邮件服务是否启用SSL,默认值 `false`。 + +**`MAIL_SMTP_SSL_TRUST`** + +配置`alert-server`的邮件服务SSL的信任地址,默认值 `空`。 + +**`ENTERPRISE_WECHAT_ENABLE`** + +配置`alert-server`的邮件服务是否启用企业微信,默认值 `false`。 + +**`ENTERPRISE_WECHAT_CORP_ID`** + +配置`alert-server`的邮件服务企业微信`ID`,默认值 `空`。 + +**`ENTERPRISE_WECHAT_SECRET`** + +配置`alert-server`的邮件服务企业微信`SECRET`,默认值 `空`。 + +**`ENTERPRISE_WECHAT_AGENT_ID`** + +配置`alert-server`的邮件服务企业微信`AGENT_ID`,默认值 `空`。 + +**`ENTERPRISE_WECHAT_USERS`** + +配置`alert-server`的邮件服务企业微信`USERS`,默认值 `空`。 + +**`FRONTEND_API_SERVER_HOST`** + +配置`frontend`的连接`api-server`的地址,默认值 `127.0.0.1`。 + +**Note**: 当单独运行`api-server`时,你应该指定`api-server`这个值。 + +**`FRONTEND_API_SERVER_PORT`** + +配置`frontend`的连接`api-server`的端口,默认值 `12345`。 + +**Note**: 当单独运行`api-server`时,你应该指定`api-server`这个值。 + +## 初始化脚本 + +如果你想在编译的时候或者运行的时候附加一些其它的操作及新增一些环境变量,你可以在`/root/start-init-conf.sh`文件中进行修改,同时如果涉及到配置文件的修改,请在`/opt/dolphinscheduler/conf/*.tpl`中修改相应的配置文件 + +例如,在`/root/start-init-conf.sh`添加一个环境变量`API_SERVER_PORT`: + +``` +export API_SERVER_PORT=5555 +``` + +当添加以上环境变量后,你应该在相应的模板文件`/opt/dolphinscheduler/conf/application-api.properties.tpl`中添加这个环境变量配置: +``` +server.port=${API_SERVER_PORT} +``` + +`/root/start-init-conf.sh`将根据模板文件动态的生成配置文件: + +```sh +echo "generate app config" +ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do +eval "cat << EOF +$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line}) +EOF +" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*} +done + +echo "generate nginx config" +sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf +sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf +``` diff --git a/dockerfile/conf/dolphinscheduler/conf/alert.properties b/dockerfile/conf/dolphinscheduler/alert.properties.tpl similarity index 68% rename from dockerfile/conf/dolphinscheduler/conf/alert.properties rename to dockerfile/conf/dolphinscheduler/alert.properties.tpl index 276ef3132a..b940ecd203 100644 --- a/dockerfile/conf/dolphinscheduler/conf/alert.properties +++ b/dockerfile/conf/dolphinscheduler/alert.properties.tpl @@ -14,33 +14,33 @@ # See the License for the specific language governing permissions and # limitations under the License. # - #alert type is EMAIL/SMS alert.type=EMAIL +# alter msg template, default is html template +#alert.template=html # mail server configuration mail.protocol=SMTP -mail.server.host=smtp.126.com -mail.server.port= -mail.sender=dolphinscheduler@126.com -mail.user=dolphinscheduler@126.com -mail.passwd=escheduler123 - +mail.server.host=${MAIL_SERVER_HOST} +mail.server.port=${MAIL_SERVER_PORT} +mail.sender=${MAIL_SENDER} +mail.user=${MAIL_USER} +mail.passwd=${MAIL_PASSWD} # TLS -mail.smtp.starttls.enable=false +mail.smtp.starttls.enable=${MAIL_SMTP_STARTTLS_ENABLE} # SSL -mail.smtp.ssl.enable=true -mail.smtp.ssl.trust=smtp.126.com +mail.smtp.ssl.enable=${MAIL_SMTP_SSL_ENABLE} +mail.smtp.ssl.trust=${MAIL_SMTP_SSL_TRUST} #xls file path,need create if not exist -xls.file.path=/tmp/xls +xls.file.path=${XLS_FILE_PATH} # Enterprise WeChat configuration -enterprise.wechat.enable=false -enterprise.wechat.corp.id=xxxxxxx -enterprise.wechat.secret=xxxxxxx -enterprise.wechat.agent.id=xxxxxxx -enterprise.wechat.users=xxxxxxx +enterprise.wechat.enable=${ENTERPRISE_WECHAT_ENABLE} +enterprise.wechat.corp.id=${ENTERPRISE_WECHAT_CORP_ID} +enterprise.wechat.secret=${ENTERPRISE_WECHAT_SECRET} +enterprise.wechat.agent.id=${ENTERPRISE_WECHAT_AGENT_ID} +enterprise.wechat.users=${ENTERPRISE_WECHAT_USERS} enterprise.wechat.token.url=https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=$corpId&corpsecret=$secret enterprise.wechat.push.url=https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=$token enterprise.wechat.team.send.msg={\"toparty\":\"$toParty\",\"agentid\":\"$agentId\",\"msgtype\":\"text\",\"text\":{\"content\":\"$msg\"},\"safe\":\"0\"} diff --git a/dockerfile/conf/dolphinscheduler/conf/application-api.properties b/dockerfile/conf/dolphinscheduler/application-api.properties.tpl similarity index 90% rename from dockerfile/conf/dolphinscheduler/conf/application-api.properties rename to dockerfile/conf/dolphinscheduler/application-api.properties.tpl index ead8dd872e..424ea55f7d 100644 --- a/dockerfile/conf/dolphinscheduler/conf/application-api.properties +++ b/dockerfile/conf/dolphinscheduler/application-api.properties.tpl @@ -14,27 +14,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - -logging.config=classpath:apiserver_logback.xml - # server port server.port=12345 - # session config server.servlet.session.timeout=7200 - +# servlet config server.servlet.context-path=/dolphinscheduler/ - # file size limit for upload spring.servlet.multipart.max-file-size=1024MB spring.servlet.multipart.max-request-size=1024MB - -#post content +# post content server.jetty.max-http-post-size=5000000 - +# i18n spring.messages.encoding=UTF-8 - #i18n classpath folder , file prefix messages, if have many files, use "," seperator spring.messages.basename=i18n/messages +# Authentication types (supported types: PASSWORD) +security.authentication.type=PASSWORD + + diff --git a/dockerfile/conf/dolphinscheduler/conf/application-dao.properties b/dockerfile/conf/dolphinscheduler/application.properties.tpl similarity index 72% rename from dockerfile/conf/dolphinscheduler/conf/application-dao.properties rename to dockerfile/conf/dolphinscheduler/application.properties.tpl index 166c36fbf0..c643c414cd 100644 --- a/dockerfile/conf/dolphinscheduler/conf/application-dao.properties +++ b/dockerfile/conf/dolphinscheduler/application.properties.tpl @@ -17,70 +17,57 @@ # base spring data source configuration spring.datasource.type=com.alibaba.druid.pool.DruidDataSource -# postgresql +# postgre spring.datasource.driver-class-name=org.postgresql.Driver -spring.datasource.url=jdbc:postgresql://127.0.0.1:5432/dolphinscheduler -spring.datasource.username=root -spring.datasource.password=root@123 - +spring.datasource.url=jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/dolphinscheduler?characterEncoding=utf8 +# mysql +#spring.datasource.driver-class-name=com.mysql.jdbc.Driver +#spring.datasource.url=jdbc:mysql://192.168.xx.xx:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8 +spring.datasource.username=${POSTGRESQL_USERNAME} +spring.datasource.password=${POSTGRESQL_PASSWORD} # connection configuration spring.datasource.initialSize=5 # min connection number spring.datasource.minIdle=5 # max connection number spring.datasource.maxActive=50 - # max wait time for get a connection in milliseconds. if configuring maxWait, fair locks are enabled by default and concurrency efficiency decreases. # If necessary, unfair locks can be used by configuring the useUnfairLock attribute to true. spring.datasource.maxWait=60000 - # milliseconds for check to close free connections spring.datasource.timeBetweenEvictionRunsMillis=60000 - # the Destroy thread detects the connection interval and closes the physical connection in milliseconds if the connection idle time is greater than or equal to minEvictableIdleTimeMillis. spring.datasource.timeBetweenConnectErrorMillis=60000 - # the longest time a connection remains idle without being evicted, in milliseconds spring.datasource.minEvictableIdleTimeMillis=300000 - #the SQL used to check whether the connection is valid requires a query statement. If validation Query is null, testOnBorrow, testOnReturn, and testWhileIdle will not work. spring.datasource.validationQuery=SELECT 1 - #check whether the connection is valid for timeout, in seconds spring.datasource.validationQueryTimeout=3 - # when applying for a connection, if it is detected that the connection is idle longer than time Between Eviction Runs Millis, # validation Query is performed to check whether the connection is valid spring.datasource.testWhileIdle=true - #execute validation to check if the connection is valid when applying for a connection spring.datasource.testOnBorrow=true #execute validation to check if the connection is valid when the connection is returned spring.datasource.testOnReturn=false spring.datasource.defaultAutoCommit=true spring.datasource.keepAlive=true - # open PSCache, specify count PSCache for every connection spring.datasource.poolPreparedStatements=true spring.datasource.maxPoolPreparedStatementPerConnectionSize=20 - spring.datasource.spring.datasource.filters=stat,wall,log4j spring.datasource.connectionProperties=druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000 #mybatis mybatis-plus.mapper-locations=classpath*:/org.apache.dolphinscheduler.dao.mapper/*.xml - mybatis-plus.typeEnumsPackage=org.apache.dolphinscheduler.*.enums - #Entity scan, where multiple packages are separated by a comma or semicolon mybatis-plus.typeAliasesPackage=org.apache.dolphinscheduler.dao.entity - #Primary key type AUTO:" database ID AUTO ", INPUT:" user INPUT ID", ID_WORKER:" global unique ID (numeric type unique ID)", UUID:" global unique ID UUID"; mybatis-plus.global-config.db-config.id-type=AUTO - #Field policy IGNORED:" ignore judgment ",NOT_NULL:" not NULL judgment "),NOT_EMPTY:" not NULL judgment" mybatis-plus.global-config.db-config.field-strategy=NOT_NULL - #The hump underline is converted mybatis-plus.global-config.db-config.column-underline=true mybatis-plus.global-config.db-config.logic-delete-value=-1 @@ -92,12 +79,37 @@ mybatis-plus.configuration.cache-enabled=false mybatis-plus.configuration.call-setters-on-nulls=true mybatis-plus.configuration.jdbc-type-for-null=null +# master settings +# master execute thread num +master.exec.threads=${MASTER_EXEC_THREADS} +# master execute task number in parallel +master.exec.task.num=${MASTER_EXEC_TASK_NUM} +# master heartbeat interval +master.heartbeat.interval=${MASTER_HEARTBEAT_INTERVAL} +# master commit task retry times +master.task.commit.retryTimes=${MASTER_TASK_COMMIT_RETRYTIMES} +# master commit task interval +master.task.commit.interval=${MASTER_TASK_COMMIT_INTERVAL} +# only less than cpu avg load, master server can work. default value : the number of cpu cores * 2 +master.max.cpuload.avg=${MASTER_MAX_CPULOAD_AVG} +# only larger than reserved memory, master server can work. default value : physical memory * 1/10, unit is G. +master.reserved.memory=${MASTER_RESERVED_MEMORY} + +# worker settings +# worker execute thread num +worker.exec.threads=${WORKER_EXEC_THREADS} +# worker heartbeat interval +worker.heartbeat.interval=${WORKER_HEARTBEAT_INTERVAL} +# submit the number of tasks at a time +worker.fetch.task.num=${WORKER_FETCH_TASK_NUM} +# only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2 +worker.max.cpuload.avg=${WORKER_MAX_CPULOAD_AVG} +# only larger than reserved memory, worker server can work. default value : physical memory * 1/6, unit is G. +worker.reserved.memory=${WORKER_RESERVED_MEMORY} + # data quality analysis is not currently in use. please ignore the following configuration -# task record flag +# task record task.record.flag=false task.record.datasource.url=jdbc:mysql://192.168.xx.xx:3306/etl?characterEncoding=UTF-8 task.record.datasource.username=xx task.record.datasource.password=xx - -# Logger Config -#logging.level.org.apache.dolphinscheduler.dao=debug diff --git a/dockerfile/conf/dolphinscheduler/conf/common/common.properties b/dockerfile/conf/dolphinscheduler/common.properties.tpl similarity index 55% rename from dockerfile/conf/dolphinscheduler/conf/common/common.properties rename to dockerfile/conf/dolphinscheduler/common.properties.tpl index 24844f693b..ea03e0b78e 100644 --- a/dockerfile/conf/dolphinscheduler/conf/common/common.properties +++ b/dockerfile/conf/dolphinscheduler/common.properties.tpl @@ -16,44 +16,69 @@ # #task queue implementation, default "zookeeper" -dolphinscheduler.queue.impl=zookeeper +dolphinscheduler.queue.impl=${TASK_QUEUE} -# user data directory path, self configuration, please make sure the directory exists and have read write permissions -data.basedir.path=/tmp/dolphinscheduler +#zookeeper cluster. multiple are separated by commas. eg. 192.168.xx.xx:2181,192.168.xx.xx:2181,192.168.xx.xx:2181 +zookeeper.quorum=${ZOOKEEPER_QUORUM} +#dolphinscheduler root directory +zookeeper.dolphinscheduler.root=/dolphinscheduler +#dolphinscheduler failover directory +zookeeper.session.timeout=300 +zookeeper.connection.timeout=300 +zookeeper.retry.base.sleep=100 +zookeeper.retry.max.sleep=30000 +zookeeper.retry.maxtime=5 -# directory path for user data download. self configuration, please make sure the directory exists and have read write permissions -data.download.basedir.path=/tmp/dolphinscheduler/download +#============================================================================ +# System +#============================================================================ +# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions +dolphinscheduler.env.path=${DOLPHINSCHEDULER_ENV_PATH} +#resource.view.suffixs +resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties +# is development state? default "false" +development.state=true -# process execute directory. self configuration, please make sure the directory exists and have read write permissions -process.exec.basepath=/tmp/dolphinscheduler/exec +# resource upload startup type : HDFS,S3,NONE +res.upload.startup.type=NONE +#============================================================================ +# HDFS +#============================================================================ # Users who have permission to create directories under the HDFS root path hdfs.root.user=hdfs - # data base dir, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended data.store2hdfs.basepath=/dolphinscheduler - -# resource upload startup type : HDFS,S3,NONE -res.upload.startup.type=NONE - +# user data directory path, self configuration, please make sure the directory exists and have read write permissions +data.basedir.path=/tmp/dolphinscheduler +# directory path for user data download. self configuration, please make sure the directory exists and have read write permissions +data.download.basedir.path=/tmp/dolphinscheduler/download +# process execute directory. self configuration, please make sure the directory exists and have read write permissions +process.exec.basepath=/tmp/dolphinscheduler/exec # whether kerberos starts hadoop.security.authentication.startup.state=false - # java.security.krb5.conf path java.security.krb5.conf.path=/opt/krb5.conf - # loginUserFromKeytab user login.user.keytab.username=hdfs-mycluster@ESZ.COM - # loginUserFromKeytab path login.user.keytab.path=/opt/hdfs.headless.keytab -# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions -dolphinscheduler.env.path=/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh +#============================================================================ +# S3 +#============================================================================ +# ha or single namenode,If namenode ha needs to copy core-site.xml and hdfs-site.xml +# to the conf directory,support s3,for example : s3a://dolphinscheduler +fs.defaultFS=hdfs://mycluster:8020 +# s3 need,s3 endpoint +fs.s3a.endpoint=http://192.168.199.91:9010 +# s3 need,s3 access key +fs.s3a.access.key=A3DXS30FO22544RE +# s3 need,s3 secret key +fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK +#resourcemanager ha note this need ips , this empty if single +yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx +# If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine +yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s -#resource.view.suffixs -resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml - -# is development state? default "false" -development.state=true diff --git a/dockerfile/conf/dolphinscheduler/conf/alert_logback.xml b/dockerfile/conf/dolphinscheduler/conf/alert_logback.xml deleted file mode 100644 index 35e19865b9..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/alert_logback.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - ${log.base}/dolphinscheduler-alert.log - - ${log.base}/dolphinscheduler-alert.%d{yyyy-MM-dd_HH}.%i.log - 20 - 64MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/apiserver_logback.xml b/dockerfile/conf/dolphinscheduler/conf/apiserver_logback.xml deleted file mode 100644 index 36719671c9..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/apiserver_logback.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - INFO - - ${log.base}/dolphinscheduler-api-server.log - - ${log.base}/dolphinscheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log - 168 - 64MB - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/combined_logback.xml b/dockerfile/conf/dolphinscheduler/conf/combined_logback.xml deleted file mode 100644 index 7a9a5b4621..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/combined_logback.xml +++ /dev/null @@ -1,80 +0,0 @@ - - - - - - - - - - %highlight([%level]) %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{10}:[%line] - %msg%n - - UTF-8 - - - - - INFO - - - - taskAppId - ${log.base} - - - - ${log.base}/${taskAppId}.log - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - true - - - - - - ${log.base}/dolphinscheduler-combined.log - - INFO - - - - ${log.base}/dolphinscheduler-combined.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - -       - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - -    - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/common/hadoop/hadoop.properties b/dockerfile/conf/dolphinscheduler/conf/common/hadoop/hadoop.properties deleted file mode 100644 index 2c19b4a52e..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/common/hadoop/hadoop.properties +++ /dev/null @@ -1,35 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# ha or single namenode,If namenode ha needs to copy core-site.xml and hdfs-site.xml -# to the conf directory,support s3,for example : s3a://dolphinscheduler -fs.defaultFS=hdfs://mycluster:8020 - -# s3 need,s3 endpoint -fs.s3a.endpoint=http://192.168.199.91:9010 - -# s3 need,s3 access key -fs.s3a.access.key=A3DXS30FO22544RE - -# s3 need,s3 secret key -fs.s3a.secret.key=OloCLq3n+8+sdPHUhJ21XrSxTC+JK - -#resourcemanager ha note this need ips , this empty if single -yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx - -# If it is a single resourcemanager, you only need to configure one host name. If it is resourcemanager HA, the default configuration is fine -yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/i18n/messages.properties b/dockerfile/conf/dolphinscheduler/conf/i18n/messages.properties deleted file mode 100644 index be880ba26d..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/i18n/messages.properties +++ /dev/null @@ -1,252 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -QUERY_SCHEDULE_LIST_NOTES=query schedule list -EXECUTE_PROCESS_TAG=execute process related operation -PROCESS_INSTANCE_EXECUTOR_TAG=process instance executor related operation -RUN_PROCESS_INSTANCE_NOTES=run process instance -START_NODE_LIST=start node list(node name) -TASK_DEPEND_TYPE=task depend type -COMMAND_TYPE=command type -RUN_MODE=run mode -TIMEOUT=timeout -EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=execute action to process instance -EXECUTE_TYPE=execute type -START_CHECK_PROCESS_DEFINITION_NOTES=start check process definition -GET_RECEIVER_CC_NOTES=query receiver cc -DESC=description -GROUP_NAME=group name -GROUP_TYPE=group type -QUERY_ALERT_GROUP_LIST_NOTES=query alert group list -UPDATE_ALERT_GROUP_NOTES=update alert group -DELETE_ALERT_GROUP_BY_ID_NOTES=delete alert group by id -VERIFY_ALERT_GROUP_NAME_NOTES=verify alert group name, check alert group exist or not -GRANT_ALERT_GROUP_NOTES=grant alert group -USER_IDS=user id list -ALERT_GROUP_TAG=alert group related operation -CREATE_ALERT_GROUP_NOTES=create alert group -WORKER_GROUP_TAG=worker group related operation -SAVE_WORKER_GROUP_NOTES=create worker group -WORKER_GROUP_NAME=worker group name -WORKER_IP_LIST=worker ip list, eg. 192.168.1.1,192.168.1.2 -QUERY_WORKER_GROUP_PAGING_NOTES=query worker group paging -QUERY_WORKER_GROUP_LIST_NOTES=query worker group list -DELETE_WORKER_GROUP_BY_ID_NOTES=delete worker group by id -DATA_ANALYSIS_TAG=analysis related operation of task state -COUNT_TASK_STATE_NOTES=count task state -COUNT_PROCESS_INSTANCE_NOTES=count process instance state -COUNT_PROCESS_DEFINITION_BY_USER_NOTES=count process definition by user -COUNT_COMMAND_STATE_NOTES=count command state -COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue\ - -ACCESS_TOKEN_TAG=access token related operation -MONITOR_TAG=monitor related operation -MASTER_LIST_NOTES=master server list -WORKER_LIST_NOTES=worker server list -QUERY_DATABASE_STATE_NOTES=query database state -QUERY_ZOOKEEPER_STATE_NOTES=QUERY ZOOKEEPER STATE -TASK_STATE=task instance state -SOURCE_TABLE=SOURCE TABLE -DEST_TABLE=dest table -TASK_DATE=task date -QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=query history task record list paging -DATA_SOURCE_TAG=data source related operation -CREATE_DATA_SOURCE_NOTES=create data source -DATA_SOURCE_NAME=data source name -DATA_SOURCE_NOTE=data source desc -DB_TYPE=database type -DATA_SOURCE_HOST=DATA SOURCE HOST -DATA_SOURCE_PORT=data source port -DATABASE_NAME=database name -QUEUE_TAG=queue related operation -QUERY_QUEUE_LIST_NOTES=query queue list -QUERY_QUEUE_LIST_PAGING_NOTES=query queue list paging -CREATE_QUEUE_NOTES=create queue -YARN_QUEUE_NAME=yarn(hadoop) queue name -QUEUE_ID=queue id -TENANT_DESC=tenant desc -QUERY_TENANT_LIST_PAGING_NOTES=query tenant list paging -QUERY_TENANT_LIST_NOTES=query tenant list -UPDATE_TENANT_NOTES=update tenant -DELETE_TENANT_NOTES=delete tenant -RESOURCES_TAG=resource center related operation -CREATE_RESOURCE_NOTES=create resource -RESOURCE_TYPE=resource file type -RESOURCE_NAME=resource name -RESOURCE_DESC=resource file desc -RESOURCE_FILE=resource file -RESOURCE_ID=resource id -QUERY_RESOURCE_LIST_NOTES=query resource list -DELETE_RESOURCE_BY_ID_NOTES=delete resource by id -VIEW_RESOURCE_BY_ID_NOTES=view resource by id -ONLINE_CREATE_RESOURCE_NOTES=online create resource -SUFFIX=resource file suffix -CONTENT=resource file content -UPDATE_RESOURCE_NOTES=edit resource file online -DOWNLOAD_RESOURCE_NOTES=download resource file -CREATE_UDF_FUNCTION_NOTES=create udf function -UDF_TYPE=UDF type -FUNC_NAME=function name -CLASS_NAME=package and class name -ARG_TYPES=arguments -UDF_DESC=udf desc -VIEW_UDF_FUNCTION_NOTES=view udf function -UPDATE_UDF_FUNCTION_NOTES=update udf function -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging -VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name -DELETE_UDF_FUNCTION_NOTES=delete udf function -AUTHORIZED_FILE_NOTES=authorized file -UNAUTHORIZED_FILE_NOTES=unauthorized file -AUTHORIZED_UDF_FUNC_NOTES=authorized udf func -UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func -VERIFY_QUEUE_NOTES=verify queue -TENANT_TAG=tenant related operation -CREATE_TENANT_NOTES=create tenant -TENANT_CODE=tenant code -TENANT_NAME=tenant name -QUEUE_NAME=queue name -PASSWORD=password -DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} -PROJECT_TAG=project related operation -CREATE_PROJECT_NOTES=create project -PROJECT_DESC=project description -UPDATE_PROJECT_NOTES=update project -PROJECT_ID=project id -QUERY_PROJECT_BY_ID_NOTES=query project info by project id -QUERY_PROJECT_LIST_PAGING_NOTES=QUERY PROJECT LIST PAGING -DELETE_PROJECT_BY_ID_NOTES=delete project by id -QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project -QUERY_ALL_PROJECT_LIST_NOTES=query all project list -QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project -TASK_RECORD_TAG=task record related operation -QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging -CREATE_TOKEN_NOTES=create token ,note: please login first -QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging -SCHEDULE=schedule -WARNING_TYPE=warning type(sending strategy) -WARNING_GROUP_ID=warning group id -FAILURE_STRATEGY=failure strategy -RECEIVERS=receivers -RECEIVERS_CC=receivers cc -WORKER_GROUP_ID=worker server group id -PROCESS_INSTANCE_PRIORITY=process instance priority -UPDATE_SCHEDULE_NOTES=update schedule -SCHEDULE_ID=schedule id -ONLINE_SCHEDULE_NOTES=online schedule -OFFLINE_SCHEDULE_NOTES=offline schedule -QUERY_SCHEDULE_NOTES=query schedule -QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging -LOGIN_TAG=User login related operations -USER_NAME=user name -PROJECT_NAME=project name -CREATE_PROCESS_DEFINITION_NOTES=create process definition -PROCESS_DEFINITION_NAME=process definition name -PROCESS_DEFINITION_JSON=process definition detail info (json format) -PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) -PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) -PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) -PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) -PROCESS_DEFINITION_DESC=process definition desc -PROCESS_DEFINITION_TAG=process definition related opertation -SIGNOUT_NOTES=logout -USER_PASSWORD=user password -UPDATE_PROCESS_INSTANCE_NOTES=update process instance -QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list -VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name -LOGIN_NOTES=user login -UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition -PROCESS_DEFINITION_ID=process definition id -PROCESS_DEFINITION_IDS=process definition ids -RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition -QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id -QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list -QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging -QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list -PAGE_NO=page no -PROCESS_INSTANCE_ID=process instance id -PROCESS_INSTANCE_JSON=process instance info(json format) -SCHEDULE_TIME=schedule time -SYNC_DEFINE=update the information of the process instance to the process definition\ - -RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance -SEARCH_VAL=search val -USER_ID=user id -PAGE_SIZE=page size -LIMIT=limit -VIEW_TREE_NOTES=view tree -GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id -PROCESS_DEFINITION_ID_LIST=process definition id list -QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query proccess definition all by project id -DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id -BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids -QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id -DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id -TASK_ID=task instance id -SKIP_LINE_NUM=skip line num -QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log -DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log -USERS_TAG=users related operation -SCHEDULER_TAG=scheduler related operation -CREATE_SCHEDULE_NOTES=create schedule -CREATE_USER_NOTES=create user -TENANT_ID=tenant id -QUEUE=queue -EMAIL=email -PHONE=phone -QUERY_USER_LIST_NOTES=query user list -UPDATE_USER_NOTES=update user -DELETE_USER_BY_ID_NOTES=delete user by id -GRANT_PROJECT_NOTES=GRANT PROJECT -PROJECT_IDS=project ids(string format, multiple projects separated by ",") -GRANT_RESOURCE_NOTES=grant resource file -RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") -GET_USER_INFO_NOTES=get user info -LIST_USER_NOTES=list user -VERIFY_USER_NAME_NOTES=verify user name -UNAUTHORIZED_USER_NOTES=cancel authorization -ALERT_GROUP_ID=alert group id -AUTHORIZED_USER_NOTES=authorized user -GRANT_UDF_FUNC_NOTES=grant udf function -UDF_IDS=udf ids(string format, multiple udf functions separated by ",") -GRANT_DATASOURCE_NOTES=grant datasource -DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") -QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id -QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id -QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables -VIEW_GANTT_NOTES=view gantt -SUB_PROCESS_INSTANCE_ID=sub process instance id -TASK_NAME=task instance name -TASK_INSTANCE_TAG=task instance related operation -LOGGER_TAG=log related operation -PROCESS_INSTANCE_TAG=process instance related operation -EXECUTION_STATUS=runing status for workflow and task nodes -HOST=ip address of running task -START_DATE=start date -END_DATE=end date -QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id -UPDATE_DATA_SOURCE_NOTES=update data source -DATA_SOURCE_ID=DATA SOURCE ID -QUERY_DATA_SOURCE_NOTES=query data source by id -QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=query data source list by database type -QUERY_DATA_SOURCE_LIST_PAGING_NOTES=query data source list paging -CONNECT_DATA_SOURCE_NOTES=CONNECT DATA SOURCE -CONNECT_DATA_SOURCE_TEST_NOTES=connect data source test -DELETE_DATA_SOURCE_NOTES=delete data source -VERIFY_DATA_SOURCE_NOTES=verify data source -UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source -AUTHORIZED_DATA_SOURCE_NOTES=authorized data source -DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id diff --git a/dockerfile/conf/dolphinscheduler/conf/i18n/messages_en_US.properties b/dockerfile/conf/dolphinscheduler/conf/i18n/messages_en_US.properties deleted file mode 100644 index 24c0843c10..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/i18n/messages_en_US.properties +++ /dev/null @@ -1,252 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -QUERY_SCHEDULE_LIST_NOTES=query schedule list -EXECUTE_PROCESS_TAG=execute process related operation -PROCESS_INSTANCE_EXECUTOR_TAG=process instance executor related operation -RUN_PROCESS_INSTANCE_NOTES=run process instance -START_NODE_LIST=start node list(node name) -TASK_DEPEND_TYPE=task depend type -COMMAND_TYPE=command type -RUN_MODE=run mode -TIMEOUT=timeout -EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=execute action to process instance -EXECUTE_TYPE=execute type -START_CHECK_PROCESS_DEFINITION_NOTES=start check process definition -GET_RECEIVER_CC_NOTES=query receiver cc -DESC=description -GROUP_NAME=group name -GROUP_TYPE=group type -QUERY_ALERT_GROUP_LIST_NOTES=query alert group list -UPDATE_ALERT_GROUP_NOTES=update alert group -DELETE_ALERT_GROUP_BY_ID_NOTES=delete alert group by id -VERIFY_ALERT_GROUP_NAME_NOTES=verify alert group name, check alert group exist or not -GRANT_ALERT_GROUP_NOTES=grant alert group -USER_IDS=user id list -ALERT_GROUP_TAG=alert group related operation -CREATE_ALERT_GROUP_NOTES=create alert group -WORKER_GROUP_TAG=worker group related operation -SAVE_WORKER_GROUP_NOTES=create worker group -WORKER_GROUP_NAME=worker group name -WORKER_IP_LIST=worker ip list, eg. 192.168.1.1,192.168.1.2 -QUERY_WORKER_GROUP_PAGING_NOTES=query worker group paging -QUERY_WORKER_GROUP_LIST_NOTES=query worker group list -DELETE_WORKER_GROUP_BY_ID_NOTES=delete worker group by id -DATA_ANALYSIS_TAG=analysis related operation of task state -COUNT_TASK_STATE_NOTES=count task state -COUNT_PROCESS_INSTANCE_NOTES=count process instance state -COUNT_PROCESS_DEFINITION_BY_USER_NOTES=count process definition by user -COUNT_COMMAND_STATE_NOTES=count command state -COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue\ - -ACCESS_TOKEN_TAG=access token related operation -MONITOR_TAG=monitor related operation -MASTER_LIST_NOTES=master server list -WORKER_LIST_NOTES=worker server list -QUERY_DATABASE_STATE_NOTES=query database state -QUERY_ZOOKEEPER_STATE_NOTES=QUERY ZOOKEEPER STATE -TASK_STATE=task instance state -SOURCE_TABLE=SOURCE TABLE -DEST_TABLE=dest table -TASK_DATE=task date -QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=query history task record list paging -DATA_SOURCE_TAG=data source related operation -CREATE_DATA_SOURCE_NOTES=create data source -DATA_SOURCE_NAME=data source name -DATA_SOURCE_NOTE=data source desc -DB_TYPE=database type -DATA_SOURCE_HOST=DATA SOURCE HOST -DATA_SOURCE_PORT=data source port -DATABASE_NAME=database name -QUEUE_TAG=queue related operation -QUERY_QUEUE_LIST_NOTES=query queue list -QUERY_QUEUE_LIST_PAGING_NOTES=query queue list paging -CREATE_QUEUE_NOTES=create queue -YARN_QUEUE_NAME=yarn(hadoop) queue name -QUEUE_ID=queue id -TENANT_DESC=tenant desc -QUERY_TENANT_LIST_PAGING_NOTES=query tenant list paging -QUERY_TENANT_LIST_NOTES=query tenant list -UPDATE_TENANT_NOTES=update tenant -DELETE_TENANT_NOTES=delete tenant -RESOURCES_TAG=resource center related operation -CREATE_RESOURCE_NOTES=create resource -RESOURCE_TYPE=resource file type -RESOURCE_NAME=resource name -RESOURCE_DESC=resource file desc -RESOURCE_FILE=resource file -RESOURCE_ID=resource id -QUERY_RESOURCE_LIST_NOTES=query resource list -DELETE_RESOURCE_BY_ID_NOTES=delete resource by id -VIEW_RESOURCE_BY_ID_NOTES=view resource by id -ONLINE_CREATE_RESOURCE_NOTES=online create resource -SUFFIX=resource file suffix -CONTENT=resource file content -UPDATE_RESOURCE_NOTES=edit resource file online -DOWNLOAD_RESOURCE_NOTES=download resource file -CREATE_UDF_FUNCTION_NOTES=create udf function -UDF_TYPE=UDF type -FUNC_NAME=function name -CLASS_NAME=package and class name -ARG_TYPES=arguments -UDF_DESC=udf desc -VIEW_UDF_FUNCTION_NOTES=view udf function -UPDATE_UDF_FUNCTION_NOTES=update udf function -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=query udf function list paging -VERIFY_UDF_FUNCTION_NAME_NOTES=verify udf function name -DELETE_UDF_FUNCTION_NOTES=delete udf function -AUTHORIZED_FILE_NOTES=authorized file -UNAUTHORIZED_FILE_NOTES=unauthorized file -AUTHORIZED_UDF_FUNC_NOTES=authorized udf func -UNAUTHORIZED_UDF_FUNC_NOTES=unauthorized udf func -VERIFY_QUEUE_NOTES=verify queue -TENANT_TAG=tenant related operation -CREATE_TENANT_NOTES=create tenant -TENANT_CODE=tenant code -TENANT_NAME=tenant name -QUEUE_NAME=queue name -PASSWORD=password -DATA_SOURCE_OTHER=jdbc connection params, format:{"key1":"value1",...} -PROJECT_TAG=project related operation -CREATE_PROJECT_NOTES=create project -PROJECT_DESC=project description -UPDATE_PROJECT_NOTES=update project -PROJECT_ID=project id -QUERY_PROJECT_BY_ID_NOTES=query project info by project id -QUERY_PROJECT_LIST_PAGING_NOTES=QUERY PROJECT LIST PAGING -QUERY_ALL_PROJECT_LIST_NOTES=query all project list -DELETE_PROJECT_BY_ID_NOTES=delete project by id -QUERY_UNAUTHORIZED_PROJECT_NOTES=query unauthorized project -QUERY_AUTHORIZED_PROJECT_NOTES=query authorized project -TASK_RECORD_TAG=task record related operation -QUERY_TASK_RECORD_LIST_PAGING_NOTES=query task record list paging -CREATE_TOKEN_NOTES=create token ,note: please login first -QUERY_ACCESS_TOKEN_LIST_NOTES=query access token list paging -SCHEDULE=schedule -WARNING_TYPE=warning type(sending strategy) -WARNING_GROUP_ID=warning group id -FAILURE_STRATEGY=failure strategy -RECEIVERS=receivers -RECEIVERS_CC=receivers cc -WORKER_GROUP_ID=worker server group id -PROCESS_INSTANCE_PRIORITY=process instance priority -UPDATE_SCHEDULE_NOTES=update schedule -SCHEDULE_ID=schedule id -ONLINE_SCHEDULE_NOTES=online schedule -OFFLINE_SCHEDULE_NOTES=offline schedule -QUERY_SCHEDULE_NOTES=query schedule -QUERY_SCHEDULE_LIST_PAGING_NOTES=query schedule list paging -LOGIN_TAG=User login related operations -USER_NAME=user name -PROJECT_NAME=project name -CREATE_PROCESS_DEFINITION_NOTES=create process definition -PROCESS_DEFINITION_NAME=process definition name -PROCESS_DEFINITION_JSON=process definition detail info (json format) -PROCESS_DEFINITION_LOCATIONS=process definition node locations info (json format) -PROCESS_INSTANCE_LOCATIONS=process instance node locations info (json format) -PROCESS_DEFINITION_CONNECTS=process definition node connects info (json format) -PROCESS_INSTANCE_CONNECTS=process instance node connects info (json format) -PROCESS_DEFINITION_DESC=process definition desc -PROCESS_DEFINITION_TAG=process definition related opertation -SIGNOUT_NOTES=logout -USER_PASSWORD=user password -UPDATE_PROCESS_INSTANCE_NOTES=update process instance -QUERY_PROCESS_INSTANCE_LIST_NOTES=query process instance list -VERIFY_PROCCESS_DEFINITION_NAME_NOTES=verify proccess definition name -LOGIN_NOTES=user login -UPDATE_PROCCESS_DEFINITION_NOTES=update proccess definition -PROCESS_DEFINITION_ID=process definition id -PROCESS_DEFINITION_IDS=process definition ids -RELEASE_PROCCESS_DEFINITION_NOTES=release proccess definition -QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=query proccess definition by id -QUERY_PROCCESS_DEFINITION_LIST_NOTES=query proccess definition list -QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=query proccess definition list paging -QUERY_ALL_DEFINITION_LIST_NOTES=query all definition list -PAGE_NO=page no -PROCESS_INSTANCE_ID=process instance id -PROCESS_INSTANCE_JSON=process instance info(json format) -SCHEDULE_TIME=schedule time -SYNC_DEFINE=update the information of the process instance to the process definition\ - -RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance -SEARCH_VAL=search val -USER_ID=user id -PAGE_SIZE=page size -LIMIT=limit -VIEW_TREE_NOTES=view tree -GET_NODE_LIST_BY_DEFINITION_ID_NOTES=get task node list by process definition id -PROCESS_DEFINITION_ID_LIST=process definition id list -QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=query proccess definition all by project id -DELETE_PROCESS_DEFINITION_BY_ID_NOTES=delete process definition by process definition id -BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=batch delete process definition by process definition ids -QUERY_PROCESS_INSTANCE_BY_ID_NOTES=query process instance by process instance id -DELETE_PROCESS_INSTANCE_BY_ID_NOTES=delete process instance by process instance id -TASK_ID=task instance id -SKIP_LINE_NUM=skip line num -QUERY_TASK_INSTANCE_LOG_NOTES=query task instance log -DOWNLOAD_TASK_INSTANCE_LOG_NOTES=download task instance log -USERS_TAG=users related operation -SCHEDULER_TAG=scheduler related operation -CREATE_SCHEDULE_NOTES=create schedule -CREATE_USER_NOTES=create user -TENANT_ID=tenant id -QUEUE=queue -EMAIL=email -PHONE=phone -QUERY_USER_LIST_NOTES=query user list -UPDATE_USER_NOTES=update user -DELETE_USER_BY_ID_NOTES=delete user by id -GRANT_PROJECT_NOTES=GRANT PROJECT -PROJECT_IDS=project ids(string format, multiple projects separated by ",") -GRANT_RESOURCE_NOTES=grant resource file -RESOURCE_IDS=resource ids(string format, multiple resources separated by ",") -GET_USER_INFO_NOTES=get user info -LIST_USER_NOTES=list user -VERIFY_USER_NAME_NOTES=verify user name -UNAUTHORIZED_USER_NOTES=cancel authorization -ALERT_GROUP_ID=alert group id -AUTHORIZED_USER_NOTES=authorized user -GRANT_UDF_FUNC_NOTES=grant udf function -UDF_IDS=udf ids(string format, multiple udf functions separated by ",") -GRANT_DATASOURCE_NOTES=grant datasource -DATASOURCE_IDS=datasource ids(string format, multiple datasources separated by ",") -QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=query subprocess instance by task instance id -QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=query parent process instance info by sub process instance id -QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=query process instance global variables and local variables -VIEW_GANTT_NOTES=view gantt -SUB_PROCESS_INSTANCE_ID=sub process instance id -TASK_NAME=task instance name -TASK_INSTANCE_TAG=task instance related operation -LOGGER_TAG=log related operation -PROCESS_INSTANCE_TAG=process instance related operation -EXECUTION_STATUS=runing status for workflow and task nodes -HOST=ip address of running task -START_DATE=start date -END_DATE=end date -QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=query task list by process instance id -UPDATE_DATA_SOURCE_NOTES=update data source -DATA_SOURCE_ID=DATA SOURCE ID -QUERY_DATA_SOURCE_NOTES=query data source by id -QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=query data source list by database type -QUERY_DATA_SOURCE_LIST_PAGING_NOTES=query data source list paging -CONNECT_DATA_SOURCE_NOTES=CONNECT DATA SOURCE -CONNECT_DATA_SOURCE_TEST_NOTES=connect data source test -DELETE_DATA_SOURCE_NOTES=delete data source -VERIFY_DATA_SOURCE_NOTES=verify data source -UNAUTHORIZED_DATA_SOURCE_NOTES=unauthorized data source -AUTHORIZED_DATA_SOURCE_NOTES=authorized data source -DELETE_SCHEDULER_BY_ID_NOTES=delete scheduler by id diff --git a/dockerfile/conf/dolphinscheduler/conf/i18n/messages_zh_CN.properties b/dockerfile/conf/dolphinscheduler/conf/i18n/messages_zh_CN.properties deleted file mode 100644 index 5f24a6fedd..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/i18n/messages_zh_CN.properties +++ /dev/null @@ -1,250 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -QUERY_SCHEDULE_LIST_NOTES=查询定时列表 -PROCESS_INSTANCE_EXECUTOR_TAG=流程实例执行相关操作 -RUN_PROCESS_INSTANCE_NOTES=运行流程实例 -START_NODE_LIST=开始节点列表(节点name) -TASK_DEPEND_TYPE=任务依赖类型 -COMMAND_TYPE=指令类型 -RUN_MODE=运行模式 -TIMEOUT=超时时间 -EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES=执行流程实例的各种操作(暂停、停止、重跑、恢复等) -EXECUTE_TYPE=执行类型 -START_CHECK_PROCESS_DEFINITION_NOTES=检查流程定义 -DESC=备注(描述) -GROUP_NAME=组名称 -GROUP_TYPE=组类型 -QUERY_ALERT_GROUP_LIST_NOTES=告警组列表\ - -UPDATE_ALERT_GROUP_NOTES=编辑(更新)告警组 -DELETE_ALERT_GROUP_BY_ID_NOTES=删除告警组通过ID -VERIFY_ALERT_GROUP_NAME_NOTES=检查告警组是否存在 -GRANT_ALERT_GROUP_NOTES=授权告警组 -USER_IDS=用户ID列表 -ALERT_GROUP_TAG=告警组相关操作 -WORKER_GROUP_TAG=Worker分组管理 -SAVE_WORKER_GROUP_NOTES=创建Worker分组\ - -WORKER_GROUP_NAME=Worker分组名称 -WORKER_IP_LIST=Worker ip列表,注意:多个IP地址以逗号分割\ - -QUERY_WORKER_GROUP_PAGING_NOTES=Worker分组管理 -QUERY_WORKER_GROUP_LIST_NOTES=查询worker group分组 -DELETE_WORKER_GROUP_BY_ID_NOTES=删除worker group通过ID -DATA_ANALYSIS_TAG=任务状态分析相关操作 -COUNT_TASK_STATE_NOTES=任务状态统计 -COUNT_PROCESS_INSTANCE_NOTES=统计流程实例状态 -COUNT_PROCESS_DEFINITION_BY_USER_NOTES=统计用户创建的流程定义 -COUNT_COMMAND_STATE_NOTES=统计命令状态 -COUNT_QUEUE_STATE_NOTES=统计队列里任务状态 -ACCESS_TOKEN_TAG=access token相关操作,需要先登录 -MONITOR_TAG=监控相关操作 -MASTER_LIST_NOTES=master服务列表 -WORKER_LIST_NOTES=worker服务列表 -QUERY_DATABASE_STATE_NOTES=查询数据库状态 -QUERY_ZOOKEEPER_STATE_NOTES=查询Zookeeper状态 -TASK_STATE=任务实例状态 -SOURCE_TABLE=源表 -DEST_TABLE=目标表 -TASK_DATE=任务时间 -QUERY_HISTORY_TASK_RECORD_LIST_PAGING_NOTES=分页查询历史任务记录列表 -DATA_SOURCE_TAG=数据源相关操作 -CREATE_DATA_SOURCE_NOTES=创建数据源 -DATA_SOURCE_NAME=数据源名称 -DATA_SOURCE_NOTE=数据源描述 -DB_TYPE=数据源类型 -DATA_SOURCE_HOST=IP主机名 -DATA_SOURCE_PORT=数据源端口 -DATABASE_NAME=数据库名 -QUEUE_TAG=队列相关操作 -QUERY_QUEUE_LIST_NOTES=查询队列列表 -QUERY_QUEUE_LIST_PAGING_NOTES=分页查询队列列表 -CREATE_QUEUE_NOTES=创建队列 -YARN_QUEUE_NAME=hadoop yarn队列名 -QUEUE_ID=队列ID -TENANT_DESC=租户描述 -QUERY_TENANT_LIST_PAGING_NOTES=分页查询租户列表 -QUERY_TENANT_LIST_NOTES=查询租户列表 -UPDATE_TENANT_NOTES=更新租户 -DELETE_TENANT_NOTES=删除租户 -RESOURCES_TAG=资源中心相关操作 -CREATE_RESOURCE_NOTES=创建资源 -RESOURCE_TYPE=资源文件类型 -RESOURCE_NAME=资源文件名称 -RESOURCE_DESC=资源文件描述 -RESOURCE_FILE=资源文件 -RESOURCE_ID=资源ID -QUERY_RESOURCE_LIST_NOTES=查询资源列表 -DELETE_RESOURCE_BY_ID_NOTES=删除资源通过ID -VIEW_RESOURCE_BY_ID_NOTES=浏览资源通通过ID -ONLINE_CREATE_RESOURCE_NOTES=在线创建资源 -SUFFIX=资源文件后缀 -CONTENT=资源文件内容 -UPDATE_RESOURCE_NOTES=在线更新资源文件 -DOWNLOAD_RESOURCE_NOTES=下载资源文件 -CREATE_UDF_FUNCTION_NOTES=创建UDF函数 -UDF_TYPE=UDF类型 -FUNC_NAME=函数名称 -CLASS_NAME=包名类名 -ARG_TYPES=参数 -UDF_DESC=udf描述,使用说明 -VIEW_UDF_FUNCTION_NOTES=查看udf函数 -UPDATE_UDF_FUNCTION_NOTES=更新udf函数 -QUERY_UDF_FUNCTION_LIST_PAGING_NOTES=分页查询udf函数列表 -VERIFY_UDF_FUNCTION_NAME_NOTES=验证udf函数名 -DELETE_UDF_FUNCTION_NOTES=删除UDF函数 -AUTHORIZED_FILE_NOTES=授权文件 -UNAUTHORIZED_FILE_NOTES=取消授权文件 -AUTHORIZED_UDF_FUNC_NOTES=授权udf函数 -UNAUTHORIZED_UDF_FUNC_NOTES=取消udf函数授权 -VERIFY_QUEUE_NOTES=验证队列 -TENANT_TAG=租户相关操作 -CREATE_TENANT_NOTES=创建租户 -TENANT_CODE=租户编码 -TENANT_NAME=租户名称 -QUEUE_NAME=队列名 -PASSWORD=密码 -DATA_SOURCE_OTHER=jdbc连接参数,格式为:{"key1":"value1",...} -PROJECT_TAG=项目相关操作 -CREATE_PROJECT_NOTES=创建项目 -PROJECT_DESC=项目描述 -UPDATE_PROJECT_NOTES=更新项目 -PROJECT_ID=项目ID -QUERY_PROJECT_BY_ID_NOTES=通过项目ID查询项目信息 -QUERY_PROJECT_LIST_PAGING_NOTES=分页查询项目列表 -QUERY_ALL_PROJECT_LIST_NOTES=查询所有项目 -DELETE_PROJECT_BY_ID_NOTES=删除项目通过ID -QUERY_UNAUTHORIZED_PROJECT_NOTES=查询未授权的项目 -QUERY_AUTHORIZED_PROJECT_NOTES=查询授权项目 -TASK_RECORD_TAG=任务记录相关操作 -QUERY_TASK_RECORD_LIST_PAGING_NOTES=分页查询任务记录列表 -CREATE_TOKEN_NOTES=创建token,注意需要先登录 -QUERY_ACCESS_TOKEN_LIST_NOTES=分页查询access token列表 -SCHEDULE=定时 -WARNING_TYPE=发送策略 -WARNING_GROUP_ID=发送组ID -FAILURE_STRATEGY=失败策略 -RECEIVERS=收件人 -RECEIVERS_CC=收件人(抄送) -WORKER_GROUP_ID=Worker Server分组ID -PROCESS_INSTANCE_PRIORITY=流程实例优先级 -UPDATE_SCHEDULE_NOTES=更新定时 -SCHEDULE_ID=定时ID -ONLINE_SCHEDULE_NOTES=定时上线 -OFFLINE_SCHEDULE_NOTES=定时下线 -QUERY_SCHEDULE_NOTES=查询定时 -QUERY_SCHEDULE_LIST_PAGING_NOTES=分页查询定时 -LOGIN_TAG=用户登录相关操作 -USER_NAME=用户名 -PROJECT_NAME=项目名称 -CREATE_PROCESS_DEFINITION_NOTES=创建流程定义 -PROCESS_DEFINITION_NAME=流程定义名称 -PROCESS_DEFINITION_JSON=流程定义详细信息(json格式) -PROCESS_DEFINITION_LOCATIONS=流程定义节点坐标位置信息(json格式) -PROCESS_INSTANCE_LOCATIONS=流程实例节点坐标位置信息(json格式) -PROCESS_DEFINITION_CONNECTS=流程定义节点图标连接信息(json格式) -PROCESS_INSTANCE_CONNECTS=流程实例节点图标连接信息(json格式) -PROCESS_DEFINITION_DESC=流程定义描述信息 -PROCESS_DEFINITION_TAG=流程定义相关操作 -SIGNOUT_NOTES=退出登录 -USER_PASSWORD=用户密码 -UPDATE_PROCESS_INSTANCE_NOTES=更新流程实例 -QUERY_PROCESS_INSTANCE_LIST_NOTES=查询流程实例列表 -VERIFY_PROCCESS_DEFINITION_NAME_NOTES=验证流程定义名字 -LOGIN_NOTES=用户登录 -UPDATE_PROCCESS_DEFINITION_NOTES=更新流程定义 -PROCESS_DEFINITION_ID=流程定义ID -RELEASE_PROCCESS_DEFINITION_NOTES=发布流程定义 -QUERY_PROCCESS_DEFINITION_BY_ID_NOTES=查询流程定义通过流程定义ID -QUERY_PROCCESS_DEFINITION_LIST_NOTES=查询流程定义列表 -QUERY_PROCCESS_DEFINITION_LIST_PAGING_NOTES=分页查询流程定义列表 -QUERY_ALL_DEFINITION_LIST_NOTES=查询所有流程定义 -PAGE_NO=页码号 -PROCESS_INSTANCE_ID=流程实例ID -PROCESS_INSTANCE_IDS=流程实例ID集合 -PROCESS_INSTANCE_JSON=流程实例信息(json格式) -SCHEDULE_TIME=定时时间 -SYNC_DEFINE=更新流程实例的信息是否同步到流程定义 -RECOVERY_PROCESS_INSTANCE_FLAG=是否恢复流程实例 -SEARCH_VAL=搜索值 -USER_ID=用户ID -PAGE_SIZE=页大小 -LIMIT=显示多少条 -VIEW_TREE_NOTES=树状图 -GET_NODE_LIST_BY_DEFINITION_ID_NOTES=获得任务节点列表通过流程定义ID -PROCESS_DEFINITION_ID_LIST=流程定义id列表 -QUERY_PROCCESS_DEFINITION_All_BY_PROJECT_ID_NOTES=查询流程定义通过项目ID -BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES=批量删除流程定义通过流程定义ID集合 -DELETE_PROCESS_DEFINITION_BY_ID_NOTES=删除流程定义通过流程定义ID -QUERY_PROCESS_INSTANCE_BY_ID_NOTES=查询流程实例通过流程实例ID -DELETE_PROCESS_INSTANCE_BY_ID_NOTES=删除流程实例通过流程实例ID -TASK_ID=任务实例ID -SKIP_LINE_NUM=忽略行数 -QUERY_TASK_INSTANCE_LOG_NOTES=查询任务实例日志 -DOWNLOAD_TASK_INSTANCE_LOG_NOTES=下载任务实例日志 -USERS_TAG=用户相关操作 -SCHEDULER_TAG=定时相关操作 -CREATE_SCHEDULE_NOTES=创建定时 -CREATE_USER_NOTES=创建用户 -TENANT_ID=租户ID -QUEUE=使用的队列 -EMAIL=邮箱 -PHONE=手机号 -QUERY_USER_LIST_NOTES=查询用户列表 -UPDATE_USER_NOTES=更新用户 -DELETE_USER_BY_ID_NOTES=删除用户通过ID -GRANT_PROJECT_NOTES=授权项目 -PROJECT_IDS=项目IDS(字符串格式,多个项目以","分割) -GRANT_RESOURCE_NOTES=授权资源文件 -RESOURCE_IDS=资源ID列表(字符串格式,多个资源ID以","分割) -GET_USER_INFO_NOTES=获取用户信息 -LIST_USER_NOTES=用户列表 -VERIFY_USER_NAME_NOTES=验证用户名 -UNAUTHORIZED_USER_NOTES=取消授权 -ALERT_GROUP_ID=报警组ID -AUTHORIZED_USER_NOTES=授权用户 -GRANT_UDF_FUNC_NOTES=授权udf函数 -UDF_IDS=udf函数id列表(字符串格式,多个udf函数ID以","分割) -GRANT_DATASOURCE_NOTES=授权数据源 -DATASOURCE_IDS=数据源ID列表(字符串格式,多个数据源ID以","分割) -QUERY_SUBPROCESS_INSTANCE_BY_TASK_ID_NOTES=查询子流程实例通过任务实例ID -QUERY_PARENT_PROCESS_INSTANCE_BY_SUB_PROCESS_INSTANCE_ID_NOTES=查询父流程实例信息通过子流程实例ID -QUERY_PROCESS_INSTANCE_GLOBAL_VARIABLES_AND_LOCAL_VARIABLES_NOTES=查询流程实例全局变量和局部变量 -VIEW_GANTT_NOTES=浏览Gantt图 -SUB_PROCESS_INSTANCE_ID=子流程是咧ID -TASK_NAME=任务实例名 -TASK_INSTANCE_TAG=任务实例相关操作 -LOGGER_TAG=日志相关操作 -PROCESS_INSTANCE_TAG=流程实例相关操作 -EXECUTION_STATUS=工作流和任务节点的运行状态 -HOST=运行任务的主机IP地址 -START_DATE=开始时间 -END_DATE=结束时间 -QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_NOTES=通过流程实例ID查询任务列表 -UPDATE_DATA_SOURCE_NOTES=更新数据源 -DATA_SOURCE_ID=数据源ID -QUERY_DATA_SOURCE_NOTES=查询数据源通过ID -QUERY_DATA_SOURCE_LIST_BY_TYPE_NOTES=查询数据源列表通过数据源类型 -QUERY_DATA_SOURCE_LIST_PAGING_NOTES=分页查询数据源列表 -CONNECT_DATA_SOURCE_NOTES=连接数据源 -CONNECT_DATA_SOURCE_TEST_NOTES=连接数据源测试 -DELETE_DATA_SOURCE_NOTES=删除数据源 -VERIFY_DATA_SOURCE_NOTES=验证数据源 -UNAUTHORIZED_DATA_SOURCE_NOTES=未授权的数据源 -AUTHORIZED_DATA_SOURCE_NOTES=授权的数据源 -DELETE_SCHEDULER_BY_ID_NOTES=根据定时id删除定时数据 diff --git a/dockerfile/conf/dolphinscheduler/conf/mail_templates/alert_mail_template.ftl b/dockerfile/conf/dolphinscheduler/conf/mail_templates/alert_mail_template.ftl deleted file mode 100644 index c638609090..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/mail_templates/alert_mail_template.ftl +++ /dev/null @@ -1,17 +0,0 @@ -<#-- - ~ Licensed to the Apache Software Foundation (ASF) under one or more - ~ contributor license agreements. See the NOTICE file distributed with - ~ this work for additional information regarding copyright ownership. - ~ The ASF licenses this file to You under the Apache License, Version 2.0 - ~ (the "License"); you may not use this file except in compliance with - ~ the License. You may obtain a copy of the License at - ~ - ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ - ~ Unless required by applicable law or agreed to in writing, software - ~ distributed under the License is distributed on an "AS IS" BASIS, - ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ~ See the License for the specific language governing permissions and - ~ limitations under the License. ---> - dolphinscheduler<#if title??> ${title}<#if content??> ${content}
\ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/master_logback.xml b/dockerfile/conf/dolphinscheduler/conf/master_logback.xml deleted file mode 100644 index 12bcd658e1..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/master_logback.xml +++ /dev/null @@ -1,52 +0,0 @@ - - - - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - ${log.base}/dolphinscheduler-master.log - - INFO - - - ${log.base}/dolphinscheduler-master.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml deleted file mode 100644 index 29c8dfa5a3..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml deleted file mode 100644 index 8ee335b6ff..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapper.xml +++ /dev/null @@ -1,47 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml deleted file mode 100644 index 703b685157..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/AlertMapper.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml deleted file mode 100644 index 66e6c3edd3..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/CommandMapper.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml deleted file mode 100644 index b296d5fc3e..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml +++ /dev/null @@ -1,79 +0,0 @@ - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml deleted file mode 100644 index a43cbeca91..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/DataSourceUserMapper.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - delete from t_ds_relation_datasource_user - where user_id = #{userId} - - - - delete from t_ds_relation_datasource_user - where datasource_id = #{datasourceId} - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml deleted file mode 100644 index 2f5ae7104a..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ErrorCommandMapper.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml deleted file mode 100644 index 1b97c07676..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessDefinitionMapper.xml +++ /dev/null @@ -1,96 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml deleted file mode 100644 index d217665eab..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapMapper.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - delete - from t_ds_relation_process_instance - where parent_process_instance_id=#{parentProcessId} - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml deleted file mode 100644 index 2e63867d33..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml +++ /dev/null @@ -1,182 +0,0 @@ - - - - - - - - - - - - - - - update t_ds_process_instance - set host=null - where host =#{host} and state in - - #{i} - - - - update t_ds_process_instance - set state = #{destState} - where state = #{originState} - - - - update t_ds_process_instance - set tenant_id = #{destTenantId} - where tenant_id = #{originTenantId} - - - - update t_ds_process_instance - set worker_group_id = #{destWorkerGroupId} - where worker_group_id = #{originWorkerGroupId} - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml deleted file mode 100644 index 5ab0756250..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProjectMapper.xml +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml deleted file mode 100644 index 006cf080eb..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ProjectUserMapper.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - delete from t_ds_relation_project_user - where 1=1 - and user_id = #{userId} - - and project_id = #{projectId} - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml deleted file mode 100644 index 423b0dd04d..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/QueueMapper.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml deleted file mode 100644 index 146daa0632..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml +++ /dev/null @@ -1,74 +0,0 @@ - - - - - - - - - - - - diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml deleted file mode 100644 index 6a89e47c2f..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ResourceUserMapper.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - delete - from t_ds_relation_resources_user - where 1 = 1 - - and user_id = #{userId} - - - and resources_id = #{resourceId} - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.xml deleted file mode 100644 index 402c864251..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/ScheduleMapper.xml +++ /dev/null @@ -1,58 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml deleted file mode 100644 index 4fa7f309dc..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/SessionMapper.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml deleted file mode 100644 index 3a1fddd288..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - - update t_ds_task_instance - set state = #{destStatus} - where host = #{host} - and state in - - #{i} - - - - - - - - - - diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml deleted file mode 100644 index fc9219ce86..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/TenantMapper.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml deleted file mode 100644 index 61b4e2c372..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UDFUserMapper.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - delete from t_ds_relation_udfs_user - where user_id = #{userId} - - - delete from t_ds_relation_udfs_user - where udf_id = #{udfFuncId} - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml deleted file mode 100644 index 04926d132e..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml +++ /dev/null @@ -1,71 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.xml deleted file mode 100644 index cbb448275c..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UserAlertGroupMapper.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - delete from t_ds_relation_user_alertgroup - where alertgroup_id = #{alertgroupId} - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml deleted file mode 100644 index 6046ad22eb..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/UserMapper.xml +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.xml b/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.xml deleted file mode 100644 index 84dd4db88d..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/org/apache/dolphinscheduler/dao/mapper/WorkerGroupMapper.xml +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/worker_logback.xml b/dockerfile/conf/dolphinscheduler/conf/worker_logback.xml deleted file mode 100644 index bf4a651e7c..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/worker_logback.xml +++ /dev/null @@ -1,79 +0,0 @@ - - - - - - - - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - - - - INFO - - - - taskAppId - ${log.base} - - - - ${log.base}/${taskAppId}.log - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - - true - - - - - - ${log.base}/dolphinscheduler-worker.log - - INFO - - - - ${log.base}/dolphinscheduler-worker.%d{yyyy-MM-dd_HH}.%i.log - 168 - 200MB - -       - - - [%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n - - UTF-8 - -    - - - - - - - - \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/conf/zookeeper.properties b/dockerfile/conf/dolphinscheduler/conf/zookeeper.properties deleted file mode 100644 index 5e9df1c863..0000000000 --- a/dockerfile/conf/dolphinscheduler/conf/zookeeper.properties +++ /dev/null @@ -1,42 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -#zookeeper cluster -zookeeper.quorum=127.0.0.1:2181 - -#dolphinscheduler root directory -zookeeper.dolphinscheduler.root=/dolphinscheduler - -#zookeeper server dirctory -zookeeper.dolphinscheduler.dead.servers=/dolphinscheduler/dead-servers -zookeeper.dolphinscheduler.masters=/dolphinscheduler/masters -zookeeper.dolphinscheduler.workers=/dolphinscheduler/workers - -#zookeeper lock dirctory -zookeeper.dolphinscheduler.lock.masters=/dolphinscheduler/lock/masters -zookeeper.dolphinscheduler.lock.workers=/dolphinscheduler/lock/workers - -#dolphinscheduler failover directory -zookeeper.dolphinscheduler.lock.failover.masters=/dolphinscheduler/lock/failover/masters -zookeeper.dolphinscheduler.lock.failover.workers=/dolphinscheduler/lock/failover/workers -zookeeper.dolphinscheduler.lock.failover.startup.masters=/dolphinscheduler/lock/failover/startup-masters - -#dolphinscheduler failover directory -zookeeper.session.timeout=300 -zookeeper.connection.timeout=300 -zookeeper.retry.sleep=1000 -zookeeper.retry.maxtime=5 \ No newline at end of file diff --git a/dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env b/dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env new file mode 100644 index 0000000000..070c438bb6 --- /dev/null +++ b/dockerfile/conf/dolphinscheduler/env/dolphinscheduler_env @@ -0,0 +1,26 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +export HADOOP_HOME=/opt/soft/hadoop +export HADOOP_CONF_DIR=/opt/soft/hadoop/etc/hadoop +export SPARK_HOME1=/opt/soft/spark1 +export SPARK_HOME2=/opt/soft/spark2 +export PYTHON_HOME=/opt/soft/python +export JAVA_HOME=/opt/soft/java +export HIVE_HOME=/opt/soft/hive +export FLINK_HOME=/opt/soft/flink +export PATH=$HADOOP_HOME/bin:$SPARK_HOME1/bin:$SPARK_HOME2/bin:$PYTHON_HOME:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$PATH diff --git a/dockerfile/conf/dolphinscheduler/conf/quartz.properties b/dockerfile/conf/dolphinscheduler/quartz.properties.tpl similarity index 81% rename from dockerfile/conf/dolphinscheduler/conf/quartz.properties rename to dockerfile/conf/dolphinscheduler/quartz.properties.tpl index a83abad5bc..de5496bc3d 100644 --- a/dockerfile/conf/dolphinscheduler/conf/quartz.properties +++ b/dockerfile/conf/dolphinscheduler/quartz.properties.tpl @@ -18,6 +18,13 @@ #============================================================================ # Configure Main Scheduler Properties #============================================================================ +#org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.StdJDBCDelegate +org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate +# postgre +org.quartz.dataSource.myDs.driver = org.postgresql.Driver +org.quartz.dataSource.myDs.URL = jdbc:postgresql://${POSTGRESQL_HOST}:${POSTGRESQL_PORT}/dolphinscheduler?characterEncoding=utf8 +org.quartz.dataSource.myDs.user = ${POSTGRESQL_USERNAME} +org.quartz.dataSource.myDs.password = ${POSTGRESQL_PASSWORD} org.quartz.scheduler.instanceName = DolphinScheduler org.quartz.scheduler.instanceId = AUTO org.quartz.scheduler.makeSchedulerThreadDaemon = true @@ -26,7 +33,6 @@ org.quartz.jobStore.useProperties = false #============================================================================ # Configure ThreadPool #============================================================================ - org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool org.quartz.threadPool.makeThreadsDaemons = true org.quartz.threadPool.threadCount = 25 @@ -35,22 +41,17 @@ org.quartz.threadPool.threadPriority = 5 #============================================================================ # Configure JobStore #============================================================================ - org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX -org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate org.quartz.jobStore.tablePrefix = QRTZ_ org.quartz.jobStore.isClustered = true org.quartz.jobStore.misfireThreshold = 60000 org.quartz.jobStore.clusterCheckinInterval = 5000 +org.quartz.jobStore.acquireTriggersWithinLock=true org.quartz.jobStore.dataSource = myDs #============================================================================ -# Configure Datasources +# Configure Datasources #============================================================================ -org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.dao.quartz.DruidConnectionProvider -org.quartz.dataSource.myDs.driver = org.postgresql.Driver -org.quartz.dataSource.myDs.URL=jdbc:postgresql://127.0.0.1:5432/dolphinscheduler -org.quartz.dataSource.myDs.user=root -org.quartz.dataSource.myDs.password=root@123 +org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider org.quartz.dataSource.myDs.maxConnections = 10 -org.quartz.dataSource.myDs.validationQuery = select 1 +org.quartz.dataSource.myDs.validationQuery = select 1 \ No newline at end of file diff --git a/dockerfile/conf/maven/settings.xml b/dockerfile/conf/maven/settings.xml deleted file mode 100644 index 6bdea4a1bf..0000000000 --- a/dockerfile/conf/maven/settings.xml +++ /dev/null @@ -1,263 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - nexus-aliyun - central - Nexus aliyun - http://maven.aliyun.com/nexus/content/groups/public - - - - - - - - - - - - diff --git a/dockerfile/conf/nginx/dolphinscheduler.conf b/dockerfile/conf/nginx/dolphinscheduler.conf index 03f87e6b52..9c2c3913dc 100644 --- a/dockerfile/conf/nginx/dolphinscheduler.conf +++ b/dockerfile/conf/nginx/dolphinscheduler.conf @@ -21,11 +21,11 @@ server { #charset koi8-r; #access_log /var/log/nginx/host.access.log main; location / { - root /opt/dolphinscheduler_source/dolphinscheduler-ui/dist; + root /opt/dolphinscheduler/ui; index index.html index.html; } location /dolphinscheduler { - proxy_pass http://127.0.0.1:12345; + proxy_pass http://FRONTEND_API_SERVER_HOST:FRONTEND_API_SERVER_PORT; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header x_real_ipP $remote_addr; diff --git a/dockerfile/hooks/build b/dockerfile/hooks/build index 8b7d5329dc..05fa09d0c9 100644 --- a/dockerfile/hooks/build +++ b/dockerfile/hooks/build @@ -16,9 +16,38 @@ # limitations under the License. # +set -e + echo "------ dolphinscheduler start - build -------" printenv -docker build --build-arg version=$version --build-arg tar_version=$tar_version -t $DOCKER_REPO:$version . +if [ -z "${VERSION}" ] +then + echo "set default environment variable [VERSION]" + VERSION=$(cat $(pwd)/sql/soft_version) +fi + +if [ "${DOCKER_REPO}x" = "x" ] +then + echo "set default environment variable [DOCKER_REPO]" + DOCKER_REPO='dolphinscheduler' +fi + +echo "Version: $VERSION" +echo "Repo: $DOCKER_REPO" + +echo -e "Current Directory is $(pwd)\n" + +# maven package(Project Directory) +echo -e "mvn -B clean compile package -Prelease -Dmaven.test.skip=true" +mvn -B clean compile package -Prelease -Dmaven.test.skip=true + +# mv dolphinscheduler-bin.tar.gz file to dockerfile directory +echo -e "mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin.tar.gz $(pwd)/dockerfile/\n" +mv $(pwd)/dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${VERSION}-SNAPSHOT-dolphinscheduler-bin.tar.gz $(pwd)/dockerfile/ + +# docker build +echo -e "docker build --build-arg VERSION=${VERSION} -t $DOCKER_REPO:${VERSION} $(pwd)/dockerfile/\n" +docker build --build-arg VERSION=${VERSION} -t $DOCKER_REPO:${VERSION} $(pwd)/dockerfile/ echo "------ dolphinscheduler end - build -------" diff --git a/dockerfile/hooks/build.bat b/dockerfile/hooks/build.bat new file mode 100644 index 0000000000..b15c7b00df --- /dev/null +++ b/dockerfile/hooks/build.bat @@ -0,0 +1,56 @@ +:: Licensed to the Apache Software Foundation (ASF) under one or more +:: contributor license agreements. See the NOTICE file distributed with +:: this work for additional information regarding copyright ownership. +:: The ASF licenses this file to You under the Apache License, Version 2.0 +:: (the "License"); you may not use this file except in compliance with +:: the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: +@echo off + +echo "------ dolphinscheduler start - build -------" +set + +if not defined VERSION ( + echo "set environment variable [VERSION]" + for /f %%l in (%cd%\sql\soft_version) do (set VERSION=%%l) +) + +if not defined DOCKER_REPO ( + echo "set environment variable [DOCKER_REPO]" + set DOCKER_REPO='dolphinscheduler' +) + +echo "Version: %VERSION%" +echo "Repo: %DOCKER_REPO%" + +echo "Current Directory is %cd%" + +:: maven package(Project Directory) +echo "call mvn clean compile package -Prelease" +call mvn clean compile package -Prelease -DskipTests=true +if "%errorlevel%"=="1" goto :mvnFailed + +:: move dolphinscheduler-bin.tar.gz file to dockerfile directory +echo "move %cd%\dolphinscheduler-dist\target\apache-dolphinscheduler-incubating-%VERSION%-SNAPSHOT-dolphinscheduler-bin.tar.gz %cd%\dockerfile\" +move %cd%\dolphinscheduler-dist\target\apache-dolphinscheduler-incubating-%VERSION%-SNAPSHOT-dolphinscheduler-bin.tar.gz %cd%\dockerfile\ + +:: docker build +echo "docker build --build-arg VERSION=%VERSION% -t %DOCKER_REPO%:%VERSION% %cd%\dockerfile\" +docker build --build-arg VERSION=%VERSION% -t %DOCKER_REPO%:%VERSION% %cd%\dockerfile\ +if "%errorlevel%"=="1" goto :dockerBuildFailed + +echo "------ dolphinscheduler end - build -------" + +:mvnFailed +echo "MAVEN PACKAGE FAILED!" + +:dockerBuildFailed +echo "DOCKER BUILD FAILED!" \ No newline at end of file diff --git a/dockerfile/hooks/check b/dockerfile/hooks/check new file mode 100644 index 0000000000..fbc3f8bd44 --- /dev/null +++ b/dockerfile/hooks/check @@ -0,0 +1,27 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +echo "------ dolphinscheduler check - server - status -------" +sleep 20 +server_num=$(docker top `docker container list | grep startup | awk '{print $1}'`| grep java | grep "dolphinscheduler" | awk -F 'classpath ' '{print $2}' | awk '{print $2}' | sort | uniq -c | wc -l) +if [ $server_num -eq 5 ] +then + echo "Server all start successfully" +else + echo "Server start failed "$server_num + exit 1 +fi diff --git a/dockerfile/hooks/push b/dockerfile/hooks/push index 6146727d45..41a25c54fe 100644 --- a/dockerfile/hooks/push +++ b/dockerfile/hooks/push @@ -19,6 +19,6 @@ echo "------ push start -------" printenv -docker push $DOCKER_REPO:$version +docker push $DOCKER_REPO:${VERSION} echo "------ push end -------" diff --git a/dockerfile/hooks/push.bat b/dockerfile/hooks/push.bat new file mode 100644 index 0000000000..458a693f97 --- /dev/null +++ b/dockerfile/hooks/push.bat @@ -0,0 +1,23 @@ +:: Licensed to the Apache Software Foundation (ASF) under one or more +:: contributor license agreements. See the NOTICE file distributed with +:: this work for additional information regarding copyright ownership. +:: The ASF licenses this file to You under the Apache License, Version 2.0 +:: (the "License"); you may not use this file except in compliance with +:: the License. You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: +@echo off + +echo "------ push start -------" +set + +docker push %DOCKER_REPO%:%VERSION% + +echo "------ push end -------" diff --git a/dockerfile/startup-init-conf.sh b/dockerfile/startup-init-conf.sh new file mode 100644 index 0000000000..db37976168 --- /dev/null +++ b/dockerfile/startup-init-conf.sh @@ -0,0 +1,100 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +set -e + +echo "init env variables" + +# Define parameters default value. +#============================================================================ +# Database Source +#============================================================================ +export POSTGRESQL_HOST=${POSTGRESQL_HOST:-"127.0.0.1"} +export POSTGRESQL_PORT=${POSTGRESQL_PORT:-"5432"} +export POSTGRESQL_USERNAME=${POSTGRESQL_USERNAME:-"root"} +export POSTGRESQL_PASSWORD=${POSTGRESQL_PASSWORD:-"root"} + +#============================================================================ +# System +#============================================================================ +export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"} + +#============================================================================ +# Zookeeper +#============================================================================ +export TASK_QUEUE=${TASK_QUEUE:-"zookeeper"} +export ZOOKEEPER_QUORUM=${ZOOKEEPER_QUORUM:-"127.0.0.1:2181"} + +#============================================================================ +# Master Server +#============================================================================ +export MASTER_EXEC_THREADS=${MASTER_EXEC_THREADS:-"100"} +export MASTER_EXEC_TASK_NUM=${MASTER_EXEC_TASK_NUM:-"20"} +export MASTER_HEARTBEAT_INTERVAL=${MASTER_HEARTBEAT_INTERVAL:-"10"} +export MASTER_TASK_COMMIT_RETRYTIMES=${MASTER_TASK_COMMIT_RETRYTIMES:-"5"} +export MASTER_TASK_COMMIT_INTERVAL=${MASTER_TASK_COMMIT_INTERVAL:-"1000"} +export MASTER_MAX_CPULOAD_AVG=${MASTER_MAX_CPULOAD_AVG:-"100"} +export MASTER_RESERVED_MEMORY=${MASTER_RESERVED_MEMORY:-"0.1"} + +#============================================================================ +# Worker Server +#============================================================================ +export WORKER_EXEC_THREADS=${WORKER_EXEC_THREADS:-"100"} +export WORKER_HEARTBEAT_INTERVAL=${WORKER_HEARTBEAT_INTERVAL:-"10"} +export WORKER_FETCH_TASK_NUM=${WORKER_FETCH_TASK_NUM:-"3"} +export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"} +export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"} + +#============================================================================ +# Alert Server +#============================================================================ +# XLS FILE +export XLS_FILE_PATH=${XLS_FILE_PATH:-"/tmp/xls"} +# mail +export MAIL_SERVER_HOST=${MAIL_SERVER_HOST:-""} +export MAIL_SERVER_PORT=${MAIL_SERVER_PORT:-""} +export MAIL_SENDER=${MAIL_SENDER:-""} +export MAIL_USER=${MAIL_USER:-""} +export MAIL_PASSWD=${MAIL_PASSWD:-""} +export MAIL_SMTP_STARTTLS_ENABLE=${MAIL_SMTP_STARTTLS_ENABLE:-"true"} +export MAIL_SMTP_SSL_ENABLE=${MAIL_SMTP_SSL_ENABLE:-"false"} +export MAIL_SMTP_SSL_TRUST=${MAIL_SMTP_SSL_TRUST:-""} +# wechat +export ENTERPRISE_WECHAT_ENABLE=${ENTERPRISE_WECHAT_ENABLE:-"false"} +export ENTERPRISE_WECHAT_CORP_ID=${ENTERPRISE_WECHAT_CORP_ID:-""} +export ENTERPRISE_WECHAT_SECRET=${ENTERPRISE_WECHAT_SECRET:-""} +export ENTERPRISE_WECHAT_AGENT_ID=${ENTERPRISE_WECHAT_AGENT_ID:-""} +export ENTERPRISE_WECHAT_USERS=${ENTERPRISE_WECHAT_USERS:-""} + +#============================================================================ +# Frontend +#============================================================================ +export FRONTEND_API_SERVER_HOST=${FRONTEND_API_SERVER_HOST:-"127.0.0.1"} +export FRONTEND_API_SERVER_PORT=${FRONTEND_API_SERVER_PORT:-"12345"} + +echo "generate app config" +ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do +eval "cat << EOF +$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line}) +EOF +" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*} +done + +echo "generate nginx config" +sed -i "s/FRONTEND_API_SERVER_HOST/${FRONTEND_API_SERVER_HOST}/g" /etc/nginx/conf.d/dolphinscheduler.conf +sed -i "s/FRONTEND_API_SERVER_PORT/${FRONTEND_API_SERVER_PORT}/g" /etc/nginx/conf.d/dolphinscheduler.conf \ No newline at end of file diff --git a/dockerfile/startup.sh b/dockerfile/startup.sh index cc98d07e57..610a86f5d3 100644 --- a/dockerfile/startup.sh +++ b/dockerfile/startup.sh @@ -17,59 +17,176 @@ # set -e - echo "start postgresql service" - /etc/init.d/postgresql restart - echo "create user and init db" - sudo -u postgres psql <<'ENDSSH' -create user root with password 'root@123'; -create database dolphinscheduler owner root; -grant all privileges on database dolphinscheduler to root; -\q -ENDSSH - echo "import sql data" - /opt/dolphinscheduler/script/create-dolphinscheduler.sh - -/opt/zookeeper/bin/zkServer.sh restart - -sleep 90 - -echo "start api-server" -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh stop api-server -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh start api-server - - - -echo "start master-server" -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh stop master-server -python /opt/dolphinscheduler/script/del-zk-node.py 127.0.0.1 /dolphinscheduler/masters -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh start master-server - -echo "start worker-server" -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh stop worker-server -python /opt/dolphinscheduler/script/del-zk-node.py 127.0.0.1 /dolphinscheduler/workers -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh start worker-server - -echo "start logger-server" -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh stop logger-server -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh start logger-server - - -echo "start alert-server" -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh stop alert-server -/opt/dolphinscheduler/bin/dolphinscheduler-daemon.sh start alert-server - - - - - -echo "start nginx" -/etc/init.d/nginx stop -nginx & +DOLPHINSCHEDULER_BIN=${DOLPHINSCHEDULER_HOME}/bin +DOLPHINSCHEDULER_SCRIPT=${DOLPHINSCHEDULER_HOME}/script +DOLPHINSCHEDULER_LOGS=${DOLPHINSCHEDULER_HOME}/logs + +# start postgresql +initPostgreSQL() { + echo "checking postgresql" + if [ -n "$(ifconfig | grep ${POSTGRESQL_HOST})" ]; then + echo "start postgresql service" + rc-service postgresql restart + + # role if not exists, create + flag=$(sudo -u postgres psql -tAc "SELECT 1 FROM pg_roles WHERE rolname='${POSTGRESQL_USERNAME}'") + if [ -z "${flag}" ]; then + echo "create user" + sudo -u postgres psql -tAc "create user ${POSTGRESQL_USERNAME} with password '${POSTGRESQL_PASSWORD}'" + fi + + # database if not exists, create + flag=$(sudo -u postgres psql -tAc "select 1 from pg_database where datname='dolphinscheduler'") + if [ -z "${flag}" ]; then + echo "init db" + sudo -u postgres psql -tAc "create database dolphinscheduler owner ${POSTGRESQL_USERNAME}" + fi + + # grant + sudo -u postgres psql -tAc "grant all privileges on database dolphinscheduler to ${POSTGRESQL_USERNAME}" + fi + + echo "connect postgresql service" + v=$(sudo -u postgres PGPASSWORD=${POSTGRESQL_PASSWORD} psql -h ${POSTGRESQL_HOST} -U ${POSTGRESQL_USERNAME} -d dolphinscheduler -tAc "select 1") + if [ "$(echo '${v}' | grep 'FATAL' | wc -l)" -eq 1 ]; then + echo "Can't connect to database...${v}" + exit 1 + fi + echo "import sql data" + ${DOLPHINSCHEDULER_SCRIPT}/create-dolphinscheduler.sh +} + +# start zk +initZK() { + echo -e "checking zookeeper" + if [[ "${ZOOKEEPER_QUORUM}" = "127.0.0.1:2181" || "${ZOOKEEPER_QUORUM}" = "localhost:2181" ]]; then + echo "start local zookeeper" + /opt/zookeeper/bin/zkServer.sh restart + else + echo "connect remote zookeeper" + echo "${ZOOKEEPER_QUORUM}" | awk -F ',' 'BEGIN{ i=1 }{ while( i <= NF ){ print $i; i++ } }' | while read line; do + while ! nc -z ${line%:*} ${line#*:}; do + counter=$((counter+1)) + if [ $counter == 30 ]; then + log "Error: Couldn't connect to zookeeper." + exit 1 + fi + log "Trying to connect to zookeeper at ${line}. Attempt $counter." + sleep 5 + done + done + fi +} + +# start nginx +initNginx() { + echo "start nginx" + nginx & +} + +# start master-server +initMasterServer() { + echo "start master-server" + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop master-server + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start master-server +} + +# start worker-server +initWorkerServer() { + echo "start worker-server" + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop worker-server + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start worker-server +} + +# start api-server +initApiServer() { + echo "start api-server" + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop api-server + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start api-server +} + +# start logger-server +initLoggerServer() { + echo "start logger-server" + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop logger-server + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start logger-server +} + +# start alert-server +initAlertServer() { + echo "start alert-server" + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh stop alert-server + ${DOLPHINSCHEDULER_BIN}/dolphinscheduler-daemon.sh start alert-server +} + +# print usage +printUsage() { + echo -e "Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system," + echo -e "dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.\n" + echo -e "Usage: [ all | master-server | worker-server | api-server | alert-server | frontend ]\n" + printf "%-13s: %s\n" "all" "Run master-server, worker-server, api-server, alert-server and frontend." + printf "%-13s: %s\n" "master-server" "MasterServer is mainly responsible for DAG task split, task submission monitoring." + printf "%-13s: %s\n" "worker-server" "WorkerServer is mainly responsible for task execution and providing log services.." + printf "%-13s: %s\n" "api-server" "ApiServer is mainly responsible for processing requests from the front-end UI layer." + printf "%-13s: %s\n" "alert-server" "AlertServer mainly include Alarms." + printf "%-13s: %s\n" "frontend" "Frontend mainly provides various visual operation interfaces of the system." +} + +# init config file +source /root/startup-init-conf.sh + +LOGFILE=/var/log/nginx/access.log +case "$1" in + (all) + initZK + initPostgreSQL + initMasterServer + initWorkerServer + initApiServer + initAlertServer + initLoggerServer + initNginx + LOGFILE=/var/log/nginx/access.log + ;; + (master-server) + initZK + initPostgreSQL + initMasterServer + LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-master.log + ;; + (worker-server) + initZK + initPostgreSQL + initWorkerServer + initLoggerServer + LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-worker.log + ;; + (api-server) + initPostgreSQL + initApiServer + LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-api-server.log + ;; + (alert-server) + initPostgreSQL + initAlertServer + LOGFILE=${DOLPHINSCHEDULER_LOGS}/dolphinscheduler-alert.log + ;; + (frontend) + initNginx + LOGFILE=/var/log/nginx/access.log + ;; + (help) + printUsage + exit 1 + ;; + (*) + printUsage + exit 1 + ;; +esac + +echo "tee begin" +exec tee ${LOGFILE} -while true -do - sleep 101 -done -exec "$@" diff --git a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java index 55c0f9ffbf..94d95b3c26 100644 --- a/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java +++ b/dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/Constants.java @@ -20,7 +20,9 @@ package org.apache.dolphinscheduler.alert.utils; * constants */ public class Constants { - + private Constants() { + throw new IllegalStateException("Constants class"); + } /** * alert properties path */ diff --git a/dolphinscheduler-api/pom.xml b/dolphinscheduler-api/pom.xml index c10f443384..41971734ec 100644 --- a/dolphinscheduler-api/pom.xml +++ b/dolphinscheduler-api/pom.xml @@ -31,12 +31,6 @@ org.apache.dolphinscheduler dolphinscheduler-alert - - - org.apache.dolphinscheduler - dolphinscheduler-dao - - @@ -129,13 +123,13 @@ - com.github.xiaoymin - swagger-bootstrap-ui + org.apache.dolphinscheduler + dolphinscheduler-service - org.apache.dolphinscheduler - dolphinscheduler-rpc + com.github.xiaoymin + swagger-bootstrap-ui @@ -152,6 +146,12 @@ org.apache.hadoop hadoop-common + + + javax.servlet + servlet-api + + @@ -162,11 +162,23 @@ org.apache.hadoop hadoop-hdfs + + + servlet-api + javax.servlet + + org.apache.hadoop hadoop-yarn-common + + + servlet-api + javax.servlet + + @@ -174,14 +186,15 @@ hadoop-aws - - javax.servlet - servlet-api - - org.mortbay.jetty jsp-2.1 + + + org.mortbay.jetty + servlet-api-2.5 + + diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index 150c647f99..102f116575 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -22,12 +22,12 @@ import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; -import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; import org.apache.dolphinscheduler.common.utils.ParameterUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.User; import io.swagger.annotations.*; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -75,6 +75,7 @@ public class ProcessInstanceController extends BaseController{ @ApiImplicitParams({ @ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), + @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type ="String"), @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), @ApiImplicitParam(name = "host", value = "HOST", type ="String"), @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), @@ -88,6 +89,7 @@ public class ProcessInstanceController extends BaseController{ @ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName, @RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId, @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "executorName", required = false) String executorName, @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, @RequestParam(value = "host", required = false) String host, @RequestParam(value = "startDate", required = false) String startTime, @@ -96,12 +98,12 @@ public class ProcessInstanceController extends BaseController{ @RequestParam("pageSize") Integer pageSize){ try{ logger.info("query all process instance list, login user:{},project name:{}, define id:{}," + - "search value:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", - loginUser.getUserName(), projectName, processDefinitionId, searchVal, stateType,host, + "search value:{},executor name:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", + loginUser.getUserName(), projectName, processDefinitionId, searchVal, executorName,stateType,host, startTime, endTime, pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); Map result = processInstanceService.queryProcessInstanceList( - loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); + loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, executorName, stateType, host, pageNo, pageSize); return returnDataListPaging(result); }catch (Exception e){ logger.error(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg(),e); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java index 5f63d744cf..276d2ff7da 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/TaskInstanceController.java @@ -69,6 +69,7 @@ public class TaskInstanceController extends BaseController{ @ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID",required = false, dataType = "Int", example = "100"), @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type ="String"), @ApiImplicitParam(name = "taskName", value = "TASK_NAME", type ="String"), + @ApiImplicitParam(name = "executorName", value = "EXECUTOR_NAME", type ="String"), @ApiImplicitParam(name = "stateType", value = "EXECUTION_STATUS", type ="ExecutionStatus"), @ApiImplicitParam(name = "host", value = "HOST", type ="String"), @ApiImplicitParam(name = "startDate", value = "START_DATE", type ="String"), @@ -83,6 +84,7 @@ public class TaskInstanceController extends BaseController{ @RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "taskName", required = false) String taskName, + @RequestParam(value = "executorName", required = false) String executorName, @RequestParam(value = "stateType", required = false) ExecutionStatus stateType, @RequestParam(value = "host", required = false) String host, @RequestParam(value = "startDate", required = false) String startTime, @@ -91,11 +93,11 @@ public class TaskInstanceController extends BaseController{ @RequestParam("pageSize") Integer pageSize){ try{ - logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, state type:{}, host:{}, start:{}, end:{}", - projectName, processInstanceId, searchVal, taskName, stateType, host, startTime, endTime); + logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, executor name: {},state type:{}, host:{}, start:{}, end:{}", + projectName, processInstanceId, searchVal, taskName, executorName, stateType, host, startTime, endTime); searchVal = ParameterUtils.handleEscapes(searchVal); Map result = taskInstanceService.queryTaskListPaging( - loginUser, projectName, processInstanceId, taskName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); + loginUser, projectName, processInstanceId, taskName, executorName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); return returnDataListPaging(result); }catch (Exception e){ logger.error(Status.QUERY_TASK_LIST_PAGING_ERROR.getMsg(),e); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java deleted file mode 100644 index 3452060ec9..0000000000 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/log/LogClient.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.api.log; - -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import io.grpc.StatusRuntimeException; -import org.apache.dolphinscheduler.rpc.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.TimeUnit; - -/** - * log client - */ -public class LogClient { - - private static final Logger logger = LoggerFactory.getLogger(LogClient.class); - - private final ManagedChannel channel; - private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; - - /** - * construct client connecting to HelloWorld server at {@code host:port} - * - * @param host host - * @param port port - */ - public LogClient(String host, int port) { - this(ManagedChannelBuilder.forAddress(host, port) - // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid - // needing certificates. - .usePlaintext(true)); - } - - /** - * construct client for accessing RouteGuide server using the existing channel - * - */ - LogClient(ManagedChannelBuilder channelBuilder) { - /** - * set max read size - */ - channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); - channel = channelBuilder.build(); - blockingStub = LogViewServiceGrpc.newBlockingStub(channel); - } - - /** - * shutdown - * - * @throws InterruptedException InterruptedException - */ - public void shutdown() throws InterruptedException { - channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); - } - - /** - * roll view log - * - * @param path path - * @param skipLineNum skip line number - * @param limit limit - * @return log content - */ - public String rollViewLog(String path,int skipLineNum,int limit) { - logger.info("roll view log : path {},skipLineNum {} ,limit {}", path, skipLineNum, limit); - LogParameter pathParameter = LogParameter - .newBuilder() - .setPath(path) - .setSkipLineNum(skipLineNum) - .setLimit(limit) - .build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.rollViewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("roll view log error", e); - return null; - } - } - - /** - * view log - * - * @param path path - * @return log content - */ - public String viewLog(String path) { - logger.info("view log path {}",path); - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.viewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("view log error", e); - return null; - } - } - - /** - * get log size - * - * @param path log path - * @return log content bytes - */ - public byte[] getLogBytes(String path) { - logger.info("log path {}",path); - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetByteInfo retByteInfo; - try { - retByteInfo = blockingStub.getLogBytes(pathParameter); - return retByteInfo.getData().toByteArray(); - } catch (StatusRuntimeException e) { - logger.error("log size error", e); - return null; - } - } - -} \ No newline at end of file diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java index af66591bed..de2c8d9cea 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/BaseDAGService.java @@ -20,12 +20,11 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; -import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.utils.DagHelper; -import java.util.ArrayList; import java.util.List; /** @@ -48,41 +47,8 @@ public class BaseDAGService extends BaseService{ List taskNodeList = processData.getTasks(); - List taskNodeRelations = new ArrayList<>(); + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); - //Traversing node information and building relationships - for (TaskNode taskNode : taskNodeList) { - String preTasks = taskNode.getPreTasks(); - List preTasksList = JSONUtils.toList(preTasks, String.class); - - //if previous tasks not empty - if (preTasksList != null) { - for (String depNode : preTasksList) { - taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); - } - } - } - - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodeList); - - - // generate detail Dag, to be executed - DAG dag = new DAG<>(); - - if (CollectionUtils.isNotEmpty(processDag.getNodes())) { - for (TaskNode node : processDag.getNodes()) { - dag.addNode(node.getName(), node); - } - } - - if (CollectionUtils.isNotEmpty(processDag.getEdges())) { - for (TaskNodeRelation edge : processDag.getEdges()) { - dag.addEdge(edge.getStartNode(), edge.getEndNode()); - } - } - - return dag; + return DagHelper.buildDagGraph(processDag); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java index b95782711f..bafe833fab 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java @@ -24,13 +24,13 @@ import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; -import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -69,7 +69,7 @@ public class DataAnalysisService extends BaseService{ TaskInstanceMapper taskInstanceMapper; @Autowired - ProcessDao processDao; + ProcessService processService; /** * statistical task instance status data @@ -296,7 +296,7 @@ public class DataAnalysisService extends BaseService{ if(projectId !=0){ projectIds.add(projectId); }else if(loginUser.getUserType() == UserType.GENERAL_USER){ - projectIds = processDao.getProjectIdListHavePerm(loginUser.getId()); + projectIds = processService.getProjectIdListHavePerm(loginUser.getId()); if(projectIds.size() ==0 ){ projectIds.add(0); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index 5d33b46bd2..f6d8903dd8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -21,10 +21,9 @@ import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.job.db.*; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.datasource.*; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.User; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java index 257f15d580..152292a21b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java @@ -25,12 +25,12 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; -import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -67,7 +67,7 @@ public class ExecutorService extends BaseService{ @Autowired - private ProcessDao processDao; + private ProcessService processService; /** * execute process instance @@ -117,7 +117,7 @@ public class ExecutorService extends BaseService{ } if (!checkTenantSuitable(processDefinition)){ - logger.error("there is not any vaild tenant for the process definition: id:{},name:{}, ", + logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); return result; @@ -186,13 +186,13 @@ public class ExecutorService extends BaseService{ return checkResult; } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); if (processInstance == null) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; } - ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); if(executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE){ result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); if (result.get(Constants.STATUS) != Status.SUCCESS) { @@ -206,7 +206,7 @@ public class ExecutorService extends BaseService{ return checkResult; } if (!checkTenantSuitable(processDefinition)){ - logger.error("there is not any vaild tenant for the process definition: id:{},name:{}, ", + logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); } @@ -227,7 +227,7 @@ public class ExecutorService extends BaseService{ } else { processInstance.setCommandType(CommandType.STOP); processInstance.addHistoryCmd(CommandType.STOP); - processDao.updateProcessInstance(processInstance); + processService.updateProcessInstance(processInstance); result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_STOP); } break; @@ -237,12 +237,12 @@ public class ExecutorService extends BaseService{ } else { processInstance.setCommandType(CommandType.PAUSE); processInstance.addHistoryCmd(CommandType.PAUSE); - processDao.updateProcessInstance(processInstance); + processService.updateProcessInstance(processInstance); result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_PAUSE); } break; default: - logger.error(String.format("unknown execute type : %s", executeType.toString())); + logger.error("unknown execute type : {}", executeType); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); break; @@ -257,7 +257,7 @@ public class ExecutorService extends BaseService{ */ private boolean checkTenantSuitable(ProcessDefinition processDefinition) { // checkTenantExists(); - Tenant tenant = processDao.getTenantForProcess(processDefinition.getTenantId(), + Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(), processDefinition.getUserId()); if(tenant == null){ return false; @@ -319,7 +319,7 @@ public class ExecutorService extends BaseService{ private Map updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { Map result = new HashMap<>(5); - int update = processDao.updateProcessInstanceState(processInstanceId, executionStatus); + int update = processService.updateProcessInstanceState(processInstanceId, executionStatus); if (update > 0) { putMsg(result, Status.SUCCESS); } else { @@ -347,12 +347,12 @@ public class ExecutorService extends BaseService{ CMDPARAM_RECOVER_PROCESS_ID_STRING, instanceId)); command.setExecutorId(loginUser.getId()); - if(!processDao.verifyIsNeedCreateCommand(command)){ + if(!processService.verifyIsNeedCreateCommand(command)){ putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,processDefinitionId); return result; } - int create = processDao.createCommand(command); + int create = processService.createCommand(command); if (create > 0) { putMsg(result, Status.SUCCESS); @@ -376,7 +376,7 @@ public class ExecutorService extends BaseService{ putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id"); } List ids = new ArrayList<>(); - processDao.recurseFindSubProcessId(processDefineId, ids); + processService.recurseFindSubProcessId(processDefineId, ids); Integer[] idArray = ids.toArray(new Integer[ids.size()]); if (ids.size() > 0){ List processDefinitionList; @@ -506,9 +506,9 @@ public class ExecutorService extends BaseService{ cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); command.setCommandParam(JSONUtils.toJson(cmdParam)); - return processDao.createCommand(command); + return processService.createCommand(command); }else if (runMode == RunMode.RUN_MODE_PARALLEL){ - List schedules = processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); + List schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); List listDate = new LinkedList<>(); if(!CollectionUtils.isEmpty(schedules)){ for (Schedule item : schedules) { @@ -521,7 +521,7 @@ public class ExecutorService extends BaseService{ cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date)); command.setCommandParam(JSONUtils.toJson(cmdParam)); - processDao.createCommand(command); + processService.createCommand(command); } return listDate.size(); }else{ @@ -532,19 +532,19 @@ public class ExecutorService extends BaseService{ cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); command.setCommandParam(JSONUtils.toJson(cmdParam)); - processDao.createCommand(command); + processService.createCommand(command); start = DateUtils.getSomeDay(start, 1); } return runCunt; } } }else{ - logger.error("there is not vaild schedule date for the process definition: id:{},date:{}", + logger.error("there is not valid schedule date for the process definition: id:{},date:{}", processDefineId, schedule); } }else{ command.setCommandParam(JSONUtils.toJson(cmdParam)); - return processDao.createCommand(command); + return processService.createCommand(command); } return 0; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java index 2587290fd3..1f65208240 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/LoggerService.java @@ -17,17 +17,19 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.log.LogClient; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.service.log.LogClientService; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import javax.annotation.PreDestroy; + /** * log service */ @@ -37,7 +39,18 @@ public class LoggerService { private static final Logger logger = LoggerFactory.getLogger(LoggerService.class); @Autowired - private ProcessDao processDao; + private ProcessService processService; + + private final LogClientService logClient; + + public LoggerService(){ + logClient = new LogClientService(); + } + + @PreDestroy + public void close(){ + logClient.close(); + } /** * view log @@ -49,7 +62,7 @@ public class LoggerService { */ public Result queryLog(int taskInstId, int skipLineNum, int limit) { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); if (taskInstance == null){ return new Result(Status.TASK_INSTANCE_NOT_FOUND.getCode(), Status.TASK_INSTANCE_NOT_FOUND.getMsg()); @@ -64,12 +77,9 @@ public class LoggerService { Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT); - - LogClient logClient = new LogClient(host, Constants.RPC_PORT); - String log = logClient.rollViewLog(taskInstance.getLogPath(),skipLineNum,limit); + String log = logClient.rollViewLog(host, Constants.RPC_PORT, taskInstance.getLogPath(),skipLineNum,limit); result.setData(log); logger.info(log); - return result; } @@ -80,17 +90,11 @@ public class LoggerService { * @return log byte array */ public byte[] getLogBytes(int taskInstId) { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); if (taskInstance == null){ throw new RuntimeException("task instance is null"); } - String host = taskInstance.getHost(); - if(StringUtils.isEmpty(host)){ - throw new RuntimeException("task instance host is null"); - } - - LogClient logClient = new LogClient(host, Constants.RPC_PORT); - return logClient.getLogBytes(taskInstance.getLogPath()); + return logClient.getLogBytes(host, Constants.RPC_PORT, taskInstance.getLogPath()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java index 8a762d7557..22e3593a52 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java @@ -43,9 +43,10 @@ import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -94,7 +95,7 @@ public class ProcessDefinitionService extends BaseDAGService { private ScheduleMapper scheduleMapper; @Autowired - private ProcessDao processDao; + private ProcessService processService; @Autowired private WorkerGroupMapper workerGroupMapper; @@ -283,7 +284,7 @@ public class ProcessDefinitionService extends BaseDAGService { if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { return checkProcessJson; } - ProcessDefinition processDefinition = processDao.findProcessDefineById(id); + ProcessDefinition processDefinition = processService.findProcessDefineById(id); if (processDefinition == null) { // check process definition exists putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); @@ -296,7 +297,7 @@ public class ProcessDefinitionService extends BaseDAGService { putMsg(result, Status.SUCCESS); } - ProcessDefinition processDefine = processDao.findProcessDefineById(id); + ProcessDefinition processDefine = processService.findProcessDefineById(id); Date now = new Date(); processDefine.setId(id); @@ -442,6 +443,13 @@ public class ProcessDefinitionService extends BaseDAGService { } ReleaseState state = ReleaseState.getEnum(releaseState); + + // check state + if (null == state) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState"); + return result; + } + ProcessDefinition processDefinition = processDefineMapper.selectById(id); switch (state) { @@ -458,7 +466,7 @@ public class ProcessDefinitionService extends BaseDAGService { ); for(Schedule schedule:scheduleList){ - logger.info("set schedule offline, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); + logger.info("set schedule offline, project id: {}, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); // set status schedule.setReleaseState(ReleaseState.OFFLINE); scheduleMapper.updateById(schedule); @@ -941,11 +949,16 @@ public class ProcessDefinitionService extends BaseDAGService { return result; } - String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + //process data check + if (null == processData) { + logger.error("process data is null"); + putMsg(result,Status.DATA_IS_NOT_VALID, processDefinitionJson); + return result; + } + List taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); result.put(Constants.DATA_LIST, taskNodeList); @@ -967,14 +980,13 @@ public class ProcessDefinitionService extends BaseDAGService { Map> taskNodeMap = new HashMap<>(); String[] idList = defineIdList.split(","); - List definitionIdList = Arrays.asList(idList); List idIntList = new ArrayList<>(); - for(String definitionId : definitionIdList) { + for(String definitionId : idList) { idIntList.add(Integer.parseInt(definitionId)); } Integer[] idArray = idIntList.toArray(new Integer[idIntList.size()]); List processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray); - if (processDefinitionList == null || processDefinitionList.size() ==0) { + if (CollectionUtils.isEmpty(processDefinitionList)) { logger.info("process definition not exists"); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); return result; @@ -1024,9 +1036,10 @@ public class ProcessDefinitionService extends BaseDAGService { Map result = new HashMap<>(); ProcessDefinition processDefinition = processDefineMapper.selectById(processId); - if (processDefinition == null) { + if (null == processDefinition) { logger.info("process define not exists"); - throw new RuntimeException("process define not exists"); + putMsg(result,Status.PROCESS_DEFINE_NOT_EXIST, processDefinition); + return result; } DAG dag = genDagGraph(processDefinition); /** @@ -1114,10 +1127,10 @@ public class ProcessDefinitionService extends BaseDAGService { pTreeViewDto.getChildren().add(treeViewDto); } postNodeList = dag.getSubsequentNodes(nodeName); - if (postNodeList != null && postNodeList.size() > 0) { + if (CollectionUtils.isNotEmpty(postNodeList)) { for (String nextNodeName : postNodeList) { List treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); - if (treeViewDtoList != null && treeViewDtoList.size() > 0) { + if (CollectionUtils.isNotEmpty(treeViewDtoList)) { treeViewDtoList.add(treeViewDto); waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); } else { @@ -1129,7 +1142,6 @@ public class ProcessDefinitionService extends BaseDAGService { } runningNodeMap.remove(nodeName); } - if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { break; } else { @@ -1154,75 +1166,29 @@ public class ProcessDefinitionService extends BaseDAGService { private DAG genDagGraph(ProcessDefinition processDefinition) throws Exception { String processDefinitionJson = processDefinition.getProcessDefinitionJson(); - ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); - List taskNodeList = processData.getTasks(); - - processDefinition.setGlobalParamList(processData.getGlobalParams()); - + //check process data + if (null != processData) { + List taskNodeList = processData.getTasks(); + processDefinition.setGlobalParamList(processData.getGlobalParams()); + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); - List taskNodeRelations = new ArrayList<>(); - - // Traverse node information and build relationships - for (TaskNode taskNode : taskNodeList) { - String preTasks = taskNode.getPreTasks(); - List preTasksList = JSONUtils.toList(preTasks, String.class); - - // If the dependency is not empty - if (preTasksList != null) { - for (String depNode : preTasksList) { - taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); - } - } + // Generate concrete Dag to be executed + return DagHelper.buildDagGraph(processDag); } - ProcessDag processDag = new ProcessDag(); - processDag.setEdges(taskNodeRelations); - processDag.setNodes(taskNodeList); - - - // Generate concrete Dag to be executed - return genDagGraph(processDag); - - + return new DAG<>(); } - /** - * Generate the DAG of process - * - * @return DAG - */ - private DAG genDagGraph(ProcessDag processDag) { - DAG dag = new DAG<>(); - - /** - * Add the ndoes - */ - if (CollectionUtils.isNotEmpty(processDag.getNodes())) { - for (TaskNode node : processDag.getNodes()) { - dag.addNode(node.getName(), node); - } - } - - /** - * Add the edges - */ - if (CollectionUtils.isNotEmpty(processDag.getEdges())) { - for (TaskNodeRelation edge : processDag.getEdges()) { - dag.addEdge(edge.getStartNode(), edge.getEndNode()); - } - } - return dag; - } /** * whether the graph has a ring * - * @param taskNodeResponseList - * @return + * @param taskNodeResponseList task node response list + * @return if graph has cycle flag */ private boolean graphHasCycle(List taskNodeResponseList) { DAG graph = new DAG<>(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java index 87e1a0ede1..4f81d89505 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java @@ -30,15 +30,15 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import com.alibaba.fastjson.JSON; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -72,7 +72,7 @@ public class ProcessInstanceService extends BaseDAGService { ProjectService projectService; @Autowired - ProcessDao processDao; + ProcessService processService; @Autowired ProcessInstanceMapper processInstanceMapper; @@ -95,6 +95,9 @@ public class ProcessInstanceService extends BaseDAGService { @Autowired WorkerGroupMapper workerGroupMapper; + @Autowired + UsersService usersService; + /** * query process instance by id * @@ -112,7 +115,7 @@ public class ProcessInstanceService extends BaseDAGService { if (resultEnum != Status.SUCCESS) { return checkResult; } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); String workerGroupName = ""; if(processInstance.getWorkerGroupId() == -1){ workerGroupName = DEFAULT; @@ -125,7 +128,7 @@ public class ProcessInstanceService extends BaseDAGService { } } processInstance.setWorkerGroupName(workerGroupName); - ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); processInstance.setReceivers(processDefinition.getReceivers()); processInstance.setReceiversCc(processDefinition.getReceiversCc()); result.put(Constants.DATA_LIST, processInstance); @@ -151,7 +154,7 @@ public class ProcessInstanceService extends BaseDAGService { */ public Map queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, String startDate, String endDate, - String searchVal, ExecutionStatus stateType, String host, + String searchVal, String executorName,ExecutionStatus stateType, String host, Integer pageNo, Integer pageSize) { Map result = new HashMap<>(5); @@ -182,25 +185,31 @@ public class ProcessInstanceService extends BaseDAGService { putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); return result; } + Page page = new Page(pageNo, pageSize); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + int executorId = usersService.getUserIdByName(executorName); IPage processInstanceList = processInstanceMapper.queryProcessInstanceListPaging(page, - project.getId(), processDefineId, searchVal, statusArray, host, start, end); + project.getId(), processDefineId, searchVal, executorId,statusArray, host, start, end); List processInstances = processInstanceList.getRecords(); for(ProcessInstance processInstance: processInstances){ processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(),processInstance.getEndTime())); + User executor = usersService.queryUser(processInstance.getExecutorId()); + if (null != executor) { + processInstance.setExecutorName(executor.getUserName()); + } } - Set exclusionSet = new HashSet(); + Set exclusionSet = new HashSet<>(); exclusionSet.add(Constants.CLASS); exclusionSet.add("locations"); exclusionSet.add("connects"); exclusionSet.add("processInstanceJson"); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); pageInfo.setTotalCount((int) processInstanceList.getTotal()); pageInfo.setLists(CollectionUtils.getListByExclusion(processInstances, exclusionSet)); result.put(Constants.DATA_LIST, pageInfo); @@ -228,8 +237,8 @@ public class ProcessInstanceService extends BaseDAGService { if (resultEnum != Status.SUCCESS) { return checkResult; } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); - List taskInstanceList = processDao.findValidTaskListByProcessId(processId); + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); + List taskInstanceList = processService.findValidTaskListByProcessId(processId); AddDependResultForTaskList(taskInstanceList); Map resultMap = new HashMap<>(); resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); @@ -304,7 +313,7 @@ public class ProcessInstanceService extends BaseDAGService { return checkResult; } - TaskInstance taskInstance = processDao.findTaskInstanceById(taskId); + TaskInstance taskInstance = processService.findTaskInstanceById(taskId); if (taskInstance == null) { putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); return result; @@ -314,7 +323,7 @@ public class ProcessInstanceService extends BaseDAGService { return result; } - ProcessInstance subWorkflowInstance = processDao.findSubProcessInstance( + ProcessInstance subWorkflowInstance = processService.findSubProcessInstance( taskInstance.getProcessInstanceId(), taskInstance.getId()); if (subWorkflowInstance == null) { putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); @@ -356,7 +365,7 @@ public class ProcessInstanceService extends BaseDAGService { } //check process instance exists - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); if (processInstance == null) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; @@ -380,7 +389,7 @@ public class ProcessInstanceService extends BaseDAGService { String globalParams = null; String originDefParams = null; int timeout = processInstance.getTimeout(); - ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); if (StringUtils.isNotEmpty(processInstanceJson)) { ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); //check workflow json is valid @@ -396,7 +405,7 @@ public class ProcessInstanceService extends BaseDAGService { processInstance.getCmdTypeIfComplement(), schedule); timeout = processData.getTimeout(); processInstance.setTimeout(timeout); - Tenant tenant = processDao.getTenantForProcess(processData.getTenantId(), + Tenant tenant = processService.getTenantForProcess(processData.getTenantId(), processDefinition.getUserId()); if(tenant != null){ processInstance.setTenantCode(tenant.getTenantCode()); @@ -406,7 +415,7 @@ public class ProcessInstanceService extends BaseDAGService { } // int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson, // globalParams, schedule, flag, locations, connects); - int update = processDao.updateProcessInstance(processInstance); + int update = processService.updateProcessInstance(processInstance); int updateDefine = 1; if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) { processDefinition.setProcessDefinitionJson(processInstanceJson); @@ -445,7 +454,7 @@ public class ProcessInstanceService extends BaseDAGService { return checkResult; } - ProcessInstance subInstance = processDao.findProcessInstanceDetailById(subId); + ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId); if (subInstance == null) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); return result; @@ -455,7 +464,7 @@ public class ProcessInstanceService extends BaseDAGService { return result; } - ProcessInstance parentWorkflowInstance = processDao.findParentProcessInstance(subId); + ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId); if (parentWorkflowInstance == null) { putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); return result; @@ -476,7 +485,7 @@ public class ProcessInstanceService extends BaseDAGService { * @return delete result code */ @Transactional(rollbackFor = Exception.class) - public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId,ITaskQueue tasksQueue) { + public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId, ITaskQueue tasksQueue) { Map result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); @@ -486,8 +495,8 @@ public class ProcessInstanceService extends BaseDAGService { if (resultEnum != Status.SUCCESS) { return checkResult; } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstanceId); + ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); + List taskInstanceList = processService.findValidTaskListByProcessId(processInstanceId); if (null == processInstance) { putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); @@ -512,7 +521,7 @@ public class ProcessInstanceService extends BaseDAGService { .append(taskInstance.getId()) .append(UNDERLINE); - int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); + int taskWorkerGroupId = processService.getTaskWorkerGroupId(taskInstance); WorkerGroup workerGroup = workerGroupMapper.selectById(taskWorkerGroupId); if(workerGroup == null){ @@ -541,9 +550,9 @@ public class ProcessInstanceService extends BaseDAGService { } // delete database cascade - int delete = processDao.deleteWorkProcessInstanceById(processInstanceId); - processDao.deleteAllSubWorkProcessByParentId(processInstanceId); - processDao.deleteWorkProcessMapByParentId(processInstanceId); + int delete = processService.deleteWorkProcessInstanceById(processInstanceId); + processService.deleteAllSubWorkProcessByParentId(processInstanceId); + processService.deleteWorkProcessMapByParentId(processInstanceId); if (delete > 0) { putMsg(result, Status.SUCCESS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java index 29a16447e1..c1adb8874d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ResourcesService.java @@ -119,7 +119,7 @@ public class ResourcesService extends BaseService { putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); return result; } - if (file.getSize() > Constants.maxFileSize) { + if (file.getSize() > Constants.MAX_FILE_SIZE) { logger.error("file size is too large: {}", file.getOriginalFilename()); putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); return result; @@ -547,7 +547,7 @@ public class ResourcesService extends BaseService { } } catch (Exception e) { - logger.error(String.format("Resource %s read failed", hdfsFileName), e); + logger.error("Resource {} read failed", hdfsFileName, e); putMsg(result, Status.HDFS_OPERATION_ERROR); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java index bdce9470ca..72122100a1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/SchedulerService.java @@ -26,7 +26,6 @@ import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Schedule; @@ -34,11 +33,12 @@ import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; -import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; -import org.apache.dolphinscheduler.dao.quartz.ProcessScheduleJob; -import org.apache.dolphinscheduler.dao.quartz.QuartzExecutors; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; +import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,7 +68,7 @@ public class SchedulerService extends BaseService { private MonitorService monitorService; @Autowired - private ProcessDao processDao; + private ProcessService processService; @Autowired private ScheduleMapper scheduleMapper; @@ -119,7 +119,7 @@ public class SchedulerService extends BaseService { } // check work flow define release state - ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); + ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); if (result.get(Constants.STATUS) != Status.SUCCESS) { return result; @@ -221,7 +221,7 @@ public class SchedulerService extends BaseService { return result; } - ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); + ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); return result; @@ -321,7 +321,7 @@ public class SchedulerService extends BaseService { putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); return result; } - ProcessDefinition processDefinition = processDao.findProcessDefineById(scheduleObj.getProcessDefinitionId()); + ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); return result; @@ -338,7 +338,7 @@ public class SchedulerService extends BaseService { } // check sub process definition release state List subProcessDefineIds = new ArrayList<>(); - processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); + processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); if (subProcessDefineIds.size() > 0){ List subProcessDefinitionList = @@ -423,7 +423,7 @@ public class SchedulerService extends BaseService { return result; } - ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); + ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); return result; @@ -472,7 +472,7 @@ public class SchedulerService extends BaseService { logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); - Schedule schedule = processDao.querySchedule(scheduleId); + Schedule schedule = processService.querySchedule(scheduleId); if (schedule == null) { logger.warn("process schedule info not exists"); return; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java index 74afa2a44e..e4fec54395 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/TaskInstanceService.java @@ -17,6 +17,8 @@ package org.apache.dolphinscheduler.api.service; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; @@ -24,15 +26,12 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; -import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; -import com.baomidou.mybatisplus.core.metadata.IPage; -import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -56,11 +55,17 @@ public class TaskInstanceService extends BaseService { ProjectService projectService; @Autowired - ProcessDao processDao; + ProcessService processService; @Autowired TaskInstanceMapper taskInstanceMapper; + @Autowired + ProcessInstanceService processInstanceService; + + @Autowired + UsersService usersService; + /** * query task list by project, process instance, task name, task start time, task end time, task status, keyword paging @@ -79,8 +84,8 @@ public class TaskInstanceService extends BaseService { * @return task list page */ public Map queryTaskListPaging(User loginUser, String projectName, - Integer processInstanceId, String taskName, String startDate, String endDate, - String searchVal, ExecutionStatus stateType,String host, + Integer processInstanceId, String taskName, String executorName, String startDate, + String endDate, String searchVal, ExecutionStatus stateType,String host, Integer pageNo, Integer pageSize) { Map result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); @@ -112,17 +117,23 @@ public class TaskInstanceService extends BaseService { } Page page = new Page(pageNo, pageSize); + PageInfo pageInfo = new PageInfo(pageNo, pageSize); + int executorId = usersService.getUserIdByName(executorName); + IPage taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( - page, project.getId(), processInstanceId, searchVal, taskName, statusArray, host, start, end + page, project.getId(), processInstanceId, searchVal, taskName, executorId, statusArray, host, start, end ); - PageInfo pageInfo = new PageInfo(pageNo, pageSize); Set exclusionSet = new HashSet<>(); exclusionSet.add(Constants.CLASS); exclusionSet.add("taskJson"); List taskInstanceList = taskInstanceIPage.getRecords(); + for(TaskInstance taskInstance : taskInstanceList){ - taskInstance.setDuration(DateUtils.differSec(taskInstance.getStartTime(), - taskInstance.getEndTime())); + taskInstance.setDuration(DateUtils.differSec(taskInstance.getStartTime(), taskInstance.getEndTime())); + User executor = usersService.queryUser(taskInstance.getExecutorId()); + if (null != executor) { + taskInstance.setExecutorName(executor.getUserName()); + } } pageInfo.setTotalCount((int)taskInstanceIPage.getTotal()); pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(),exclusionSet)); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java index fb8dcc97ab..1e5ec9e369 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java @@ -191,6 +191,26 @@ public class UsersService extends BaseService { return userMapper.queryUserByNamePassword(name, md5); } + /** + * get user id by user name + * @param name user name + * @return if name empty 0, user not exists -1, user exist user id + */ + public int getUserIdByName(String name) { + //executor name query + int executorId = 0; + if (StringUtils.isNotEmpty(name)) { + User executor = queryUser(name); + if (null != executor) { + executorId = executor.getId(); + } else { + executorId = -1; + } + } + + return executorId; + } + /** * query user list * diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java index 7099378b1d..a888712511 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/CheckUtils.java @@ -35,7 +35,9 @@ import java.util.regex.Pattern; */ public class CheckUtils { - + private CheckUtils() { + throw new IllegalStateException("CheckUtils class"); + } /** * check username * @@ -84,7 +86,7 @@ public class CheckUtils { * @return true if other parameters are valid, otherwise return false */ public static boolean checkOtherParams(String otherParams) { - return StringUtils.isNotEmpty(otherParams) && !JSONUtils.checkJsonVaild(otherParams); + return StringUtils.isNotEmpty(otherParams) && !JSONUtils.checkJsonValid(otherParams); } /** diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java index b04e773aea..340a389d1c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMain.java @@ -29,13 +29,17 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketTimeoutException; - +import java.util.Objects; public class FourLetterWordMain { private static final int DEFAULT_SOCKET_TIMEOUT = 5000; protected static final Logger LOG = LoggerFactory.getLogger(FourLetterWordMain.class); + private FourLetterWordMain() { + throw new IllegalStateException("FourLetterWordMain class"); + } + /** * Send the 4letterword * @param host the destination host @@ -48,6 +52,7 @@ public class FourLetterWordMain { throws IOException { return send4LetterWord(host, port, cmd, DEFAULT_SOCKET_TIMEOUT); } + /** * Send the 4letterword * @param host the destination host @@ -59,28 +64,30 @@ public class FourLetterWordMain { */ public static String send4LetterWord(String host, int port, String cmd, int timeout) throws IOException { + Objects.requireNonNull(cmd, "cmd must not be null"); LOG.info("connecting to {} {}", host, port); InetSocketAddress hostaddress= host != null ? new InetSocketAddress(host, port) : new InetSocketAddress(InetAddress.getByName(null), port); - try (Socket sock = new Socket(); - OutputStream outstream = sock.getOutputStream(); - BufferedReader reader = - new BufferedReader( - new InputStreamReader(sock.getInputStream()))) { + try (Socket sock = new Socket()) { sock.setSoTimeout(timeout); sock.connect(hostaddress, timeout); + OutputStream outstream = sock.getOutputStream(); outstream.write(cmd.getBytes()); outstream.flush(); // this replicates NC - close the output stream before reading sock.shutdownOutput(); - StringBuilder sb = new StringBuilder(); - String line; - while((line = reader.readLine()) != null) { - sb.append(line + "\n"); + try (BufferedReader reader = + new BufferedReader( + new InputStreamReader(sock.getInputStream()))) { + StringBuilder sb = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + sb.append(line + "\n"); + } + return sb.toString(); } - return sb.toString(); } catch (SocketTimeoutException e) { throw new IOException("Exception while executing four letter word: " + cmd, e); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java index 66f57f6a11..f91d3bc68c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/ZookeeperMonitor.java @@ -18,9 +18,9 @@ package org.apache.dolphinscheduler.api.utils; import org.apache.dolphinscheduler.common.enums.ZKNodeType; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.common.zk.AbstractZKClient; import org.apache.dolphinscheduler.common.model.Server; import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; +import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; @@ -34,7 +34,7 @@ import java.util.List; * monitor zookeeper info */ @Component -public class ZookeeperMonitor extends AbstractZKClient{ +public class ZookeeperMonitor extends AbstractZKClient { private static final Logger LOG = LoggerFactory.getLogger(ZookeeperMonitor.class); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java index c7afd76cc6..6f308e7b17 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java @@ -21,15 +21,15 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; -import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.CommandCount; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -78,7 +78,7 @@ public class DataAnalysisServiceTest { ITaskQueue taskQueue; @Mock - ProcessDao processDao; + ProcessService processService; private Project project; diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java index 66c7a3ebab..07d7477930 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java @@ -22,10 +22,10 @@ import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.ReleaseState; import org.apache.dolphinscheduler.common.enums.RunMode; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -52,7 +52,7 @@ public class ExecutorService2Test { private ExecutorService executorService; @Mock - private ProcessDao processDao; + private ProcessService processService; @Mock private ProcessDefinitionMapper processDefinitionMapper; @@ -100,8 +100,8 @@ public class ExecutorService2Test { Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkProjectAndAuth()); Mockito.when(processDefinitionMapper.selectById(processDefinitionId)).thenReturn(processDefinition); - Mockito.when(processDao.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant()); - Mockito.when(processDao.createCommand(any(Command.class))).thenReturn(1); + Mockito.when(processService.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant()); + Mockito.when(processService.createCommand(any(Command.class))).thenReturn(1); } /** @@ -111,7 +111,7 @@ public class ExecutorService2Test { @Test public void testNoComplement() throws ParseException { try { - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.START_PROCESS, null, null, @@ -119,7 +119,7 @@ public class ExecutorService2Test { "", "", RunMode.RUN_MODE_SERIAL, Priority.LOW, 0, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - verify(processDao, times(1)).createCommand(any(Command.class)); + verify(processService, times(1)).createCommand(any(Command.class)); }catch (Exception e){ Assert.assertTrue(false); } @@ -132,7 +132,7 @@ public class ExecutorService2Test { @Test public void testDateError() throws ParseException { try { - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, "2020-01-31 23:00:00,2020-01-01 00:00:00", CommandType.COMPLEMENT_DATA, null, null, @@ -140,7 +140,7 @@ public class ExecutorService2Test { "", "", RunMode.RUN_MODE_SERIAL, Priority.LOW, 0, 110); Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS)); - verify(processDao, times(0)).createCommand(any(Command.class)); + verify(processService, times(0)).createCommand(any(Command.class)); }catch (Exception e){ Assert.assertTrue(false); } @@ -153,7 +153,7 @@ public class ExecutorService2Test { @Test public void testSerial() throws ParseException { try { - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, @@ -161,7 +161,7 @@ public class ExecutorService2Test { "", "", RunMode.RUN_MODE_SERIAL, Priority.LOW, 0, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - verify(processDao, times(1)).createCommand(any(Command.class)); + verify(processService, times(1)).createCommand(any(Command.class)); }catch (Exception e){ Assert.assertTrue(false); } @@ -174,7 +174,7 @@ public class ExecutorService2Test { @Test public void testParallelWithOutSchedule() throws ParseException { try{ - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, @@ -182,7 +182,7 @@ public class ExecutorService2Test { "", "", RunMode.RUN_MODE_PARALLEL, Priority.LOW, 0, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - verify(processDao, times(31)).createCommand(any(Command.class)); + verify(processService, times(31)).createCommand(any(Command.class)); }catch (Exception e){ Assert.assertTrue(false); } @@ -195,7 +195,7 @@ public class ExecutorService2Test { @Test public void testParallelWithSchedule() throws ParseException { try{ - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); Map result = executorService.execProcessInstance(loginUser, projectName, processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA, null, null, @@ -203,7 +203,7 @@ public class ExecutorService2Test { "", "", RunMode.RUN_MODE_PARALLEL, Priority.LOW, 0, 110); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); - verify(processDao, times(15)).createCommand(any(Command.class)); + verify(processService, times(15)).createCommand(any(Command.class)); }catch (Exception e){ Assert.assertTrue(false); } diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java index c6ab6f8e74..20571577e3 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/LoggerServiceTest.java @@ -18,8 +18,8 @@ package org.apache.dolphinscheduler.api.service; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -40,14 +40,14 @@ public class LoggerServiceTest { @InjectMocks private LoggerService loggerService; @Mock - private ProcessDao processDao; + private ProcessService processService; @Test public void testQueryDataSourceList(){ TaskInstance taskInstance = new TaskInstance(); - Mockito.when(processDao.findTaskInstanceById(1)).thenReturn(taskInstance); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); Result result = loggerService.queryLog(2,1,1); //TASK_INSTANCE_NOT_FOUND Assert.assertEquals(Status.TASK_INSTANCE_NOT_FOUND.getCode(),result.getCode().intValue()); @@ -59,7 +59,7 @@ public class LoggerServiceTest { //SUCCESS taskInstance.setHost("127.0.0.1"); taskInstance.setLogPath("/temp/log"); - Mockito.when(processDao.findTaskInstanceById(1)).thenReturn(taskInstance); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); result = loggerService.queryLog(1,1,1); Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); } @@ -68,7 +68,7 @@ public class LoggerServiceTest { public void testGetLogBytes(){ TaskInstance taskInstance = new TaskInstance(); - Mockito.when(processDao.findTaskInstanceById(1)).thenReturn(taskInstance); + Mockito.when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); //task instance is null try{ diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java index a4b07e1835..1e6ee13c57 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java @@ -16,7 +16,6 @@ */ package org.apache.dolphinscheduler.api.service; -import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONArray; import com.alibaba.fastjson.JSONObject; import org.apache.dolphinscheduler.api.ApiApplicationServer; @@ -27,9 +26,9 @@ import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.FileUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.http.entity.ContentType; import org.json.JSONException; import org.junit.Assert; @@ -79,7 +78,13 @@ public class ProcessDefinitionServiceTest { private WorkerGroupMapper workerGroupMapper; @Mock - private ProcessDao processDao; + private ProcessService processService; + + @Mock + private ProcessInstanceMapper processInstanceMapper; + + @Mock + private TaskInstanceMapper taskInstanceMapper; private String sqlDependentJson = "{\"globalParams\":[]," + "\"tasks\":[{\"type\":\"SQL\",\"id\":\"tasks-27297\",\"name\":\"sql\"," + @@ -98,8 +103,14 @@ public class ProcessDefinitionServiceTest { "\"timeout\":{\"strategy\":\"\",\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; + private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + + "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + + "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; + @Test - public void queryProccessDefinitionList() throws Exception { + public void testQueryProccessDefinitionList() { String projectName = "project_test1"; Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); @@ -111,15 +122,23 @@ public class ProcessDefinitionServiceTest { Map result = new HashMap<>(5); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + //project not found Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); + + //project check auth success + putMsg(result, Status.SUCCESS, projectName); + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + List resourceList = new ArrayList<>(); + resourceList.add(getProcessDefinition()); + Mockito.when(processDefineMapper.queryAllDefinitionList(project.getId())).thenReturn(resourceList); + Map checkSuccessRes = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); + Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS)); } @Test - public void queryProcessDefinitionListPagingTest() throws Exception { + public void testQueryProcessDefinitionListPaging() { String projectName = "project_test1"; Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); @@ -132,12 +151,46 @@ public class ProcessDefinitionServiceTest { Map result = new HashMap<>(5); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + //project not found Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "",1, 5,0); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + + } + + @Test + public void testQueryProcessDefinitionById() { + String projectName = "project_test1"; + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); + + Project project = getProject(projectName); + + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project check auth fail + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Map map = processDefinitionService.queryProccessDefinitionById(loginUser, + "project_test1", 1); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); + + //project check auth success, instance not exist + putMsg(result, Status.SUCCESS, projectName); + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(processDefineMapper.selectById(1)).thenReturn(null); + Map instanceNotexitRes = processDefinitionService.queryProccessDefinitionById(loginUser, + "project_test1", 1); + Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS)); + + //instance exit + Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition()); + Map successRes = processDefinitionService.queryProccessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); } @Test @@ -150,14 +203,264 @@ public class ProcessDefinitionServiceTest { loginUser.setId(-1); loginUser.setUserType(UserType.GENERAL_USER); + //project check auth fail Map result = new HashMap<>(5); putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); - Map map = processDefinitionService.deleteProcessDefinitionById(loginUser, "project_test1", 6); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + + //project check auth success, instance not exist + putMsg(result, Status.SUCCESS, projectName); + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Mockito.when(processDefineMapper.selectById(1)).thenReturn(null); + Map instanceNotexitRes = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 1); + Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS)); + + ProcessDefinition processDefinition = getProcessDefinition(); + //user no auth + loginUser.setUserType(UserType.GENERAL_USER); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + Map userNoAuthRes = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.USER_NO_OPERATION_PERM, userNoAuthRes.get(Constants.STATUS)); + //process definition online + loginUser.setUserType(UserType.ADMIN_USER); + processDefinition.setReleaseState(ReleaseState.ONLINE); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + Map dfOnlineRes = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.PROCESS_DEFINE_STATE_ONLINE, dfOnlineRes.get(Constants.STATUS)); + + //scheduler list elements > 1 + processDefinition.setReleaseState(ReleaseState.OFFLINE); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + List schedules = new ArrayList<>(); + schedules.add(getSchedule()); + schedules.add(getSchedule()); + Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(schedules); + Map schedulerGreaterThanOneRes = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR, schedulerGreaterThanOneRes.get(Constants.STATUS)); + + //scheduler online + schedules.clear(); + Schedule schedule = getSchedule(); + schedule.setReleaseState(ReleaseState.ONLINE); + schedules.add(schedule); + Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(schedules); + Map schedulerOnlineRes = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.SCHEDULE_CRON_STATE_ONLINE, schedulerOnlineRes.get(Constants.STATUS)); + + //delete fail + schedules.clear(); + schedule.setReleaseState(ReleaseState.OFFLINE); + schedules.add(schedule); + Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(schedules); + Mockito.when(processDefineMapper.deleteById(46)).thenReturn(0); + Map deleteFail = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR, deleteFail.get(Constants.STATUS)); + + //delete success + Mockito.when(processDefineMapper.deleteById(46)).thenReturn(1); + Map deleteSuccess = processDefinitionService.deleteProcessDefinitionById(loginUser, + "project_test1", 46); + Assert.assertEquals(Status.SUCCESS, deleteSuccess.get(Constants.STATUS)); + } + + @Test + public void testReleaseProcessDefinition() { + String projectName = "project_test1"; + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); + + Project project = getProject(projectName); + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + //project check auth fail + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Map map = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", + 6, ReleaseState.OFFLINE.getCode()); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); + + //project check auth success, processs definition online + putMsg(result, Status.SUCCESS, projectName); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition()); + Mockito.when(processDefineMapper.updateById(getProcessDefinition())).thenReturn(1); + Map onlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", + 46, ReleaseState.ONLINE.getCode()); + Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS)); + + //process definition offline + List schedules = new ArrayList<>(); + Schedule schedule = getSchedule(); + schedules.add(schedule); + Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules); + Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1); + Map offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", + 46, ReleaseState.OFFLINE.getCode()); + Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS)); + + //release error code + Map failRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1", + 46, 2); + Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failRes.get(Constants.STATUS)); + } + + @Test + public void testVerifyProcessDefinitionName() { + String projectName = "project_test1"; + Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); + + Project project = getProject(projectName); + User loginUser = new User(); + loginUser.setId(-1); + loginUser.setUserType(UserType.GENERAL_USER); + + //project check auth fail + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + Mockito.when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + Map map = processDefinitionService.verifyProccessDefinitionName(loginUser, + "project_test1", "test_pdf"); Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); + + //project check auth success, process not exist + putMsg(result, Status.SUCCESS, projectName); + Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(null); + Map processNotExistRes = processDefinitionService.verifyProccessDefinitionName(loginUser, + "project_test1", "test_pdf"); + Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS)); + + //process exist + Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_pdf")).thenReturn(getProcessDefinition()); + Map processExistRes = processDefinitionService.verifyProccessDefinitionName(loginUser, + "project_test1", "test_pdf"); + Assert.assertEquals(Status.PROCESS_INSTANCE_EXIST, processExistRes.get(Constants.STATUS)); + } + + @Test + public void testCheckProcessNodeList() { + + Map dataNotValidRes = processDefinitionService.checkProcessNodeList(null, ""); + Assert.assertEquals(Status.DATA_IS_NOT_VALID, dataNotValidRes.get(Constants.STATUS)); + + //task not empty + String processDefinitionJson = shellJson; + ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); + assert processData != null; + Map taskEmptyRes = processDefinitionService.checkProcessNodeList(processData, processDefinitionJson); + Assert.assertEquals(Status.SUCCESS, taskEmptyRes.get(Constants.STATUS)); + + //task empty + processData.setTasks(null); + Map taskNotEmptyRes = processDefinitionService.checkProcessNodeList(processData, processDefinitionJson); + Assert.assertEquals(Status.DATA_IS_NULL, taskNotEmptyRes.get(Constants.STATUS)); + + //json abnormal + String abnormalJson = processDefinitionJson.replaceAll("SHELL",""); + processData = JSONUtils.parseObject(abnormalJson, ProcessData.class); + Map abnormalTaskRes = processDefinitionService.checkProcessNodeList(processData, abnormalJson); + Assert.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID, abnormalTaskRes.get(Constants.STATUS)); + } + + @Test + public void testGetTaskNodeListByDefinitionId() throws Exception { + //process definition not exist + Mockito.when(processDefineMapper.selectById(46)).thenReturn(null); + Map processDefinitionNullRes = processDefinitionService.getTaskNodeListByDefinitionId(46); + Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS)); + + //process data null + ProcessDefinition processDefinition = getProcessDefinition(); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + Map successRes = processDefinitionService.getTaskNodeListByDefinitionId(46); + Assert.assertEquals(Status.DATA_IS_NOT_VALID, successRes.get(Constants.STATUS)); + + //success + processDefinition.setProcessDefinitionJson(shellJson); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + Map dataNotValidRes = processDefinitionService.getTaskNodeListByDefinitionId(46); + Assert.assertEquals(Status.SUCCESS, dataNotValidRes.get(Constants.STATUS)); + } + + @Test + public void testGetTaskNodeListByDefinitionIdList() throws Exception { + //process definition not exist + String defineIdList = "46"; + Integer[] idArray = {46}; + Mockito.when(processDefineMapper.queryDefinitionListByIdList(idArray)).thenReturn(null); + Map processNotExistRes = processDefinitionService.getTaskNodeListByDefinitionIdList(defineIdList); + Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processNotExistRes.get(Constants.STATUS)); + + //process definition exist + ProcessDefinition processDefinition = getProcessDefinition(); + processDefinition.setProcessDefinitionJson(shellJson); + List processDefinitionList = new ArrayList<>(); + processDefinitionList.add(processDefinition); + Mockito.when(processDefineMapper.queryDefinitionListByIdList(idArray)).thenReturn(processDefinitionList); + Map successRes = processDefinitionService.getTaskNodeListByDefinitionIdList(defineIdList); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + @Test + public void testQueryProccessDefinitionAllByProjectId() { + int projectId = 1; + ProcessDefinition processDefinition = getProcessDefinition(); + processDefinition.setProcessDefinitionJson(shellJson); + List processDefinitionList = new ArrayList<>(); + processDefinitionList.add(processDefinition); + Mockito.when(processDefineMapper.queryAllDefinitionList(projectId)).thenReturn(processDefinitionList); + Map successRes = processDefinitionService.queryProccessDefinitionAllByProjectId(projectId); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + @Test + public void testViewTree() throws Exception { + //process definition not exist + ProcessDefinition processDefinition = getProcessDefinition(); + processDefinition.setProcessDefinitionJson(shellJson); + Mockito.when(processDefineMapper.selectById(46)).thenReturn(null); + Map processDefinitionNullRes = processDefinitionService.viewTree(46, 10); + Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS)); + + List processInstanceList = new ArrayList<>(); + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setName("test_instance"); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + processInstance.setHost("192.168.xx.xx"); + processInstance.setStartTime(new Date()); + processInstance.setEndTime(new Date()); + processInstanceList.add(processInstance); + + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setStartTime(new Date()); + taskInstance.setEndTime(new Date()); + taskInstance.setTaskType("SHELL"); + taskInstance.setId(1); + taskInstance.setName("test_task_instance"); + taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + taskInstance.setHost("192.168.xx.xx"); + + //task instance not exist + Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition); + Mockito.when(processInstanceMapper.queryByProcessDefineId(46, 10)).thenReturn(processInstanceList); + Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(null); + Map taskNullRes = processDefinitionService.viewTree(46, 10); + Assert.assertEquals(Status.SUCCESS, taskNullRes.get(Constants.STATUS)); + + //task instance exist + Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(taskInstance); + Map taskNotNuLLRes = processDefinitionService.viewTree(46, 10); + Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS)); } /** @@ -190,13 +493,9 @@ public class ProcessDefinitionServiceTest { @Test public void testAddExportTaskNodeSpecialParam() throws JSONException { - String shellJson = "{\"globalParams\":[],\"tasks\":[{\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + - "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + - "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + - "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + - "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; + String shellData = shellJson; - String resultStr = processDefinitionService.addExportTaskNodeSpecialParam(shellJson); + String resultStr = processDefinitionService.addExportTaskNodeSpecialParam(shellData); JSONAssert.assertEquals(shellJson, resultStr, false); } @@ -440,7 +739,7 @@ public class ProcessDefinitionServiceTest { Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName)); Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); - Mockito.when(processDao.findProcessDefineById(1)).thenReturn(getProcessDefinition()); + Mockito.when(processService.findProcessDefineById(1)).thenReturn(getProcessDefinition()); Map updateResult = processDefinitionService.updateProcessDefinition(loginUser, projectName, 1, "test", sqlDependentJson, "", "", ""); @@ -466,7 +765,7 @@ public class ProcessDefinitionServiceTest { private ProcessDefinition getProcessDefinition(){ ProcessDefinition processDefinition = new ProcessDefinition(); processDefinition.setId(46); - processDefinition.setName("testProject"); + processDefinition.setName("test_pdf"); processDefinition.setProjectId(2); processDefinition.setTenantId(1); processDefinition.setDescription(""); diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java index e0a52bb3a7..959dca21bf 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessInstanceServiceTest.java @@ -16,68 +16,514 @@ */ package org.apache.dolphinscheduler.api.service; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.common.Constants; -import org.apache.dolphinscheduler.common.enums.DependResult; -import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.enums.UserType; -import org.apache.dolphinscheduler.dao.entity.User; -import com.alibaba.fastjson.JSON; +import org.apache.dolphinscheduler.common.enums.*; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.*; +import org.apache.dolphinscheduler.dao.mapper.*; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; import java.io.IOException; -import java.util.Map; +import java.text.MessageFormat; +import java.text.ParseException; +import java.util.*; -@RunWith(SpringRunner.class) +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.Silent.class) @SpringBootTest(classes = ApiApplicationServer.class) public class ProcessInstanceServiceTest { private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class); - @Autowired + @InjectMocks ProcessInstanceService processInstanceService; + @Mock + ProjectMapper projectMapper; + + @Mock + ProjectService projectService; + + @Mock + ProcessService processService; + + @Mock + ProcessInstanceMapper processInstanceMapper; + + @Mock + ProcessDefinitionMapper processDefineMapper; + + @Mock + ProcessDefinitionService processDefinitionService; + + @Mock + ExecutorService execService; + + @Mock + TaskInstanceMapper taskInstanceMapper; + + @Mock + LoggerService loggerService; + + @Mock + WorkerGroupMapper workerGroupMapper; + + @Mock + UsersService usersService; + + private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + + "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + + "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; + + @Test - public void viewVariables() { - try { - Map map = processInstanceService.viewVariables(-1); - Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); - }catch (Exception e){ - logger.error(e.getMessage(), e); - } + public void testQueryProcessInstanceList() { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.queryProcessInstanceList(loginUser, projectName, 46, "2020-01-01 00:00:00", + "2020-01-02 00:00:00", "", "test_user", ExecutionStatus.SUBMITTED_SUCCESS, + "192.168.xx.xx", 1, 10); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //project auth success + putMsg(result, Status.SUCCESS, projectName); + Project project = getProject(projectName); + Date start = DateUtils.getScheduleDate("2020-01-01 00:00:00"); + Date end = DateUtils.getScheduleDate("2020-01-02 00:00:00"); + ProcessInstance processInstance = getProcessInstance(); + List processInstanceList = new ArrayList<>(); + Page pageReturn = new Page<>(1, 10); + processInstanceList.add(processInstance); + pageReturn.setRecords(processInstanceList); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); + when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); + when(processInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(-1), Mockito.any(), + eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); + when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser); + Map successRes = processInstanceService.queryProcessInstanceList(loginUser, projectName, 1, "2020-01-01 00:00:00", + "2020-01-02 00:00:00", "", loginUser.getUserName(), ExecutionStatus.SUBMITTED_SUCCESS, + "192.168.xx.xx", 1, 10); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + + //executor null + when(usersService.queryUser(loginUser.getId())).thenReturn(null); + when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(-1); + Map executorExistRes = processInstanceService.queryProcessInstanceList(loginUser, projectName, 1, "2020-01-01 00:00:00", + "2020-01-02 00:00:00", "", "admin", ExecutionStatus.SUBMITTED_SUCCESS, + "192.168.xx.xx", 1, 10); + Assert.assertEquals(Status.SUCCESS, executorExistRes.get(Constants.STATUS)); + + //executor name empty + when(processInstanceMapper.queryProcessInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(0), Mockito.any(), + eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); + Map executorEmptyRes = processInstanceService.queryProcessInstanceList(loginUser, projectName, 1, "2020-01-01 00:00:00", + "2020-01-02 00:00:00", "", "", ExecutionStatus.SUBMITTED_SUCCESS, + "192.168.xx.xx", 1, 10); + Assert.assertEquals(Status.SUCCESS, executorEmptyRes.get(Constants.STATUS)); + } @Test - public void testDependResult(){ + public void testQueryProcessInstanceById() { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //project auth success + ProcessInstance processInstance = getProcessInstance(); + processInstance.setWorkerGroupId(-1); + processInstance.setReceivers("xxx@qq.com"); + processInstance.setReceiversCc("xxx@qq.com"); + processInstance.setProcessDefinitionId(46); + putMsg(result, Status.SUCCESS, projectName); + Project project = getProject(projectName); + ProcessDefinition processDefinition = getProcessDefinition(); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(processService.findProcessInstanceDetailById(processInstance.getId())).thenReturn(processInstance); + when(processService.findProcessDefineById(processInstance.getProcessDefinitionId())).thenReturn(processDefinition); + Map successRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + + //worker group null + processInstance.setWorkerGroupId(1); + when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null); + Map workerNullRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1); + Assert.assertEquals(Status.SUCCESS, workerNullRes.get(Constants.STATUS)); + + //worker group exist + WorkerGroup workerGroup = getWorkGroup(); + when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(workerGroup); + processInstance.setWorkerGroupId(1); + when(workerGroupMapper.selectById(processInstance.getWorkerGroupId())).thenReturn(null); + Map workerExistRes = processInstanceService.queryProcessInstanceById(loginUser, projectName, 1); + Assert.assertEquals(Status.SUCCESS, workerExistRes.get(Constants.STATUS)); + } + + @Test + public void testQueryTaskListByProcessId() throws IOException { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.queryTaskListByProcessId(loginUser, projectName, 1); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //project auth success + putMsg(result, Status.SUCCESS, projectName); + Project project = getProject(projectName); + ProcessInstance processInstance = getProcessInstance(); + processInstance.setState(ExecutionStatus.SUCCESS); + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setTaskType(TaskType.SHELL.getDescp()); + List taskInstanceList = new ArrayList<>(); + taskInstanceList.add(taskInstance); + Result res = new Result(); + res.setCode(Status.SUCCESS.ordinal()); + res.setData("xxx"); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(processService.findProcessInstanceDetailById(processInstance.getId())).thenReturn(processInstance); + when(processService.findValidTaskListByProcessId(processInstance.getId())).thenReturn(taskInstanceList); + when(loggerService.queryLog(taskInstance.getId(), 0, 4098)).thenReturn(res); + Map successRes = processInstanceService.queryTaskListByProcessId(loginUser, projectName, 1); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + + @Test + public void testParseLogForDependentResult() { String logString = "[INFO] 2019-03-19 17:11:08.475 org.apache.dolphinscheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" + "[INFO] 2019-03-19 17:11:08.476 org.apache.dolphinscheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" + "[root@node2 current]# "; try { Map resultMap = processInstanceService.parseLogForDependentResult(logString); - Assert.assertEquals(resultMap.size() , 1); + Assert.assertEquals(1, resultMap.size()); } catch (IOException e) { } } @Test - public void queryProcessInstanceList() throws Exception { + public void testQuerySubProcessInstanceByTaskId() { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, 1); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //task null + Project project = getProject(projectName); + putMsg(result, Status.SUCCESS, projectName); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(processService.findTaskInstanceById(1)).thenReturn(null); + Map taskNullRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, 1); + Assert.assertEquals(Status.TASK_INSTANCE_NOT_EXISTS, taskNullRes.get(Constants.STATUS)); + //task not sub process + TaskInstance taskInstance = getTaskInstance(); + taskInstance.setTaskType(TaskType.HTTP.getDescp()); + taskInstance.setProcessInstanceId(1); + when(processService.findTaskInstanceById(1)).thenReturn(taskInstance); + Map notSubprocessRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, 1); + Assert.assertEquals(Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, notSubprocessRes.get(Constants.STATUS)); + + //sub process not exist + TaskInstance subTask = getTaskInstance(); + subTask.setTaskType(TaskType.SUB_PROCESS.getDescp()); + subTask.setProcessInstanceId(1); + when(processService.findTaskInstanceById(subTask.getId())).thenReturn(subTask); + when(processService.findSubProcessInstance(subTask.getProcessInstanceId(), subTask.getId())).thenReturn(null); + Map subprocessNotExistRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, 1); + Assert.assertEquals(Status.SUB_PROCESS_INSTANCE_NOT_EXIST, subprocessNotExistRes.get(Constants.STATUS)); + + //sub process exist + ProcessInstance processInstance = getProcessInstance(); + when(processService.findSubProcessInstance(taskInstance.getProcessInstanceId(), taskInstance.getId())).thenReturn(processInstance); + Map subprocessExistRes = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, 1); + Assert.assertEquals(Status.SUCCESS, subprocessExistRes.get(Constants.STATUS)); + } + + @Test + public void testUpdateProcessInstance() throws ParseException { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1, + shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", ""); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //process instance null + Project project = getProject(projectName); + putMsg(result, Status.SUCCESS, projectName); + ProcessInstance processInstance = getProcessInstance(); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(processService.findProcessInstanceDetailById(1)).thenReturn(null); + Map processInstanceNullRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1, + shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", ""); + Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS)); + + //process instance not finish + when(processService.findProcessInstanceDetailById(1)).thenReturn(processInstance); + processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + Map processInstanceNotFinishRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1, + shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", ""); + Assert.assertEquals(Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstanceNotFinishRes.get(Constants.STATUS)); + + //process instance finish + processInstance.setState(ExecutionStatus.SUCCESS); + processInstance.setTimeout(3000); + processInstance.setCommandType(CommandType.STOP); + ProcessDefinition processDefinition = getProcessDefinition(); + processDefinition.setUserId(1); + Tenant tenant = new Tenant(); + tenant.setId(1); + tenant.setTenantCode("test_tenant"); + when(processService.findProcessDefineById(processInstance.getProcessDefinitionId())).thenReturn(processDefinition); + when(processService.getTenantForProcess(Mockito.anyInt(), Mockito.anyInt())).thenReturn(tenant); + when(processService.updateProcessInstance(processInstance)).thenReturn(1); + when(processDefinitionService.checkProcessNodeList(Mockito.any(), eq(shellJson))).thenReturn(result); + Map processInstanceFinishRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1, + shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", ""); + Assert.assertEquals(Status.UPDATE_PROCESS_INSTANCE_ERROR, processInstanceFinishRes.get(Constants.STATUS)); + + //success + when(processDefineMapper.updateById(processDefinition)).thenReturn(1); + Map successRes = processInstanceService.updateProcessInstance(loginUser, projectName, 1, + shellJson, "2020-02-21 00:00:00", true, Flag.YES, "", ""); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + @Test + public void testQueryParentInstanceBySubId() { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //process instance null + Project project = getProject(projectName); + putMsg(result, Status.SUCCESS, projectName); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(processService.findProcessInstanceDetailById(1)).thenReturn(null); + Map processInstanceNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1); + Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS)); + + //not sub process + ProcessInstance processInstance = getProcessInstance(); + processInstance.setIsSubProcess(Flag.NO); + when(processService.findProcessInstanceDetailById(1)).thenReturn(processInstance); + Map notSubProcessRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1); + Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, notSubProcessRes.get(Constants.STATUS)); + + //sub process + processInstance.setIsSubProcess(Flag.YES); + when(processService.findParentProcessInstance(1)).thenReturn(null); + Map subProcessNullRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1); + Assert.assertEquals(Status.SUB_PROCESS_INSTANCE_NOT_EXIST, subProcessNullRes.get(Constants.STATUS)); + + //success + when(processService.findParentProcessInstance(1)).thenReturn(processInstance); + Map successRes = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, 1); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + @Test + public void testDeleteProcessInstanceById() { + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser, null, projectName)).thenReturn(result); + Map proejctAuthFailRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any()); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + //process instance null + Project project = getProject(projectName); + putMsg(result, Status.SUCCESS, projectName); + when(projectMapper.queryByName(projectName)).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result); + when(processService.findProcessInstanceDetailById(1)).thenReturn(null); + Map processInstanceNullRes = processInstanceService.deleteProcessInstanceById(loginUser, projectName, 1, Mockito.any()); + Assert.assertEquals(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceNullRes.get(Constants.STATUS)); + } + + @Test + public void testViewVariables() throws Exception { + //process instance not null + ProcessInstance processInstance = getProcessInstance(); + processInstance.setCommandType(CommandType.SCHEDULER); + processInstance.setScheduleTime(new Date()); + processInstance.setProcessInstanceJson(shellJson); + processInstance.setGlobalParams(""); + when(processInstanceMapper.queryDetailById(1)).thenReturn(processInstance); + Map successRes = processInstanceService.viewVariables(1); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + @Test + public void testViewGantt() throws Exception { + ProcessInstance processInstance = getProcessInstance(); + processInstance.setProcessInstanceJson(shellJson); + TaskInstance taskInstance = getTaskInstance(); + taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); + taskInstance.setStartTime(new Date()); + when(processInstanceMapper.queryDetailById(1)).thenReturn(processInstance); + when(taskInstanceMapper.queryByInstanceIdAndName(Mockito.anyInt(), Mockito.any())).thenReturn(taskInstance); + Map successRes = processInstanceService.viewGantt(1); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + } + + /** + * get Mock Admin User + * + * @return admin user + */ + private User getAdminUser() { User loginUser = new User(); - loginUser.setId(27); + loginUser.setId(-1); + loginUser.setUserName("admin"); loginUser.setUserType(UserType.GENERAL_USER); - Map map = processInstanceService.queryProcessInstanceList(loginUser, "project_test1", 0, "", "", "", ExecutionStatus.FAILURE, "", 1, 5); + return loginUser; + } + + /** + * get mock Project + * + * @param projectName projectName + * @return Project + */ + private Project getProject(String projectName) { + Project project = new Project(); + project.setId(1); + project.setName(projectName); + project.setUserId(1); + return project; + } + + /** + * get Mock process instance + * + * @return process instance + */ + private ProcessInstance getProcessInstance() { + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setName("test_process_instance"); + processInstance.setStartTime(new Date()); + processInstance.setEndTime(new Date()); + return processInstance; + } + + /** + * get mock processDefinition + * + * @return ProcessDefinition + */ + private ProcessDefinition getProcessDefinition() { + ProcessDefinition processDefinition = new ProcessDefinition(); + processDefinition.setId(46); + processDefinition.setName("test_pdf"); + processDefinition.setProjectId(2); + processDefinition.setTenantId(1); + processDefinition.setDescription(""); + return processDefinition; + } + + /** + * get Mock worker group + * + * @return worker group + */ + private WorkerGroup getWorkGroup() { + WorkerGroup workerGroup = new WorkerGroup(); + workerGroup.setId(1); + workerGroup.setName("test_workergroup"); + return workerGroup; + } - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - logger.info(JSON.toJSONString(map)); + /** + * get Mock task instance + * + * @return task instance + */ + private TaskInstance getTaskInstance() { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setName("test_task_instance"); + taskInstance.setStartTime(new Date()); + taskInstance.setEndTime(new Date()); + taskInstance.setExecutorId(-1); + return taskInstance; } + + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java index 931f2cea37..ebb6139577 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/TaskInstanceServiceTest.java @@ -16,47 +16,177 @@ */ package org.apache.dolphinscheduler.api.service; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.dolphinscheduler.api.ApiApplicationServer; import org.apache.dolphinscheduler.api.enums.Status; -import org.apache.dolphinscheduler.api.utils.PageInfo; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.Project; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; +import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.test.context.junit4.SpringRunner; -import java.util.Map; +import java.text.MessageFormat; +import java.util.*; -@RunWith(SpringRunner.class) +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.Silent.class) @SpringBootTest(classes = ApiApplicationServer.class) public class TaskInstanceServiceTest { private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceTest.class); - @Autowired + @InjectMocks private TaskInstanceService taskInstanceService; + @Mock + ProjectMapper projectMapper; + + @Mock + ProjectService projectService; + + @Mock + ProcessService processService; + + @Mock + TaskInstanceMapper taskInstanceMapper; + + @Mock + ProcessInstanceService processInstanceService; + + @Mock + UsersService usersService; + @Test public void queryTaskListPaging(){ + String projectName = "project_test1"; + User loginUser = getAdminUser(); + Map result = new HashMap<>(5); + putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); + + //project auth fail + when(projectMapper.queryByName(projectName)).thenReturn(null); + when(projectService.checkProjectAndAuth(loginUser,null,projectName)).thenReturn(result); + Map proejctAuthFailRes = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", + "test_user", "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); + Assert.assertEquals(Status.PROJECT_NOT_FOUNT, proejctAuthFailRes.get(Constants.STATUS)); + + + //project + putMsg(result, Status.SUCCESS, projectName); + Project project = getProject(projectName); + Date start = DateUtils.getScheduleDate("2020-01-01 00:00:00"); + Date end = DateUtils.getScheduleDate("2020-01-02 00:00:00"); + ProcessInstance processInstance = getProcessInstance(); + TaskInstance taskInstance = getTaskInstance(); + List taskInstanceList = new ArrayList<>(); + Page pageReturn = new Page<>(1, 10); + taskInstanceList.add(taskInstance); + pageReturn.setRecords(taskInstanceList); + when(projectMapper.queryByName(Mockito.anyString())).thenReturn(project); + when(projectService.checkProjectAndAuth(loginUser,project,projectName)).thenReturn(result); + when(usersService.queryUser(loginUser.getId())).thenReturn(loginUser); + when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(loginUser.getId()); + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + eq(0), Mockito.any(), eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); + when(usersService.queryUser(processInstance.getExecutorId())).thenReturn(loginUser); + when(processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId())).thenReturn(processInstance); + + Map successRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + "test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); + Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS)); + + //executor name empty + when(taskInstanceMapper.queryTaskInstanceListPaging(Mockito.any(Page.class), eq(project.getId()), eq(1), eq(""), eq(""), + eq(0), Mockito.any(), eq("192.168.xx.xx"), eq(start), eq(end))).thenReturn(pageReturn); + Map executorEmptyRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + "", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); + Assert.assertEquals(Status.SUCCESS, executorEmptyRes.get(Constants.STATUS)); + + //executor null + when(usersService.queryUser(loginUser.getId())).thenReturn(null); + when(usersService.getUserIdByName(loginUser.getUserName())).thenReturn(-1); + Map executorNullRes = taskInstanceService.queryTaskListPaging(loginUser, projectName, 1, "", + "test_user", "2020-01-01 00:00:00", "2020-01-02 00:00:00", "", ExecutionStatus.SUCCESS, "192.168.xx.xx", 1, 20); + Assert.assertEquals(Status.SUCCESS, executorNullRes.get(Constants.STATUS)); + } + + /** + * get Mock Admin User + * @return admin user + */ + private User getAdminUser() { User loginUser = new User(); loginUser.setId(-1); + loginUser.setUserName("admin"); loginUser.setUserType(UserType.GENERAL_USER); + return loginUser; + } - Map map = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", - "2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); - Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS)); - PageInfo pageInfo = (PageInfo) map.get("data"); + /** + * get mock Project + * @param projectName projectName + * @return Project + */ + private Project getProject(String projectName){ + Project project = new Project(); + project.setId(1); + project.setName(projectName); + project.setUserId(1); + return project; + } - if(pageInfo != null){ - logger.info(pageInfo.getLists().toString()); - } + /** + * get Mock process instance + * @return process instance + */ + private ProcessInstance getProcessInstance() { + ProcessInstance processInstance = new ProcessInstance(); + processInstance.setId(1); + processInstance.setName("test_process_instance"); + processInstance.setStartTime(new Date()); + processInstance.setEndTime(new Date()); + processInstance.setExecutorId(-1); + return processInstance; + } + /** + * get Mock task instance + * @return task instance + */ + private TaskInstance getTaskInstance() { + TaskInstance taskInstance = new TaskInstance(); + taskInstance.setId(1); + taskInstance.setName("test_task_instance"); + taskInstance.setStartTime(new Date()); + taskInstance.setEndTime(new Date()); + taskInstance.setExecutorId(-1); + return taskInstance; + } + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java index 30aabe93f2..efe9022ad7 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java @@ -148,6 +148,28 @@ public class UsersServiceTest { Assert.assertTrue(queryUser!=null); } + @Test + public void testGetUserIdByName() { + User user = new User(); + user.setId(1); + user.setUserType(UserType.ADMIN_USER); + user.setUserName("test_user"); + + //user name null + int userId = usersService.getUserIdByName(""); + Assert.assertEquals(0, userId); + + //user not exist + when(usersService.queryUser(user.getUserName())).thenReturn(null); + int userNotExistId = usersService.getUserIdByName(user.getUserName()); + Assert.assertEquals(-1, userNotExistId); + + //user exist + when(usersService.queryUser(user.getUserName())).thenReturn(user); + int userExistId = usersService.getUserIdByName(user.getUserName()); + Assert.assertEquals(user.getId(), userExistId); + } + @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java index b0b4319fb4..24a0ed31d6 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/CheckUtilsTest.java @@ -17,8 +17,26 @@ package org.apache.dolphinscheduler.api.utils; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.ProgramType; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.datax.DataxParameters; +import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; +import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; +import org.apache.dolphinscheduler.common.task.http.HttpParameters; +import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; +import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; +import org.apache.dolphinscheduler.common.task.python.PythonParameters; +import org.apache.dolphinscheduler.common.task.shell.ShellParameters; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; +import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -78,6 +96,14 @@ public class CheckUtilsTest { } + @Test + public void testCheckOtherParams() { + assertFalse(CheckUtils.checkOtherParams(null)); + assertFalse(CheckUtils.checkOtherParams("")); + assertTrue(CheckUtils.checkOtherParams("xxx")); + assertFalse(CheckUtils.checkOtherParams("{}")); + assertFalse(CheckUtils.checkOtherParams("{\"key1\":111}")); + } /** * check passwd */ @@ -106,5 +132,90 @@ public class CheckUtilsTest { assertTrue(CheckUtils.checkPhone("17362537263")); } + @Test + public void testCheckTaskNodeParameters() { + + assertFalse(CheckUtils.checkTaskNodeParameters(null,null)); + assertFalse(CheckUtils.checkTaskNodeParameters(null,"unKnown")); + assertFalse(CheckUtils.checkTaskNodeParameters("unKnown","unKnown")); + assertFalse(CheckUtils.checkTaskNodeParameters("unKnown",null)); + + // sub SubProcessParameters + SubProcessParameters subProcessParameters = new SubProcessParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(subProcessParameters), TaskType.SUB_PROCESS.toString())); + subProcessParameters.setProcessDefinitionId(1234); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(subProcessParameters), TaskType.SUB_PROCESS.toString())); + + // ShellParameters + ShellParameters shellParameters = new ShellParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(shellParameters), TaskType.SHELL.toString())); + shellParameters.setRawScript(""); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(shellParameters), TaskType.SHELL.toString())); + shellParameters.setRawScript("sss"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(shellParameters), TaskType.SHELL.toString())); + + // ProcedureParameters + ProcedureParameters procedureParameters = new ProcedureParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(procedureParameters), TaskType.PROCEDURE.toString())); + procedureParameters.setDatasource(1); + procedureParameters.setType("xx"); + procedureParameters.setMethod("yy"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(procedureParameters), TaskType.PROCEDURE.toString())); + + // SqlParameters + SqlParameters sqlParameters = new SqlParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(sqlParameters), TaskType.SQL.toString())); + sqlParameters.setDatasource(1); + sqlParameters.setType("xx"); + sqlParameters.setSql("yy"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(sqlParameters), TaskType.SQL.toString())); + + // MapreduceParameters + MapreduceParameters mapreduceParameters = new MapreduceParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(mapreduceParameters), TaskType.MR.toString())); + mapreduceParameters.setMainJar(new ResourceInfo()); + mapreduceParameters.setProgramType(ProgramType.JAVA); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(mapreduceParameters), TaskType.MR.toString())); + + // SparkParameters + SparkParameters sparkParameters = new SparkParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(sparkParameters), TaskType.SPARK.toString())); + sparkParameters.setMainJar(new ResourceInfo()); + sparkParameters.setProgramType(ProgramType.SCALA); + sparkParameters.setSparkVersion("1.1.1"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(sparkParameters), TaskType.SPARK.toString())); + + // PythonParameters + PythonParameters pythonParameters = new PythonParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(pythonParameters), TaskType.PYTHON.toString())); + pythonParameters.setRawScript("ss"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(pythonParameters), TaskType.PYTHON.toString())); + + // DependentParameters + DependentParameters dependentParameters = new DependentParameters(); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(dependentParameters), TaskType.DEPENDENT.toString())); + + // FlinkParameters + FlinkParameters flinkParameters = new FlinkParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(flinkParameters), TaskType.FLINK.toString())); + flinkParameters.setMainJar(new ResourceInfo()); + flinkParameters.setProgramType(ProgramType.JAVA); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(flinkParameters), TaskType.FLINK.toString())); + + // HTTP + HttpParameters httpParameters = new HttpParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(httpParameters), TaskType.HTTP.toString())); + httpParameters.setUrl("httpUrl"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(httpParameters), TaskType.HTTP.toString())); + + // DataxParameters + DataxParameters dataxParameters = new DataxParameters(); + assertFalse(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(dataxParameters), TaskType.DATAX.toString())); + dataxParameters.setDataSource(111); + dataxParameters.setDataTarget(333); + dataxParameters.setSql("sql"); + dataxParameters.setTargetTable("tar"); + assertTrue(CheckUtils.checkTaskNodeParameters(JSONUtils.toJsonString(dataxParameters), TaskType.DATAX.toString())); + } } \ No newline at end of file diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java new file mode 100644 index 0000000000..e8adc6ca9c --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/utils/FourLetterWordMainTest.java @@ -0,0 +1,218 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.utils; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.net.InetSocketAddress; +import java.net.Socket; +import java.net.SocketTimeoutException; + +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.when; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({FourLetterWordMain.class, Socket.class}) +public class FourLetterWordMainTest { + + private static final Logger logger = + LoggerFactory.getLogger(FourLetterWordMainTest.class); + private static final String NEW_LINE = "\n"; + + @InjectMocks + private FourLetterWordMain fourLetterWord; + @Mock + private Socket socket; + @Mock + private InetSocketAddress socketAddress; + + private final String localHost = "127.0.0.1"; + private final int zkPort = 2181; + private ByteArrayOutputStream byteArrayOutputStream; + private InputStream inputStream; + + private String cmd; + private String testResult; + private String expectedStr; + + @Before + public void setUp() { + // mock socket class + PowerMockito.mockStatic(Socket.class); + try { + PowerMockito.whenNew(Socket.class).withNoArguments() + .thenReturn(socket); + } catch (Exception e) { + e.printStackTrace(); + } + } + + /** + * None mock test method, just to check zookeeper status. + * Comment @Before notation to run this test. + * Zookeeper status will be as: + * Zookeeper version: 3.4.11 ... + * Received: 6739707 + * Sent: 6739773 + * Connections: 20 + * Outstanding: 0 + * Zxid: 0x9ba + * Mode: standalone + * Node count: 263 + */ + public void testCmd() { + // "192.168.64.11" + // final String zkHost = localHost; + final String zkHost = "192.168.64.11"; + cmd = "srvr"; + try { + // Change localhost to right zk host ip. + final String result = FourLetterWordMain + .send4LetterWord(zkHost, zkPort, cmd); + logger.info(cmd + ": " + result + "<<<"); + } catch (Exception e) { + e.printStackTrace(); + } + } + + @Test + public void testEmptyCmd() { + cmd = ""; + expectedStr = ""; + testSend4LetterWord(cmd, expectedStr); + } + + @Test + public void testNullCmd() { + cmd = null; + + try { + testResult = FourLetterWordMain + .send4LetterWord(localHost, zkPort, cmd); + } catch (Exception e) { + testResult = e.getMessage(); + } + + logger.info("testNullCmd result: " + testResult); + assertEquals("cmd must not be null", testResult); + } + + @Test + public void testNullSocketOutput() { + cmd = "test null socket output"; + expectedStr = null; + testSend4LetterWord(cmd, expectedStr); + } + + @Test + public void testOneLineOutput() { + cmd = "line 1"; + + // line end without \n + expectedStr = "line 1" + NEW_LINE; + testSend4LetterWord(cmd, expectedStr); + + // line end with \n + expectedStr = "line 1\n" + NEW_LINE; + testSend4LetterWord(cmd, expectedStr); + } + + @Test + public void testMultiline() { + cmd = "line 1 " + NEW_LINE + + "line 2 " + NEW_LINE + + "line 3 " + NEW_LINE; + + expectedStr = cmd + NEW_LINE; + testSend4LetterWord(cmd, expectedStr); + + expectedStr = NEW_LINE + NEW_LINE + NEW_LINE; + testSend4LetterWord(cmd, expectedStr); + } + + @Test + public void testSocketTimeOut() { + cmd = "test socket time out"; + + try { + doThrow(new SocketTimeoutException()) + .when(socket) + .connect(any(InetSocketAddress.class), Mockito.anyInt()); + testResult = FourLetterWordMain + .send4LetterWord(localHost, zkPort, cmd); + } catch (Exception e) { + testResult = e.getMessage(); + } + + logger.info("testSocketTimeOut result: " + testResult); + assertEquals( + "Exception while executing four letter word: " + cmd, + testResult + ); + } + + /** + * Test FourLetterWordMain.send4LetterWord() with input cmd and output + * string. + * @param cmd + * @param expectedStr + */ + public void testSend4LetterWord(String cmd, String expectedStr) { + try { + final byte[] strBytes = cmd.getBytes(); + byteArrayOutputStream = new ByteArrayOutputStream(strBytes.length); + byteArrayOutputStream.write(strBytes, 0, strBytes.length); + + inputStream = new ByteArrayInputStream(expectedStr.getBytes()); + + when(socket.getOutputStream()) + .thenReturn(byteArrayOutputStream); + when(socket.getInputStream()).thenReturn(inputStream); + + final String result = FourLetterWordMain + .send4LetterWord(localHost, zkPort, cmd); + logger.info( + "testSend4LetterWord: " + + "cmd: " + cmd + + ", expectedStr: " + expectedStr + + ", result: " + result + "." + ); + Assert.assertEquals(expectedStr, result); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/dolphinscheduler-common/pom.xml b/dolphinscheduler-common/pom.xml index bd2448eee7..e7789f724b 100644 --- a/dolphinscheduler-common/pom.xml +++ b/dolphinscheduler-common/pom.xml @@ -85,21 +85,7 @@ com.fasterxml.jackson.core jackson-databind - - org.apache.curator - curator-client - ${curator.version} - - - log4j-1.2-api - org.apache.logging.log4j - - - io.netty - netty - - - + org.apache.commons commons-collections4 @@ -260,11 +246,6 @@ - - javax.servlet - javax.servlet-api - - org.apache.hadoop hadoop-hdfs @@ -548,6 +529,10 @@ log4j-web org.apache.logging.log4j + + jasper-compiler + tomcat + @@ -601,11 +586,6 @@ compile - - org.springframework - spring-context - - org.codehaus.janino janino diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index 9cb1a5821e..73125f4926 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -25,7 +25,9 @@ import java.util.regex.Pattern; * Constants */ public final class Constants { - + private Constants() { + throw new IllegalStateException("Constants class"); + } /** * common properties path */ @@ -124,49 +126,41 @@ public final class Constants { /** * MasterServer directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "zookeeper.dolphinscheduler.masters"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/masters"; /** * WorkerServer directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "zookeeper.dolphinscheduler.workers"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/workers"; /** * all servers directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS = "zookeeper.dolphinscheduler.dead.servers"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS = "/dead-servers"; /** * MasterServer lock directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "zookeeper.dolphinscheduler.lock.masters"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters"; /** * WorkerServer lock directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_WORKERS = "zookeeper.dolphinscheduler.lock.workers"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_WORKERS = "/lock/workers"; /** * MasterServer failover directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "zookeeper.dolphinscheduler.lock.failover.masters"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters"; /** * WorkerServer failover directory registered in zookeeper */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "zookeeper.dolphinscheduler.lock.failover.workers"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers"; /** * MasterServer startup failover runing and fault tolerance process */ - //public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "zookeeper.dolphinscheduler.lock.failover.startup.masters"; public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters"; /** @@ -354,87 +348,87 @@ public final class Constants { /** * heartbeat threads number */ - public static final int defaulWorkerHeartbeatThreadNum = 1; + public static final int DEFAUL_WORKER_HEARTBEAT_THREAD_NUM = 1; /** * heartbeat interval */ - public static final int defaultWorkerHeartbeatInterval = 60; + public static final int DEFAULT_WORKER_HEARTBEAT_INTERVAL = 60; /** * worker fetch task number */ - public static final int defaultWorkerFetchTaskNum = 1; + public static final int DEFAULT_WORKER_FETCH_TASK_NUM = 1; /** * worker execute threads number */ - public static final int defaultWorkerExecThreadNum = 10; + public static final int DEFAULT_WORKER_EXEC_THREAD_NUM = 10; /** * master cpu load */ - public static final int defaultMasterCpuLoad = Runtime.getRuntime().availableProcessors() * 2; + public static final int DEFAULT_MASTER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2; /** * master reserved memory */ - public static final double defaultMasterReservedMemory = OSUtils.totalMemorySize() / 10; + public static final double DEFAULT_MASTER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10; /** * worker cpu load */ - public static final int defaultWorkerCpuLoad = Runtime.getRuntime().availableProcessors() * 2; + public static final int DEFAULT_WORKER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2; /** * worker reserved memory */ - public static final double defaultWorkerReservedMemory = OSUtils.totalMemorySize() / 10; + public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10; /** * master execute threads number */ - public static final int defaultMasterExecThreadNum = 100; + public static final int DEFAULT_MASTER_EXEC_THREAD_NUM = 100; /** * default master concurrent task execute num */ - public static final int defaultMasterTaskExecNum = 20; + public static final int DEFAULT_MASTER_TASK_EXEC_NUM = 20; /** * default log cache rows num,output when reach the number */ - public static final int defaultLogRowsNum = 4 * 16; + public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16; /** * log flush interval,output when reach the interval */ - public static final int defaultLogFlushInterval = 1000; + public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000; /** * default master heartbeat thread number */ - public static final int defaulMasterHeartbeatThreadNum = 1; + public static final int DEFAULT_MASTER_HEARTBEAT_THREAD_NUM = 1; /** * default master heartbeat interval */ - public static final int defaultMasterHeartbeatInterval = 60; + public static final int DEFAULT_MASTER_HEARTBEAT_INTERVAL = 60; /** * default master commit retry times */ - public static final int defaultMasterCommitRetryTimes = 5; + public static final int DEFAULT_MASTER_COMMIT_RETRY_TIMES = 5; /** * default master commit retry interval */ - public static final int defaultMasterCommitRetryInterval = 3000; + public static final int DEFAULT_MASTER_COMMIT_RETRY_INTERVAL = 3000; /** * time unit secong to minutes @@ -474,9 +468,9 @@ public final class Constants { public static final String THREAD_NAME_MASTER_SERVER = "Master-Server"; public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server"; - public static String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd"; + public static final String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd"; - public static String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd"; + public static final String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd"; /** @@ -874,7 +868,7 @@ public final class Constants { public static final String FLINK_JOB_MANAGE_MEM = "-yjm"; public static final String FLINK_TASK_MANAGE_MEM = "-ytm"; - public static final String FLINK_detach = "-d"; + public static final String FLINK_DETACH = "-d"; public static final String FLINK_MAIN_CLASS = "-c"; @@ -989,7 +983,7 @@ public final class Constants { * session timeout */ public static final int SESSION_TIME_OUT = 7200; - public static final int maxFileSize = 1024 * 1024 * 1024; + public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024; public static final String UDF = "UDF"; public static final String CLASS = "class"; public static final String RECEIVERS = "receivers"; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java new file mode 100644 index 0000000000..13820b4bab --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/QueryType.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.enums; + +public enum QueryType { + + FORM, + SQL; + + public static QueryType getEnum(int value){ + for (QueryType e:QueryType.values()) { + if(e.ordinal() == value) { + return e; + } + } + //For values out of enum scope + return null; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java index b996c3aec9..1f85432bd2 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java @@ -34,6 +34,8 @@ public enum TaskType { * 8 FLINK * 9 HTTP * 10 DATAX + * 11 CONDITIONS + * 12 SQOOP */ SHELL(0, "shell"), SQL(1, "sql"), @@ -45,7 +47,9 @@ public enum TaskType { DEPENDENT(7, "dependent"), FLINK(8, "flink"), HTTP(9, "http"), - DATAX(10, "datax"); + DATAX(10, "datax"), + CONDITIONS(11, "conditions"), + SQOOP(12, "sqoop"); TaskType(int code, String descp){ this.code = code; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java index 484a2f7ac8..6c09064eae 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/DependentItem.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.common.model; import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; /** * dependent item @@ -28,6 +29,7 @@ public class DependentItem { private String cycle; private String dateValue; private DependResult dependResult; + private ExecutionStatus status; public String getKey(){ @@ -77,4 +79,12 @@ public class DependentItem { public void setDependResult(DependResult dependResult) { this.dependResult = dependResult; } + + public ExecutionStatus getStatus() { + return status; + } + + public void setStatus(ExecutionStatus status) { + this.status = status; + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java index 40efd0a24f..b45bd8aeb8 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.common.model; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.Priority; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; +import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; @@ -108,6 +109,11 @@ public class TaskNode { @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) private String dependence; + + @JsonDeserialize(using = JSONUtils.JsonDataDeserializer.class) + @JsonSerialize(using = JSONUtils.JsonDataSerializer.class) + private String conditionResult; + /** * task instance priority */ @@ -230,6 +236,7 @@ public class TaskNode { Objects.equals(extras, taskNode.extras) && Objects.equals(runFlag, taskNode.runFlag) && Objects.equals(dependence, taskNode.dependence) && + Objects.equals(conditionResult, taskNode.conditionResult) && Objects.equals(workerGroupId, taskNode.workerGroupId) && CollectionUtils.equalLists(depList, taskNode.depList); } @@ -292,6 +299,10 @@ public class TaskNode { return new TaskTimeoutParameter(false); } + public boolean isConditionsTask(){ + return this.getType().toUpperCase().equals(TaskType.CONDITIONS.toString()); + } + @Override public String toString() { return "TaskNode{" + @@ -321,4 +332,12 @@ public class TaskNode { public void setWorkerGroupId(int workerGroupId) { this.workerGroupId = workerGroupId; } + + public String getConditionResult() { + return conditionResult; + } + + public void setConditionResult(String conditionResult) { + this.conditionResult = conditionResult; + } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java new file mode 100644 index 0000000000..5714b5ef3e --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.conditions; + +import org.apache.dolphinscheduler.common.enums.DependentRelation; +import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.task.AbstractParameters; + +import java.util.List; + +public class ConditionsParameters extends AbstractParameters { + + //depend node list and state, only need task name + private List dependTaskList; + private DependentRelation dependRelation; + + // node list to run when success + private List successNode; + + // node list to run when failed + private List failedNode; + + + @Override + public boolean checkParameters() { + return true; + } + + @Override + public List getResourceFilesList() { + return null; + } + + public List getDependTaskList() { + return dependTaskList; + } + + public void setDependTaskList(List dependTaskList) { + this.dependTaskList = dependTaskList; + } + + public DependentRelation getDependRelation() { + return dependRelation; + } + + public void setDependRelation(DependentRelation dependRelation) { + this.dependRelation = dependRelation; + } + + public List getSuccessNode() { + return successNode; + } + + public void setSuccessNode(List successNode) { + this.successNode = successNode; + } + + public List getFailedNode() { + return failedNode; + } + + public void setFailedNode(List failedNode) { + this.failedNode = failedNode; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java new file mode 100644 index 0000000000..fb65df6c1b --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/SqoopParameters.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sqoop; + +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.utils.StringUtils; + +import java.util.ArrayList; +import java.util.List; + +/** + * sqoop parameters + */ +public class SqoopParameters extends AbstractParameters { + + /** + * model type + */ + private String modelType; + /** + * concurrency + */ + private int concurrency; + /** + * source type + */ + private String sourceType; + /** + * target type + */ + private String targetType; + /** + * source params + */ + private String sourceParams; + /** + * target params + */ + private String targetParams; + + public String getModelType() { + return modelType; + } + + public void setModelType(String modelType) { + this.modelType = modelType; + } + + public int getConcurrency() { + return concurrency; + } + + public void setConcurrency(int concurrency) { + this.concurrency = concurrency; + } + + public String getSourceType() { + return sourceType; + } + + public void setSourceType(String sourceType) { + this.sourceType = sourceType; + } + + public String getTargetType() { + return targetType; + } + + public void setTargetType(String targetType) { + this.targetType = targetType; + } + + public String getSourceParams() { + return sourceParams; + } + + public void setSourceParams(String sourceParams) { + this.sourceParams = sourceParams; + } + + public String getTargetParams() { + return targetParams; + } + + public void setTargetParams(String targetParams) { + this.targetParams = targetParams; + } + + @Override + public boolean checkParameters() { + return StringUtils.isNotEmpty(modelType)&& + concurrency != 0 && + StringUtils.isNotEmpty(sourceType)&& + StringUtils.isNotEmpty(targetType)&& + StringUtils.isNotEmpty(sourceParams)&& + StringUtils.isNotEmpty(targetParams); + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/TestZk.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceHdfsParameter.java similarity index 66% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/TestZk.java rename to dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceHdfsParameter.java index 5c3db2d5d1..07f1157be0 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/TestZk.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceHdfsParameter.java @@ -14,30 +14,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +package org.apache.dolphinscheduler.common.task.sqoop.sources; /** - * demo for using zkServer + * source hdfs parameter */ -public class TestZk { +public class SourceHdfsParameter { - @Before - public void before(){ - ZKServer.start(); - } + /** + * export dir + */ + private String exportDir; - @Test - public void test(){ - Assert.assertTrue(ZKServer.isStarted()); + public String getExportDir() { + return exportDir; } - @After - public void after(){ - ZKServer.stop(); + public void setExportDir(String exportDir) { + this.exportDir = exportDir; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceHiveParameter.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceHiveParameter.java new file mode 100644 index 0000000000..a37840f9ea --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceHiveParameter.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sqoop.sources; + +/** + * source hive parameter + */ +public class SourceHiveParameter { + + /** + * hive database + */ + private String hiveDatabase; + /** + * hive table + */ + private String hiveTable; + /** + * hive partition key + */ + private String hivePartitionKey; + /** + * hive partition value + */ + private String hivePartitionValue; + + public String getHiveDatabase() { + return hiveDatabase; + } + + public void setHiveDatabase(String hiveDatabase) { + this.hiveDatabase = hiveDatabase; + } + + public String getHiveTable() { + return hiveTable; + } + + public void setHiveTable(String hiveTable) { + this.hiveTable = hiveTable; + } + + public String getHivePartitionKey() { + return hivePartitionKey; + } + + public void setHivePartitionKey(String hivePartitionKey) { + this.hivePartitionKey = hivePartitionKey; + } + + public String getHivePartitionValue() { + return hivePartitionValue; + } + + public void setHivePartitionValue(String hivePartitionValue) { + this.hivePartitionValue = hivePartitionValue; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceMysqlParameter.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceMysqlParameter.java new file mode 100644 index 0000000000..f80d681b59 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/sources/SourceMysqlParameter.java @@ -0,0 +1,136 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sqoop.sources; + +import org.apache.dolphinscheduler.common.process.Property; + +import java.util.List; + +/** + * source mysql parameter + */ +public class SourceMysqlParameter { + + /** + * src datasource + */ + private int srcDatasource; + /** + * src table + */ + private String srcTable; + /** + * src query type + */ + private int srcQueryType; + /** + * src query sql + */ + private String srcQuerySql; + /** + * src column type + */ + private int srcColumnType; + /** + * src columns + */ + private String srcColumns; + /** + * src condition list + */ + private List srcConditionList; + /** + * map column hive + */ + private List mapColumnHive; + /** + * map column java + */ + private List mapColumnJava; + + public int getSrcDatasource() { + return srcDatasource; + } + + public void setSrcDatasource(int srcDatasource) { + this.srcDatasource = srcDatasource; + } + + public String getSrcTable() { + return srcTable; + } + + public void setSrcTable(String srcTable) { + this.srcTable = srcTable; + } + + public int getSrcQueryType() { + return srcQueryType; + } + + public void setSrcQueryType(int srcQueryType) { + this.srcQueryType = srcQueryType; + } + + public String getSrcQuerySql() { + return srcQuerySql; + } + + public void setSrcQuerySql(String srcQuerySql) { + this.srcQuerySql = srcQuerySql; + } + + public int getSrcColumnType() { + return srcColumnType; + } + + public void setSrcColumnType(int srcColumnType) { + this.srcColumnType = srcColumnType; + } + + public String getSrcColumns() { + return srcColumns; + } + + public void setSrcColumns(String srcColumns) { + this.srcColumns = srcColumns; + } + + public List getSrcConditionList() { + return srcConditionList; + } + + public void setSrcConditionList(List srcConditionList) { + this.srcConditionList = srcConditionList; + } + + public List getMapColumnHive() { + return mapColumnHive; + } + + public void setMapColumnHive(List mapColumnHive) { + this.mapColumnHive = mapColumnHive; + } + + public List getMapColumnJava() { + return mapColumnJava; + } + + public void setMapColumnJava(List mapColumnJava) { + this.mapColumnJava = mapColumnJava; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetHdfsParameter.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetHdfsParameter.java new file mode 100644 index 0000000000..524921dcee --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetHdfsParameter.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sqoop.targets; + +/** + * target hdfs parameter + */ +public class TargetHdfsParameter { + + /** + * target dir + */ + private String targetPath; + /** + * delete target dir + */ + private boolean deleteTargetDir; + /** + * file type + */ + private String fileType; + /** + * compression codec + */ + private String compressionCodec; + /** + * fields terminated + */ + private String fieldsTerminated; + /** + * lines terminated + */ + private String linesTerminated; + + public String getTargetPath() { + return targetPath; + } + + public void setTargetPath(String targetPath) { + this.targetPath = targetPath; + } + + public boolean isDeleteTargetDir() { + return deleteTargetDir; + } + + public void setDeleteTargetDir(boolean deleteTargetDir) { + this.deleteTargetDir = deleteTargetDir; + } + + public String getFileType() { + return fileType; + } + + public void setFileType(String fileType) { + this.fileType = fileType; + } + + public String getCompressionCodec() { + return compressionCodec; + } + + public void setCompressionCodec(String compressionCodec) { + this.compressionCodec = compressionCodec; + } + + public String getFieldsTerminated() { + return fieldsTerminated; + } + + public void setFieldsTerminated(String fieldsTerminated) { + this.fieldsTerminated = fieldsTerminated; + } + + public String getLinesTerminated() { + return linesTerminated; + } + + public void setLinesTerminated(String linesTerminated) { + this.linesTerminated = linesTerminated; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetHiveParameter.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetHiveParameter.java new file mode 100644 index 0000000000..f9bfde3374 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetHiveParameter.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sqoop.targets; + +/** + * target hive parameter + */ +public class TargetHiveParameter { + + /** + * hive database + */ + private String hiveDatabase; + /** + * hive table + */ + private String hiveTable; + /** + * create hive table + */ + private boolean createHiveTable; + /** + * drop delimiter + */ + private boolean dropDelimiter; + /** + * hive overwrite + */ + private boolean hiveOverWrite; + /** + * replace delimiter + */ + private String replaceDelimiter; + /** + * hive partition key + */ + private String hivePartitionKey; + /** + * hive partition value + */ + private String hivePartitionValue; + + public String getHiveDatabase() { + return hiveDatabase; + } + + public void setHiveDatabase(String hiveDatabase) { + this.hiveDatabase = hiveDatabase; + } + + public String getHiveTable() { + return hiveTable; + } + + public void setHiveTable(String hiveTable) { + this.hiveTable = hiveTable; + } + + public boolean isCreateHiveTable() { + return createHiveTable; + } + + public void setCreateHiveTable(boolean createHiveTable) { + this.createHiveTable = createHiveTable; + } + + public boolean isDropDelimiter() { + return dropDelimiter; + } + + public void setDropDelimiter(boolean dropDelimiter) { + this.dropDelimiter = dropDelimiter; + } + + public boolean isHiveOverWrite() { + return hiveOverWrite; + } + + public void setHiveOverWrite(boolean hiveOverWrite) { + this.hiveOverWrite = hiveOverWrite; + } + + public String getReplaceDelimiter() { + return replaceDelimiter; + } + + public void setReplaceDelimiter(String replaceDelimiter) { + this.replaceDelimiter = replaceDelimiter; + } + + public String getHivePartitionKey() { + return hivePartitionKey; + } + + public void setHivePartitionKey(String hivePartitionKey) { + this.hivePartitionKey = hivePartitionKey; + } + + public String getHivePartitionValue() { + return hivePartitionValue; + } + + public void setHivePartitionValue(String hivePartitionValue) { + this.hivePartitionValue = hivePartitionValue; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java new file mode 100644 index 0000000000..47126ae993 --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/sqoop/targets/TargetMysqlParameter.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task.sqoop.targets; + +/** + * target mysql parameter + */ +public class TargetMysqlParameter { + + /** + * target datasource + */ + private int targetDatasource; + /** + * target table + */ + private String targetTable; + /** + * target columns + */ + private String targetColumns; + /** + * fields terminated + */ + private String fieldsTerminated; + /** + * lines terminated + */ + private String linesTerminated; + /** + * pre query + */ + private String preQuery; + /** + * is update + */ + private boolean isUpdate; + /** + * target update key + */ + private String targetUpdateKey; + /** + * target update mode + */ + private String targetUpdateMode; + + public int getTargetDatasource() { + return targetDatasource; + } + + public void setTargetDatasource(int targetDatasource) { + this.targetDatasource = targetDatasource; + } + + public String getTargetTable() { + return targetTable; + } + + public void setTargetTable(String targetTable) { + this.targetTable = targetTable; + } + + public String getTargetColumns() { + return targetColumns; + } + + public void setTargetColumns(String targetColumns) { + this.targetColumns = targetColumns; + } + + public String getFieldsTerminated() { + return fieldsTerminated; + } + + public void setFieldsTerminated(String fieldsTerminated) { + this.fieldsTerminated = fieldsTerminated; + } + + public String getLinesTerminated() { + return linesTerminated; + } + + public void setLinesTerminated(String linesTerminated) { + this.linesTerminated = linesTerminated; + } + + public String getPreQuery() { + return preQuery; + } + + public void setPreQuery(String preQuery) { + this.preQuery = preQuery; + } + + public boolean isUpdate() { + return isUpdate; + } + + public void setUpdate(boolean update) { + isUpdate = update; + } + + public String getTargetUpdateKey() { + return targetUpdateKey; + } + + public void setTargetUpdateKey(String targetUpdateKey) { + this.targetUpdateKey = targetUpdateKey; + } + + public String getTargetUpdateMode() { + return targetUpdateMode; + } + + public void setTargetUpdateMode(String targetUpdateMode) { + this.targetUpdateMode = targetUpdateMode; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java index 21e3ce2248..c7784de8dd 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/subprocess/SubProcessParameters.java @@ -38,7 +38,7 @@ public class SubProcessParameters extends AbstractParameters { @Override public boolean checkParameters() { - return this.processDefinitionId != 0; + return this.processDefinitionId != null && this.processDefinitionId != 0; } @Override diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java index 9c02111c36..22c58640cc 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java @@ -37,6 +37,9 @@ import java.util.*; */ public class CollectionUtils { + private CollectionUtils() { + throw new IllegalStateException("CollectionUtils class"); + } /** * Returns a new {@link Collection} containing a minus a subset of * b. Only the elements of b that satisfy the predicate @@ -139,26 +142,6 @@ public class CollectionUtils { cardinalityB = CollectionUtils.getCardinalityMap(b); } - /** - * Returns the maximum frequency of an object. - * - * @param obj the object - * @return the maximum frequency of the object - */ - private int max(final Object obj) { - return Math.max(freqA(obj), freqB(obj)); - } - - /** - * Returns the minimum frequency of an object. - * - * @param obj the object - * @return the minimum frequency of the object - */ - private int min(final Object obj) { - return Math.min(freqA(obj), freqB(obj)); - } - /** * Returns the frequency of this object in collection A. * @@ -225,7 +208,7 @@ public class CollectionUtils { if (a.size() != b.size()) { return false; } - final CardinalityHelper helper = new CardinalityHelper(a, b); + final CardinalityHelper helper = new CardinalityHelper<>(a, b); if (helper.cardinalityA.size() != helper.cardinalityB.size()) { return false; } @@ -250,7 +233,7 @@ public class CollectionUtils { * @return the populated cardinality map */ public static Map getCardinalityMap(final Iterable coll) { - final Map count = new HashMap(); + final Map count = new HashMap<>(); for (final O obj : coll) { count.put(obj, count.getOrDefault(obj, 0) + 1); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java index 842c74edc0..b4b89bfe26 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java @@ -20,8 +20,6 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ResUploadType; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.File; @@ -29,8 +27,9 @@ import java.io.File; * common utils */ public class CommonUtils { - - private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class); + private CommonUtils() { + throw new IllegalStateException("CommonUtils class"); + } /** * @return get the path of system environment variables diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index 9e9e4f6546..ec523b1ff2 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -126,7 +126,7 @@ public class JSONUtils { * @param json json * @return true if valid */ - public static boolean checkJsonVaild(String json) { + public static boolean checkJsonValid(String json) { if (StringUtils.isEmpty(json)) { return false; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java index 0c061fc0ba..acfca77c8c 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java @@ -40,6 +40,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.regex.Pattern; /** * os utils @@ -138,7 +139,7 @@ public class OSUtils { if (isMacOS()) { return getUserListFromMac(); } else if (isWindows()) { - // do something + return getUserListFromWindows(); } else { return getUserListFromLinux(); } @@ -185,6 +186,47 @@ public class OSUtils { return Collections.emptyList(); } + /** + * get user list from windows + * @return user list + * @throws IOException + */ + private static List getUserListFromWindows() throws IOException { + String result = exeCmd("net user"); + String[] lines = result.split("\n"); + + int startPos = 0; + int endPos = lines.length - 2; + for (int i = 0; i < lines.length; i++) { + if (lines[i].isEmpty()) { + continue; + } + + int count = 0; + if (lines[i].charAt(0) == '-') { + for (int j = 0; j < lines[i].length(); j++) { + if (lines[i].charAt(i) == '-') { + count++; + } + } + } + + if (count == lines[i].length()) { + startPos = i + 1; + break; + } + } + + List users = new ArrayList<>(); + while (startPos <= endPos) { + Pattern pattern = Pattern.compile("\\s+"); + users.addAll(Arrays.asList(pattern.split(lines[startPos]))); + startPos++; + } + + return users; + } + /** * create user * @param userName user name @@ -200,7 +242,7 @@ public class OSUtils { if (isMacOS()) { createMacUser(userName, userGroup); } else if (isWindows()) { - // do something + createWindowsUser(userName, userGroup); } else { createLinuxUser(userName, userGroup); } @@ -243,16 +285,46 @@ public class OSUtils { OSUtils.exeCmd(appendGroupCmd); } + /** + * create windows user + * @param userName user name + * @param userGroup user group + * @throws IOException in case of an I/O error + */ + private static void createWindowsUser(String userName, String userGroup) throws IOException { + logger.info("create windows os user : {}", userName); + String userCreateCmd = String.format("net user \"%s\" /add", userName); + String appendGroupCmd = String.format("net localgroup \"%s\" \"%s\" /add", userGroup, userName); + + logger.info("execute create user command : {}", userCreateCmd); + OSUtils.exeCmd(userCreateCmd); + + logger.info("execute append user to group : {}", appendGroupCmd); + OSUtils.exeCmd(appendGroupCmd); + } + /** * get system group information * @return system group info * @throws IOException errors */ public static String getGroup() throws IOException { - String result = exeCmd("groups"); - if (StringUtils.isNotEmpty(result)) { - String[] groupInfo = result.split(" "); - return groupInfo[0]; + if (isWindows()) { + String currentProcUserName = System.getProperty("user.name"); + String result = exeCmd(String.format("net user \"%s\"", currentProcUserName)); + String line = result.split("\n")[22]; + String group = Pattern.compile("\\s+").split(line)[1]; + if (group.charAt(0) == '*') { + return group.substring(1); + } else { + return group; + } + } else { + String result = exeCmd("groups"); + if (StringUtils.isNotEmpty(result)) { + String[] groupInfo = result.split(" "); + return groupInfo[0]; + } } return null; @@ -370,25 +442,14 @@ public class OSUtils { double systemCpuLoad; double systemReservedMemory; - if(isMaster){ - systemCpuLoad = conf.getDouble(Constants.MASTER_MAX_CPULOAD_AVG, Constants.defaultMasterCpuLoad); - systemReservedMemory = conf.getDouble(Constants.MASTER_RESERVED_MEMORY, Constants.defaultMasterReservedMemory); + if(Boolean.TRUE.equals(isMaster)){ + systemCpuLoad = conf.getDouble(Constants.MASTER_MAX_CPULOAD_AVG, Constants.DEFAULT_MASTER_CPU_LOAD); + systemReservedMemory = conf.getDouble(Constants.MASTER_RESERVED_MEMORY, Constants.DEFAULT_MASTER_RESERVED_MEMORY); }else{ - systemCpuLoad = conf.getDouble(Constants.WORKER_MAX_CPULOAD_AVG, Constants.defaultWorkerCpuLoad); - systemReservedMemory = conf.getDouble(Constants.WORKER_RESERVED_MEMORY, Constants.defaultWorkerReservedMemory); - } - - // judging usage - double loadAverage = OSUtils.loadAverage(); - // - double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); - - if(loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory){ - logger.warn("load or availablePhysicalMemorySize(G) is too high, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize , loadAverage); - return false; - }else{ - return true; + systemCpuLoad = conf.getDouble(Constants.WORKER_MAX_CPULOAD_AVG, Constants.DEFAULT_WORKER_CPU_LOAD); + systemReservedMemory = conf.getDouble(Constants.WORKER_RESERVED_MEMORY, Constants.DEFAULT_WORKER_RESERVED_MEMORY); } + return checkResource(systemCpuLoad,systemReservedMemory); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java index a6dd53ea22..7f2888384e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java @@ -61,7 +61,7 @@ public class ParameterUtils { try { cronTime = DateUtils.parseDate(cronTimeStr, new String[]{Constants.PARAMETER_FORMAT_TIME}); } catch (ParseException e) { - logger.error(String.format("parse %s exception", cronTimeStr), e); + logger.error("parse {} exception", cronTimeStr, e); } } else { cronTime = new Date(); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java index 92337f5de6..e59cbd1b96 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/Preconditions.java @@ -16,7 +16,6 @@ */ package org.apache.dolphinscheduler.common.utils; -import org.springframework.lang.Nullable; /** * A collection of static utility methods to validate input. @@ -57,7 +56,7 @@ public final class Preconditions { * * @throws NullPointerException Thrown, if the passed reference was null. */ - public static T checkNotNull(T reference, @Nullable String errorMessage) { + public static T checkNotNull(T reference, String errorMessage) { if (reference == null) { throw new NullPointerException(String.valueOf(errorMessage)); } @@ -84,8 +83,8 @@ public final class Preconditions { * @throws NullPointerException Thrown, if the passed reference was null. */ public static T checkNotNull(T reference, - @Nullable String errorMessageTemplate, - @Nullable Object... errorMessageArgs) { + String errorMessageTemplate, + Object... errorMessageArgs) { if (reference == null) { throw new NullPointerException(format(errorMessageTemplate, errorMessageArgs)); @@ -121,7 +120,7 @@ public final class Preconditions { * * @throws IllegalArgumentException Thrown, if the condition is violated. */ - public static void checkArgument(boolean condition, @Nullable Object errorMessage) { + public static void checkArgument(boolean condition, Object errorMessage) { if (!condition) { throw new IllegalArgumentException(String.valueOf(errorMessage)); } @@ -141,8 +140,8 @@ public final class Preconditions { * @throws IllegalArgumentException Thrown, if the condition is violated. */ public static void checkArgument(boolean condition, - @Nullable String errorMessageTemplate, - @Nullable Object... errorMessageArgs) { + String errorMessageTemplate, + Object... errorMessageArgs) { if (!condition) { throw new IllegalArgumentException(format(errorMessageTemplate, errorMessageArgs)); @@ -177,7 +176,7 @@ public final class Preconditions { * * @throws IllegalStateException Thrown, if the condition is violated. */ - public static void checkState(boolean condition, @Nullable Object errorMessage) { + public static void checkState(boolean condition, Object errorMessage) { if (!condition) { throw new IllegalStateException(String.valueOf(errorMessage)); } @@ -197,8 +196,8 @@ public final class Preconditions { * @throws IllegalStateException Thrown, if the condition is violated. */ public static void checkState(boolean condition, - @Nullable String errorMessageTemplate, - @Nullable Object... errorMessageArgs) { + String errorMessageTemplate, + Object... errorMessageArgs) { if (!condition) { throw new IllegalStateException(format(errorMessageTemplate, errorMessageArgs)); @@ -231,7 +230,7 @@ public final class Preconditions { * @throws IllegalArgumentException Thrown, if size is negative. * @throws IndexOutOfBoundsException Thrown, if the index negative or greater than or equal to size */ - public static void checkElementIndex(int index, int size, @Nullable String errorMessage) { + public static void checkElementIndex(int index, int size, String errorMessage) { checkArgument(size >= 0, "Size was negative."); if (index < 0 || index >= size) { throw new IndexOutOfBoundsException(String.valueOf(errorMessage) + " Index: " + index + ", Size: " + size); @@ -248,7 +247,7 @@ public final class Preconditions { * *

This method is taken quasi verbatim from the Guava Preconditions class. */ - private static String format(@Nullable String template, @Nullable Object... args) { + private static String format( String template, Object... args) { final int numArgs = args == null ? 0 : args.length; template = String.valueOf(template); // null -> "null" diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java index a2ae6a68e3..a3492f49fa 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.common.utils; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; @@ -28,6 +29,7 @@ import org.apache.dolphinscheduler.common.task.python.PythonParameters; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; import org.apache.dolphinscheduler.common.task.spark.SparkParameters; import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,6 +73,10 @@ public class TaskParametersUtils { return JSONUtils.parseObject(parameter, HttpParameters.class); case DATAX: return JSONUtils.parseObject(parameter, DataxParameters.class); + case CONDITIONS: + return JSONUtils.parseObject(parameter, ConditionsParameters.class); + case SQOOP: + return JSONUtils.parseObject(parameter, SqoopParameters.class); default: return null; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java index b52707bfde..0c756cb0b3 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/PlaceholderUtils.java @@ -90,7 +90,7 @@ public class PlaceholderUtils { try { return paramsMap.get(placeholderName); } catch (Exception ex) { - logger.error(String.format("resolve placeholder '%s' in [ %s ]" , placeholderName, value), ex); + logger.error("resolve placeholder '{}' in [ {} ]" , placeholderName, value, ex); return null; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java index 1751df53c6..15e3282d38 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/placeholder/TimePlaceholderUtils.java @@ -278,7 +278,7 @@ public class TimePlaceholderUtils { try { return calculateTime(placeholderName, date); } catch (Exception ex) { - logger.error(String.format("resolve placeholder '%s' in [ %s ]" , placeholderName, value), ex); + logger.error("resolve placeholder '{}' in [ {} ]" , placeholderName, value, ex); return null; } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/EntityTestUtils.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/EntityTestUtils.java new file mode 100644 index 0000000000..5d867bc4d9 --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/EntityTestUtils.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.*; + +/** + * entity test utils + */ +public class EntityTestUtils { + + private static final Map OBJECT_MAP = new HashMap<>(); + + private static final String SKIP_METHOD = "getClass,notify,notifyAll,wait,equals,hashCode,clone"; + + static { + OBJECT_MAP.put("java.lang.Long", 1L); + OBJECT_MAP.put("java.lang.String", "test"); + OBJECT_MAP.put("java.lang.Integer", 1); + OBJECT_MAP.put("int", 1); + OBJECT_MAP.put("long", 1L); + OBJECT_MAP.put("java.util.Date", new Date()); + OBJECT_MAP.put("char", '1'); + OBJECT_MAP.put("java.util.Map", new HashMap()); + OBJECT_MAP.put("boolean", true); + } + + public static void run(List classList) + throws IllegalAccessException, InvocationTargetException, InstantiationException { + for (Class temp : classList) { + Object tempInstance = new Object(); + Constructor[] constructors = temp.getConstructors(); + for (Constructor constructor : constructors) { + final Class[] parameterTypes = constructor.getParameterTypes(); + if (parameterTypes.length == 0) { + tempInstance = constructor.newInstance(); + } else { + Object[] objects = new Object[parameterTypes.length]; + for (int i = 0; i < parameterTypes.length; i++) { + objects[i] = OBJECT_MAP.get(parameterTypes[i].getName()); + } + tempInstance = constructor.newInstance(objects); + } + } + + Method[] methods = temp.getMethods(); + for (final Method method : methods) { + if (SKIP_METHOD.contains(method.getName())) { + break; + } + final Class[] parameterTypes = method.getParameterTypes(); + if (parameterTypes.length != 0) { + Object[] objects = new Object[parameterTypes.length]; + for (int i = 0; i < parameterTypes.length; i++) { + objects[i] = OBJECT_MAP.get(parameterTypes[i].getName()); + } + method.invoke(tempInstance, objects); + } else { + method.invoke(tempInstance); + } + } + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqoopParameterEntityTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqoopParameterEntityTest.java new file mode 100644 index 0000000000..5f35e89ddd --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/SqoopParameterEntityTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common.task; + +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; +import org.junit.Assert; +import org.junit.Test; +import java.util.ArrayList; +import java.util.List; + +/** + * sqoop parameter entity test + */ +public class SqoopParameterEntityTest { + + @Test + public void testEntity(){ + try { + List classList = new ArrayList<>(); + classList.add(SourceMysqlParameter.class); + classList.add(SourceHiveParameter.class); + classList.add(SourceHdfsParameter.class); + classList.add(SqoopParameters.class); + classList.add(TargetMysqlParameter.class); + classList.add(TargetHiveParameter.class); + classList.add(TargetHdfsParameter.class); + EntityTestUtils.run(classList); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java index 265f7eabcd..9879154889 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/threadutils/ThreadPoolExecutorsTest.java @@ -48,7 +48,7 @@ public class ThreadPoolExecutorsTest { class Thread2 extends Thread { @Override public void run() { - logger.info(String.format("ThreadPoolExecutors instance's hashcode is: %s ",ThreadPoolExecutors.getInstance("a",2).hashCode())); + logger.info("ThreadPoolExecutors instance's hashcode is: {} ",ThreadPoolExecutors.getInstance("a",2).hashCode()); } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java index 7321879ab8..99685265e6 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CollectionUtilsTest.java @@ -76,11 +76,11 @@ public class CollectionUtilsTest { a = CollectionUtils.stringToMap("a=b;c=d", null); Assert.assertTrue(a.isEmpty()); a = CollectionUtils.stringToMap("a=b;c=d;e=f", ";"); - Assert.assertEquals(a.size(), 3); + Assert.assertEquals(3, a.size()); a = CollectionUtils.stringToMap("a;b=f", ";"); Assert.assertTrue(a.isEmpty()); a = CollectionUtils.stringToMap("a=b;c=d;e=f;", ";", "test"); - Assert.assertEquals(a.size(), 3); + Assert.assertEquals(3, a.size()); Assert.assertNotNull(a.get("testa")); } @@ -91,14 +91,14 @@ public class CollectionUtilsTest { originList.add(1); originList.add(2); List> ret = CollectionUtils.getListByExclusion(originList, null); - Assert.assertEquals(ret.size(), 2); + Assert.assertEquals(2, ret.size()); ret = CollectionUtils.getListByExclusion(originList, new HashSet<>()); - Assert.assertEquals(ret.size(), 2); + Assert.assertEquals(2, ret.size()); Assert.assertFalse(ret.get(0).isEmpty()); Set exclusion = new HashSet<>(); exclusion.add(Constants.CLASS); ret = CollectionUtils.getListByExclusion(originList, exclusion); - Assert.assertEquals(ret.size(), 2); + Assert.assertEquals(2, ret.size()); Assert.assertTrue(ret.get(0).isEmpty()); } @@ -108,5 +108,38 @@ public class CollectionUtilsTest { Assert.assertFalse(CollectionUtils.isNotEmpty(list)); Assert.assertFalse(CollectionUtils.isNotEmpty(null)); } + @Test + public void isEmpty(){ + List list = new ArrayList<>(); + Assert.assertTrue(CollectionUtils.isEmpty(list)); + Assert.assertTrue(CollectionUtils.isEmpty(null)); + list.add(1); + Assert.assertFalse(CollectionUtils.isEmpty(list)); + } + @Test + public void isEqualCollection() { + List a = new ArrayList<>(); + a.add(1); + List b = new ArrayList<>(); + b.add(1); + Assert.assertTrue(CollectionUtils.isEqualCollection(a,b)); + b.add(2); + Assert.assertFalse(CollectionUtils.isEqualCollection(a,b)); + } + @Test + public void getCardinalityMap(){ + List a = new ArrayList<>(); + a.add(1); + a.add(2); + a.add(2); + a.add(3); + a.add(3); + a.add(3); + Map cardinalityMap = CollectionUtils.getCardinalityMap(a); + Assert.assertEquals(3, cardinalityMap.size()); + Assert.assertEquals(1, cardinalityMap.get(1).intValue()); + Assert.assertEquals(2, cardinalityMap.get(2).intValue()); + Assert.assertEquals(3, cardinalityMap.get(3).intValue()); + } } diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java index f38b9b4c3b..42c9958810 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java @@ -16,6 +16,7 @@ */ package org.apache.dolphinscheduler.common.utils; +import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,24 +29,58 @@ import java.net.UnknownHostException; */ public class CommonUtilsTest { private static final Logger logger = LoggerFactory.getLogger(CommonUtilsTest.class); + @Test + public void getSystemEnvPath() { + logger.info(CommonUtils.getSystemEnvPath()); + Assert.assertTrue(true); + } + @Test + public void getQueueImplValue(){ + logger.info(CommonUtils.getQueueImplValue()); + Assert.assertTrue(true); + } + @Test + public void isDevelopMode() { + logger.info("develop mode: {}",CommonUtils.isDevelopMode()); + Assert.assertTrue(true); + } + @Test + public void getKerberosStartupState(){ + logger.info("kerberos startup state: {}",CommonUtils.getKerberosStartupState()); + Assert.assertTrue(true); + } + @Test + public void loadKerberosConf(){ + try { + CommonUtils.loadKerberosConf(); + Assert.assertTrue(true); + } catch (Exception e) { + Assert.fail("load Kerberos Conf failed"); + } + } + @Test public void getHdfsDataBasePath() { logger.info(HadoopUtils.getHdfsDataBasePath()); + Assert.assertTrue(true); } @Test public void getDownloadFilename() { logger.info(FileUtils.getDownloadFilename("a.txt")); + Assert.assertTrue(true); } @Test public void getUploadFilename() { logger.info(FileUtils.getUploadFilename("1234", "a.txt")); + Assert.assertTrue(true); } @Test public void getHdfsDir() { logger.info(HadoopUtils.getHdfsResDir("1234")); + Assert.assertTrue(true); } @Test @@ -57,5 +92,6 @@ public class CommonUtilsTest { } catch (UnknownHostException e) { e.printStackTrace(); } + Assert.assertTrue(true); } } \ No newline at end of file diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java index 799874ad71..bd924e4852 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/JSONUtilsTest.java @@ -117,9 +117,9 @@ public class JSONUtilsTest { } @Test - public void testCheckJsonVaild() { - Assert.assertTrue(JSONUtils.checkJsonVaild("3")); - Assert.assertFalse(JSONUtils.checkJsonVaild("")); + public void testCheckJsonValid() { + Assert.assertTrue(JSONUtils.checkJsonValid("3")); + Assert.assertFalse(JSONUtils.checkJsonValid("")); } @Test diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java index 391fb594f0..7106804aaf 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/OSUtilsTest.java @@ -16,10 +16,15 @@ */ package org.apache.dolphinscheduler.common.utils; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.dolphinscheduler.common.Constants; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + +import java.io.IOException; import java.util.List; public class OSUtilsTest { @@ -31,4 +36,88 @@ public class OSUtilsTest { Assert.assertNotEquals("System user list should not be empty", userList.size(), 0); logger.info("OS user list : {}", userList.toString()); } + + @Test + public void testOSMetric(){ + double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize(); + Assert.assertTrue(availablePhysicalMemorySize > 0.0f); + double totalMemorySize = OSUtils.totalMemorySize(); + Assert.assertTrue(totalMemorySize > 0.0f); + double loadAverage = OSUtils.loadAverage(); + logger.info("loadAverage {}", loadAverage); + double memoryUsage = OSUtils.memoryUsage(); + Assert.assertTrue(memoryUsage > 0.0f); + double cpuUsage = OSUtils.cpuUsage(); + Assert.assertTrue(cpuUsage > 0.0f); + } + + @Test + public void getGroup() { + try { + String group = OSUtils.getGroup(); + Assert.assertNotNull(group); + } catch (IOException e) { + Assert.fail("get group failed " + e.getMessage()); + } + } + + @Test + public void createUser() { + boolean result = OSUtils.createUser("test123"); + Assert.assertTrue(result); + } + + @Test + public void exeCmd() { + if(OSUtils.isMacOS() || !OSUtils.isWindows()){ + try { + String result = OSUtils.exeCmd("echo helloWorld"); + Assert.assertEquals("helloWorld\n",result); + } catch (IOException e) { + Assert.fail("exeCmd " + e.getMessage()); + } + } + } + @Test + public void getProcessID(){ + int processId = OSUtils.getProcessID(); + Assert.assertNotEquals(0, processId); + } + @Test + public void getHost(){ + String host = OSUtils.getHost(); + Assert.assertNotNull(host); + Assert.assertNotEquals("", host); + } + @Test + public void checkResource(){ + boolean resource = OSUtils.checkResource(100,0); + Assert.assertTrue(resource); + resource = OSUtils.checkResource(0,Double.MAX_VALUE); + Assert.assertFalse(resource); + + Configuration configuration = new PropertiesConfiguration(); + + configuration.setProperty(Constants.MASTER_MAX_CPULOAD_AVG,100); + configuration.setProperty(Constants.MASTER_RESERVED_MEMORY,0); + resource = OSUtils.checkResource(configuration,true); + Assert.assertTrue(resource); + + configuration.setProperty(Constants.MASTER_MAX_CPULOAD_AVG,0); + configuration.setProperty(Constants.MASTER_RESERVED_MEMORY,Double.MAX_VALUE); + resource = OSUtils.checkResource(configuration,true); + Assert.assertFalse(resource); + + configuration.setProperty(Constants.WORKER_MAX_CPULOAD_AVG,100); + configuration.setProperty(Constants.WORKER_RESERVED_MEMORY,0); + resource = OSUtils.checkResource(configuration,false); + Assert.assertTrue(resource); + + configuration.setProperty(Constants.WORKER_MAX_CPULOAD_AVG,0); + configuration.setProperty(Constants.WORKER_RESERVED_MEMORY,Double.MAX_VALUE); + resource = OSUtils.checkResource(configuration,false); + Assert.assertFalse(resource); + + } + } diff --git a/dolphinscheduler-dao/pom.xml b/dolphinscheduler-dao/pom.xml index b3b22c128d..20d19410e2 100644 --- a/dolphinscheduler-dao/pom.xml +++ b/dolphinscheduler-dao/pom.xml @@ -116,21 +116,6 @@ cron-utils - - org.quartz-scheduler - quartz - - - c3p0 - c3p0 - - - - - - org.quartz-scheduler - quartz-jobs - commons-configuration commons-configuration diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java index 41a9b3a566..a46e5aabcc 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/BaseDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/BaseDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; /** * data source base class diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java similarity index 95% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java index fe76497ff8..e159f81d2e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/ClickHouseDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ClickHouseDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; @@ -38,7 +38,7 @@ public class ClickHouseDataSource extends BaseDataSource { @Override public String getJdbcUrl() { String jdbcUrl = getAddress(); - if (jdbcUrl.lastIndexOf("/") != (jdbcUrl.length() - 1)) { + if (jdbcUrl.lastIndexOf('/') != (jdbcUrl.length() - 1)) { jdbcUrl += "/"; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java index 6fdc233455..a3bc6a0150 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/ConnectionFactory.java @@ -20,8 +20,6 @@ import com.alibaba.druid.pool.DruidDataSource; import com.baomidou.mybatisplus.core.MybatisConfiguration; import com.baomidou.mybatisplus.extension.plugins.PaginationInterceptor; import com.baomidou.mybatisplus.extension.spring.MybatisSqlSessionFactoryBean; -import org.apache.commons.configuration.ConfigurationException; -import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.dolphinscheduler.common.Constants; import org.apache.ibatis.mapping.Environment; import org.apache.ibatis.session.SqlSession; @@ -31,8 +29,6 @@ import org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory; import org.mybatis.spring.SqlSessionTemplate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.context.annotation.Bean; -import org.springframework.stereotype.Service; import javax.sql.DataSource; @@ -117,7 +113,6 @@ public class ConnectionFactory extends SpringConnectionFactory{ sqlSessionFactoryBean.setTypeEnumsPackage("org.apache.dolphinscheduler.*.enums"); sqlSessionFactory = sqlSessionFactoryBean.getObject(); - return sqlSessionFactory; } } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DB2ServerDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DB2ServerDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java index 44ee200c5d..3c2366b5b0 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DB2ServerDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DB2ServerDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java similarity index 98% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java index 48ec319eaa..9571f9c9f6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/DataSourceFactory.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/DataSourceFactory.java @@ -14,11 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.Constants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java similarity index 93% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java index cddedd1f73..0a8f527536 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/HiveDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java @@ -14,14 +14,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.sql.*; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; /** * data source of hive diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java index fa149e67e2..6e2fbe3dd8 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/MySQLDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/MySQLDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java index c3dc3a96df..cefaf879b5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/OracleDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/OracleDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java index 4989e7681e..176cba2587 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/PostgreDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/PostgreDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java index 8554992efc..07770c06a7 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SQLServerDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SQLServerDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java rename to dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java index 5d10c63e5d..81a5ac6f04 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/job/db/SparkDataSource.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/SparkDataSource.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.job.db; +package org.apache.dolphinscheduler.dao.datasource; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.StringUtils; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java index 54c96e932d..fb5d0cda42 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/ProcessInstance.java @@ -139,6 +139,12 @@ public class ProcessInstance { */ private int executorId; + /** + * executor name + */ + @TableField(exist = false) + private String executorName; + /** * tenant code */ @@ -472,6 +478,14 @@ public class ProcessInstance { return historyCmd; } + public String getExecutorName() { + return executorName; + } + + public void setExecutorName(String executorName) { + this.executorName = executorName; + } + public void setHistoryCmd(String historyCmd) { this.historyCmd = historyCmd; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java index 2db1eda8f4..c692575e3a 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java @@ -196,6 +196,17 @@ public class TaskInstance { */ private int workerGroupId; + /** + * executor id + */ + private int executorId; + + /** + * executor name + */ + @TableField(exist = false) + private String executorName; + public void init(String host,Date startTime,String executePath){ @@ -363,7 +374,7 @@ public class TaskInstance { public Boolean isSubProcess(){ - return TaskType.SUB_PROCESS.toString().equals(this.taskType.toUpperCase()); + return TaskType.SUB_PROCESS.getDescp().equals(this.taskType); } public String getDependency(){ @@ -415,6 +426,22 @@ public class TaskInstance { this.retryInterval = retryInterval; } + public int getExecutorId() { + return executorId; + } + + public void setExecutorId(int executorId) { + this.executorId = executorId; + } + + public String getExecutorName() { + return executorName; + } + + public void setExecutorName(String executorName) { + this.executorName = executorName; + } + public Boolean isTaskComplete() { return this.getState().typeIsPause() @@ -466,6 +493,14 @@ public class TaskInstance { this.workerGroupId = workerGroupId; } + public String getDependentResult() { + return dependentResult; + } + + public void setDependentResult(String dependentResult) { + this.dependentResult = dependentResult; + } + @Override public String toString() { return "TaskInstance{" + @@ -485,27 +520,21 @@ public class TaskInstance { ", logPath='" + logPath + '\'' + ", retryTimes=" + retryTimes + ", alertFlag=" + alertFlag + - ", flag=" + flag + ", processInstance=" + processInstance + ", processDefine=" + processDefine + ", pid=" + pid + ", appLink='" + appLink + '\'' + ", flag=" + flag + - ", dependency=" + dependency + + ", dependency='" + dependency + '\'' + ", duration=" + duration + ", maxRetryTimes=" + maxRetryTimes + ", retryInterval=" + retryInterval + ", taskInstancePriority=" + taskInstancePriority + ", processInstancePriority=" + processInstancePriority + - ", workGroupId=" + workerGroupId + + ", dependentResult='" + dependentResult + '\'' + + ", workerGroupId=" + workerGroupId + + ", executorId=" + executorId + + ", executorName='" + executorName + '\'' + '}'; } - - public String getDependentResult() { - return dependentResult; - } - - public void setDependentResult(String dependentResult) { - this.dependentResult = dependentResult; - } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java index 18b643513f..c358cab3f3 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/CommandMapper.java @@ -16,13 +16,10 @@ */ package org.apache.dolphinscheduler.dao.mapper; +import com.baomidou.mybatisplus.core.mapper.BaseMapper; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.CommandCount; -import com.baomidou.mybatisplus.core.conditions.Wrapper; -import com.baomidou.mybatisplus.core.mapper.BaseMapper; -import com.baomidou.mybatisplus.core.toolkit.Constants; import org.apache.ibatis.annotations.Param; -import org.apache.ibatis.annotations.Select; import java.util.Date; import java.util.List; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java index 167928cb4b..9a5f261254 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.java @@ -82,6 +82,7 @@ public interface ProcessInstanceMapper extends BaseMapper { @Param("projectId") int projectId, @Param("processDefinitionId") Integer processDefinitionId, @Param("searchVal") String searchVal, + @Param("executorId") Integer executorId, @Param("states") int[] statusArray, @Param("host") String host, @Param("startTime") Date startTime, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java index 12ce29faf4..ac23b25c9c 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.java @@ -16,13 +16,12 @@ */ package org.apache.dolphinscheduler.dao.mapper; -import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; -import org.apache.dolphinscheduler.dao.entity.TaskInstance; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.core.metadata.IPage; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.Flag; -import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.ibatis.annotations.Param; import java.util.Date; @@ -64,6 +63,7 @@ public interface TaskInstanceMapper extends BaseMapper { @Param("processInstanceId") Integer processInstanceId, @Param("searchVal") String searchVal, @Param("taskName") String taskName, + @Param("executorId") int executorId, @Param("states") int[] statusArray, @Param("host") String host, @Param("startTime") Date startTime, diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java index ac38ddd2e8..7a4dc655f7 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DagHelper.java @@ -319,18 +319,14 @@ public class DagHelper { DAG dag = new DAG<>(); - /** - * add vertex - */ + //add vertex if (CollectionUtils.isNotEmpty(processDag.getNodes())){ for (TaskNode node : processDag.getNodes()){ dag.addNode(node.getName(),node); } } - /** - * add edge - */ + //add edge if (CollectionUtils.isNotEmpty(processDag.getEdges())){ for (TaskNodeRelation edge : processDag.getEdges()){ dag.addEdge(edge.getStartNode(),edge.getEndNode()); @@ -338,4 +334,31 @@ public class DagHelper { } return dag; } + + /** + * get process dag + * @param taskNodeList task node list + * @return Process dag + */ + public static ProcessDag getProcessDag(List taskNodeList) { + List taskNodeRelations = new ArrayList<>(); + + // Traverse node information and build relationships + for (TaskNode taskNode : taskNodeList) { + String preTasks = taskNode.getPreTasks(); + List preTasksList = JSONUtils.toList(preTasks, String.class); + + // If the dependency is not empty + if (preTasksList != null) { + for (String depNode : preTasksList) { + taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); + } + } + } + + ProcessDag processDag = new ProcessDag(); + processDag.setEdges(taskNodeRelations); + processDag.setNodes(taskNodeList); + return processDag; + } } diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml index d37ea92140..3559ca9c85 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapper.xml @@ -90,6 +90,9 @@ and instance.host like concat('%', #{host}, '%') + + and instance.executor_id = #{executorId} + order by instance.start_time desc diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml index 3a1fddd288..143761bf8c 100644 --- a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapper.xml @@ -124,6 +124,9 @@ and instance.host like concat('%', #{host}, '%') + + and instance.executor_id = #{executorId} + order by instance.start_time desc diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java new file mode 100644 index 0000000000..e165da1e88 --- /dev/null +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.dao.entity; + +import org.junit.Assert; +import org.junit.Test; + +public class TaskInstanceTest { + + /** + * task instance sub process + */ + @Test + public void testTaskInstanceIsSubProcess() { + TaskInstance taskInstance = new TaskInstance(); + + //sub process + taskInstance.setTaskType("sub process"); + Assert.assertTrue(taskInstance.isSubProcess()); + + //not sub process + taskInstance.setTaskType("http"); + Assert.assertFalse(taskInstance.isSubProcess()); + } +} diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java index 57b002e18d..5ba2936aaf 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ConnectionFactoryTest.java @@ -32,6 +32,6 @@ public class ConnectionFactoryTest { @Test public void testConnection()throws Exception{ Connection connection = ConnectionFactory.getDataSource().getPooledConnection().getConnection(); - Assert.assertEquals(connection != null , true); + Assert.assertTrue(connection != null); } } \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java index 951b3f0e38..3b307cc2ad 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ProcessInstanceMapperTest.java @@ -165,6 +165,7 @@ public class ProcessInstanceMapperTest { processDefinition.getProjectId(), processInstance.getProcessDefinitionId(), processInstance.getName(), + 0, stateArray, processInstance.getHost(), null, diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java index c60cc3a655..51a22b756e 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/TaskInstanceMapperTest.java @@ -121,7 +121,7 @@ public class TaskInstanceMapperTest { } /** - * test find vaild task list by process instance id + * test find valid task list by process instance id */ @Test public void testFindValidTaskListByProcessId() { @@ -282,6 +282,7 @@ public class TaskInstanceMapperTest { task.getProcessInstanceId(), "", "", + 0, new int[0], "", null,null diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java index a1e3f819e3..95c7d2f086 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java +++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/utils/DagHelperTest.java @@ -24,6 +24,8 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessData; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.junit.Assert; import org.junit.Test; @@ -37,7 +39,6 @@ import java.util.Map; * dag helper test */ public class DagHelperTest { - /** * test task node can submit * @throws JsonProcessingException if error throws JsonProcessingException @@ -131,4 +132,20 @@ public class DagHelperTest { return DagHelper.buildDagGraph(processDag); } + @Test + public void testBuildDagGraph() { + String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," + + "\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," + + "\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," + + "\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," + + "\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}"; + + ProcessData processData = JSONUtils.parseObject(shellJson, ProcessData.class); + assert processData != null; + List taskNodeList = processData.getTasks(); + ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); + DAG dag = DagHelper.buildDagGraph(processDag); + Assert.assertNotNull(dag); + } + } diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml index b43daff41b..80ddfec9b0 100644 --- a/dolphinscheduler-dist/pom.xml +++ b/dolphinscheduler-dist/pom.xml @@ -41,7 +41,6 @@ org.apache.dolphinscheduler dolphinscheduler-api - @@ -97,7 +96,6 @@ - diff --git a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml index b4326c6795..28bbb361cd 100644 --- a/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml +++ b/dolphinscheduler-dist/src/main/assembly/dolphinscheduler-binary.xml @@ -112,6 +112,21 @@ + + + + ${basedir}/../dolphinscheduler-service/src/main/resources + + **/*.properties + **/*.xml + **/*.json + **/*.yml + + conf + + + + ${basedir}/../dolphinscheduler-server/target/dolphinscheduler-server-${project.version} diff --git a/dolphinscheduler-remote/pom.xml b/dolphinscheduler-remote/pom.xml new file mode 100644 index 0000000000..b67b033ffa --- /dev/null +++ b/dolphinscheduler-remote/pom.xml @@ -0,0 +1,44 @@ + + + + + dolphinscheduler + org.apache.dolphinscheduler + 1.2.1-SNAPSHOT + + 4.0.0 + + dolphinscheduler-remote + + dolphinscheduler-remote + + http://www.example.com + + + UTF-8 + 1.7 + 1.7 + + + + + io.netty + netty-all + + + org.slf4j + slf4j-api + + + com.alibaba + fastjson + + + junit + junit + test + + + + diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java new file mode 100644 index 0000000000..96258d752a --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java @@ -0,0 +1,352 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote; + +import io.netty.bootstrap.Bootstrap; +import io.netty.channel.*; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.SocketChannel; +import io.netty.channel.socket.nio.NioSocketChannel; +import org.apache.dolphinscheduler.remote.codec.NettyDecoder; +import org.apache.dolphinscheduler.remote.codec.NettyEncoder; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.exceptions.RemotingException; +import org.apache.dolphinscheduler.remote.exceptions.RemotingTimeoutException; +import org.apache.dolphinscheduler.remote.exceptions.RemotingTooMuchRequestException; +import org.apache.dolphinscheduler.remote.future.InvokeCallback; +import org.apache.dolphinscheduler.remote.future.ReleaseSemaphore; +import org.apache.dolphinscheduler.remote.future.ResponseFuture; +import org.apache.dolphinscheduler.remote.handler.NettyClientHandler; +import org.apache.dolphinscheduler.remote.utils.Address; +import org.apache.dolphinscheduler.remote.utils.CallerThreadExecutePolicy; +import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.InetSocketAddress; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * remoting netty client + */ +public class NettyRemotingClient { + + private final Logger logger = LoggerFactory.getLogger(NettyRemotingClient.class); + + /** + * client bootstrap + */ + private final Bootstrap bootstrap = new Bootstrap(); + + /** + * encoder + */ + private final NettyEncoder encoder = new NettyEncoder(); + + /** + * channels + */ + private final ConcurrentHashMap channels = new ConcurrentHashMap(128); + + /** + * started flag + */ + private final AtomicBoolean isStarted = new AtomicBoolean(false); + + /** + * worker group + */ + private final NioEventLoopGroup workerGroup; + + /** + * client config + */ + private final NettyClientConfig clientConfig; + + /** + * saync semaphore + */ + private final Semaphore asyncSemaphore = new Semaphore(200, true); + + /** + * callback thread executor + */ + private final ExecutorService callbackExecutor; + + /** + * client handler + */ + private final NettyClientHandler clientHandler; + + /** + * response future executor + */ + private final ScheduledExecutorService responseFutureExecutor; + + /** + * client init + * @param clientConfig client config + */ + public NettyRemotingClient(final NettyClientConfig clientConfig){ + this.clientConfig = clientConfig; + this.workerGroup = new NioEventLoopGroup(clientConfig.getWorkerThreads(), new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyClient_%d", this.threadIndex.incrementAndGet())); + } + }); + this.callbackExecutor = new ThreadPoolExecutor(5, 10, 1, TimeUnit.MINUTES, + new LinkedBlockingQueue<>(1000), new NamedThreadFactory("CallbackExecutor", 10), + new CallerThreadExecutePolicy()); + this.clientHandler = new NettyClientHandler(this, callbackExecutor); + + this.responseFutureExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("ResponseFutureExecutor")); + + this.start(); + } + + /** + * start + */ + private void start(){ + + this.bootstrap + .group(this.workerGroup) + .channel(NioSocketChannel.class) + .option(ChannelOption.SO_KEEPALIVE, clientConfig.isSoKeepalive()) + .option(ChannelOption.TCP_NODELAY, clientConfig.isTcpNoDelay()) + .option(ChannelOption.SO_SNDBUF, clientConfig.getSendBufferSize()) + .option(ChannelOption.SO_RCVBUF, clientConfig.getReceiveBufferSize()) + .handler(new ChannelInitializer() { + @Override + public void initChannel(SocketChannel ch) throws Exception { + ch.pipeline().addLast( + new NettyDecoder(), + clientHandler, + encoder); + } + }); + this.responseFutureExecutor.scheduleAtFixedRate(new Runnable() { + @Override + public void run() { + ResponseFuture.scanFutureTable(); + } + }, 5000, 1000, TimeUnit.MILLISECONDS); + // + isStarted.compareAndSet(false, true); + } + + /** + * async send + * @param address address + * @param command command + * @param timeoutMillis timeoutMillis + * @param invokeCallback callback function + * @throws InterruptedException + * @throws RemotingException + */ + public void sendAsync(final Address address, final Command command, + final long timeoutMillis, + final InvokeCallback invokeCallback) throws InterruptedException, RemotingException { + final Channel channel = getChannel(address); + if (channel == null) { + throw new RemotingException("network error"); + } + /** + * request unique identification + */ + final long opaque = command.getOpaque(); + /** + * control concurrency number + */ + boolean acquired = this.asyncSemaphore.tryAcquire(timeoutMillis, TimeUnit.MILLISECONDS); + if(acquired){ + final ReleaseSemaphore releaseSemaphore = new ReleaseSemaphore(this.asyncSemaphore); + + /** + * response future + */ + final ResponseFuture responseFuture = new ResponseFuture(opaque, + timeoutMillis, + invokeCallback, + releaseSemaphore); + try { + channel.writeAndFlush(command).addListener(new ChannelFutureListener(){ + + @Override + public void operationComplete(ChannelFuture future) throws Exception { + if(future.isSuccess()){ + responseFuture.setSendOk(true); + return; + } else { + responseFuture.setSendOk(false); + } + responseFuture.setCause(future.cause()); + responseFuture.putResponse(null); + try { + responseFuture.executeInvokeCallback(); + } catch (Throwable ex){ + logger.error("execute callback error", ex); + } finally{ + responseFuture.release(); + } + } + }); + } catch (Throwable ex){ + responseFuture.release(); + throw new RemotingException(String.format("send command to address: %s failed", address), ex); + } + } else{ + String message = String.format("try to acquire async semaphore timeout: %d, waiting thread num: %d, total permits: %d", + timeoutMillis, asyncSemaphore.getQueueLength(), asyncSemaphore.availablePermits()); + throw new RemotingTooMuchRequestException(message); + } + } + + /** + * sync send + * @param address address + * @param command command + * @param timeoutMillis timeoutMillis + * @return command + * @throws InterruptedException + * @throws RemotingException + */ + public Command sendSync(final Address address, final Command command, final long timeoutMillis) throws InterruptedException, RemotingException { + final Channel channel = getChannel(address); + if (channel == null) { + throw new RemotingException(String.format("connect to : %s fail", address)); + } + final long opaque = command.getOpaque(); + final ResponseFuture responseFuture = new ResponseFuture(opaque, timeoutMillis, null, null); + channel.writeAndFlush(command).addListener(new ChannelFutureListener() { + @Override + public void operationComplete(ChannelFuture future) throws Exception { + if(future.isSuccess()){ + responseFuture.setSendOk(true); + return; + } else { + responseFuture.setSendOk(false); + } + responseFuture.setCause(future.cause()); + responseFuture.putResponse(null); + logger.error("send command {} to address {} failed", command, address); + } + }); + /** + * sync wait for result + */ + Command result = responseFuture.waitResponse(); + if(result == null){ + if(responseFuture.isSendOK()){ + throw new RemotingTimeoutException(address.toString(), timeoutMillis, responseFuture.getCause()); + } else{ + throw new RemotingException(address.toString(), responseFuture.getCause()); + } + } + return result; + } + + /** + * get channel + * @param address + * @return + */ + public Channel getChannel(Address address) { + Channel channel = channels.get(address); + if(channel != null && channel.isActive()){ + return channel; + } + return createChannel(address, true); + } + + /** + * create channel + * @param address address + * @param isSync sync flag + * @return channel + */ + public Channel createChannel(Address address, boolean isSync) { + ChannelFuture future; + try { + synchronized (bootstrap){ + future = bootstrap.connect(new InetSocketAddress(address.getHost(), address.getPort())); + } + if(isSync){ + future.sync(); + } + if (future.isSuccess()) { + Channel channel = future.channel(); + channels.put(address, channel); + return channel; + } + } catch (Exception ex) { + logger.info("connect to {} error {}", address, ex); + } + return null; + } + + /** + * close + */ + public void close() { + if(isStarted.compareAndSet(true, false)){ + try { + closeChannels(); + if(workerGroup != null){ + this.workerGroup.shutdownGracefully(); + } + if(callbackExecutor != null){ + this.callbackExecutor.shutdownNow(); + } + if(this.responseFutureExecutor != null){ + this.responseFutureExecutor.shutdownNow(); + } + } catch (Exception ex) { + logger.error("netty client close exception", ex); + } + logger.info("netty client closed"); + } + } + + /** + * close channels + */ + private void closeChannels(){ + for (Channel channel : this.channels.values()) { + channel.close(); + } + this.channels.clear(); + } + + /** + * close channel + * @param address address + */ + public void closeChannel(Address address){ + Channel channel = this.channels.remove(address); + if(channel != null){ + channel.close(); + } + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java new file mode 100644 index 0000000000..29b2317633 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java @@ -0,0 +1,220 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.ChannelOption; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.channel.socket.nio.NioSocketChannel; +import org.apache.dolphinscheduler.remote.codec.NettyDecoder; +import org.apache.dolphinscheduler.remote.codec.NettyEncoder; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.apache.dolphinscheduler.remote.handler.NettyServerHandler; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.Constants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * remoting netty server + */ +public class NettyRemotingServer { + + private final Logger logger = LoggerFactory.getLogger(NettyRemotingServer.class); + + /** + * server bootstrap + */ + private final ServerBootstrap serverBootstrap = new ServerBootstrap(); + + /** + * encoder + */ + private final NettyEncoder encoder = new NettyEncoder(); + + /** + * default executor + */ + private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS); + + /** + * boss group + */ + private final NioEventLoopGroup bossGroup; + + /** + * worker group + */ + private final NioEventLoopGroup workGroup; + + /** + * server config + */ + private final NettyServerConfig serverConfig; + + /** + * server handler + */ + private final NettyServerHandler serverHandler = new NettyServerHandler(this); + + /** + * started flag + */ + private final AtomicBoolean isStarted = new AtomicBoolean(false); + + /** + * server init + * + * @param serverConfig server config + */ + public NettyRemotingServer(final NettyServerConfig serverConfig){ + this.serverConfig = serverConfig; + + this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet())); + } + }); + + this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() { + private AtomicInteger threadIndex = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet())); + } + }); + } + + /** + * server start + */ + public void start(){ + + if(this.isStarted.get()){ + return; + } + + this.serverBootstrap + .group(this.bossGroup, this.workGroup) + .channel(NioServerSocketChannel.class) + .option(ChannelOption.SO_REUSEADDR, true) + .option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog()) + .childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive()) + .childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay()) + .childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize()) + .childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize()) + .childHandler(new ChannelInitializer() { + + @Override + protected void initChannel(NioSocketChannel ch) throws Exception { + initNettyChannel(ch); + } + }); + + ChannelFuture future; + try { + future = serverBootstrap.bind(serverConfig.getListenPort()).sync(); + } catch (Exception e) { + logger.error("NettyRemotingServer bind fail {}, exit", e); + throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort())); + } + if (future.isSuccess()) { + logger.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort()); + } else if (future.cause() != null) { + throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()), future.cause()); + } else { + throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort())); + } + // + isStarted.compareAndSet(false, true); + } + + /** + * init netty channel + * @param ch socket channel + * @throws Exception + */ + private void initNettyChannel(NioSocketChannel ch) throws Exception{ + ChannelPipeline pipeline = ch.pipeline(); + pipeline.addLast("encoder", encoder); + pipeline.addLast("decoder", new NettyDecoder()); + pipeline.addLast("handler", serverHandler); + } + + /** + * register processor + * @param commandType command type + * @param processor processor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { + this.registerProcessor(commandType, processor, null); + } + + /** + * register processor + * + * @param commandType command type + * @param processor processor + * @param executor thread executor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { + this.serverHandler.registerProcessor(commandType, processor, executor); + } + + /** + * get default thread executor + * @return thread executor + */ + public ExecutorService getDefaultExecutor() { + return defaultExecutor; + } + + public void close() { + if(isStarted.compareAndSet(true, false)){ + try { + if(bossGroup != null){ + this.bossGroup.shutdownGracefully(); + } + if(workGroup != null){ + this.workGroup.shutdownGracefully(); + } + if(defaultExecutor != null){ + defaultExecutor.shutdown(); + } + } catch (Exception ex) { + logger.error("netty server close exception", ex); + } + logger.info("netty server closed"); + } + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java new file mode 100644 index 0000000000..caa4fbdd17 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.codec; + + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.ReplayingDecoder; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandHeader; +import org.apache.dolphinscheduler.remote.command.CommandType; + +import java.util.List; + +/** + * netty decoder + */ +public class NettyDecoder extends ReplayingDecoder { + + public NettyDecoder(){ + super(State.MAGIC); + } + + private final CommandHeader commandHeader = new CommandHeader(); + + /** + * decode + * + * @param ctx channel handler context + * @param in byte buffer + * @param out out content + * @throws Exception + */ + @Override + protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { + switch (state()){ + case MAGIC: + checkMagic(in.readByte()); + checkpoint(State.COMMAND); + case COMMAND: + commandHeader.setType(in.readByte()); + checkpoint(State.OPAQUE); + case OPAQUE: + commandHeader.setOpaque(in.readLong()); + checkpoint(State.BODY_LENGTH); + case BODY_LENGTH: + commandHeader.setBodyLength(in.readInt()); + checkpoint(State.BODY); + case BODY: + byte[] body = new byte[commandHeader.getBodyLength()]; + in.readBytes(body); + // + Command packet = new Command(); + packet.setType(commandType(commandHeader.getType())); + packet.setOpaque(commandHeader.getOpaque()); + packet.setBody(body); + out.add(packet); + // + checkpoint(State.MAGIC); + } + } + + /** + * get command type + * @param type type + * @return + */ + private CommandType commandType(byte type){ + for(CommandType ct : CommandType.values()){ + if(ct.ordinal() == type){ + return ct; + } + } + return null; + } + + /** + * check magic + * @param magic magic + */ + private void checkMagic(byte magic) { + if (magic != Command.MAGIC) { + throw new IllegalArgumentException("illegal packet [magic]" + magic); + } + } + + enum State{ + MAGIC, + COMMAND, + OPAQUE, + BODY_LENGTH, + BODY; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java new file mode 100644 index 0000000000..4e9836a26f --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyEncoder.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.codec; + +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandler.Sharable; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToByteEncoder; +import org.apache.dolphinscheduler.remote.command.Command; + +/** + * netty encoder + */ +@Sharable +public class NettyEncoder extends MessageToByteEncoder { + + /** + * encode + * + * @param ctx channel handler context + * @param msg command + * @param out byte buffer + * @throws Exception + */ + @Override + protected void encode(ChannelHandlerContext ctx, Command msg, ByteBuf out) throws Exception { + if(msg == null){ + throw new Exception("encode msg is null"); + } + out.writeByte(Command.MAGIC); + out.writeByte(msg.getType().ordinal()); + out.writeLong(msg.getOpaque()); + out.writeInt(msg.getBody().length); + out.writeBytes(msg.getBody()); + } + +} + diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java new file mode 100644 index 0000000000..86ba79c884 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Command.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.command; + +import com.sun.org.apache.regexp.internal.RE; + +import java.io.Serializable; +import java.util.concurrent.atomic.AtomicLong; + +/** + * receive task log request command and content fill + * for netty data serializable transfer + */ +public class Command implements Serializable { + + private static final AtomicLong REQUEST_ID = new AtomicLong(1); + + public static final byte MAGIC = (byte) 0xbabe; + + public Command(){ + this.opaque = REQUEST_ID.getAndIncrement(); + } + + public Command(long opaque){ + this.opaque = opaque; + } + + /** + * command type + */ + private CommandType type; + + /** + * request unique identification + */ + private long opaque; + + /** + * data body + */ + private byte[] body; + + public CommandType getType() { + return type; + } + + public void setType(CommandType type) { + this.type = type; + } + + public long getOpaque() { + return opaque; + } + + public void setOpaque(long opaque) { + this.opaque = opaque; + } + + public byte[] getBody() { + return body; + } + + public void setBody(byte[] body) { + this.body = body; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (int) (opaque ^ (opaque >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Command other = (Command) obj; + return opaque == other.opaque; + } + + @Override + public String toString() { + return "Command [type=" + type + ", opaque=" + opaque + ", bodyLen=" + (body == null ? 0 : body.length) + "]"; + } + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java new file mode 100644 index 0000000000..78948a5c0c --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandHeader.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.command; + +import java.io.Serializable; + +/** + * command header + */ +public class CommandHeader implements Serializable { + + /** + * type + */ + private byte type; + + /** + * request unique identification + */ + private long opaque; + + /** + * body length + */ + private int bodyLength; + + public int getBodyLength() { + return bodyLength; + } + + public void setBodyLength(int bodyLength) { + this.bodyLength = bodyLength; + } + + public byte getType() { + return type; + } + + public void setType(byte type) { + this.type = type; + } + + public long getOpaque() { + return opaque; + } + + public void setOpaque(long opaque) { + this.opaque = opaque; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java new file mode 100644 index 0000000000..b1b24d3303 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/CommandType.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; public enum CommandType { /** * roll view log request */ ROLL_VIEW_LOG_REQUEST, /** * roll view log response */ ROLL_VIEW_LOG_RESPONSE, /** * view whole log request */ VIEW_WHOLE_LOG_REQUEST, /** * view whole log response */ VIEW_WHOLE_LOG_RESPONSE, /** * get log bytes request */ GET_LOG_BYTES_REQUEST, /** * get log bytes response */ GET_LOG_BYTES_RESPONSE, WORKER_REQUEST, MASTER_RESPONSE, /** * execute task request */ EXECUTE_TASK_REQUEST, /** * execute task response */ EXECUTE_TASK_RESPONSE, /** * ping */ PING, /** * pong */ PONG; } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java new file mode 100644 index 0000000000..beec055403 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskRequestCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** * execute task request command */ public class ExecuteTaskRequestCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * application name */ private String applicationName; /** * group name */ private String groupName; /** * task name */ private String taskName; /** * connector port */ private int connectorPort; /** * description info */ private String description; /** * class name */ private String className; /** * method name */ private String methodName; /** * parameters */ private String params; /** * shard itemds */ private List shardItems; public List getShardItems() { return shardItems; } public void setShardItems(List shardItems) { this.shardItems = shardItems; } public String getParams() { return params; } public void setParams(String params) { this.params = params; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getApplicationName() { return applicationName; } public void setApplicationName(String applicationName) { this.applicationName = applicationName; } public String getGroupName() { return groupName; } public void setGroupName(String groupName) { this.groupName = groupName; } public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public int getConnectorPort() { return connectorPort; } public void setConnectorPort(int connectorPort) { this.connectorPort = connectorPort; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getMethodName() { return methodName; } public void setMethodName(String methodName) { this.methodName = methodName; } /** * package request command * * @return command */ public Command convert2Command(){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_REQUEST); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java new file mode 100644 index 0000000000..7e35fa6e75 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/ExecuteTaskResponseCommand.java @@ -0,0 +1 @@ +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dolphinscheduler.remote.command; import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; import java.io.Serializable; import java.util.concurrent.atomic.AtomicLong; /** * execute task response command */ public class ExecuteTaskResponseCommand implements Serializable { /** * task id */ private String taskId; /** * attempt id */ private String attemptId; /** * return result */ private Object result; /** * received time */ private long receivedTime; /** * execute count */ private int executeCount; /** * execute time */ private long executeTime; public String getAttemptId() { return attemptId; } public void setAttemptId(String attemptId) { this.attemptId = attemptId; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public Object getResult() { return result; } public void setResult(Object result) { this.result = result; } public long getReceivedTime() { return receivedTime; } public void setReceivedTime(long receivedTime) { this.receivedTime = receivedTime; } public int getExecuteCount() { return executeCount; } public void setExecuteCount(int executeCount) { this.executeCount = executeCount; } public long getExecuteTime() { return executeTime; } public void setExecuteTime(long executeTime) { this.executeTime = executeTime; } public Command convert2Command(long opaque){ Command command = new Command(); command.setType(CommandType.EXECUTE_TASK_RESPONSE); byte[] body = FastJsonSerializer.serialize(this); command.setBody(body); return command; } } \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java new file mode 100644 index 0000000000..c50413e98a --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Ping.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; + +import java.io.Serializable; +import java.util.concurrent.atomic.AtomicLong; + +/** + * ping machine + */ +public class Ping implements Serializable { + + /** + * ping body + */ + protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; + + /** + * request command body + */ + private static byte[] EMPTY_BODY_ARRAY = new byte[0]; + + private static final ByteBuf PING_BUF; + + static { + ByteBuf ping = Unpooled.buffer(); + ping.writeByte(Command.MAGIC); + ping.writeByte(CommandType.PING.ordinal()); + ping.writeLong(0); + ping.writeInt(0); + ping.writeBytes(EMPTY_BODY); + PING_BUF = Unpooled.unreleasableBuffer(ping).asReadOnly(); + } + + /** + * ping content + * @return result + */ + public static ByteBuf pingContent(){ + return PING_BUF.duplicate(); + } + + /** + * create ping command + * + * @return command + */ + public static Command create(){ + Command command = new Command(); + command.setType(CommandType.PING); + command.setBody(EMPTY_BODY_ARRAY); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java new file mode 100644 index 0000000000..e52cef6d92 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/Pong.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; + +import java.io.Serializable; + +/** + * Pong return after ping + */ +public class Pong implements Serializable { + + /** + * pong body + */ + protected static ByteBuf EMPTY_BODY = Unpooled.EMPTY_BUFFER; + + /** + * pong command body + */ + private static byte[] EMPTY_BODY_ARRAY = new byte[0]; + + /** + * ping byte buffer + */ + private static final ByteBuf PONG_BUF; + + static { + ByteBuf ping = Unpooled.buffer(); + ping.writeByte(Command.MAGIC); + ping.writeByte(CommandType.PONG.ordinal()); + ping.writeLong(0); + ping.writeInt(0); + ping.writeBytes(EMPTY_BODY); + PONG_BUF = Unpooled.unreleasableBuffer(ping).asReadOnly(); + } + + /** + * ping content + * @return result + */ + public static ByteBuf pingContent(){ + return PONG_BUF.duplicate(); + } + + /** + * package pong command + * + * @param opaque request unique identification + * @return command + */ + public static Command create(long opaque){ + Command command = new Command(opaque); + command.setType(CommandType.PONG); + command.setBody(EMPTY_BODY_ARRAY); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java new file mode 100644 index 0000000000..4cc32ed42a --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesRequestCommand.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command.log; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; +import java.util.concurrent.atomic.AtomicLong; + +/** + * get log bytes request command + */ +public class GetLogBytesRequestCommand implements Serializable { + + /** + * log path + */ + private String path; + + public GetLogBytesRequestCommand() { + } + + public GetLogBytesRequestCommand(String path) { + this.path = path; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + /** + * package request command + * + * @return command + */ + public Command convert2Command(){ + Command command = new Command(); + command.setType(CommandType.GET_LOG_BYTES_REQUEST); + byte[] body = FastJsonSerializer.serialize(this); + command.setBody(body); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java new file mode 100644 index 0000000000..deaf9b8d85 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/GetLogBytesResponseCommand.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command.log; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; + +/** + * get log bytes response command + */ +public class GetLogBytesResponseCommand implements Serializable { + + /** + * log byte data + */ + private byte[] data; + + public GetLogBytesResponseCommand() { + } + + public GetLogBytesResponseCommand(byte[] data) { + this.data = data; + } + + public byte[] getData() { + return data; + } + + public void setData(byte[] data) { + this.data = data; + } + + /** + * package response command + * + * @param opaque request unique identification + * @return command + */ + public Command convert2Command(long opaque){ + Command command = new Command(opaque); + command.setType(CommandType.GET_LOG_BYTES_RESPONSE); + byte[] body = FastJsonSerializer.serialize(this); + command.setBody(body); + return command; + } + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java new file mode 100644 index 0000000000..621d35a804 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogRequestCommand.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command.log; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; +import java.util.concurrent.atomic.AtomicLong; + +/** + * roll view log request command + */ +public class RollViewLogRequestCommand implements Serializable { + + /** + * log path + */ + private String path; + + /** + * skip line number + */ + private int skipLineNum; + + /** + * query line number + */ + private int limit; + + public RollViewLogRequestCommand() { + } + + public RollViewLogRequestCommand(String path, int skipLineNum, int limit) { + this.path = path; + this.skipLineNum = skipLineNum; + this.limit = limit; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public int getSkipLineNum() { + return skipLineNum; + } + + public void setSkipLineNum(int skipLineNum) { + this.skipLineNum = skipLineNum; + } + + public int getLimit() { + return limit; + } + + public void setLimit(int limit) { + this.limit = limit; + } + + /** + * package request command + * + * @return command + */ + public Command convert2Command(){ + Command command = new Command(); + command.setType(CommandType.ROLL_VIEW_LOG_REQUEST); + byte[] body = FastJsonSerializer.serialize(this); + command.setBody(body); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java new file mode 100644 index 0000000000..591d787200 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/RollViewLogResponseCommand.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command.log; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; + +/** + * roll view log response command + */ +public class RollViewLogResponseCommand implements Serializable { + + /** + * response data + */ + private String msg; + + public RollViewLogResponseCommand() { + } + + public RollViewLogResponseCommand(String msg) { + this.msg = msg; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + /** + * package response command + * + * @param opaque request unique identification + * @return command + */ + public Command convert2Command(long opaque){ + Command command = new Command(opaque); + command.setType(CommandType.ROLL_VIEW_LOG_RESPONSE); + byte[] body = FastJsonSerializer.serialize(this); + command.setBody(body); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java new file mode 100644 index 0000000000..8835348ee3 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogRequestCommand.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command.log; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; +import java.util.concurrent.atomic.AtomicLong; + +/** + * view log request command + */ +public class ViewLogRequestCommand implements Serializable { + + /** + * log path + */ + private String path; + + public ViewLogRequestCommand() { + } + + public ViewLogRequestCommand(String path) { + this.path = path; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + /** + * package request command + * + * @return command + */ + public Command convert2Command(){ + Command command = new Command(); + command.setType(CommandType.VIEW_WHOLE_LOG_REQUEST); + byte[] body = FastJsonSerializer.serialize(this); + command.setBody(body); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java new file mode 100644 index 0000000000..dffadade26 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/command/log/ViewLogResponseCommand.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.command.log; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; + +import java.io.Serializable; + +/** + * view log response command + */ +public class ViewLogResponseCommand implements Serializable { + + /** + * response data + */ + private String msg; + + public ViewLogResponseCommand() { + } + + public ViewLogResponseCommand(String msg) { + this.msg = msg; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } + + /** + * package response command + * + * @param opaque request unique identification + * @return command + */ + public Command convert2Command(long opaque){ + Command command = new Command(opaque); + command.setType(CommandType.VIEW_WHOLE_LOG_RESPONSE); + byte[] body = FastJsonSerializer.serialize(this); + command.setBody(body); + return command; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/config/NettyClientConfig.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/config/NettyClientConfig.java new file mode 100644 index 0000000000..831e05f7e7 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/config/NettyClientConfig.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.config; + +import org.apache.dolphinscheduler.remote.utils.Constants; + +/** + * netty client config + */ +public class NettyClientConfig { + + /** + * worker threads,default get machine cpus + */ + private int workerThreads = Constants.CPUS; + + /** + * whether tpc delay + */ + private boolean tcpNoDelay = true; + + /** + * whether keep alive + */ + private boolean soKeepalive = true; + + /** + * send buffer size + */ + private int sendBufferSize = 65535; + + /** + * receive buffer size + */ + private int receiveBufferSize = 65535; + + public int getWorkerThreads() { + return workerThreads; + } + + public void setWorkerThreads(int workerThreads) { + this.workerThreads = workerThreads; + } + + public boolean isTcpNoDelay() { + return tcpNoDelay; + } + + public void setTcpNoDelay(boolean tcpNoDelay) { + this.tcpNoDelay = tcpNoDelay; + } + + public boolean isSoKeepalive() { + return soKeepalive; + } + + public void setSoKeepalive(boolean soKeepalive) { + this.soKeepalive = soKeepalive; + } + + public int getSendBufferSize() { + return sendBufferSize; + } + + public void setSendBufferSize(int sendBufferSize) { + this.sendBufferSize = sendBufferSize; + } + + public int getReceiveBufferSize() { + return receiveBufferSize; + } + + public void setReceiveBufferSize(int receiveBufferSize) { + this.receiveBufferSize = receiveBufferSize; + } + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/config/NettyServerConfig.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/config/NettyServerConfig.java new file mode 100644 index 0000000000..4ec8a0f7a7 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/config/NettyServerConfig.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.config; + +import org.apache.dolphinscheduler.remote.utils.Constants; + +/** + * netty server config + */ +public class NettyServerConfig { + + /** + * init the server connectable queue + */ + private int soBacklog = 1024; + + /** + * whether tpc delay + */ + private boolean tcpNoDelay = true; + + /** + * whether keep alive + */ + private boolean soKeepalive = true; + + /** + * send buffer size + */ + private int sendBufferSize = 65535; + + /** + * receive buffer size + */ + private int receiveBufferSize = 65535; + + /** + * worker threads,default get machine cpus + */ + private int workerThread = Constants.CPUS; + + /** + * listen port + */ + private int listenPort = 12346; + + public int getListenPort() { + return listenPort; + } + + public void setListenPort(int listenPort) { + this.listenPort = listenPort; + } + + public int getSoBacklog() { + return soBacklog; + } + + public void setSoBacklog(int soBacklog) { + this.soBacklog = soBacklog; + } + + public boolean isTcpNoDelay() { + return tcpNoDelay; + } + + public void setTcpNoDelay(boolean tcpNoDelay) { + this.tcpNoDelay = tcpNoDelay; + } + + public boolean isSoKeepalive() { + return soKeepalive; + } + + public void setSoKeepalive(boolean soKeepalive) { + this.soKeepalive = soKeepalive; + } + + public int getSendBufferSize() { + return sendBufferSize; + } + + public void setSendBufferSize(int sendBufferSize) { + this.sendBufferSize = sendBufferSize; + } + + public int getReceiveBufferSize() { + return receiveBufferSize; + } + + public void setReceiveBufferSize(int receiveBufferSize) { + this.receiveBufferSize = receiveBufferSize; + } + + public int getWorkerThread() { + return workerThread; + } + + public void setWorkerThread(int workerThread) { + this.workerThread = workerThread; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingException.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingException.java new file mode 100644 index 0000000000..29d48db8f8 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingException.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.exceptions; + +/** + * remote exception + */ +public class RemotingException extends Exception { + + public RemotingException() { + super(); + } + + /** Constructs a new runtime exception with the specified detail message. + * The cause is not initialized, and may subsequently be initialized by a + * call to {@link #initCause}. + * + * @param message the detail message. The detail message is saved for + * later retrieval by the {@link #getMessage()} method. + */ + public RemotingException(String message) { + super(message); + } + + /** + * Constructs a new runtime exception with the specified detail message and + * cause.

Note that the detail message associated with + * {@code cause} is not automatically incorporated in + * this runtime exception's detail message. + * + * @param message the detail message (which is saved for later retrieval + * by the {@link #getMessage()} method). + * @param cause the cause (which is saved for later retrieval by the + * {@link #getCause()} method). (A null value is + * permitted, and indicates that the cause is nonexistent or + * unknown.) + * @since 1.4 + */ + public RemotingException(String message, Throwable cause) { + super(message, cause); + } + + /** Constructs a new runtime exception with the specified cause and a + * detail message of (cause==null ? null : cause.toString()) + * (which typically contains the class and detail message of + * cause). This constructor is useful for runtime exceptions + * that are little more than wrappers for other throwables. + * + * @param cause the cause (which is saved for later retrieval by the + * {@link #getCause()} method). (A null value is + * permitted, and indicates that the cause is nonexistent or + * unknown.) + * @since 1.4 + */ + public RemotingException(Throwable cause) { + super(cause); + } + + /** + * Constructs a new runtime exception with the specified detail + * message, cause, suppression enabled or disabled, and writable + * stack trace enabled or disabled. + * + * @param message the detail message. + * @param cause the cause. (A {@code null} value is permitted, + * and indicates that the cause is nonexistent or unknown.) + * @param enableSuppression whether or not suppression is enabled + * or disabled + * @param writableStackTrace whether or not the stack trace should + * be writable + * + * @since 1.7 + */ + protected RemotingException(String message, Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingTimeoutException.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingTimeoutException.java new file mode 100644 index 0000000000..3d91ba57f6 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingTimeoutException.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.exceptions; + + +/** + * timeout exception + */ +public class RemotingTimeoutException extends RemotingException{ + + public RemotingTimeoutException(String message) { + super(message); + } + + + public RemotingTimeoutException(String address, long timeoutMillis) { + this(address, timeoutMillis, null); + } + + public RemotingTimeoutException(String address, long timeoutMillis, Throwable cause) { + super(String.format("wait response on the channel %s timeout %s", address, timeoutMillis), cause); + } +} diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/TestZkServer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingTooMuchRequestException.java similarity index 65% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/TestZkServer.java rename to dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingTooMuchRequestException.java index d1a0526309..82cc3f4dbf 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/TestZkServer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/exceptions/RemotingTooMuchRequestException.java @@ -14,30 +14,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +package org.apache.dolphinscheduler.remote.exceptions; /** - * demo for using zkServer + * too much request exception */ -public class TestZkServer { - - @Before - public void before(){ - ZKServer.start(); - } - - @Test - public void test(){ - Assert.assertTrue(ZKServer.isStarted()); - } +public class RemotingTooMuchRequestException extends RemotingException{ - @After - public void after(){ - ZKServer.stop(); + public RemotingTooMuchRequestException(String message) { + super(message); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/InvokeCallback.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/InvokeCallback.java new file mode 100644 index 0000000000..84cdae867b --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/InvokeCallback.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.future; + +/** + * invoke callback + */ +public interface InvokeCallback { + + /** + * operation + * + * @param responseFuture responseFuture + */ + void operationComplete(final ResponseFuture responseFuture); + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ReleaseSemaphore.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ReleaseSemaphore.java new file mode 100644 index 0000000000..95a04b1f1a --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ReleaseSemaphore.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.future; + +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * release semaphore + */ +public class ReleaseSemaphore { + + private final Semaphore semaphore; + + private final AtomicBoolean released; + + public ReleaseSemaphore(Semaphore semaphore){ + this.semaphore = semaphore; + this.released = new AtomicBoolean(false); + } + + public void release(){ + if(this.released.compareAndSet(false, true)){ + this.semaphore.release(); + } + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java new file mode 100644 index 0000000000..ca304646e4 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java @@ -0,0 +1,212 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.future; + +import org.apache.dolphinscheduler.remote.command.Command; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.*; + +/** + * response future + */ +public class ResponseFuture { + + private final static Logger LOGGER = LoggerFactory.getLogger(ResponseFuture.class); + + private final static ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(256); + + /** + * request unique identification + */ + private final long opaque; + + /** + * timeout + */ + private final long timeoutMillis; + + /** + * invokeCallback function + */ + private final InvokeCallback invokeCallback; + + /** + * releaseSemaphore + */ + private final ReleaseSemaphore releaseSemaphore; + + private final CountDownLatch latch = new CountDownLatch(1); + + private final long beginTimestamp = System.currentTimeMillis(); + + /** + * response command + */ + private volatile Command responseCommand; + + private volatile boolean sendOk = true; + + private volatile Throwable cause; + + public ResponseFuture(long opaque, long timeoutMillis, InvokeCallback invokeCallback, ReleaseSemaphore releaseSemaphore) { + this.opaque = opaque; + this.timeoutMillis = timeoutMillis; + this.invokeCallback = invokeCallback; + this.releaseSemaphore = releaseSemaphore; + FUTURE_TABLE.put(opaque, this); + } + + /** + * wait for response + * + * @return command + * @throws InterruptedException + */ + public Command waitResponse() throws InterruptedException { + this.latch.await(timeoutMillis, TimeUnit.MILLISECONDS); + return this.responseCommand; + } + + /** + * put response + * + * @param responseCommand responseCommand + */ + public void putResponse(final Command responseCommand) { + this.responseCommand = responseCommand; + this.latch.countDown(); + FUTURE_TABLE.remove(opaque); + } + + public static ResponseFuture getFuture(long opaque){ + return FUTURE_TABLE.get(opaque); + } + + /** + * whether timeout + * @return timeout + */ + public boolean isTimeout() { + long diff = System.currentTimeMillis() - this.beginTimestamp; + return diff > this.timeoutMillis; + } + + /** + * execute invoke callback + */ + public void executeInvokeCallback() { + if (invokeCallback != null) { + invokeCallback.operationComplete(this); + } + } + + public boolean isSendOK() { + return sendOk; + } + + public void setSendOk(boolean sendOk) { + this.sendOk = sendOk; + } + + public void setCause(Throwable cause) { + this.cause = cause; + } + + public Throwable getCause() { + return cause; + } + + public long getOpaque() { + return opaque; + } + + public long getTimeoutMillis() { + return timeoutMillis; + } + + public long getBeginTimestamp() { + return beginTimestamp; + } + + public Command getResponseCommand() { + return responseCommand; + } + + public void setResponseCommand(Command responseCommand) { + this.responseCommand = responseCommand; + } + + public InvokeCallback getInvokeCallback() { + return invokeCallback; + } + + /** + * release + */ + public void release() { + if(this.releaseSemaphore != null){ + this.releaseSemaphore.release(); + } + } + + @Override + public String toString() { + return "ResponseFuture{" + + "opaque=" + opaque + + ", timeoutMillis=" + timeoutMillis + + ", invokeCallback=" + invokeCallback + + ", releaseSemaphore=" + releaseSemaphore + + ", latch=" + latch + + ", beginTimestamp=" + beginTimestamp + + ", responseCommand=" + responseCommand + + ", sendOk=" + sendOk + + ", cause=" + cause + + '}'; + } + + /** + * scan future table + */ + public static void scanFutureTable(){ + final List futureList = new LinkedList<>(); + Iterator> it = FUTURE_TABLE.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry next = it.next(); + ResponseFuture future = next.getValue(); + if ((future.getBeginTimestamp() + future.getTimeoutMillis() + 1000) <= System.currentTimeMillis()) { + futureList.add(future); + it.remove(); + LOGGER.warn("remove timeout request : {}", future); + } + } + for (ResponseFuture future : futureList) { + try { + future.release(); + future.executeInvokeCallback(); + } catch (Throwable ex) { + LOGGER.warn("scanFutureTable, execute callback error", ex); + } + } + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java new file mode 100644 index 0000000000..d5d0d4df83 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.handler; + +import io.netty.channel.*; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.future.ResponseFuture; +import org.apache.dolphinscheduler.remote.utils.ChannelUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ExecutorService; + +/** + * netty client request handler + */ +@ChannelHandler.Sharable +public class NettyClientHandler extends ChannelInboundHandlerAdapter { + + private final Logger logger = LoggerFactory.getLogger(NettyClientHandler.class); + + /** + * netty client + */ + private final NettyRemotingClient nettyRemotingClient; + + /** + * callback thread executor + */ + private final ExecutorService callbackExecutor; + + public NettyClientHandler(NettyRemotingClient nettyRemotingClient, ExecutorService callbackExecutor){ + this.nettyRemotingClient = nettyRemotingClient; + this.callbackExecutor = callbackExecutor; + } + + /** + * When the current channel is not active, + * the current channel has reached the end of its life cycle + * + * @param ctx channel handler context + * @throws Exception + */ + @Override + public void channelInactive(ChannelHandlerContext ctx) throws Exception { + nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel())); + ctx.channel().close(); + } + + /** + * The current channel reads data from the remote + * + * @param ctx channel handler context + * @param msg message + * @throws Exception + */ + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + processReceived((Command)msg); + } + + /** + * process received logic + * + * @param responseCommand responseCommand + */ + private void processReceived(final Command responseCommand) { + ResponseFuture future = ResponseFuture.getFuture(responseCommand.getOpaque()); + if(future != null){ + future.setResponseCommand(responseCommand); + future.release(); + if(future.getInvokeCallback() != null){ + this.callbackExecutor.submit(new Runnable() { + @Override + public void run() { + future.executeInvokeCallback(); + } + }); + } else{ + future.putResponse(responseCommand); + } + } else{ + logger.warn("receive response {}, but not matched any request ", responseCommand); + } + } + + /** + * caught exception + * @param ctx channel handler context + * @param cause cause + * @throws Exception + */ + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + logger.error("exceptionCaught : {}", cause); + nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel())); + ctx.channel().close(); + } + + /** + * channel write changed + * + * @param ctx channel handler context + * @throws Exception + */ + @Override + public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception { + Channel ch = ctx.channel(); + ChannelConfig config = ch.config(); + + if (!ch.isWritable()) { + if (logger.isWarnEnabled()) { + logger.warn("{} is not writable, over high water level : {}", + new Object[]{ch, config.getWriteBufferHighWaterMark()}); + } + + config.setAutoRead(false); + } else { + if (logger.isWarnEnabled()) { + logger.warn("{} is writable, to low water : {}", + new Object[]{ch, config.getWriteBufferLowWaterMark()}); + } + config.setAutoRead(true); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java new file mode 100644 index 0000000000..eabd6560de --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.handler; + +import io.netty.channel.*; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.ChannelUtils; +import org.apache.dolphinscheduler.remote.utils.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.RejectedExecutionException; + +/** + * netty server request handler + */ +@ChannelHandler.Sharable +public class NettyServerHandler extends ChannelInboundHandlerAdapter { + + private final Logger logger = LoggerFactory.getLogger(NettyServerHandler.class); + + /** + * netty remote server + */ + private final NettyRemotingServer nettyRemotingServer; + + /** + * server processors queue + */ + private final ConcurrentHashMap> processors = new ConcurrentHashMap(); + + public NettyServerHandler(NettyRemotingServer nettyRemotingServer){ + this.nettyRemotingServer = nettyRemotingServer; + } + + /** + * When the current channel is not active, + * the current channel has reached the end of its life cycle + * @param ctx channel handler context + * @throws Exception + */ + @Override + public void channelInactive(ChannelHandlerContext ctx) throws Exception { + ctx.channel().close(); + } + + /** + * The current channel reads data from the remote end + * + * @param ctx channel handler context + * @param msg message + * @throws Exception + */ + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { + processReceived(ctx.channel(), (Command)msg); + } + + /** + * register processor + * + * @param commandType command type + * @param processor processor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) { + this.registerProcessor(commandType, processor, null); + } + + /** + * register processor + * + * @param commandType command type + * @param processor processor + * @param executor thread executor + */ + public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) { + ExecutorService executorRef = executor; + if(executorRef == null){ + executorRef = nettyRemotingServer.getDefaultExecutor(); + } + this.processors.putIfAbsent(commandType, new Pair(processor, executorRef)); + } + + /** + * process received logic + * @param channel channel + * @param msg message + */ + private void processReceived(final Channel channel, final Command msg) { + final CommandType commandType = msg.getType(); + final Pair pair = processors.get(commandType); + if (pair != null) { + Runnable r = new Runnable() { + + @Override + public void run() { + try { + pair.getLeft().process(channel, msg); + } catch (Throwable ex) { + logger.error("process msg {} error : {}", msg, ex); + } + } + }; + try { + pair.getRight().submit(r); + } catch (RejectedExecutionException e) { + logger.warn("thread pool is full, discard msg {} from {}", msg, ChannelUtils.getRemoteAddress(channel)); + } + } else { + logger.warn("commandType {} not support", commandType); + } + } + + /** + * caught exception + * + * @param ctx channel handler context + * @param cause cause + * @throws Exception + */ + @Override + public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { + logger.error("exceptionCaught : {}", cause); + ctx.channel().close(); + } + + /** + * channel write changed + * + * @param ctx channel handler context + * @throws Exception + */ + @Override + public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception { + Channel ch = ctx.channel(); + ChannelConfig config = ch.config(); + + if (!ch.isWritable()) { + if (logger.isWarnEnabled()) { + logger.warn("{} is not writable, over high water level : {}", + new Object[]{ch, config.getWriteBufferHighWaterMark()}); + } + + config.setAutoRead(false); + } else { + if (logger.isWarnEnabled()) { + logger.warn("{} is writable, to low water : {}", + new Object[]{ch, config.getWriteBufferLowWaterMark()}); + } + config.setAutoRead(true); + } + } +} \ No newline at end of file diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/NettyRequestProcessor.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/NettyRequestProcessor.java new file mode 100644 index 0000000000..6966b53d17 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/NettyRequestProcessor.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.processor; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.remote.command.Command; + +/** + * netty request processor + */ +public interface NettyRequestProcessor { + + /** + * process logic + * @param channel channel + * @param command command + */ + void process(final Channel channel, final Command command); +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Address.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Address.java new file mode 100644 index 0000000000..f61dcd615c --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Address.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import java.io.Serializable; + +/** + * server address + */ +public class Address implements Serializable { + + /** + * host + */ + private String host; + + /** + * port + */ + private int port; + + public Address(){ + //NOP + } + + public Address(String host, int port){ + this.host = host; + this.port = port; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getPort() { + return port; + } + + public void setPort(int port) { + this.port = port; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((host == null) ? 0 : host.hashCode()); + result = prime * result + port; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Address other = (Address) obj; + if (host == null) { + if (other.host != null) { + return false; + } + } else if (!host.equals(other.host)) { + return false; + } + return port == other.port; + } + + @Override + public String toString() { + return "Address [host=" + host + ", port=" + port + "]"; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java new file mode 100644 index 0000000000..048ea86acb --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.RejectedExecutionHandler; +import java.util.concurrent.ThreadPoolExecutor; + +/** + * caller thread execute + */ +public class CallerThreadExecutePolicy implements RejectedExecutionHandler { + + private final Logger logger = LoggerFactory.getLogger(CallerThreadExecutePolicy.class); + + @Override + public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { + logger.warn("queue is full, trigger caller thread execute"); + r.run(); + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java new file mode 100644 index 0000000000..d7af5fe165 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import io.netty.channel.Channel; + +import java.net.InetSocketAddress; + +/** + * channel utils + */ +public class ChannelUtils { + + /** + * get local address + * + * @param channel channel + * @return local address + */ + public static String getLocalAddress(Channel channel){ + return ((InetSocketAddress)channel.localAddress()).getAddress().getHostAddress(); + } + + /** + * get remote address + * @param channel channel + * @return remote address + */ + public static String getRemoteAddress(Channel channel){ + return ((InetSocketAddress)channel.remoteAddress()).getAddress().getHostAddress(); + } + + /** + * channel to address + * @param channel channel + * @return address + */ + public static Address toAddress(Channel channel){ + InetSocketAddress socketAddress = ((InetSocketAddress)channel.remoteAddress()); + return new Address(socketAddress.getAddress().getHostAddress(), socketAddress.getPort()); + } + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java new file mode 100644 index 0000000000..5733b17790 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Constants.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import java.nio.charset.Charset; + + +/** + * constant + */ +public class Constants { + + public static final String COMMA = ","; + + public static final String SLASH = "/"; + + /** + * charset + */ + public static final Charset UTF8 = Charset.forName("UTF-8"); + + /** + * cpus + */ + public static final int CPUS = Runtime.getRuntime().availableProcessors(); + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java new file mode 100644 index 0000000000..e96796a05c --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/FastJsonSerializer.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import com.alibaba.fastjson.JSON; + +/** + * json serialize or deserialize + */ +public class FastJsonSerializer { + + /** + * serialize to byte + * + * @param obj object + * @param object type + * @return byte array + */ + public static byte[] serialize(T obj) { + String json = JSON.toJSONString(obj); + return json.getBytes(Constants.UTF8); + } + + /** + * serialize to string + * @param obj object + * @param object type + * @return string + */ + public static String serializeToString(T obj) { + return JSON.toJSONString(obj); + } + + /** + * deserialize + * + * @param src byte array + * @param clazz class + * @param deserialize type + * @return deserialize type + */ + public static T deserialize(byte[] src, Class clazz) { + return JSON.parseObject(new String(src, Constants.UTF8), clazz); + } + +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java new file mode 100644 index 0000000000..2f0d05ebd4 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/NamedThreadFactory.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.remote.utils; + +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * thread factory + */ +public class NamedThreadFactory implements ThreadFactory { + + private final AtomicInteger increment = new AtomicInteger(1); + + /** + * name + */ + private final String name; + + /** + * count + */ + private final int count; + + public NamedThreadFactory(String name){ + this(name, 0); + } + + public NamedThreadFactory(String name, int count){ + this.name = name; + this.count = count; + } + + /** + * create thread + * @param r runnable + * @return thread + */ + @Override + public Thread newThread(Runnable r) { + final String threadName = count > 0 ? String.format(name + "_%d_%d", count, increment.getAndIncrement()) + : String.format(name + "_%d", increment.getAndIncrement()); + Thread t = new Thread(r, threadName); + t.setDaemon(true); + return t; + } +} diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java new file mode 100644 index 0000000000..2042191486 --- /dev/null +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/Pair.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote.utils; + + +/** + * key value pair + * + * @param L generic type + * @param R generic type + */ +public class Pair { + + private L left; + + private R right; + + public Pair(L left, R right) { + this.left = left; + this.right = right; + } + + public L getLeft() { + return left; + } + + public void setLeft(L left) { + this.left = left; + } + + public R getRight() { + return right; + } + + public void setRight(R right) { + this.right = right; + } +} diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/FastJsonSerializerTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/FastJsonSerializerTest.java new file mode 100644 index 0000000000..97166cca70 --- /dev/null +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/FastJsonSerializerTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote; + + +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.junit.Assert; +import org.junit.Test; + +public class FastJsonSerializerTest { + + @Test + public void testSerialize(){ + TestObj testObj = new TestObj(); + testObj.setAge(12); + byte[] serializeByte = FastJsonSerializer.serialize(testObj); + + // + TestObj deserialize = FastJsonSerializer.deserialize(serializeByte, TestObj.class); + + Assert.assertEquals(testObj.getAge(), deserialize.getAge()); + } + + static class TestObj { + + private int age; + + public int getAge() { + return age; + } + + public void setAge(int age) { + this.age = age; + } + + @Override + public String toString() { + return "TestObj{" + + "age=" + age + + '}'; + } + } +} diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java new file mode 100644 index 0000000000..ef46c2c781 --- /dev/null +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/NettyRemotingClientTest.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.Ping; +import org.apache.dolphinscheduler.remote.command.Pong; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.apache.dolphinscheduler.remote.future.InvokeCallback; +import org.apache.dolphinscheduler.remote.future.ResponseFuture; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.Address; +import org.junit.Assert; +import org.junit.Test; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicLong; + +/** + * netty remote client test + */ +public class NettyRemotingClientTest { + + + /** + * test sned sync + */ + @Test + public void testSendSync(){ + NettyServerConfig serverConfig = new NettyServerConfig(); + + NettyRemotingServer server = new NettyRemotingServer(serverConfig); + server.registerProcessor(CommandType.PING, new NettyRequestProcessor() { + @Override + public void process(Channel channel, Command command) { + channel.writeAndFlush(Pong.create(command.getOpaque())); + } + }); + + + server.start(); + // + final NettyClientConfig clientConfig = new NettyClientConfig(); + NettyRemotingClient client = new NettyRemotingClient(clientConfig); + Command commandPing = Ping.create(); + try { + Command response = client.sendSync(new Address("127.0.0.1", serverConfig.getListenPort()), commandPing, 2000); + Assert.assertEquals(commandPing.getOpaque(), response.getOpaque()); + } catch (Exception e) { + e.printStackTrace(); + } + server.close(); + client.close(); + } + + /** + * test sned async + */ + @Test + public void testSendAsync(){ + NettyServerConfig serverConfig = new NettyServerConfig(); + + NettyRemotingServer server = new NettyRemotingServer(serverConfig); + server.registerProcessor(CommandType.PING, new NettyRequestProcessor() { + @Override + public void process(Channel channel, Command command) { + channel.writeAndFlush(Pong.create(command.getOpaque())); + } + }); + server.start(); + // + final NettyClientConfig clientConfig = new NettyClientConfig(); + NettyRemotingClient client = new NettyRemotingClient(clientConfig); + CountDownLatch latch = new CountDownLatch(1); + Command commandPing = Ping.create(); + try { + final AtomicLong opaque = new AtomicLong(0); + client.sendAsync(new Address("127.0.0.1", serverConfig.getListenPort()), commandPing, 2000, new InvokeCallback() { + @Override + public void operationComplete(ResponseFuture responseFuture) { + opaque.set(responseFuture.getOpaque()); + latch.countDown(); + } + }); + latch.await(); + Assert.assertEquals(commandPing.getOpaque(), opaque.get()); + } catch (Exception e) { + e.printStackTrace(); + } + server.close(); + client.close(); + } +} diff --git a/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/ResponseFutureTest.java b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/ResponseFutureTest.java new file mode 100644 index 0000000000..8836043257 --- /dev/null +++ b/dolphinscheduler-remote/src/test/java/org/apache/dolphinscheduler/remote/ResponseFutureTest.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.remote; + + +import org.apache.dolphinscheduler.remote.future.InvokeCallback; +import org.apache.dolphinscheduler.remote.future.ResponseFuture; +import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory; +import org.junit.Assert; +import org.junit.Test; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +public class ResponseFutureTest { + + @Test + public void testScanFutureTable(){ + ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("executor-service")); + executorService.scheduleAtFixedRate(new Runnable() { + @Override + public void run() { + ResponseFuture.scanFutureTable(); + } + }, 3000, 1000, TimeUnit.MILLISECONDS); + + CountDownLatch latch = new CountDownLatch(1); + InvokeCallback invokeCallback = new InvokeCallback() { + @Override + public void operationComplete(ResponseFuture responseFuture) { + latch.countDown(); + } + }; + ResponseFuture future = new ResponseFuture(1, 2000, invokeCallback, null); + try { + latch.await(5000, TimeUnit.MILLISECONDS); + Assert.assertTrue(ResponseFuture.getFuture(1) == null); + } catch (InterruptedException e) { + e.printStackTrace(); + } + executorService.shutdownNow(); + } +} diff --git a/dolphinscheduler-rpc/pom.xml b/dolphinscheduler-rpc/pom.xml deleted file mode 100644 index 680a4a24c0..0000000000 --- a/dolphinscheduler-rpc/pom.xml +++ /dev/null @@ -1,113 +0,0 @@ - - - - - - org.apache.dolphinscheduler - dolphinscheduler - 1.2.1-SNAPSHOT - - 4.0.0 - - dolphinscheduler-rpc - - dolphinscheduler-rpc - https://github.com/apache/incubator-dolphinscheduler - - - UTF-8 - 1.8 - 1.8 - - 3.5.1 - 1.9.0 - - - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - io.grpc - grpc-netty - ${grpc.version} - - - io.grpc - grpc-protobuf - ${grpc.version} - - - io.grpc - grpc-stub - ${grpc.version} - - - - com.google.guava - guava - - - - - - - kr.motd.maven - os-maven-plugin - 1.5.0.Final - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - 0.5.0 - - com.google.protobuf:protoc:3.5.1-1:exe:${os.detected.classifier} - grpc-java - io.grpc:protoc-gen-grpc-java:${grpc.version}:exe:${os.detected.classifier} - - - - compile - - compile - - - - compile-custom - - compile-custom - - - - - - org.apache.maven.plugins - maven-compiler-plugin - - ${java.version} - ${java.version} - ${project.build.sourceEncoding} - - - - - diff --git a/dolphinscheduler-rpc/src/main/proto/scheduler.proto b/dolphinscheduler-rpc/src/main/proto/scheduler.proto deleted file mode 100644 index b8b595cb2a..0000000000 --- a/dolphinscheduler-rpc/src/main/proto/scheduler.proto +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -syntax = "proto3"; - -package schduler; - -option java_multiple_files = true; -option java_package = "org.apache.dolphinscheduler.rpc"; -option java_outer_classname = "SchdulerProto"; - - -/** - * return str info - */ -message RetStrInfo { - /** - * str msg info - */ - string msg = 1 ; -} - -/** - * return byte info - */ -message RetByteInfo { - /** - * byte data info - */ - bytes data = 1; -} - -/** - * log parameter - */ -message LogParameter { - - /** - * path - */ - string path = 1 ; - - /** - * skip line num - */ - int32 skipLineNum = 2 ; - - /** - * display limt num - */ - int32 limit = 3 ; -} - - -/** - * path parameter - */ -message PathParameter { - - /** - * path - */ - string path = 1 ; -} - -/** - * log view service - */ -service LogViewService { - - /** - * roll view log - */ - rpc rollViewLog(LogParameter) returns (RetStrInfo) {}; - - /** - * view all log - */ - rpc viewLog(PathParameter) returns (RetStrInfo) {}; - - /** - * get log bytes - */ - rpc getLogBytes(PathParameter) returns (RetByteInfo) {}; -} - diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml index 751fd919a8..080b87ebaa 100644 --- a/dolphinscheduler-server/pom.xml +++ b/dolphinscheduler-server/pom.xml @@ -71,7 +71,7 @@ org.apache.dolphinscheduler - dolphinscheduler-rpc + dolphinscheduler-service org.apache.curator diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java new file mode 100644 index 0000000000..4e4404ea1c --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerRequestProcessor.java @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.log; + +import io.netty.channel.Channel; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.command.log.*; +import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * logger request process logic + */ +public class LoggerRequestProcessor implements NettyRequestProcessor { + + private final Logger logger = LoggerFactory.getLogger(LoggerRequestProcessor.class); + + private final ThreadPoolExecutor executor; + + public LoggerRequestProcessor(){ + this.executor = new ThreadPoolExecutor(4, 4, 10, TimeUnit.SECONDS, new LinkedBlockingQueue<>(100)); + } + + @Override + public void process(Channel channel, Command command) { + logger.info("received command : {}", command); + + /** + * reuqest task log command type + */ + final CommandType commandType = command.getType(); + switch (commandType){ + case GET_LOG_BYTES_REQUEST: + GetLogBytesRequestCommand getLogRequest = FastJsonSerializer.deserialize( + command.getBody(), GetLogBytesRequestCommand.class); + byte[] bytes = getFileContentBytes(getLogRequest.getPath()); + GetLogBytesResponseCommand getLogResponse = new GetLogBytesResponseCommand(bytes); + channel.writeAndFlush(getLogResponse.convert2Command(command.getOpaque())); + break; + case VIEW_WHOLE_LOG_REQUEST: + ViewLogRequestCommand viewLogRequest = FastJsonSerializer.deserialize( + command.getBody(), ViewLogRequestCommand.class); + String msg = readWholeFileContent(viewLogRequest.getPath()); + ViewLogResponseCommand viewLogResponse = new ViewLogResponseCommand(msg); + channel.writeAndFlush(viewLogResponse.convert2Command(command.getOpaque())); + break; + case ROLL_VIEW_LOG_REQUEST: + RollViewLogRequestCommand rollViewLogRequest = FastJsonSerializer.deserialize( + command.getBody(), RollViewLogRequestCommand.class); + List lines = readPartFileContent(rollViewLogRequest.getPath(), + rollViewLogRequest.getSkipLineNum(), rollViewLogRequest.getLimit()); + StringBuilder builder = new StringBuilder(); + for (String line : lines){ + builder.append(line + "\r\n"); + } + RollViewLogResponseCommand rollViewLogRequestResponse = new RollViewLogResponseCommand(builder.toString()); + channel.writeAndFlush(rollViewLogRequestResponse.convert2Command(command.getOpaque())); + break; + default: + throw new IllegalArgumentException("unknown commandType"); + } + } + + public ExecutorService getExecutor(){ + return this.executor; + } + + /** + * get files content bytes,for down load file + * + * @param filePath file path + * @return byte array of file + * @throws Exception exception + */ + private byte[] getFileContentBytes(String filePath){ + InputStream in = null; + ByteArrayOutputStream bos = null; + try { + in = new FileInputStream(filePath); + bos = new ByteArrayOutputStream(); + byte[] buf = new byte[1024]; + int len; + while ((len = in.read(buf)) != -1) { + bos.write(buf, 0, len); + } + return bos.toByteArray(); + }catch (IOException e){ + logger.error("get file bytes error",e); + }finally { + if (bos != null){ + try { + bos.close(); + } catch (IOException ignore) {} + } + if (in != null){ + try { + in.close(); + } catch (IOException ignore) {} + } + } + return new byte[0]; + } + + /** + * read part file content,can skip any line and read some lines + * + * @param filePath file path + * @param skipLine skip line + * @param limit read lines limit + * @return part file content + */ + private List readPartFileContent(String filePath, + int skipLine, + int limit){ + try (Stream stream = Files.lines(Paths.get(filePath))) { + return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); + } catch (IOException e) { + logger.error("read file error",e); + } + return Collections.EMPTY_LIST; + } + + /** + * read whole file content + * + * @param filePath file path + * @return whole file content + */ + private String readWholeFileContent(String filePath){ + BufferedReader br = null; + String line; + StringBuilder sb = new StringBuilder(); + try { + br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath))); + while ((line = br.readLine()) != null){ + sb.append(line + "\r\n"); + } + return sb.toString(); + }catch (IOException e){ + logger.error("read file error",e); + }finally { + try { + if (br != null){ + br.close(); + } + } catch (IOException ignore) {} + } + return ""; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java new file mode 100644 index 0000000000..3520fb09ec --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/log/LoggerServer.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.log; + + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.remote.NettyRemotingServer; +import org.apache.dolphinscheduler.remote.command.CommandType; +import org.apache.dolphinscheduler.remote.config.NettyServerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * logger server + */ +public class LoggerServer { + + private static final Logger logger = LoggerFactory.getLogger(LoggerServer.class); + + /** + * netty server + */ + private final NettyRemotingServer server; + + /** + * netty server config + */ + private final NettyServerConfig serverConfig; + + /** + * loggger request processor + */ + private final LoggerRequestProcessor requestProcessor; + + public LoggerServer(){ + this.serverConfig = new NettyServerConfig(); + this.serverConfig.setListenPort(Constants.RPC_PORT); + this.server = new NettyRemotingServer(serverConfig); + this.requestProcessor = new LoggerRequestProcessor(); + this.server.registerProcessor(CommandType.GET_LOG_BYTES_REQUEST, requestProcessor, requestProcessor.getExecutor()); + this.server.registerProcessor(CommandType.ROLL_VIEW_LOG_REQUEST, requestProcessor, requestProcessor.getExecutor()); + this.server.registerProcessor(CommandType.VIEW_WHOLE_LOG_REQUEST, requestProcessor, requestProcessor.getExecutor()); + } + + /** + * main launches the server from the command line. + * @param args arguments + */ + public static void main(String[] args) { + final LoggerServer server = new LoggerServer(); + server.start(); + } + + /** + * server start + */ + public void start() { + this.server.start(); + logger.info("logger server started, listening on port : {}" , Constants.RPC_PORT); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + LoggerServer.this.stop(); + } + }); + } + + /** + * stop + */ + public void stop() { + this.server.close(); + logger.info("logger server shut down"); + } + +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java index 0647b9450b..6b5063cba4 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java @@ -22,14 +22,14 @@ import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.runner.MasterSchedulerThread; -import org.apache.dolphinscheduler.dao.quartz.ProcessScheduleJob; -import org.apache.dolphinscheduler.dao.quartz.QuartzExecutors; import org.apache.dolphinscheduler.server.zk.ZKMasterClient; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; +import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,10 +66,10 @@ public class MasterServer implements IStoppable { private ScheduledExecutorService heartbeatMasterService; /** - * dolphinscheduler database interface + * process service */ @Autowired - protected ProcessDao processDao; + protected ProcessService processService; /** * master exec thread pool @@ -77,17 +77,18 @@ public class MasterServer implements IStoppable { private ExecutorService masterSchedulerService; /** - * spring application context - * only use it for initialization + * master config */ @Autowired - private SpringApplicationContext springApplicationContext; + private MasterConfig masterConfig; + /** - * master config + * spring application context + * only use it for initialization */ @Autowired - private MasterConfig masterConfig; + private SpringApplicationContext springApplicationContext; /** @@ -111,7 +112,7 @@ public class MasterServer implements IStoppable { masterSchedulerService = ThreadUtils.newDaemonSingleThreadExecutor("Master-Scheduler-Thread"); - heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.defaulMasterHeartbeatThreadNum); + heartbeatMasterService = ThreadUtils.newDaemonThreadScheduledExecutor("Master-Main-Thread",Constants.DEFAULT_MASTER_HEARTBEAT_THREAD_NUM); // heartbeat thread implement Runnable heartBeatThread = heartBeatThread(); @@ -126,7 +127,7 @@ public class MasterServer implements IStoppable { // master scheduler thread MasterSchedulerThread masterSchedulerThread = new MasterSchedulerThread( zkMasterClient, - processDao, + processService, masterConfig.getMasterExecThreads()); // submit master scheduler thread @@ -136,7 +137,7 @@ public class MasterServer implements IStoppable { // what system should do if exception try { logger.info("start Quartz server..."); - ProcessScheduleJob.init(processDao); + ProcessScheduleJob.init(processService); QuartzExecutors.getInstance().start(); } catch (Exception e) { try { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java index c1552c4621..f8fcb1456d 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java @@ -16,15 +16,15 @@ */ package org.apache.dolphinscheduler.server.master.runner; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; -import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.BeanContext; import org.apache.dolphinscheduler.server.master.config.MasterConfig; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,9 +41,9 @@ public class MasterBaseTaskExecThread implements Callable { private static final Logger logger = LoggerFactory.getLogger(MasterBaseTaskExecThread.class); /** - * process dao + * process service */ - protected ProcessDao processDao; + protected ProcessService processService; /** * alert database access @@ -81,7 +81,7 @@ public class MasterBaseTaskExecThread implements Callable { * @param processInstance process instance */ public MasterBaseTaskExecThread(TaskInstance taskInstance, ProcessInstance processInstance){ - this.processDao = BeanContext.getBean(ProcessDao.class); + this.processService = BeanContext.getBean(ProcessService.class); this.alertDao = BeanContext.getBean(AlertDao.class); this.processInstance = processInstance; this.taskQueue = TaskQueueFactory.getTaskQueueInstance(); @@ -121,14 +121,14 @@ public class MasterBaseTaskExecThread implements Callable { try { if(!submitDB){ // submit task to db - task = processDao.submitTask(taskInstance, processInstance); + task = processService.submitTask(taskInstance, processInstance); if(task != null && task.getId() != 0){ submitDB = true; } } if(submitDB && !submitQueue){ // submit task to queue - submitQueue = processDao.submitTaskToQueue(task); + submitQueue = processService.submitTaskToQueue(task); } if(submitDB && submitQueue){ return task; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java index 2b1ff4d23f..4b22b27bec 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java @@ -25,17 +25,19 @@ import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNodeRelation; import org.apache.dolphinscheduler.common.process.ProcessDag; +import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.utils.DagHelper; -import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.utils.AlertManager; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -108,6 +110,11 @@ public class MasterExecThread implements Runnable { */ private Map forbiddenTaskList = new ConcurrentHashMap<>(); + /** + * skip task map + */ + private Map skipTaskNodeList = new ConcurrentHashMap<>(); + /** * recover tolerance fault task list */ @@ -124,9 +131,9 @@ public class MasterExecThread implements Runnable { private DAG dag; /** - * process dao + * process service */ - private ProcessDao processDao; + private ProcessService processService; /** * master config @@ -136,10 +143,10 @@ public class MasterExecThread implements Runnable { /** * constructor of MasterExecThread * @param processInstance process instance - * @param processDao process dao + * @param processService process dao */ - public MasterExecThread(ProcessInstance processInstance,ProcessDao processDao){ - this.processDao = processDao; + public MasterExecThread(ProcessInstance processInstance, ProcessService processService){ + this.processService = processService; this.processInstance = processInstance; this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class); @@ -177,7 +184,7 @@ public class MasterExecThread implements Runnable { logger.error("process execute failed, process id:{}", processInstance.getId()); processInstance.setState(ExecutionStatus.FAILURE); processInstance.setEndTime(new Date()); - processDao.updateProcessInstance(processInstance); + processService.updateProcessInstance(processInstance); }finally { taskExecService.shutdown(); // post handle @@ -205,11 +212,11 @@ public class MasterExecThread implements Runnable { Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE)); Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE)); - processDao.saveProcessInstance(processInstance); + processService.saveProcessInstance(processInstance); // get schedules int processDefinitionId = processInstance.getProcessDefinitionId(); - List schedules = processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); + List schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId); List listDate = Lists.newLinkedList(); if(!CollectionUtils.isEmpty(schedules)){ for (Schedule schedule : schedules) { @@ -223,7 +230,7 @@ public class MasterExecThread implements Runnable { iterator = listDate.iterator(); scheduleDate = iterator.next(); processInstance.setScheduleTime(scheduleDate); - processDao.updateProcessInstance(processInstance); + processService.updateProcessInstance(processInstance); }else{ scheduleDate = processInstance.getScheduleTime(); if(scheduleDate == null){ @@ -239,7 +246,7 @@ public class MasterExecThread implements Runnable { logger.error("process {} dag is null, please check out parameters", processInstance.getId()); processInstance.setState(ExecutionStatus.SUCCESS); - processDao.updateProcessInstance(processInstance); + processService.updateProcessInstance(processInstance); return; } @@ -281,10 +288,10 @@ public class MasterExecThread implements Runnable { processInstance.setCommandParam(JSONUtils.toJson(cmdParam)); } - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); + List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for(TaskInstance taskInstance : taskInstanceList){ taskInstance.setFlag(Flag.NO); - processDao.updateTaskInstance(taskInstance); + processService.updateTaskInstance(taskInstance); } processInstance.setState(ExecutionStatus.RUNNING_EXEUTION); processInstance.setGlobalParams(ParameterUtils.curingGlobalParams( @@ -292,7 +299,7 @@ public class MasterExecThread implements Runnable { processInstance.getProcessDefinition().getGlobalParamList(), CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime())); - processDao.saveProcessInstance(processInstance); + processService.saveProcessInstance(processInstance); } // flow end @@ -320,11 +327,11 @@ public class MasterExecThread implements Runnable { */ private void endProcess() { processInstance.setEndTime(new Date()); - processDao.updateProcessInstance(processInstance); + processService.updateProcessInstance(processInstance); if(processInstance.getState().typeIsWaittingThread()){ - processDao.createRecoveryWaitingThreadCommand(null, processInstance); + processService.createRecoveryWaitingThreadCommand(null, processInstance); } - List taskInstances = processDao.findValidTaskListByProcessId(processInstance.getId()); + List taskInstances = processService.findValidTaskListByProcessId(processInstance.getId()); alertManager.sendAlertProcessInstance(processInstance, taskInstances); } @@ -361,7 +368,7 @@ public class MasterExecThread implements Runnable { dependFailedTask.clear(); completeTaskList.clear(); errorTaskList.clear(); - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); + List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for(TaskInstance task : taskInstanceList){ if(task.isTaskComplete()){ completeTaskList.put(task.getName(), task); @@ -417,7 +424,7 @@ public class MasterExecThread implements Runnable { * @return TaskInstance */ private TaskInstance findTaskIfExists(String taskName){ - List taskInstanceList = processDao.findValidTaskListByProcessId(this.processInstance.getId()); + List taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId()); for(TaskInstance taskInstance : taskInstanceList){ if(taskInstance.getName().equals(taskName)){ return taskInstance; @@ -433,7 +440,7 @@ public class MasterExecThread implements Runnable { * @return TaskInstance */ private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName, - TaskNode taskNode, String parentNodeName) { + TaskNode taskNode) { TaskInstance taskInstance = findTaskIfExists(nodeName); if(taskInstance == null){ @@ -483,58 +490,140 @@ public class MasterExecThread implements Runnable { } /** - * get post task instance by node - * @param dag dag - * @param parentNodeName parent node name - * @return task instance list + * is there have conditions after the parent node + * @param parentNodeName + * @return */ - private List getPostTaskInstanceByNode(DAG dag, String parentNodeName){ + private boolean haveConditionsAfterNode(String parentNodeName){ - List postTaskList = new ArrayList<>(); + boolean result = false; Collection startVertex = DagHelper.getStartVertex(parentNodeName, dag, completeTaskList); if(startVertex == null){ - return postTaskList; + return result; + } + for(String nodeName : startVertex){ + TaskNode taskNode = dag.getNode(nodeName); + if(taskNode.getType().equals(TaskType.CONDITIONS.toString())){ + result = true; + break; + } } + return result; + } - for (String nodeName : startVertex){ - // encapsulation task instance - TaskInstance taskInstance = createTaskInstance(processInstance, nodeName , - dag.getNode(nodeName),parentNodeName); - postTaskList.add(taskInstance); + /** + * if all of the task dependence are skip, skip it too. + * @param taskNode + * @return + */ + private boolean isTaskNodeNeedSkip(TaskNode taskNode){ + if(CollectionUtils.isEmpty(taskNode.getDepList())){ + return false; } - return postTaskList; + for(String depNode : taskNode.getDepList()){ + if(!skipTaskNodeList.containsKey(depNode)){ + return false; + } + } + return true; } /** - * return start task node list - * @return task instance list + * set task node skip if dependence all skip + * @param taskNodesSkipList */ - private List getStartSubmitTaskList(){ + private void setTaskNodeSkip(List taskNodesSkipList){ + for(String skipNode : taskNodesSkipList){ + skipTaskNodeList.putIfAbsent(skipNode, dag.getNode(skipNode)); + Collection postNodeList = DagHelper.getStartVertex(skipNode, dag, completeTaskList); + List postSkipList = new ArrayList<>(); + for(String post : postNodeList){ + TaskNode postNode = dag.getNode(post); + if(isTaskNodeNeedSkip(postNode)){ + postSkipList.add(post); + } + } + setTaskNodeSkip(postSkipList); + } + } - List startTaskList = getPostTaskInstanceByNode(dag, null); - HashMap successTaskMaps = new HashMap<>(); - List resultList = new ArrayList<>(); - while(Stopper.isRunning()){ - for(TaskInstance task : startTaskList){ - if(task.getState().typeIsSuccess()){ - successTaskMaps.put(task.getName(), task); - }else if(!completeTaskList.containsKey(task.getName()) && !errorTaskList.containsKey(task.getName())){ - resultList.add(task); + /** + * parse condition task find the branch process + * set skip flag for another one. + * @param nodeName + * @return + */ + private List parseConditionTask(String nodeName){ + List conditionTaskList = new ArrayList<>(); + TaskNode taskNode = dag.getNode(nodeName); + if(!taskNode.isConditionsTask()){ + return conditionTaskList; + } + ConditionsParameters conditionsParameters = + JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class); + + TaskInstance taskInstance = completeTaskList.get(nodeName); + if(taskInstance == null){ + logger.error("task instance cannot find, please check it!", nodeName); + return conditionTaskList; + } + + if(taskInstance.getState().typeIsSuccess()){ + conditionTaskList = conditionsParameters.getSuccessNode(); + setTaskNodeSkip(conditionsParameters.getFailedNode()); + }else if(taskInstance.getState().typeIsFailure()){ + conditionTaskList = conditionsParameters.getFailedNode(); + setTaskNodeSkip(conditionsParameters.getSuccessNode()); + }else{ + conditionTaskList.add(nodeName); + } + return conditionTaskList; + } + + /** + * parse post node list of previous node + * if condition node: return process according to the settings + * if post node completed, return post nodes of the completed node + * @param previousNodeName + * @return + */ + private List parsePostNodeList(String previousNodeName){ + List postNodeList = new ArrayList<>(); + + TaskNode taskNode = dag.getNode(previousNodeName); + if(taskNode != null && taskNode.isConditionsTask()){ + return parseConditionTask(previousNodeName); + } + Collection postNodeCollection = DagHelper.getStartVertex(previousNodeName, dag, completeTaskList); + List postSkipList = new ArrayList<>(); + // delete success node, parse the past nodes + // if conditions node, + // 1. parse the branch process according the conditions setting + // 2. set skip flag on anther branch process + for(String postNode : postNodeCollection){ + if(completeTaskList.containsKey(postNode)){ + TaskInstance postTaskInstance = completeTaskList.get(postNode); + if(dag.getNode(postNode).isConditionsTask()){ + List conditionTaskNodeList = parseConditionTask(postNode); + for(String conditions : conditionTaskNodeList){ + postNodeList.addAll(parsePostNodeList(conditions)); + } + }else if(postTaskInstance.getState().typeIsSuccess()){ + postNodeList.addAll(parsePostNodeList(postNode)); + }else{ + postNodeList.add(postNode); } - } - startTaskList.clear(); - if(successTaskMaps.size() == 0){ - break; - } - Set taskNameKeys = successTaskMaps.keySet(); - for(String taskName : taskNameKeys){ - startTaskList.addAll(getPostTaskInstanceByNode(dag, taskName)); + }else if(isTaskNodeNeedSkip(dag.getNode(postNode))){ + postSkipList.add(postNode); + setTaskNodeSkip(postSkipList); + postSkipList.clear(); + }else{ + postNodeList.add(postNode); } - successTaskMaps.clear(); } - return resultList; + return postNodeList; } /** @@ -543,14 +632,17 @@ public class MasterExecThread implements Runnable { */ private void submitPostNode(String parentNodeName){ - List submitTaskList = null; - if(parentNodeName == null){ - submitTaskList = getStartSubmitTaskList(); - }else{ - submitTaskList = getPostTaskInstanceByNode(dag, parentNodeName); + List submitTaskNodeList = parsePostNodeList(parentNodeName); + + List taskInstances = new ArrayList<>(); + for(String taskNode : submitTaskNodeList){ + taskInstances.add(createTaskInstance(processInstance, taskNode, + dag.getNode(taskNode))); } + // if previous node success , post node submit - for(TaskInstance task : submitTaskList){ + for(TaskInstance task : taskInstances){ + if(readyToSubmitTaskList.containsKey(task.getName())){ continue; } @@ -574,27 +666,31 @@ public class MasterExecThread implements Runnable { private DependResult isTaskDepsComplete(String taskName) { Collection startNodes = dag.getBeginNode(); - // if the vertex returns true directly + // if vertex,returns true directly if(startNodes.contains(taskName)){ return DependResult.SUCCESS; } TaskNode taskNode = dag.getNode(taskName); - List depsNameList = taskNode.getDepList(); - for(String depsNode : depsNameList ){ + List depNameList = taskNode.getDepList(); + for(String depsNode : depNameList ){ - if(forbiddenTaskList.containsKey(depsNode)){ + if(forbiddenTaskList.containsKey(depsNode) || + skipTaskNodeList.containsKey(depsNode)){ continue; } // dependencies must be fully completed if(!completeTaskList.containsKey(depsNode)){ return DependResult.WAITING; } - ExecutionStatus taskState = completeTaskList.get(depsNode).getState(); - if(taskState.typeIsFailure()){ - return DependResult.FAILED; + ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState(); + // conditions task would not return failed. + if(depTaskState.typeIsFailure()){ + if(!haveConditionsAfterNode(depsNode) && !dag.getNode(depsNode).isConditionsTask()){ + return DependResult.FAILED; + } } - if(taskState.typeIsPause() || taskState.typeIsCancel()){ + if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){ return DependResult.WAITING; } } @@ -706,7 +802,7 @@ public class MasterExecThread implements Runnable { * @return process instance execution status */ private ExecutionStatus getProcessInstanceState(){ - ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId()); + ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); ExecutionStatus state = instance.getState(); if(activeTaskNode.size() > 0){ @@ -784,10 +880,10 @@ public class MasterExecThread implements Runnable { processInstance.getState().toString(), state.toString(), processInstance.getCommandType().toString()); processInstance.setState(state); - ProcessInstance instance = processDao.findProcessInstanceById(processInstance.getId()); + ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId()); instance.setState(state); instance.setProcessDefinition(processInstance.getProcessDefinition()); - processDao.updateProcessInstance(instance); + processService.updateProcessInstance(instance); processInstance = instance; } } @@ -845,7 +941,7 @@ public class MasterExecThread implements Runnable { // send warning email if process time out. if( !sendTimeWarning && checkProcessTimeOut(processInstance) ){ alertManager.sendProcessTimeoutAlert(processInstance, - processDao.findProcessDefineById(processInstance.getProcessDefinitionId())); + processService.findProcessDefineById(processInstance.getProcessDefinitionId())); sendTimeWarning = true; } for(Map.Entry> entry: activeTaskNode.entrySet()) { @@ -877,11 +973,15 @@ public class MasterExecThread implements Runnable { if(task.taskCanRetry()){ addTaskToStandByList(task); }else{ - // node failure, based on failure strategy - errorTaskList.put(task.getName(), task); completeTaskList.put(task.getName(), task); - if(processInstance.getFailureStrategy() == FailureStrategy.END){ - killTheOtherTasks(); + if( task.getTaskType().equals(TaskType.CONDITIONS.toString()) || + haveConditionsAfterNode(task.getName())) { + submitPostNode(task.getName()); + }else{ + errorTaskList.put(task.getName(), task); + if(processInstance.getFailureStrategy() == FailureStrategy.END){ + killTheOtherTasks(); + } } } continue; @@ -903,7 +1003,7 @@ public class MasterExecThread implements Runnable { if(completeTask.getState()== ExecutionStatus.PAUSE){ completeTask.setState(ExecutionStatus.KILL); completeTaskList.put(entry.getKey(), completeTask); - processDao.updateTaskInstance(completeTask); + processService.updateTaskInstance(completeTask); } } } @@ -961,7 +1061,7 @@ public class MasterExecThread implements Runnable { Future future = entry.getValue(); TaskInstance taskInstance = taskExecThread.getTaskInstance(); - taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); + taskInstance = processService.findTaskInstanceById(taskInstance.getId()); if(taskInstance.getState().typeIsFinished()){ continue; } @@ -1031,7 +1131,7 @@ public class MasterExecThread implements Runnable { } try { Integer intId = Integer.valueOf(taskId); - TaskInstance task = processDao.findTaskInstanceById(intId); + TaskInstance task = processService.findTaskInstanceById(intId); if(task == null){ logger.error("start node id cannot be found: {}", taskId); }else { diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java index a873fb786d..cc5a7e76e4 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerThread.java @@ -22,13 +22,13 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.common.zk.AbstractZKClient; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.zk.ZKMasterClient; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,7 +53,7 @@ public class MasterSchedulerThread implements Runnable { /** * dolphinscheduler database interface */ - private final ProcessDao processDao; + private final ProcessService processService; /** * zookeeper master client @@ -74,11 +74,11 @@ public class MasterSchedulerThread implements Runnable { /** * constructor of MasterSchedulerThread * @param zkClient zookeeper master client - * @param processDao process dao + * @param processService process service * @param masterExecThreadNum master exec thread num */ - public MasterSchedulerThread(ZKMasterClient zkClient, ProcessDao processDao, int masterExecThreadNum){ - this.processDao = processDao; + public MasterSchedulerThread(ZKMasterClient zkClient, ProcessService processService, int masterExecThreadNum){ + this.processService = processService; this.zkMasterClient = zkClient; this.masterExecThreadNum = masterExecThreadNum; this.masterExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Exec-Thread",masterExecThreadNum); @@ -115,19 +115,19 @@ public class MasterSchedulerThread implements Runnable { ThreadPoolExecutor poolExecutor = (ThreadPoolExecutor) masterExecService; int activeCount = poolExecutor.getActiveCount(); // make sure to scan and delete command table in one transaction - Command command = processDao.findOneCommand(); + Command command = processService.findOneCommand(); if (command != null) { - logger.info(String.format("find one command: id: %d, type: %s", command.getId(),command.getCommandType().toString())); + logger.info("find one command: id: {}, type: {}", command.getId(),command.getCommandType()); try{ - processInstance = processDao.handleCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount, command); + processInstance = processService.handleCommand(logger, OSUtils.getHost(), this.masterExecThreadNum - activeCount, command); if (processInstance != null) { logger.info("start master exec thread , split DAG ..."); - masterExecService.execute(new MasterExecThread(processInstance,processDao)); + masterExecService.execute(new MasterExecThread(processInstance, processService)); } }catch (Exception e){ logger.error("scan command error ", e); - processDao.moveToErrorCommand(command, e.toString()); + processService.moveToErrorCommand(command, e.toString()); } } else{ //indicate that no command ,sleep for 1s diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java index f2ee66b64a..66d1a3f4c2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java @@ -82,7 +82,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { result = waitTaskQuit(); } taskInstance.setEndTime(new Date()); - processDao.updateTaskInstance(taskInstance); + processService.updateTaskInstance(taskInstance); logger.info("task :{} id:{}, process id:{}, exec thread completed ", this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() ); return result; @@ -94,7 +94,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { */ public Boolean waitTaskQuit(){ // query new state - taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); + taskInstance = processService.findTaskInstanceById(taskInstance.getId()); logger.info("wait task: process id: {}, task id:{}, task name:{} complete", this.taskInstance.getProcessInstanceId(), this.taskInstance.getId(), this.taskInstance.getName()); // task time out @@ -126,15 +126,15 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread { if (remainTime < 0) { logger.warn("task id: {} execution time out",taskInstance.getId()); // process define - ProcessDefinition processDefine = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); + ProcessDefinition processDefine = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); // send warn mail alertDao.sendTaskTimeoutAlert(processInstance.getWarningGroupId(),processDefine.getReceivers(),processDefine.getReceiversCc(),taskInstance.getId(),taskInstance.getName()); checkTimeout = false; } } // updateProcessInstance task instance - taskInstance = processDao.findTaskInstanceById(taskInstance.getId()); - processInstance = processDao.findProcessInstanceById(processInstance.getId()); + taskInstance = processService.findTaskInstanceById(taskInstance.getId()); + processInstance = processService.findProcessInstanceById(processInstance.getId()); Thread.sleep(Constants.SLEEP_TIME_MILLIS); } catch (Exception e) { logger.error("exception",e); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java index 0026de7c25..fc16b5112b 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/SubProcessTaskExecThread.java @@ -64,7 +64,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { } setTaskInstanceState(); waitTaskQuit(); - subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); + subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); // at the end of the subflow , the task state is changed to the subflow state if(subProcessInstance != null){ @@ -75,7 +75,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { } } taskInstance.setEndTime(new Date()); - processDao.updateTaskInstance(taskInstance); + processService.updateTaskInstance(taskInstance); logger.info("subflow task :{} id:{}, process id:{}, exec thread completed ", this.taskInstance.getName(),taskInstance.getId(), processInstance.getId() ); result = true; @@ -96,14 +96,14 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { * @return */ private Boolean setTaskInstanceState(){ - subProcessInstance = processDao.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); + subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); if(subProcessInstance == null || taskInstance.getState().typeIsFinished()){ return false; } taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION); taskInstance.setStartTime(new Date()); - processDao.updateTaskInstance(taskInstance); + processService.updateTaskInstance(taskInstance); return true; } @@ -111,7 +111,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { * updateProcessInstance parent state */ private void updateParentProcessState(){ - ProcessInstance parentProcessInstance = processDao.findProcessInstanceById(this.processInstance.getId()); + ProcessInstance parentProcessInstance = processService.findProcessInstanceById(this.processInstance.getId()); if(parentProcessInstance == null){ logger.error("parent work flow instance is null , please check it! work flow id {}", processInstance.getId()); @@ -145,7 +145,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { continue; } } - subProcessInstance = processDao.findProcessInstanceById(subProcessInstance.getId()); + subProcessInstance = processService.findProcessInstanceById(subProcessInstance.getId()); updateParentProcessState(); if (subProcessInstance.getState().typeIsFinished()){ break; @@ -171,7 +171,7 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { return; } subProcessInstance.setState(ExecutionStatus.READY_STOP); - processDao.updateProcessInstance(subProcessInstance); + processService.updateProcessInstance(subProcessInstance); } /** @@ -183,6 +183,6 @@ public class SubProcessTaskExecThread extends MasterBaseTaskExecThread { return; } subProcessInstance.setState(ExecutionStatus.READY_PAUSE); - processDao.updateProcessInstance(subProcessInstance); + processService.updateProcessInstance(subProcessInstance); } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/ZKMonitorImpl.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/ZKMonitorImpl.java index 927074012d..5acc8fd931 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/ZKMonitorImpl.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/monitor/ZKMonitorImpl.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.monitor; -import org.apache.dolphinscheduler.common.zk.ZookeeperOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperOperator; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java deleted file mode 100644 index 1c6c97b88f..0000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LogClient.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.rpc; - -import io.grpc.ManagedChannel; -import io.grpc.ManagedChannelBuilder; -import io.grpc.StatusRuntimeException; -import org.apache.dolphinscheduler.rpc.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.TimeUnit; - -/** - * log client - */ -public class LogClient { - - /** - * logger of LogClient - */ - private static final Logger logger = LoggerFactory.getLogger(LogClient.class); - - /** - * managed channel - */ - private final ManagedChannel channel; - - /** - * blocking stub - */ - private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; - - /** - * Construct client connecting to HelloWorld server at host:port. - * - * @param host host - * @param port port - */ - public LogClient(String host, int port) { - this(ManagedChannelBuilder.forAddress(host, port) - // Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid - // needing certificates. - .usePlaintext(true)); - } - - /** - * Construct client for accessing RouteGuide server using the existing channel. - * - * @param channelBuilder channel builder - */ - LogClient(ManagedChannelBuilder channelBuilder) { - /** - * set max message read size - */ - channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); - channel = channelBuilder.build(); - blockingStub = LogViewServiceGrpc.newBlockingStub(channel); - } - - /** - * shut down channel - * - * @throws InterruptedException interrupted exception - */ - public void shutdown() throws InterruptedException { - channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); - } - - /** - * roll view log - * - * @param path log path - * @param skipLineNum skip line num - * @param limit limit - * @return log content - */ - public String rollViewLog(String path,int skipLineNum,int limit) { - logger.info("roll view log , path : {},skipLineNum : {} ,limit :{}", path, skipLineNum, limit); - LogParameter pathParameter = LogParameter - .newBuilder() - .setPath(path) - .setSkipLineNum(skipLineNum) - .setLimit(limit) - .build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.rollViewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("roll view log failed", e); - return null; - } - } - - /** - * view all log - * - * @param path log path - * @return log content - */ - public String viewLog(String path) { - logger.info("view log path : {}",path); - - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetStrInfo retStrInfo; - try { - retStrInfo = blockingStub.viewLog(pathParameter); - return retStrInfo.getMsg(); - } catch (StatusRuntimeException e) { - logger.error("view log failed", e); - return null; - } - } - - /** - * get log bytes - * - * @param path log path - * @return log content - */ - public byte[] getLogBytes(String path) { - logger.info("get log bytes {}",path); - - PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); - RetByteInfo retByteInfo; - try { - retByteInfo = blockingStub.getLogBytes(pathParameter); - return retByteInfo.getData().toByteArray(); - } catch (StatusRuntimeException e) { - logger.error("get log bytes failed ", e); - return null; - } - } -} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java deleted file mode 100644 index 5ec5df92fc..0000000000 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/rpc/LoggerServer.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.dolphinscheduler.server.rpc; - -import io.grpc.stub.StreamObserver; -import org.apache.dolphinscheduler.common.Constants; -import com.google.protobuf.ByteString; -import io.grpc.Server; -import io.grpc.ServerBuilder; -import org.apache.dolphinscheduler.rpc.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -/** - * logger server - */ -public class LoggerServer { - - private static final Logger logger = LoggerFactory.getLogger(LoggerServer.class); - - /** - * server - */ - private Server server; - - /** - * server start - * @throws IOException io exception - */ - public void start() throws IOException { - /* The port on which the server should run */ - int port = Constants.RPC_PORT; - server = ServerBuilder.forPort(port) - .addService(new LogViewServiceGrpcImpl()) - .build() - .start(); - logger.info("server started, listening on port : {}" , port); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - // Use stderr here since the logger may have been reset by its JVM shutdown hook. - logger.info("shutting down gRPC server since JVM is shutting down"); - LoggerServer.this.stop(); - logger.info("server shut down"); - } - }); - } - - /** - * stop - */ - private void stop() { - if (server != null) { - server.shutdown(); - } - } - - /** - * await termination on the main thread since the grpc library uses daemon threads. - */ - private void blockUntilShutdown() throws InterruptedException { - if (server != null) { - server.awaitTermination(); - } - } - - /** - * main launches the server from the command line. - */ - - /** - * main launches the server from the command line. - * @param args arguments - * @throws IOException io exception - * @throws InterruptedException interrupted exception - */ - public static void main(String[] args) throws IOException, InterruptedException { - final LoggerServer server = new LoggerServer(); - server.start(); - server.blockUntilShutdown(); - } - - /** - * Log View Service Grpc Implementation - */ - static class LogViewServiceGrpcImpl extends LogViewServiceGrpc.LogViewServiceImplBase { - @Override - public void rollViewLog(LogParameter request, StreamObserver responseObserver) { - - logger.info("log parameter path : {} ,skip line : {}, limit : {}", - request.getPath(), - request.getSkipLineNum(), - request.getLimit()); - List list = readFile(request.getPath(), request.getSkipLineNum(), request.getLimit()); - StringBuilder sb = new StringBuilder(); - boolean errorLineFlag = false; - for (String line : list){ - sb.append(line + "\r\n"); - } - RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(sb.toString()).build(); - responseObserver.onNext(retInfoBuild); - responseObserver.onCompleted(); - } - - @Override - public void viewLog(PathParameter request, StreamObserver responseObserver) { - logger.info("task path is : {} " , request.getPath()); - RetStrInfo retInfoBuild = RetStrInfo.newBuilder().setMsg(readFile(request.getPath())).build(); - responseObserver.onNext(retInfoBuild); - responseObserver.onCompleted(); - } - - @Override - public void getLogBytes(PathParameter request, StreamObserver responseObserver) { - try { - ByteString bytes = ByteString.copyFrom(getFileBytes(request.getPath())); - RetByteInfo.Builder builder = RetByteInfo.newBuilder(); - builder.setData(bytes); - responseObserver.onNext(builder.build()); - responseObserver.onCompleted(); - }catch (Exception e){ - logger.error("get log bytes failed",e); - } - } - } - - /** - * get files bytes - * - * @param path path - * @return byte array of file - * @throws Exception exception - */ - private static byte[] getFileBytes(String path){ - InputStream in = null; - ByteArrayOutputStream bos = null; - try { - in = new FileInputStream(path); - bos = new ByteArrayOutputStream(); - byte[] buf = new byte[1024]; - int len = 0; - while ((len = in.read(buf)) != -1) { - bos.write(buf, 0, len); - } - return bos.toByteArray(); - }catch (IOException e){ - logger.error("get file bytes error",e); - }finally { - if (bos != null){ - try { - bos.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - if (in != null){ - try { - in.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - return null; - } - - /** - * read file content - * - * @param path - * @param skipLine - * @param limit - * @return - */ - private static List readFile(String path,int skipLine,int limit){ - try (Stream stream = Files.lines(Paths.get(path))) { - return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); - } catch (IOException e) { - logger.error("read file failed",e); - } - return null; - } - - /** - * read file content - * - * @param path path - * @return string of file content - * @throws Exception exception - */ - private static String readFile(String path){ - BufferedReader br = null; - String line = null; - StringBuilder sb = new StringBuilder(); - try { - br = new BufferedReader(new InputStreamReader(new FileInputStream(path))); - boolean errorLineFlag = false; - while ((line = br.readLine()) != null){ - sb.append(line + "\r\n"); - } - - return sb.toString(); - }catch (IOException e){ - logger.error("read file failed",e); - }finally { - try { - if (br != null){ - br.close(); - } - } catch (IOException e) { - logger.error(e.getMessage(),e); - } - } - return null; - } - -} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java index ab34ddfc2b..4c33ef8db2 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/FlinkArgsUtils.java @@ -87,7 +87,7 @@ public class FlinkArgsUtils { args.add(taskManagerMemory); } - args.add(Constants.FLINK_detach); //-d + args.add(Constants.FLINK_DETACH); //-d } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java index fd0a08cd8e..e0c00c55d9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/ProcessUtils.java @@ -22,8 +22,8 @@ import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.server.rpc.LogClient; import org.apache.commons.io.FileUtils; +import org.apache.dolphinscheduler.service.log.LogClientService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -375,9 +375,16 @@ public class ProcessUtils { public static void killYarnJob(TaskInstance taskInstance) { try { Thread.sleep(Constants.SLEEP_TIME_MILLIS); - LogClient logClient = new LogClient(taskInstance.getHost(), Constants.RPC_PORT); - - String log = logClient.viewLog(taskInstance.getLogPath()); + LogClientService logClient = null; + String log = null; + try { + logClient = new LogClientService(); + log = logClient.viewLog(taskInstance.getHost(), Constants.RPC_PORT, taskInstance.getLogPath()); + } finally { + if(logClient != null){ + logClient.close(); + } + } if (StringUtils.isNotEmpty(log)) { List appIds = LoggerUtils.getAppIds(log, logger); String workerDir = taskInstance.getExecutePath(); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java index 7264c2f59d..5550e750b5 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/RemoveZKNode.java @@ -16,7 +16,7 @@ */ package org.apache.dolphinscheduler.server.utils; -import org.apache.dolphinscheduler.common.zk.ZookeeperOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperOperator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java index d270880408..ace93079ff 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java @@ -22,22 +22,22 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.IStoppable; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.TaskType; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; -import org.apache.dolphinscheduler.common.queue.TaskQueueFactory; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadPoolExecutors; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.common.zk.AbstractZKClient; import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.utils.ProcessUtils; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.worker.runner.FetchTaskThread; import org.apache.dolphinscheduler.server.zk.ZKWorkerClient; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; +import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -73,10 +73,10 @@ public class WorkerServer implements IStoppable { /** - * process database access + * process service */ @Autowired - private ProcessDao processDao; + private ProcessService processService; /** * alert database access @@ -104,13 +104,6 @@ public class WorkerServer implements IStoppable { */ private ExecutorService fetchTaskExecutorService; - /** - * spring application context - * only use it for initialization - */ - @Autowired - private SpringApplicationContext springApplicationContext; - /** * CountDownLatch latch */ @@ -122,6 +115,13 @@ public class WorkerServer implements IStoppable { @Autowired private WorkerConfig workerConfig; + /** + * spring application context + * only use it for initialization + */ + @Autowired + private SpringApplicationContext springApplicationContext; + /** * master server startup * @@ -149,7 +149,7 @@ public class WorkerServer implements IStoppable { this.fetchTaskExecutorService = ThreadUtils.newDaemonSingleThreadExecutor("Worker-Fetch-Thread-Executor"); - heartbeatWorkerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.defaulWorkerHeartbeatThreadNum); + heartbeatWorkerService = ThreadUtils.newDaemonThreadScheduledExecutor("Worker-Heartbeat-Thread-Executor", Constants.DEFAUL_WORKER_HEARTBEAT_THREAD_NUM); // heartbeat thread implement Runnable heartBeatThread = heartBeatThread(); @@ -167,7 +167,7 @@ public class WorkerServer implements IStoppable { killExecutorService.execute(killProcessThread); // new fetch task thread - FetchTaskThread fetchTaskThread = new FetchTaskThread(zkWorkerClient, processDao, taskQueue); + FetchTaskThread fetchTaskThread = new FetchTaskThread(zkWorkerClient, processService, taskQueue); // submit fetch task thread fetchTaskExecutorService.execute(fetchTaskThread); @@ -297,7 +297,7 @@ public class WorkerServer implements IStoppable { Set taskInfoSet = taskQueue.smembers(Constants.DOLPHINSCHEDULER_TASKS_KILL); if (CollectionUtils.isNotEmpty(taskInfoSet)){ for (String taskInfo : taskInfoSet){ - killTask(taskInfo, processDao); + killTask(taskInfo, processService); removeKillInfoFromQueue(taskInfo); } } @@ -319,7 +319,7 @@ public class WorkerServer implements IStoppable { * @param taskInfo task info * @param pd process dao */ - private void killTask(String taskInfo, ProcessDao pd) { + private void killTask(String taskInfo, ProcessService pd) { logger.info("get one kill command from tasks kill queue: " + taskInfo); String[] taskInfoArray = taskInfo.split("-"); if(taskInfoArray.length != 2){ @@ -357,7 +357,7 @@ public class WorkerServer implements IStoppable { * @param taskInstance * @param pd process dao */ - private void deleteTaskFromQueue(TaskInstance taskInstance, ProcessDao pd){ + private void deleteTaskFromQueue(TaskInstance taskInstance, ProcessService pd){ // creating distributed locks, lock path /dolphinscheduler/lock/worker InterProcessMutex mutex = null; logger.info("delete task from tasks queue: " + taskInstance.getId()); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java index 221ad069bb..013db83761 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/FetchTaskThread.java @@ -19,17 +19,18 @@ package org.apache.dolphinscheduler.server.worker.runner; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.*; -import org.apache.dolphinscheduler.common.zk.AbstractZKClient; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.Tenant; import org.apache.dolphinscheduler.dao.entity.WorkerGroup; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import org.apache.dolphinscheduler.server.zk.ZKWorkerClient; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,7 +64,7 @@ public class FetchTaskThread implements Runnable{ /** * process database access */ - private final ProcessDao processDao; + private final ProcessService processService; /** * worker thread pool executor @@ -91,10 +92,10 @@ public class FetchTaskThread implements Runnable{ private WorkerConfig workerConfig; public FetchTaskThread(ZKWorkerClient zkWorkerClient, - ProcessDao processDao, + ProcessService processService, ITaskQueue taskQueue){ this.zkWorkerClient = zkWorkerClient; - this.processDao = processDao; + this.processService = processService; this.taskQueue = taskQueue; this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class); this.taskNum = workerConfig.getWorkerFetchTaskNum(); @@ -112,12 +113,12 @@ public class FetchTaskThread implements Runnable{ */ private boolean checkWorkerGroup(TaskInstance taskInstance, String host){ - int taskWorkerGroupId = processDao.getTaskWorkerGroupId(taskInstance); + int taskWorkerGroupId = processService.getTaskWorkerGroupId(taskInstance); if(taskWorkerGroupId <= 0){ return true; } - WorkerGroup workerGroup = processDao.queryWorkerGroupById(taskWorkerGroupId); + WorkerGroup workerGroup = processService.queryWorkerGroupById(taskWorkerGroupId); if(workerGroup == null ){ logger.info("task {} cannot find the worker group, use all worker instead.", taskInstance.getId()); return true; @@ -184,7 +185,7 @@ public class FetchTaskThread implements Runnable{ // mainly to wait for the master insert task to succeed waitForTaskInstance(); - taskInstance = processDao.getTaskInstanceDetailByTaskId(taskInstId); + taskInstance = processService.getTaskInstanceDetailByTaskId(taskInstId); // verify task instance is null if (verifyTaskInstanceIsNull(taskInstance)) { @@ -200,7 +201,7 @@ public class FetchTaskThread implements Runnable{ // if process definition is null ,process definition already deleted int userId = taskInstance.getProcessDefine() == null ? 0 : taskInstance.getProcessDefine().getUserId(); - Tenant tenant = processDao.getTenantForProcess( + Tenant tenant = processService.getTenantForProcess( taskInstance.getProcessInstance().getTenantId(), userId); @@ -212,7 +213,7 @@ public class FetchTaskThread implements Runnable{ } // set queue for process instance, user-specified queue takes precedence over tenant queue - String userQueue = processDao.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); + String userQueue = processService.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId()); taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue); taskInstance.getProcessInstance().setTenantCode(tenant.getTenantCode()); @@ -234,7 +235,7 @@ public class FetchTaskThread implements Runnable{ logger.info("task : {} ready to submit to task scheduler thread",taskInstId); // submit task - workerExecService.submit(new TaskScheduleThread(taskInstance, processDao)); + workerExecService.submit(new TaskScheduleThread(taskInstance, processService)); // remove node from zk removeNodeFromTaskQueue(taskQueueStr); @@ -259,7 +260,7 @@ public class FetchTaskThread implements Runnable{ removeNodeFromTaskQueue(taskQueueStr); if (taskInstance != null){ - processDao.changeTaskState(ExecutionStatus.FAILURE, + processService.changeTaskState(ExecutionStatus.FAILURE, taskInstance.getStartTime(), taskInstance.getHost(), null, @@ -347,7 +348,7 @@ public class FetchTaskThread implements Runnable{ int retryTimes = 30; while (taskInstance == null && retryTimes > 0) { Thread.sleep(Constants.SLEEP_TIME_MILLIS); - taskInstance = processDao.findTaskInstanceById(taskInstId); + taskInstance = processService.findTaskInstanceById(taskInstId); retryTimes--; } } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java index f179d6344a..5e68acf94e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskScheduleThread.java @@ -31,15 +31,15 @@ import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter; import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; -import org.apache.dolphinscheduler.dao.permission.PermissionCheck; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.common.log.TaskLogDiscriminator; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskManager; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.permission.PermissionCheck; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,9 +64,9 @@ public class TaskScheduleThread implements Runnable { private TaskInstance taskInstance; /** - * process database access + * process service */ - private final ProcessDao processDao; + private final ProcessService processService; /** * abstract task @@ -77,10 +77,10 @@ public class TaskScheduleThread implements Runnable { * constructor * * @param taskInstance task instance - * @param processDao process dao + * @param processService process dao */ - public TaskScheduleThread(TaskInstance taskInstance, ProcessDao processDao){ - this.processDao = processDao; + public TaskScheduleThread(TaskInstance taskInstance, ProcessService processService){ + this.processService = processService; this.taskInstance = taskInstance; } @@ -152,7 +152,7 @@ public class TaskScheduleThread implements Runnable { logger.error("task scheduler failure", e); kill(); // update task instance state - processDao.changeTaskState(ExecutionStatus.FAILURE, + processService.changeTaskState(ExecutionStatus.FAILURE, new Date(), taskInstance.getId()); } @@ -161,11 +161,10 @@ public class TaskScheduleThread implements Runnable { taskInstance.getId(), task.getExitStatus()); // update task instance state - processDao.changeTaskState(task.getExitStatus(), + processService.changeTaskState(task.getExitStatus(), new Date(), taskInstance.getId()); } - /** * get global paras map * @return @@ -191,14 +190,14 @@ public class TaskScheduleThread implements Runnable { // update task status is running if(taskType.equals(TaskType.SQL.name()) || taskType.equals(TaskType.PROCEDURE.name())){ - processDao.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, + processService.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, taskInstance.getStartTime(), taskInstance.getHost(), null, getTaskLogPath(), taskInstance.getId()); }else{ - processDao.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, + processService.changeTaskState(ExecutionStatus.RUNNING_EXEUTION, taskInstance.getStartTime(), taskInstance.getHost(), taskInstance.getExecutePath(), @@ -212,21 +211,29 @@ public class TaskScheduleThread implements Runnable { * @return log path */ private String getTaskLogPath() { - String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) - .getLogger("ROOT") - .getAppender("TASKLOGFILE")) - .getDiscriminator()).getLogBase(); - if (baseLog.startsWith(Constants.SINGLE_SLASH)){ - return baseLog + Constants.SINGLE_SLASH + - taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + - taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + - taskInstance.getId() + ".log"; + String logPath; + try{ + String baseLog = ((TaskLogDiscriminator) ((SiftingAppender) ((LoggerContext) LoggerFactory.getILoggerFactory()) + .getLogger("ROOT") + .getAppender("TASKLOGFILE")) + .getDiscriminator()).getLogBase(); + if (baseLog.startsWith(Constants.SINGLE_SLASH)){ + logPath = baseLog + Constants.SINGLE_SLASH + + taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + + taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + + taskInstance.getId() + ".log"; + }else{ + logPath = System.getProperty("user.dir") + Constants.SINGLE_SLASH + + baseLog + Constants.SINGLE_SLASH + + taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + + taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + + taskInstance.getId() + ".log"; + } + }catch (Exception e){ + logger.error("logger" + e); + logPath = ""; } - return System.getProperty("user.dir") + Constants.SINGLE_SLASH + - baseLog + Constants.SINGLE_SLASH + - taskInstance.getProcessDefinitionId() + Constants.SINGLE_SLASH + - taskInstance.getProcessInstanceId() + Constants.SINGLE_SLASH + - taskInstance.getId() + ".log"; + return logPath; } /** @@ -311,7 +318,7 @@ public class TaskScheduleThread implements Runnable { if (!resFile.exists()) { try { // query the tenant code of the resource according to the name of the resource - String tentnCode = processDao.queryTenantCodeByResName(res); + String tentnCode = processService.queryTenantCodeByResName(res); String resHdfsPath = HadoopUtils.getHdfsFilename(tentnCode, res); logger.info("get resource file from hdfs :{}", resHdfsPath); @@ -334,7 +341,7 @@ public class TaskScheduleThread implements Runnable { private void checkDownloadPermission(List projectRes) throws Exception { int userId = taskInstance.getProcessInstance().getExecutorId(); String[] resNames = projectRes.toArray(new String[projectRes.size()]); - PermissionCheck permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE,processDao,resNames,userId,logger); + PermissionCheck permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE, processService,resNames,userId,logger); permissionCheck.checkPermission(); } } \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java index 04098215dd..bac498c150 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java @@ -21,10 +21,10 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.io.*; @@ -121,10 +121,10 @@ public abstract class AbstractCommandExecutor { * task specific execution logic * * @param execCommand exec command - * @param processDao process dao + * @param processService process dao * @return exit status code */ - public int run(String execCommand, ProcessDao processDao) { + public int run(String execCommand, ProcessService processService) { int exitStatusCode; try { @@ -147,7 +147,7 @@ public abstract class AbstractCommandExecutor { // get process id int pid = getProcessId(process); - processDao.updatePidByTaskInstId(taskInstId, pid, ""); + processService.updatePidByTaskInstId(taskInstId, pid, ""); logger.info("process start, process id is: {}", pid); @@ -161,10 +161,10 @@ public abstract class AbstractCommandExecutor { exitStatusCode = process.exitValue(); logger.info("process has exited, work dir:{}, pid:{} ,exitStatusCode:{}", taskDir, pid,exitStatusCode); //update process state to db - exitStatusCode = updateState(processDao, exitStatusCode, pid, taskInstId); + exitStatusCode = updateState(processService, exitStatusCode, pid, taskInstId); } else { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); if (taskInstance == null) { logger.error("task instance id:{} not exist", taskInstId); } else { @@ -176,7 +176,7 @@ public abstract class AbstractCommandExecutor { } catch (InterruptedException e) { exitStatusCode = -1; - logger.error(String.format("interrupt exception: {}, task may be cancelled or killed",e.getMessage()), e); + logger.error("interrupt exception: {}, task may be cancelled or killed", e.getMessage(), e); throw new RuntimeException("interrupt exception. exitCode is : " + exitStatusCode); } catch (Exception e) { exitStatusCode = -1; @@ -219,23 +219,23 @@ public abstract class AbstractCommandExecutor { /** * update process state to db * - * @param processDao process dao + * @param processService process dao * @param exitStatusCode exit status code * @param pid process id * @param taskInstId task instance id * @return exit status code */ - private int updateState(ProcessDao processDao, int exitStatusCode, int pid, int taskInstId) { + private int updateState(ProcessService processService, int exitStatusCode, int pid, int taskInstId) { //get yarn state by log if (exitStatusCode == 0) { - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); logger.info("process id is {}", pid); List appIds = getAppLinks(taskInstance.getLogPath()); if (appIds.size() > 0) { String appUrl = String.join(Constants.COMMA, appIds); logger.info("yarn log url:{}",appUrl); - processDao.updatePidByTaskInstId(taskInstId, pid, appUrl); + processService.updatePidByTaskInstId(taskInstId, pid, appUrl); } // check if all operations are completed @@ -408,7 +408,7 @@ public abstract class AbstractCommandExecutor { } } } catch (Exception e) { - logger.error(String.format("yarn applications: %s status failed ", appIds.toString()),e); + logger.error("yarn applications: {} status failed ", appIds,e); result = false; } return result; @@ -458,7 +458,7 @@ public abstract class AbstractCommandExecutor { lineList.add(line); } } catch (Exception e) { - logger.error(String.format("read file: %s failed : ",filename),e); + logger.error("read file: {} failed",filename,e); } finally { if(br != null){ try { @@ -535,7 +535,7 @@ public abstract class AbstractCommandExecutor { /** * when log buffer siz or flush time reach condition , then flush */ - if (logBuffer.size() >= Constants.defaultLogRowsNum || now - lastFlushTime > Constants.defaultLogFlushInterval) { + if (logBuffer.size() >= Constants.DEFAULT_LOG_ROWS_NUM || now - lastFlushTime > Constants.DEFAULT_LOG_FLUSH_INTERVAL) { lastFlushTime = now; /** log handle */ logHandler.accept(logBuffer); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java index f2772d0747..3795506b78 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java @@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.common.enums.TaskRecordStatus; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters; @@ -30,6 +31,7 @@ import org.apache.dolphinscheduler.common.task.python.PythonParameters; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; import org.apache.dolphinscheduler.common.task.spark.SparkParameters; import org.apache.dolphinscheduler.common.task.sql.SqlParameters; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.TaskRecordDao; import org.apache.dolphinscheduler.server.utils.ParamUtils; @@ -198,6 +200,12 @@ public abstract class AbstractTask { case DATAX: paramsClass = DataxParameters.class; break; + case SQOOP: + paramsClass = SqoopParameters.class; + break; + case CONDITIONS: + paramsClass = ConditionsParameters.class; + break; default: logger.error("not support this task type: {}", taskType); throw new IllegalArgumentException("not support this task type"); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java index 6846617408..39f4dfbb97 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractYarnTask.java @@ -16,10 +16,10 @@ */ package org.apache.dolphinscheduler.server.worker.task; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.utils.ProcessUtils; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; /** @@ -39,7 +39,7 @@ public abstract class AbstractYarnTask extends AbstractTask { /** * process database access */ - protected ProcessDao processDao; + protected ProcessService processService; /** * Abstract Yarn Task @@ -48,7 +48,7 @@ public abstract class AbstractYarnTask extends AbstractTask { */ public AbstractYarnTask(TaskProps taskProps, Logger logger) { super(taskProps, logger); - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskProps.getTaskDir(), taskProps.getTaskAppId(), @@ -64,7 +64,7 @@ public abstract class AbstractYarnTask extends AbstractTask { public void handle() throws Exception { try { // construct process - exitStatusCode = shellCommandExecutor.run(buildCommand(), processDao); + exitStatusCode = shellCommandExecutor.run(buildCommand(), processService); } catch (Exception e) { logger.error("yarn process failure", e); exitStatusCode = -1; @@ -82,7 +82,7 @@ public abstract class AbstractYarnTask extends AbstractTask { cancel = true; // cancel process shellCommandExecutor.cancelApplication(); - TaskInstance taskInstance = processDao.findTaskInstanceById(taskProps.getTaskInstId()); + TaskInstance taskInstance = processService.findTaskInstanceById(taskProps.getTaskInstId()); if (status && taskInstance != null){ ProcessUtils.killYarnJob(taskInstance); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java index 67deb7a3fa..ad62b77655 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/TaskManager.java @@ -19,6 +19,7 @@ package org.apache.dolphinscheduler.server.worker.task; import org.apache.dolphinscheduler.common.enums.TaskType; import org.apache.dolphinscheduler.common.utils.EnumUtils; +import org.apache.dolphinscheduler.server.worker.task.conditions.ConditionsTask; import org.apache.dolphinscheduler.server.worker.task.dependent.DependentTask; import org.apache.dolphinscheduler.server.worker.task.datax.DataxTask; import org.apache.dolphinscheduler.server.worker.task.flink.FlinkTask; @@ -29,6 +30,7 @@ import org.apache.dolphinscheduler.server.worker.task.python.PythonTask; import org.apache.dolphinscheduler.server.worker.task.shell.ShellTask; import org.apache.dolphinscheduler.server.worker.task.spark.SparkTask; import org.apache.dolphinscheduler.server.worker.task.sql.SqlTask; +import org.apache.dolphinscheduler.server.worker.task.sqoop.SqoopTask; import org.slf4j.Logger; /** @@ -68,6 +70,10 @@ public class TaskManager { return new HttpTask(props, logger); case DATAX: return new DataxTask(props, logger); + case SQOOP: + return new SqoopTask(props, logger); + case CONDITIONS: + return new ConditionsTask(props, logger); default: logger.error("unsupport task type: {}", taskType); throw new IllegalArgumentException("not support task type"); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/conditions/ConditionsTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/conditions/ConditionsTask.java new file mode 100644 index 0000000000..cbe82ce20a --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/conditions/ConditionsTask.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.conditions; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.DependResult; +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.model.DependentItem; +import org.apache.dolphinscheduler.common.model.DependentTaskModel; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; +import org.apache.dolphinscheduler.common.utils.DependentUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.server.worker.task.AbstractTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public class ConditionsTask extends AbstractTask { + + + /** + * dependent parameters + */ + private DependentParameters dependentParameters; + + /** + * process dao + */ + private ProcessService processService; + + /** + * taskInstance + */ + private TaskInstance taskInstance; + + /** + * processInstance + */ + private ProcessInstance processInstance; + + /** + * + */ + private Map completeTaskList = new ConcurrentHashMap<>(); + + /** + * constructor + * + * @param taskProps task props + * @param logger logger + */ + public ConditionsTask(TaskProps taskProps, Logger logger) { + super(taskProps, logger); + } + + @Override + public void init() throws Exception { + logger.info("conditions task initialize"); + + this.processService = SpringApplicationContext.getBean(ProcessService.class); + + this.dependentParameters = JSONUtils.parseObject(this.taskProps.getDependence(), DependentParameters.class); + + this.taskInstance = processService.findTaskInstanceById(taskProps.getTaskInstId()); + + if(taskInstance == null){ + throw new Exception("cannot find the task instance!"); + } + + List taskInstanceList = processService.findValidTaskListByProcessId(taskInstance.getProcessInstanceId()); + for(TaskInstance task : taskInstanceList){ + this.completeTaskList.putIfAbsent(task.getName(), task.getState()); + } + } + + @Override + public void handle() throws Exception { + + String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskProps.getTaskAppId()); + Thread.currentThread().setName(threadLoggerInfoName); + + List modelResultList = new ArrayList<>(); + for(DependentTaskModel dependentTaskModel : dependentParameters.getDependTaskList()){ + + List itemDependResult = new ArrayList<>(); + for(DependentItem item : dependentTaskModel.getDependItemList()){ + itemDependResult.add(getDependResultForItem(item)); + } + DependResult modelResult = DependentUtils.getDependResultForRelation(dependentTaskModel.getRelation(), itemDependResult); + modelResultList.add(modelResult); + } + DependResult result = DependentUtils.getDependResultForRelation( + dependentParameters.getRelation(), modelResultList + ); + logger.info("the conditions task depend result : {}", result); + exitStatusCode = (result == DependResult.SUCCESS) ? + Constants.EXIT_CODE_SUCCESS : Constants.EXIT_CODE_FAILURE; + } + + private DependResult getDependResultForItem(DependentItem item){ + + DependResult dependResult = DependResult.SUCCESS; + if(!completeTaskList.containsKey(item.getDepTasks())){ + logger.info("depend item: {} have not completed yet.", item.getDepTasks()); + dependResult = DependResult.FAILED; + return dependResult; + } + ExecutionStatus executionStatus = completeTaskList.get(item.getDepTasks()); + if(executionStatus != item.getStatus()){ + logger.info("depend item : {} expect status: {}, actual status: {}" ,item.getDepTasks(), item.getStatus().toString(), executionStatus.toString()); + dependResult = DependResult.FAILED; + } + logger.info("depend item: {}, depend result: {}", + item.getDepTasks(), dependResult); + return dependResult; + } + + @Override + public AbstractParameters getParameters() { + return null; + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java index 0de2bbc7c6..ef941cd062 100755 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTask.java @@ -39,23 +39,23 @@ import java.util.Set; import org.apache.commons.io.FileUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.job.db.BaseDataSource; -import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.utils.ParamUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import com.alibaba.druid.sql.ast.SQLStatement; @@ -106,9 +106,9 @@ public class DataxTask extends AbstractTask { private ShellCommandExecutor shellCommandExecutor; /** - * process database access + * process dao */ - private ProcessDao processDao; + private ProcessService processService; /** * constructor @@ -128,7 +128,7 @@ public class DataxTask extends AbstractTask { props.getTaskInstId(), props.getTenantCode(), props.getEnvFile(), props.getTaskStartTime(), props.getTaskTimeout(), logger); - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); } /** @@ -160,7 +160,7 @@ public class DataxTask extends AbstractTask { // run datax process String jsonFilePath = buildDataxJsonFile(); String shellCommandFilePath = buildShellCommandFile(jsonFilePath); - exitStatusCode = shellCommandExecutor.run(shellCommandFilePath, processDao); + exitStatusCode = shellCommandExecutor.run(shellCommandFilePath, processService); } catch (Exception e) { exitStatusCode = -1; @@ -220,11 +220,11 @@ public class DataxTask extends AbstractTask { */ private List buildDataxJobContentJson() throws SQLException { - DataSource dataSource = processDao.findDataSourceById(dataXParameters.getDataSource()); + DataSource dataSource = processService.findDataSourceById(dataXParameters.getDataSource()); BaseDataSource dataSourceCfg = DataSourceFactory.getDatasource(dataSource.getType(), dataSource.getConnectionParams()); - DataSource dataTarget = processDao.findDataSourceById(dataXParameters.getDataTarget()); + DataSource dataTarget = processService.findDataSourceById(dataXParameters.getDataTarget()); BaseDataSource dataTargetCfg = DataSourceFactory.getDatasource(dataTarget.getType(), dataTarget.getConnectionParams()); @@ -355,7 +355,7 @@ public class DataxTask extends AbstractTask { String dataxCommand = sbr.toString(); // find process instance by task id - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); // combining local and global parameters Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java index 4be65ed49d..b08cabc2e9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentExecute.java @@ -23,10 +23,10 @@ import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.DependentItem; import org.apache.dolphinscheduler.common.utils.DependentUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,9 +37,9 @@ import java.util.*; */ public class DependentExecute { /** - * process dao + * process service */ - private final ProcessDao processDao = SpringApplicationContext.getBean(ProcessDao.class); + private final ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); /** * depend item list @@ -108,7 +108,7 @@ public class DependentExecute { result = getDependResultByState(processInstance.getState()); }else{ TaskInstance taskInstance = null; - List taskInstanceList = processDao.findValidTaskListByProcessId(processInstance.getId()); + List taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId()); for(TaskInstance task : taskInstanceList){ if(task.getName().equals(dependentItem.getDepTasks())){ @@ -141,16 +141,16 @@ public class DependentExecute { */ private ProcessInstance findLastProcessInterval(int definitionId, DateInterval dateInterval) { - ProcessInstance runningProcess = processDao.findLastRunningProcess(definitionId, dateInterval); + ProcessInstance runningProcess = processService.findLastRunningProcess(definitionId, dateInterval); if(runningProcess != null){ return runningProcess; } - ProcessInstance lastSchedulerProcess = processDao.findLastSchedulerProcessInterval( + ProcessInstance lastSchedulerProcess = processService.findLastSchedulerProcessInterval( definitionId, dateInterval ); - ProcessInstance lastManualProcess = processDao.findLastManualProcessInterval( + ProcessInstance lastManualProcess = processService.findLastManualProcessInterval( definitionId, dateInterval ); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java index 9af29e01dd..f074d57e6c 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/dependent/DependentTask.java @@ -25,11 +25,11 @@ import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; import org.apache.dolphinscheduler.common.thread.Stopper; import org.apache.dolphinscheduler.common.utils.DependentUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.util.*; @@ -63,9 +63,9 @@ public class DependentTask extends AbstractTask { private Date dependentDate; /** - * process dao + * process service */ - private ProcessDao processDao; + private ProcessService processService; /** * constructor @@ -88,7 +88,7 @@ public class DependentTask extends AbstractTask { taskModel.getDependItemList(), taskModel.getRelation())); } - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); if(taskProps.getScheduleTime() != null){ this.dependentDate = taskProps.getScheduleTime(); @@ -107,7 +107,7 @@ public class DependentTask extends AbstractTask { try{ TaskInstance taskInstance = null; while(Stopper.isRunning()){ - taskInstance = processDao.findTaskInstanceById(this.taskProps.getTaskInstId()); + taskInstance = processService.findTaskInstanceById(this.taskProps.getTaskInstId()); if(taskInstance == null){ exitStatusCode = -1; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java index 0fa9e11ce5..c562fbe4dd 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/flink/FlinkTask.java @@ -68,7 +68,7 @@ public class FlinkTask extends AbstractYarnTask { if (StringUtils.isNotEmpty(flinkParameters.getMainArgs())) { String args = flinkParameters.getMainArgs(); // get process instance by task instance id - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); /** * combining local and global parameters diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java index 97e6cb7bee..c925f90b9e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/http/HttpTask.java @@ -28,13 +28,13 @@ import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.http.HttpParameters; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; import org.apache.dolphinscheduler.common.utils.StringUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.http.HttpEntity; import org.apache.http.ParseException; import org.apache.http.client.config.RequestConfig; @@ -66,9 +66,9 @@ public class HttpTask extends AbstractTask { private HttpParameters httpParameters; /** - * process database access + * process service */ - private ProcessDao processDao; + private ProcessService processService; /** * Convert mill seconds to second unit @@ -92,7 +92,7 @@ public class HttpTask extends AbstractTask { */ public HttpTask(TaskProps props, Logger logger) { super(props, logger); - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override @@ -138,7 +138,7 @@ public class HttpTask extends AbstractTask { */ protected CloseableHttpResponse sendRequest(CloseableHttpClient client) throws IOException { RequestBuilder builder = createRequestBuilder(); - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), taskProps.getDefinedParams(), diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java index 9b4952bbd2..fb881453e9 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/processdure/ProcedureTask.java @@ -22,19 +22,19 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.DataType; import org.apache.dolphinscheduler.common.enums.Direct; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; -import org.apache.dolphinscheduler.common.job.db.BaseDataSource; -import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; import org.apache.dolphinscheduler.common.utils.CollectionUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.sql.*; @@ -56,9 +56,9 @@ public class ProcedureTask extends AbstractTask { private ProcedureParameters procedureParameters; /** - * process database access + * process service */ - private ProcessDao processDao; + private ProcessService processService; /** * base datasource @@ -82,7 +82,7 @@ public class ProcedureTask extends AbstractTask { throw new RuntimeException("procedure task params is not valid"); } - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override @@ -97,7 +97,7 @@ public class ProcedureTask extends AbstractTask { procedureParameters.getMethod(), procedureParameters.getLocalParams()); - DataSource dataSource = processDao.findDataSourceById(procedureParameters.getDatasource()); + DataSource dataSource = processService.findDataSourceById(procedureParameters.getDatasource()); if (dataSource == null){ logger.error("datasource not exists"); exitStatusCode = -1; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java index 585d62f154..fc212f866b 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/python/PythonTask.java @@ -22,12 +22,12 @@ import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.python.PythonParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.PythonCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.util.Map; @@ -53,9 +53,9 @@ public class PythonTask extends AbstractTask { private PythonCommandExecutor pythonCommandExecutor; /** - * process database access + * process service */ - private ProcessDao processDao; + private ProcessService processService; /** * constructor @@ -76,7 +76,7 @@ public class PythonTask extends AbstractTask { taskProps.getTaskStartTime(), taskProps.getTaskTimeout(), logger); - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override @@ -94,7 +94,7 @@ public class PythonTask extends AbstractTask { public void handle() throws Exception { try { // construct process - exitStatusCode = pythonCommandExecutor.run(buildCommand(), processDao); + exitStatusCode = pythonCommandExecutor.run(buildCommand(), processService); } catch (Exception e) { logger.error("python task failure", e); exitStatusCode = -1; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java index 789a0c5302..5704c8052e 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/shell/ShellTask.java @@ -23,12 +23,12 @@ import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.shell.ShellParameters; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.io.File; @@ -64,7 +64,7 @@ public class ShellTask extends AbstractTask { /** * process database access */ - private ProcessDao processDao; + private ProcessService processService; /** * constructor @@ -84,7 +84,7 @@ public class ShellTask extends AbstractTask { taskProps.getTaskStartTime(), taskProps.getTaskTimeout(), logger); - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); } @Override @@ -102,7 +102,7 @@ public class ShellTask extends AbstractTask { public void handle() throws Exception { try { // construct process - exitStatusCode = shellCommandExecutor.run(buildCommand(), processDao); + exitStatusCode = shellCommandExecutor.run(buildCommand(), processService); } catch (Exception e) { logger.error("shell task failure", e); exitStatusCode = -1; diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java index eba05a0d21..aae11f5530 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java @@ -27,8 +27,6 @@ import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.ShowType; import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy; import org.apache.dolphinscheduler.common.enums.UdfType; -import org.apache.dolphinscheduler.common.job.db.BaseDataSource; -import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.sql.SqlBinds; @@ -36,16 +34,19 @@ import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sql.SqlType; import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.AlertDao; -import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; -import org.apache.dolphinscheduler.dao.permission.PermissionCheck; import org.apache.dolphinscheduler.server.utils.ParamUtils; import org.apache.dolphinscheduler.server.utils.UDFUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.permission.PermissionCheck; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.sql.*; @@ -67,9 +68,9 @@ public class SqlTask extends AbstractTask { private SqlParameters sqlParameters; /** - * process database access + * process service */ - private ProcessDao processDao; + private ProcessService processService; /** * alert dao @@ -96,7 +97,7 @@ public class SqlTask extends AbstractTask { if (!sqlParameters.checkParameters()) { throw new RuntimeException("sql task params is not valid"); } - this.processDao = SpringApplicationContext.getBean(ProcessDao.class); + this.processService = SpringApplicationContext.getBean(ProcessService.class); this.alertDao = SpringApplicationContext.getBean(AlertDao.class); } @@ -122,7 +123,7 @@ public class SqlTask extends AbstractTask { return; } - dataSource= processDao.findDataSourceById(sqlParameters.getDatasource()); + dataSource= processService.findDataSourceById(sqlParameters.getDatasource()); // data source is null if (dataSource == null){ @@ -171,7 +172,7 @@ public class SqlTask extends AbstractTask { } // check udf permission checkUdfPermission(ArrayUtils.toObject(idsArray)); - List udfFuncList = processDao.queryUdfFunListByids(idsArray); + List udfFuncList = processService.queryUdfFunListByids(idsArray); createFuncs = UDFUtils.createFuncs(udfFuncList, taskProps.getTenantCode(), logger); } @@ -383,7 +384,7 @@ public class SqlTask extends AbstractTask { public void sendAttachment(String title,String content){ // process instance - ProcessInstance instance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + ProcessInstance instance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); List users = alertDao.queryUserByAlertGroupId(instance.getWarningGroupId()); @@ -470,10 +471,10 @@ public class SqlTask extends AbstractTask { */ private void checkUdfPermission(Integer[] udfFunIds) throws Exception{ // process instance - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); int userId = processInstance.getExecutorId(); - PermissionCheck permissionCheckUdf = new PermissionCheck(AuthorizationType.UDF,processDao,udfFunIds,userId,logger); + PermissionCheck permissionCheckUdf = new PermissionCheck(AuthorizationType.UDF, processService,udfFunIds,userId,logger); permissionCheckUdf.checkPermission(); } @@ -484,10 +485,10 @@ public class SqlTask extends AbstractTask { */ private void checkDataSourcePermission(int dataSourceId) throws Exception{ // process instance - ProcessInstance processInstance = processDao.findProcessInstanceByTaskId(taskProps.getTaskInstId()); + ProcessInstance processInstance = processService.findProcessInstanceByTaskId(taskProps.getTaskInstId()); int userId = processInstance.getExecutorId(); - PermissionCheck permissionCheckDataSource = new PermissionCheck(AuthorizationType.DATASOURCE,processDao,new Integer[]{dataSourceId},userId,logger); + PermissionCheck permissionCheckDataSource = new PermissionCheck(AuthorizationType.DATASOURCE, processService,new Integer[]{dataSourceId},userId,logger); permissionCheckDataSource.checkPermission(); } diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java new file mode 100644 index 0000000000..64bc7924d2 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTask.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop; + +import com.alibaba.fastjson.JSON; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.server.utils.ParamUtils; +import org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; +import org.slf4j.Logger; +import java.util.Map; + +/** + * sqoop task extends the shell task + */ +public class SqoopTask extends AbstractYarnTask { + + private SqoopParameters sqoopParameters; + + public SqoopTask(TaskProps props, Logger logger){ + super(props,logger); + } + + @Override + public void init() throws Exception { + logger.info("sqoop task params {}", taskProps.getTaskParams()); + sqoopParameters = + JSON.parseObject(taskProps.getTaskParams(),SqoopParameters.class); + if (!sqoopParameters.checkParameters()) { + throw new RuntimeException("sqoop task params is not valid"); + } + + } + + @Override + protected String buildCommand() throws Exception { + //get sqoop scripts + SqoopJobGenerator generator = new SqoopJobGenerator(); + String script = generator.generateSqoopJob(sqoopParameters); + + Map paramsMap = ParamUtils.convert(taskProps.getUserDefParamsMap(), + taskProps.getDefinedParams(), + sqoopParameters.getLocalParametersMap(), + taskProps.getCmdTypeIfComplement(), + taskProps.getScheduleTime()); + + if(paramsMap != null){ + String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); + logger.info("sqoop script: {}", resultScripts); + return resultScripts; + } + + return null; + } + + @Override + public AbstractParameters getParameters() { + return sqoopParameters; + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java new file mode 100644 index 0000000000..4944bac5ba --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/CommonGenerator.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; + +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * common script generator + */ +public class CommonGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + public String generate(SqoopParameters sqoopParameters) { + StringBuilder result = new StringBuilder(); + try{ + result.append("sqoop ") + .append(sqoopParameters.getModelType()); + if(sqoopParameters.getConcurrency() >0){ + result.append(" -m ") + .append(sqoopParameters.getConcurrency()); + } + }catch (Exception e){ + logger.error(e.getMessage()); + } + + return result.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java new file mode 100644 index 0000000000..6c1d1fdca8 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ISourceGenerator.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; + +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; + +/** + * Source Generator Interface + */ +public interface ISourceGenerator { + + /** + * generate the source script + * @param sqoopParameters sqoop params + * @return + */ + String generate(SqoopParameters sqoopParameters); +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java new file mode 100644 index 0000000000..be307af5f2 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/ITargetGenerator.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; + +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; + +/** + * Target Generator Interface + */ +public interface ITargetGenerator { + + /** + * generate the target script + * @param sqoopParameters sqoop params + * @return + */ + String generate(SqoopParameters sqoopParameters); +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java new file mode 100644 index 0000000000..24c76e027d --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/SqoopJobGenerator.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator; + +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HdfsSourceGenerator; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.HiveSourceGenerator; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources.MysqlSourceGenerator; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets.HdfsTargetGenerator; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets.HiveTargetGenerator; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets.MysqlTargetGenerator; + +/** + * Sqoop Job Scripts Generator + */ +public class SqoopJobGenerator { + + private static final String MYSQL = "MYSQL"; + private static final String HIVE = "HIVE"; + private static final String HDFS = "HDFS"; + + /** + * target script generator + */ + private ITargetGenerator targetGenerator; + /** + * source script generator + */ + private ISourceGenerator sourceGenerator; + /** + * common script generator + */ + private CommonGenerator commonGenerator; + + public SqoopJobGenerator(){ + commonGenerator = new CommonGenerator(); + } + + private void createSqoopJobGenerator(String sourceType,String targetType){ + sourceGenerator = createSourceGenerator(sourceType); + targetGenerator = createTargetGenerator(targetType); + } + + /** + * get the final sqoop scripts + * @param sqoopParameters + * @return + */ + public String generateSqoopJob(SqoopParameters sqoopParameters){ + createSqoopJobGenerator(sqoopParameters.getSourceType(),sqoopParameters.getTargetType()); + if(sourceGenerator == null || targetGenerator == null){ + return null; + } + + return commonGenerator.generate(sqoopParameters) + + sourceGenerator.generate(sqoopParameters) + + targetGenerator.generate(sqoopParameters); + } + + /** + * get the source generator + * @param sourceType + * @return + */ + private ISourceGenerator createSourceGenerator(String sourceType){ + switch (sourceType){ + case MYSQL: + return new MysqlSourceGenerator(); + case HIVE: + return new HiveSourceGenerator(); + case HDFS: + return new HdfsSourceGenerator(); + default: + return null; + } + } + + /** + * get the target generator + * @param targetType + * @return + */ + private ITargetGenerator createTargetGenerator(String targetType){ + switch (targetType){ + case MYSQL: + return new MysqlTargetGenerator(); + case HIVE: + return new HiveTargetGenerator(); + case HDFS: + return new HdfsTargetGenerator(); + default: + return null; + } + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java new file mode 100644 index 0000000000..47b01363e6 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HdfsSourceGenerator.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; + +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * hdfs source generator + */ +public class HdfsSourceGenerator implements ISourceGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + @Override + public String generate(SqoopParameters sqoopParameters) { + StringBuilder result = new StringBuilder(); + try{ + SourceHdfsParameter sourceHdfsParameter + = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceHdfsParameter.class); + + if(sourceHdfsParameter != null){ + if(StringUtils.isNotEmpty(sourceHdfsParameter.getExportDir())){ + result.append(" --export-dir ") + .append(sourceHdfsParameter.getExportDir()); + }else{ + throw new Exception("--export-dir is null"); + } + + } + }catch (Exception e){ + logger.error("get hdfs source failed",e); + } + + return result.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java new file mode 100644 index 0000000000..91363e296a --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/HiveSourceGenerator.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; + +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * hive source generator + */ +public class HiveSourceGenerator implements ISourceGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + @Override + public String generate(SqoopParameters sqoopParameters) { + StringBuilder sb = new StringBuilder(); + try{ + SourceHiveParameter sourceHiveParameter + = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceHiveParameter.class); + if(sourceHiveParameter != null){ + if(StringUtils.isNotEmpty(sourceHiveParameter.getHiveDatabase())){ + sb.append(" --hcatalog-database ").append(sourceHiveParameter.getHiveDatabase()); + } + + if(StringUtils.isNotEmpty(sourceHiveParameter.getHiveTable())){ + sb.append(" --hcatalog-table ").append(sourceHiveParameter.getHiveTable()); + } + + if(StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionKey())&& + StringUtils.isNotEmpty(sourceHiveParameter.getHivePartitionValue())){ + sb.append(" --hcatalog-partition-keys ").append(sourceHiveParameter.getHivePartitionKey()) + .append(" --hcatalog-partition-values ").append(sourceHiveParameter.getHivePartitionValue()); + } + } + }catch (Exception e){ + logger.error(e.getMessage()); + } + + return sb.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java new file mode 100644 index 0000000000..050fef7cc7 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/sources/MysqlSourceGenerator.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.sources; + +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.enums.QueryType; +import org.apache.dolphinscheduler.common.process.Property; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ISourceGenerator; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +/** + * mysql source generator + */ +public class MysqlSourceGenerator implements ISourceGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + @Override + public String generate(SqoopParameters sqoopParameters) { + StringBuilder result = new StringBuilder(); + try { + SourceMysqlParameter sourceMysqlParameter + = JSONUtils.parseObject(sqoopParameters.getSourceParams(),SourceMysqlParameter.class); + + if(sourceMysqlParameter != null){ + ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); + DataSource dataSource= processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource()); + BaseDataSource baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), + dataSource.getConnectionParams()); + if(baseDataSource != null){ + result.append(" --connect ") + .append(baseDataSource.getJdbcUrl()) + .append(" --username ") + .append(baseDataSource.getUser()) + .append(" --password ") + .append(baseDataSource.getPassword()); + + if(sourceMysqlParameter.getSrcQueryType() == QueryType.FORM.ordinal()){ + if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcTable())){ + result.append(" --table ").append(sourceMysqlParameter.getSrcTable()); + } + + if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcColumns())){ + result.append(" --columns ").append(sourceMysqlParameter.getSrcColumns()); + } + + }else if(sourceMysqlParameter.getSrcQueryType() == QueryType.SQL.ordinal()){ + if(StringUtils.isNotEmpty(sourceMysqlParameter.getSrcQuerySql())){ + + String srcQuery = sourceMysqlParameter.getSrcQuerySql(); + if(srcQuery.toLowerCase().contains("where")){ + srcQuery += " AND "+"$CONDITIONS"; + }else{ + srcQuery += " WHERE $CONDITIONS"; + } + result.append(" --query \'"+srcQuery+"\'"); + } + } + + List mapColumnHive = sourceMysqlParameter.getMapColumnHive(); + + if(mapColumnHive != null && !mapColumnHive.isEmpty()){ + String columnMap = ""; + for(Property item:mapColumnHive){ + columnMap = item.getProp()+"="+ item.getValue()+","; + } + + if(StringUtils.isNotEmpty(columnMap)){ + result.append(" --map-column-hive ") + .append(columnMap.substring(0,columnMap.length()-1)); + } + } + + List mapColumnJava = sourceMysqlParameter.getMapColumnJava(); + + if(mapColumnJava != null && !mapColumnJava.isEmpty()){ + String columnMap = ""; + for(Property item:mapColumnJava){ + columnMap = item.getProp()+"="+ item.getValue()+","; + } + + if(StringUtils.isNotEmpty(columnMap)){ + result.append(" --map-column-java ") + .append(columnMap.substring(0,columnMap.length()-1)); + } + } + } + } + }catch (Exception e){ + logger.error(e.getMessage()); + } + + return result.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java new file mode 100644 index 0000000000..411e9b4450 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HdfsTargetGenerator.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; + +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * hdfs target generator + */ +public class HdfsTargetGenerator implements ITargetGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + @Override + public String generate(SqoopParameters sqoopParameters) { + StringBuilder result = new StringBuilder(); + try{ + TargetHdfsParameter targetHdfsParameter = + JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetHdfsParameter.class); + + if(targetHdfsParameter != null){ + + if(StringUtils.isNotEmpty(targetHdfsParameter.getTargetPath())){ + result.append(" --target-dir ").append(targetHdfsParameter.getTargetPath()); + } + + if(StringUtils.isNotEmpty(targetHdfsParameter.getCompressionCodec())){ + result.append(" --compression-codec ").append(targetHdfsParameter.getCompressionCodec()); + } + + if(StringUtils.isNotEmpty(targetHdfsParameter.getFileType())){ + result.append(" ").append(targetHdfsParameter.getFileType()); + } + + if(targetHdfsParameter.isDeleteTargetDir()){ + result.append(" --delete-target-dir"); + } + + if(StringUtils.isNotEmpty(targetHdfsParameter.getFieldsTerminated())){ + result.append(" --fields-terminated-by '").append(targetHdfsParameter.getFieldsTerminated()).append("'"); + } + + if(StringUtils.isNotEmpty(targetHdfsParameter.getLinesTerminated())){ + result.append(" --lines-terminated-by '").append(targetHdfsParameter.getLinesTerminated()).append("'"); + } + + result.append(" --null-non-string 'NULL' --null-string 'NULL'"); + } + }catch(Exception e){ + logger.error(e.getMessage()); + } + + return result.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java new file mode 100644 index 0000000000..ad59173ad0 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/HiveTargetGenerator.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; + +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * hive target generator + */ +public class HiveTargetGenerator implements ITargetGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + @Override + public String generate(SqoopParameters sqoopParameters) { + + StringBuilder result = new StringBuilder(); + + try{ + TargetHiveParameter targetHiveParameter = + JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetHiveParameter.class); + if(targetHiveParameter != null){ + + result.append(" --hive-import "); + + if(StringUtils.isNotEmpty(targetHiveParameter.getHiveDatabase())&& + StringUtils.isNotEmpty(targetHiveParameter.getHiveTable())){ + result.append(" --hive-table ") + .append(targetHiveParameter.getHiveDatabase()) + .append(".") + .append(targetHiveParameter.getHiveTable()); + } + + if(targetHiveParameter.isCreateHiveTable()){ + result.append(" --create-hive-table"); + } + + if(targetHiveParameter.isDropDelimiter()){ + result.append(" --hive-drop-import-delims"); + } + + if(targetHiveParameter.isHiveOverWrite()){ + result.append(" --hive-overwrite -delete-target-dir"); + } + + if(StringUtils.isNotEmpty(targetHiveParameter.getReplaceDelimiter())){ + result.append(" --hive-delims-replacement ").append(targetHiveParameter.getReplaceDelimiter()); + } + + if(StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionKey())&& + StringUtils.isNotEmpty(targetHiveParameter.getHivePartitionValue())){ + result.append(" --hive-partition-key ") + .append(targetHiveParameter.getHivePartitionKey()) + .append(" --hive-partition-value ") + .append(targetHiveParameter.getHivePartitionValue()); + } + + } + }catch(Exception e){ + logger.error(e.getMessage()); + } + + return result.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java new file mode 100644 index 0000000000..0733338812 --- /dev/null +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sqoop/generator/targets/MysqlTargetGenerator.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop.generator.targets; + +import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.ITargetGenerator; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * mysql target generator + */ +public class MysqlTargetGenerator implements ITargetGenerator { + + private Logger logger = LoggerFactory.getLogger(getClass()); + + @Override + public String generate(SqoopParameters sqoopParameters) { + + StringBuilder result = new StringBuilder(); + try{ + + TargetMysqlParameter targetMysqlParameter = + JSONUtils.parseObject(sqoopParameters.getTargetParams(),TargetMysqlParameter.class); + + if(targetMysqlParameter != null && targetMysqlParameter.getTargetDatasource() != 0){ + + ProcessService processService = SpringApplicationContext.getBean(ProcessService.class); + DataSource dataSource= processService.findDataSourceById(targetMysqlParameter.getTargetDatasource()); + // get datasource + BaseDataSource baseDataSource = DataSourceFactory.getDatasource(dataSource.getType(), + dataSource.getConnectionParams()); + + if(baseDataSource != null){ + result.append(" --connect ") + .append(baseDataSource.getJdbcUrl()) + .append(" --username ") + .append(baseDataSource.getUser()) + .append(" --password ") + .append(baseDataSource.getPassword()) + .append(" --table ") + .append(targetMysqlParameter.getTargetTable()); + + if(StringUtils.isNotEmpty(targetMysqlParameter.getTargetColumns())){ + result.append(" --columns ").append(targetMysqlParameter.getTargetColumns()); + } + + if(StringUtils.isNotEmpty(targetMysqlParameter.getFieldsTerminated())){ + result.append(" --fields-terminated-by '").append(targetMysqlParameter.getFieldsTerminated()).append("'"); + } + + if(StringUtils.isNotEmpty(targetMysqlParameter.getLinesTerminated())){ + result.append(" --lines-terminated-by '").append(targetMysqlParameter.getLinesTerminated()).append("'"); + } + + if(targetMysqlParameter.isUpdate()){ + if(StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateKey())&& + StringUtils.isNotEmpty(targetMysqlParameter.getTargetUpdateMode())){ + result.append(" --update-key ").append(targetMysqlParameter.getTargetUpdateKey()) + .append(" --update-mode ").append(targetMysqlParameter.getTargetUpdateMode()); + } + } + } + } + }catch (Exception e){ + logger.error(e.getMessage()); + } + + return result.toString(); + } +} diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java index c6a71ed066..fe4ec9130a 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java @@ -21,10 +21,8 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ZKNodeType; import org.apache.dolphinscheduler.common.model.Server; -import org.apache.dolphinscheduler.common.zk.AbstractZKClient; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.DaoFactory; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.server.utils.ProcessUtils; @@ -32,6 +30,8 @@ import org.apache.commons.lang.StringUtils; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.curator.utils.ThreadUtils; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -70,10 +70,10 @@ public class ZKMasterClient extends AbstractZKClient { */ private AlertDao alertDao = null; /** - * flow database access + * process service */ @Autowired - private ProcessDao processDao; + private ProcessService processService; /** * default constructor @@ -374,7 +374,7 @@ public class ZKMasterClient extends AbstractZKClient { private void failoverWorker(String workerHost, boolean needCheckWorkerAlive) throws Exception { logger.info("start worker[{}] failover ...", workerHost); - List needFailoverTaskInstanceList = processDao.queryNeedFailoverTaskInstances(workerHost); + List needFailoverTaskInstanceList = processService.queryNeedFailoverTaskInstances(workerHost); for(TaskInstance taskInstance : needFailoverTaskInstanceList){ if(needCheckWorkerAlive){ if(!checkTaskInstanceNeedFailover(taskInstance)){ @@ -382,7 +382,7 @@ public class ZKMasterClient extends AbstractZKClient { } } - ProcessInstance instance = processDao.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); + ProcessInstance instance = processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId()); if(instance!=null){ taskInstance.setProcessInstance(instance); } @@ -390,7 +390,7 @@ public class ZKMasterClient extends AbstractZKClient { ProcessUtils.killYarnJob(taskInstance); taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE); - processDao.saveTaskInstance(taskInstance); + processService.saveTaskInstance(taskInstance); } logger.info("end worker[{}] failover ...", workerHost); } @@ -403,11 +403,11 @@ public class ZKMasterClient extends AbstractZKClient { private void failoverMaster(String masterHost) { logger.info("start master failover ..."); - List needFailoverProcessInstanceList = processDao.queryNeedFailoverProcessInstances(masterHost); + List needFailoverProcessInstanceList = processService.queryNeedFailoverProcessInstances(masterHost); //updateProcessInstance host is null and insert into command for(ProcessInstance processInstance : needFailoverProcessInstanceList){ - processDao.processNeedFailoverProcessInstances(processInstance); + processService.processNeedFailoverProcessInstances(processInstance); } logger.info("master failover end"); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java index 88abfa3071..7ddee3b2a1 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKWorkerClient.java @@ -19,9 +19,9 @@ package org.apache.dolphinscheduler.server.zk; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ZKNodeType; -import org.apache.dolphinscheduler.common.zk.AbstractZKClient; import org.apache.commons.lang.StringUtils; import org.apache.curator.framework.CuratorFramework; +import org.apache.dolphinscheduler.service.zk.AbstractZKClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java index d7c3de13a5..d2a0fb2407 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/MasterExecThreadTest.java @@ -20,13 +20,13 @@ import com.alibaba.fastjson.JSONObject; import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.graph.DAG; import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Schedule; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.runner.MasterExecThread; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -57,7 +57,7 @@ public class MasterExecThreadTest { private ProcessInstance processInstance; - private ProcessDao processDao; + private ProcessService processService; private int processDefinitionId = 1; @@ -67,7 +67,7 @@ public class MasterExecThreadTest { @Before public void init() throws Exception{ - processDao = mock(ProcessDao.class); + processService = mock(ProcessService.class); applicationContext = mock(ApplicationContext.class); config = new MasterConfig(); @@ -91,7 +91,7 @@ public class MasterExecThreadTest { processDefinition.setGlobalParamList(Collections.EMPTY_LIST); Mockito.when(processInstance.getProcessDefinition()).thenReturn(processDefinition); - masterExecThread = PowerMockito.spy(new MasterExecThread(processInstance, processDao)); + masterExecThread = PowerMockito.spy(new MasterExecThread(processInstance, processService)); // prepareProcess init dag Field dag = MasterExecThread.class.getDeclaredField("dag"); dag.setAccessible(true); @@ -110,12 +110,12 @@ public class MasterExecThreadTest { @Test public void testParallelWithOutSchedule() throws ParseException { try{ - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList()); Method method = MasterExecThread.class.getDeclaredMethod("executeComplementProcess"); method.setAccessible(true); method.invoke(masterExecThread); // one create save, and 1-30 for next save, and last day 31 no save - verify(processDao, times(31)).saveProcessInstance(processInstance); + verify(processService, times(31)).saveProcessInstance(processInstance); }catch (Exception e){ e.printStackTrace(); Assert.assertTrue(false); @@ -129,12 +129,12 @@ public class MasterExecThreadTest { @Test public void testParallelWithSchedule() throws ParseException { try{ - Mockito.when(processDao.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); + Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList()); Method method = MasterExecThread.class.getDeclaredMethod("executeComplementProcess"); method.setAccessible(true); method.invoke(masterExecThread); // one create save, and 15(1 to 31 step 2) for next save, and last day 31 no save - verify(processDao, times(15)).saveProcessInstance(processInstance); + verify(processService, times(15)).saveProcessInstance(processInstance); }catch (Exception e){ Assert.assertTrue(false); } @@ -151,4 +151,5 @@ public class MasterExecThreadTest { schedulerList.add(schedule); return schedulerList; } + } \ No newline at end of file diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java index 04c844827f..5d4263644b 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/shell/ShellCommandExecutorTest.java @@ -20,13 +20,13 @@ import com.alibaba.fastjson.JSONObject; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskManager; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -43,11 +43,11 @@ public class ShellCommandExecutorTest { private static final Logger logger = LoggerFactory.getLogger(ShellCommandExecutorTest.class); - private ProcessDao processDao = null; + private ProcessService processService = null; @Before public void before(){ - processDao = SpringApplicationContext.getBean(ProcessDao.class); + processService = SpringApplicationContext.getBean(ProcessService.class); } @Test @@ -65,7 +65,7 @@ public class ShellCommandExecutorTest { - TaskInstance taskInstance = processDao.findTaskInstanceById(7657); + TaskInstance taskInstance = processService.findTaskInstanceById(7657); String taskJson = taskInstance.getTaskJson(); TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java index 7da3f710b6..c395eabe51 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/sql/SqlExecutorTest.java @@ -21,13 +21,13 @@ import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.model.TaskNode; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.common.utils.LoggerUtils; import org.apache.dolphinscheduler.server.worker.task.AbstractTask; import org.apache.dolphinscheduler.server.worker.task.TaskManager; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -44,11 +44,11 @@ public class SqlExecutorTest { private static final Logger logger = LoggerFactory.getLogger(SqlExecutorTest.class); - private ProcessDao processDao = null; + private ProcessService processService = null; @Before public void before(){ - processDao = SpringApplicationContext.getBean(ProcessDao.class); + processService = SpringApplicationContext.getBean(ProcessService.class); } @Test @@ -109,7 +109,7 @@ public class SqlExecutorTest { taskProps.setCmdTypeIfComplement(CommandType.START_PROCESS); - TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); + TaskInstance taskInstance = processService.findTaskInstanceById(taskInstId); String taskJson = taskInstance.getTaskJson(); TaskNode taskNode = JSONObject.parseObject(taskJson, TaskNode.class); diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java index 7a6073e05d..bd7f27530a 100644 --- a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/datax/DataxTaskTest.java @@ -25,15 +25,15 @@ import java.util.List; import com.alibaba.fastjson.JSONObject; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.DbType; -import org.apache.dolphinscheduler.common.job.db.BaseDataSource; -import org.apache.dolphinscheduler.common.job.db.DataSourceFactory; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; -import org.apache.dolphinscheduler.dao.ProcessDao; +import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; +import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; import org.apache.dolphinscheduler.dao.entity.DataSource; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.server.utils.DataxUtils; import org.apache.dolphinscheduler.server.worker.task.ShellCommandExecutor; import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -53,7 +53,7 @@ public class DataxTaskTest { private DataxTask dataxTask; - private ProcessDao processDao; + private ProcessService processService; private ShellCommandExecutor shellCommandExecutor; @@ -62,13 +62,13 @@ public class DataxTaskTest { @Before public void before() throws Exception { - processDao = Mockito.mock(ProcessDao.class); + processService = Mockito.mock(ProcessService.class); shellCommandExecutor = Mockito.mock(ShellCommandExecutor.class); applicationContext = Mockito.mock(ApplicationContext.class); SpringApplicationContext springApplicationContext = new SpringApplicationContext(); springApplicationContext.setApplicationContext(applicationContext); - Mockito.when(applicationContext.getBean(ProcessDao.class)).thenReturn(processDao); + Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); TaskProps props = new TaskProps(); props.setTaskDir("/tmp"); @@ -83,12 +83,12 @@ public class DataxTaskTest { dataxTask = PowerMockito.spy(new DataxTask(props, logger)); dataxTask.init(); - Mockito.when(processDao.findDataSourceById(1)).thenReturn(getDataSource()); - Mockito.when(processDao.findDataSourceById(2)).thenReturn(getDataSource()); - Mockito.when(processDao.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); + Mockito.when(processService.findDataSourceById(1)).thenReturn(getDataSource()); + Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); + Mockito.when(processService.findProcessInstanceByTaskId(1)).thenReturn(getProcessInstance()); String fileName = String.format("%s/%s_node.sh", props.getTaskDir(), props.getTaskAppId()); - Mockito.when(shellCommandExecutor.run(fileName, processDao)).thenReturn(0); + Mockito.when(shellCommandExecutor.run(fileName, processService)).thenReturn(0); } private DataSource getDataSource() { diff --git a/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java new file mode 100644 index 0000000000..f8688e7c0c --- /dev/null +++ b/dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/task/sqoop/SqoopTaskTest.java @@ -0,0 +1,139 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.server.worker.task.sqoop; + +import com.alibaba.fastjson.JSONObject; +import org.apache.dolphinscheduler.common.enums.DbType; +import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHdfsParameter; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceHiveParameter; +import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHdfsParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetHiveParameter; +import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.server.worker.task.TaskProps; +import org.apache.dolphinscheduler.server.worker.task.sqoop.generator.SqoopJobGenerator; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContext; + +import java.util.*; + +/** + * sqoop task test + */ +@RunWith(MockitoJUnitRunner.Silent.class) +public class SqoopTaskTest { + + private static final Logger logger = LoggerFactory.getLogger(SqoopTaskTest.class); + + private ProcessService processService; + private ApplicationContext applicationContext; + private SqoopTask sqoopTask; + + @Before + public void before() throws Exception{ + processService = Mockito.mock(ProcessService.class); + Mockito.when(processService.findDataSourceById(2)).thenReturn(getDataSource()); + applicationContext = Mockito.mock(ApplicationContext.class); + SpringApplicationContext springApplicationContext = new SpringApplicationContext(); + springApplicationContext.setApplicationContext(applicationContext); + Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService); + + TaskProps props = new TaskProps(); + props.setTaskDir("/tmp"); + props.setTaskAppId(String.valueOf(System.currentTimeMillis())); + props.setTaskInstId(1); + props.setTenantCode("1"); + props.setEnvFile(".dolphinscheduler_env.sh"); + props.setTaskStartTime(new Date()); + props.setTaskTimeout(0); + props.setTaskParams("{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"); + + sqoopTask = new SqoopTask(props,logger); + sqoopTask.init(); + } + + @Test + public void testGenerator(){ + String data1 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HDFS\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"0\\\",\\\"srcQuerySql\\\":\\\"\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[]}\",\"targetParams\":\"{\\\"targetPath\\\":\\\"/ods/tmp/test/person7\\\",\\\"deleteTargetDir\\\":true,\\\"fileType\\\":\\\"--as-textfile\\\",\\\"compressionCodec\\\":\\\"\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + SqoopParameters sqoopParameters1 = JSONObject.parseObject(data1,SqoopParameters.class); + + SqoopJobGenerator generator = new SqoopJobGenerator(); + String script = generator.generateSqoopJob(sqoopParameters1); + String expected = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_2 --target-dir /ods/tmp/test/person7 --as-textfile --delete-target-dir --fields-terminated-by '@' --lines-terminated-by '\\n' --null-non-string 'NULL' --null-string 'NULL'"; + Assert.assertEquals(expected, script); + + String data2 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HDFS\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"exportDir\\\":\\\"/ods/tmp/test/person7\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"id,name,age,sex,create_time\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":true,\\\"targetUpdateKey\\\":\\\"id\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + SqoopParameters sqoopParameters2 = JSONObject.parseObject(data2,SqoopParameters.class); + + String script2 = generator.generateSqoopJob(sqoopParameters2); + String expected2 = "sqoop export -m 1 --export-dir /ods/tmp/test/person7 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --columns id,name,age,sex,create_time --fields-terminated-by '@' --lines-terminated-by '\\n' --update-key id --update-mode allowinsert"; + Assert.assertEquals(expected2, script2); + + String data3 = "{\"concurrency\":1,\"modelType\":\"export\",\"sourceType\":\"HIVE\",\"targetType\":\"MYSQL\",\"sourceParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-17\\\"}\",\"targetParams\":\"{\\\"targetDatasource\\\":2,\\\"targetTable\\\":\\\"person_3\\\",\\\"targetColumns\\\":\\\"\\\",\\\"preQuery\\\":\\\"\\\",\\\"isUpdate\\\":false,\\\"targetUpdateKey\\\":\\\"\\\",\\\"targetUpdateMode\\\":\\\"allowinsert\\\",\\\"fieldsTerminated\\\":\\\"@\\\",\\\"linesTerminated\\\":\\\"\\\\\\\\n\\\"}\",\"localParams\":[]}"; + SqoopParameters sqoopParameters3 = JSONObject.parseObject(data3,SqoopParameters.class); + + String script3 = generator.generateSqoopJob(sqoopParameters3); + String expected3 = "sqoop export -m 1 --hcatalog-database stg --hcatalog-table person_internal --hcatalog-partition-keys date --hcatalog-partition-values 2020-02-17 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --table person_3 --fields-terminated-by '@' --lines-terminated-by '\\n'"; + Assert.assertEquals(expected3, script3); + + String data4 = "{\"concurrency\":1,\"modelType\":\"import\",\"sourceType\":\"MYSQL\",\"targetType\":\"HIVE\",\"sourceParams\":\"{\\\"srcDatasource\\\":2,\\\"srcTable\\\":\\\"person_2\\\",\\\"srcQueryType\\\":\\\"1\\\",\\\"srcQuerySql\\\":\\\"SELECT * FROM person_2\\\",\\\"srcColumnType\\\":\\\"0\\\",\\\"srcColumns\\\":\\\"\\\",\\\"srcConditionList\\\":[],\\\"mapColumnHive\\\":[],\\\"mapColumnJava\\\":[{\\\"prop\\\":\\\"id\\\",\\\"direct\\\":\\\"IN\\\",\\\"type\\\":\\\"VARCHAR\\\",\\\"value\\\":\\\"Integer\\\"}]}\",\"targetParams\":\"{\\\"hiveDatabase\\\":\\\"stg\\\",\\\"hiveTable\\\":\\\"person_internal_2\\\",\\\"createHiveTable\\\":true,\\\"dropDelimiter\\\":false,\\\"hiveOverWrite\\\":true,\\\"replaceDelimiter\\\":\\\"\\\",\\\"hivePartitionKey\\\":\\\"date\\\",\\\"hivePartitionValue\\\":\\\"2020-02-16\\\"}\",\"localParams\":[]}"; + SqoopParameters sqoopParameters4 = JSONObject.parseObject(data4,SqoopParameters.class); + + String script4 = generator.generateSqoopJob(sqoopParameters4); + String expected4 = "sqoop import -m 1 --connect jdbc:mysql://192.168.0.111:3306/test --username kylo --password 123456 --query 'SELECT * FROM person_2 WHERE $CONDITIONS' --map-column-java id=Integer --hive-import --hive-table stg.person_internal_2 --create-hive-table --hive-overwrite -delete-target-dir --hive-partition-key date --hive-partition-value 2020-02-16"; + Assert.assertEquals(expected4, script4); + + } + + private DataSource getDataSource() { + DataSource dataSource = new DataSource(); + dataSource.setType(DbType.MYSQL); + dataSource.setConnectionParams( + "{\"address\":\"jdbc:mysql://192.168.0.111:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://192.168.0.111:3306/test\",\"user\":\"kylo\",\"password\":\"123456\"}"); + dataSource.setUserId(1); + return dataSource; + } + + @Test + public void testGetParameters() { + Assert.assertNotNull(sqoopTask.getParameters()); + } + + /** + * Method: init + */ + @Test + public void testInit(){ + try { + sqoopTask.init(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + +} diff --git a/dolphinscheduler-service/pom.xml b/dolphinscheduler-service/pom.xml new file mode 100644 index 0000000000..7d775d5497 --- /dev/null +++ b/dolphinscheduler-service/pom.xml @@ -0,0 +1,56 @@ + + + + + dolphinscheduler + org.apache.dolphinscheduler + 1.2.1-SNAPSHOT + + 4.0.0 + + dolphinscheduler-service + + dolphinscheduler-service + + + + org.apache.dolphinscheduler + dolphinscheduler-remote + + + org.apache.dolphinscheduler + dolphinscheduler-dao + + + org.apache.curator + curator-client + ${curator.version} + + + log4j-1.2-api + org.apache.logging.log4j + + + io.netty + netty + + + + + org.quartz-scheduler + quartz + + + c3p0 + c3p0 + + + + + + org.quartz-scheduler + quartz-jobs + + + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SpringApplicationContext.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/bean/SpringApplicationContext.java similarity index 96% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SpringApplicationContext.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/bean/SpringApplicationContext.java index 97618e1b39..ddf1fecf76 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/SpringApplicationContext.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/bean/SpringApplicationContext.java @@ -14,14 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.utils; +package org.apache.dolphinscheduler.service.bean; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.stereotype.Component; - @Component public class SpringApplicationContext implements ApplicationContextAware { diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java new file mode 100644 index 0000000000..5daf535625 --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClientService.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.service.log; + +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.remote.NettyRemotingClient; +import org.apache.dolphinscheduler.remote.command.Command; +import org.apache.dolphinscheduler.remote.command.log.*; +import org.apache.dolphinscheduler.remote.config.NettyClientConfig; +import org.apache.dolphinscheduler.remote.utils.Address; +import org.apache.dolphinscheduler.remote.utils.FastJsonSerializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * log client + */ +public class LogClientService { + + private static final Logger logger = LoggerFactory.getLogger(LogClientService.class); + + private final NettyClientConfig clientConfig; + + private final NettyRemotingClient client; + + /** + * request time out + */ + private final long logRequestTimeout = 10 * 1000; + + /** + * construct client + */ + public LogClientService() { + this.clientConfig = new NettyClientConfig(); + this.clientConfig.setWorkerThreads(4); + this.client = new NettyRemotingClient(clientConfig); + } + + /** + * close + */ + public void close() { + this.client.close(); + logger.info("logger client closed"); + } + + /** + * roll view log + * @param host host + * @param port port + * @param path path + * @param skipLineNum skip line number + * @param limit limit + * @return log content + */ + public String rollViewLog(String host, int port, String path,int skipLineNum,int limit) { + logger.info("roll view log, host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, skipLineNum, limit); + RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit); + String result = ""; + final Address address = new Address(host, port); + try { + Command command = request.convert2Command(); + Command response = this.client.sendSync(address, command, logRequestTimeout); + if(response != null){ + RollViewLogResponseCommand rollReviewLog = FastJsonSerializer.deserialize( + response.getBody(), RollViewLogResponseCommand.class); + return rollReviewLog.getMsg(); + } + } catch (Exception e) { + logger.error("roll view log error", e); + } finally { + this.client.closeChannel(address); + } + return result; + } + + /** + * view log + * @param host host + * @param port port + * @param path path + * @return log content + */ + public String viewLog(String host, int port, String path) { + logger.info("view log path {}", path); + ViewLogRequestCommand request = new ViewLogRequestCommand(path); + String result = ""; + final Address address = new Address(host, port); + try { + Command command = request.convert2Command(); + Command response = this.client.sendSync(address, command, logRequestTimeout); + if(response != null){ + ViewLogResponseCommand viewLog = FastJsonSerializer.deserialize( + response.getBody(), ViewLogResponseCommand.class); + return viewLog.getMsg(); + } + } catch (Exception e) { + logger.error("view log error", e); + } finally { + this.client.closeChannel(address); + } + return result; + } + + /** + * get log size + * @param host host + * @param port port + * @param path log path + * @return log content bytes + */ + public byte[] getLogBytes(String host, int port, String path) { + logger.info("log path {}", path); + GetLogBytesRequestCommand request = new GetLogBytesRequestCommand(path); + byte[] result = null; + final Address address = new Address(host, port); + try { + Command command = request.convert2Command(); + Command response = this.client.sendSync(address, command, logRequestTimeout); + if(response != null){ + GetLogBytesResponseCommand getLog = FastJsonSerializer.deserialize( + response.getBody(), GetLogBytesResponseCommand.class); + return getLog.getData(); + } + } catch (Exception e) { + logger.error("get log size error", e); + } finally { + this.client.closeChannel(address); + } + return result; + } +} \ No newline at end of file diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogPromise.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogPromise.java new file mode 100644 index 0000000000..98ee3fdbbf --- /dev/null +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogPromise.java @@ -0,0 +1,109 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.service.log; + +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +/** + * log asyc callback + */ +public class LogPromise { + + private static final ConcurrentHashMap PROMISES = new ConcurrentHashMap<>(); + + /** + * request unique identification + */ + private long opaque; + + /** + * start timemillis + */ + private final long start; + + /** + * timeout + */ + private final long timeout; + + /** + * latch + */ + private final CountDownLatch latch; + + /** + * result + */ + private Object result; + + public LogPromise(long opaque, long timeout){ + this.opaque = opaque; + this.timeout = timeout; + this.start = System.currentTimeMillis(); + this.latch = new CountDownLatch(1); + PROMISES.put(opaque, this); + } + + + /** + * notify client finish + * @param opaque unique identification + * @param result result + */ + public static void notify(long opaque, Object result){ + LogPromise promise = PROMISES.remove(opaque); + if(promise != null){ + promise.doCountDown(result); + } + } + + /** + * countdown + * + * @param result result + */ + private void doCountDown(Object result){ + this.result = result; + this.latch.countDown(); + } + + /** + * whether timeout + * @return timeout + */ + public boolean isTimeout(){ + return System.currentTimeMillis() - start > timeout; + } + + /** + * get result + * @return + */ + public Object getResult(){ + try { + latch.await(timeout, TimeUnit.MILLISECONDS); + } catch (InterruptedException ignore) { + } + PROMISES.remove(opaque); + return this.result; + } + + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/permission/PermissionCheck.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java similarity index 80% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/permission/PermissionCheck.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java index 63d4c1c8af..027666f053 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/permission/PermissionCheck.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/permission/PermissionCheck.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.permission; +package org.apache.dolphinscheduler.service.permission; import org.apache.dolphinscheduler.common.enums.AuthorizationType; import org.apache.dolphinscheduler.common.enums.UserType; import org.apache.dolphinscheduler.common.utils.CollectionUtils; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.service.process.ProcessService; import org.slf4j.Logger; import java.util.List; @@ -38,7 +38,7 @@ public class PermissionCheck { /** * Authorization Type */ - private ProcessDao processDao; + private ProcessService processService; /** * need check array @@ -53,23 +53,23 @@ public class PermissionCheck { /** * permission check * @param authorizationType authorization type - * @param processDao process dao + * @param processService process dao */ - public PermissionCheck(AuthorizationType authorizationType, ProcessDao processDao) { + public PermissionCheck(AuthorizationType authorizationType, ProcessService processService) { this.authorizationType = authorizationType; - this.processDao = processDao; + this.processService = processService; } /** * permission check * @param authorizationType - * @param processDao + * @param processService * @param needChecks * @param userId */ - public PermissionCheck(AuthorizationType authorizationType, ProcessDao processDao, T[] needChecks, int userId) { + public PermissionCheck(AuthorizationType authorizationType, ProcessService processService, T[] needChecks, int userId) { this.authorizationType = authorizationType; - this.processDao = processDao; + this.processService = processService; this.needChecks = needChecks; this.userId = userId; } @@ -77,14 +77,14 @@ public class PermissionCheck { /** * permission check * @param authorizationType - * @param processDao + * @param processService * @param needChecks * @param userId * @param logger */ - public PermissionCheck(AuthorizationType authorizationType, ProcessDao processDao, T[] needChecks, int userId,Logger logger) { + public PermissionCheck(AuthorizationType authorizationType, ProcessService processService, T[] needChecks, int userId, Logger logger) { this.authorizationType = authorizationType; - this.processDao = processDao; + this.processService = processService; this.needChecks = needChecks; this.userId = userId; this.logger = logger; @@ -98,12 +98,12 @@ public class PermissionCheck { this.authorizationType = authorizationType; } - public ProcessDao getProcessDao() { - return processDao; + public ProcessService getProcessService() { + return processService; } - public void setProcessDao(ProcessDao processDao) { - this.processDao = processDao; + public void setProcessService(ProcessService processService) { + this.processService = processService; } public T[] getNeedChecks() { @@ -142,9 +142,9 @@ public class PermissionCheck { public void checkPermission() throws Exception{ if(this.needChecks.length > 0){ // get user type in order to judge whether the user is admin - User user = processDao.getUserById(userId); + User user = processService.getUserById(userId); if (user.getUserType() != UserType.ADMIN_USER){ - List unauthorizedList = processDao.listUnauthorized(userId,needChecks,authorizationType); + List unauthorizedList = processService.listUnauthorized(userId,needChecks,authorizationType); // if exist unauthorized resource if(CollectionUtils.isNotEmpty(unauthorizedList)){ logger.error("user {} didn't has permission of {}: {}", user.getUserName(), authorizationType.getDescp(),unauthorizedList.toString()); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java similarity index 98% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 820b2fdaf4..b589cd4295 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/ProcessDao.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao; +package org.apache.dolphinscheduler.service.process; import com.alibaba.fastjson.JSONObject; import com.cronutils.model.Cron; @@ -24,16 +24,12 @@ import org.apache.dolphinscheduler.common.enums.*; import org.apache.dolphinscheduler.common.model.DateInterval; import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.process.Property; -import org.apache.dolphinscheduler.common.queue.ITaskQueue; import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.common.utils.IpUtils; -import org.apache.dolphinscheduler.common.utils.JSONUtils; -import org.apache.dolphinscheduler.common.utils.ParameterUtils; -import org.apache.dolphinscheduler.common.utils.StringUtils; +import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.dao.entity.*; import org.apache.dolphinscheduler.dao.mapper.*; -import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,7 +47,7 @@ import static org.apache.dolphinscheduler.common.Constants.*; * process relative dao that some mappers in this. */ @Component -public class ProcessDao { +public class ProcessService { private final Logger logger = LoggerFactory.getLogger(getClass()); @@ -481,7 +477,7 @@ public class ProcessDao { if(cmdParam == null || !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES) || cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()){ - logger.error(String.format("command node depend type is %s, but start nodes is null ", command.getTaskDependType().toString())); + logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } } @@ -504,7 +500,7 @@ public class ProcessDao { if(command.getProcessDefinitionId() != 0){ processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId()); if(processDefinition == null){ - logger.error(String.format("cannot find the work process define! define id : %d", command.getProcessDefinitionId())); + logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId()); return null; } } @@ -954,6 +950,7 @@ public class ProcessDao { } } } + taskInstance.setExecutorId(processInstance.getExecutorId()); taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority()); taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState)); taskInstance.setSubmitTime(new Date()); @@ -976,21 +973,21 @@ public class ProcessDao { return true; } if(taskInstance.getState().typeIsFinished()){ - logger.info(String.format("submit to task queue, but task [%s] state [%s] is already finished. ", taskInstance.getName(), taskInstance.getState().toString())); + logger.info("submit to task queue, but task [{}] state [{}] is already finished. ", taskInstance.getName(), taskInstance.getState()); return true; } // task cannot submit when running if(taskInstance.getState() == ExecutionStatus.RUNNING_EXEUTION){ - logger.info(String.format("submit to task queue, but task [%s] state already be running. ", taskInstance.getName())); + logger.info("submit to task queue, but task [{}] state already be running. ", taskInstance.getName()); return true; } if(checkTaskExistsInTaskQueue(taskInstance)){ - logger.info(String.format("submit to task queue, but task [%s] already exists in the queue.", taskInstance.getName())); + logger.info("submit to task queue, but task [{}] already exists in the queue.", taskInstance.getName()); return true; } logger.info("task ready to queue: {}" , taskInstance); boolean insertQueueResult = taskQueue.add(DOLPHINSCHEDULER_TASKS_QUEUE, taskZkInfo(taskInstance)); - logger.info(String.format("master insert into queue success, task : %s", taskInstance.getName()) ); + logger.info("master insert into queue success, task : {}", taskInstance.getName()); return insertQueueResult; }catch (Exception e){ logger.error("submit task to queue Exception: ", e); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/DruidConnectionProvider.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java similarity index 99% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/DruidConnectionProvider.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java index 8a4ceba927..d51e8e82bf 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/DruidConnectionProvider.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/DruidConnectionProvider.java @@ -14,11 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.quartz; +package org.apache.dolphinscheduler.service.quartz; import com.alibaba.druid.pool.DruidDataSource; import org.quartz.SchedulerException; import org.quartz.utils.ConnectionProvider; + import java.sql.Connection; import java.sql.SQLException; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/ProcessScheduleJob.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java similarity index 83% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/ProcessScheduleJob.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java index ac461296a9..69a80e65f5 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/ProcessScheduleJob.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/ProcessScheduleJob.java @@ -14,17 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.quartz; +package org.apache.dolphinscheduler.service.quartz; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.ReleaseState; -import org.apache.dolphinscheduler.dao.ProcessDao; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.Schedule; -import org.quartz.*; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.quartz.Job; +import org.quartz.JobDataMap; +import org.quartz.JobExecutionContext; +import org.quartz.JobExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.util.Assert; @@ -42,17 +45,17 @@ public class ProcessScheduleJob implements Job { private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleJob.class); /** - * process dao + * process service */ - private static ProcessDao processDao; + private static ProcessService processService; /** * init - * @param processDao process dao + * @param processService process dao */ - public static void init(ProcessDao processDao) { - ProcessScheduleJob.processDao = processDao; + public static void init(ProcessService processService) { + ProcessScheduleJob.processService = processService; } /** @@ -64,7 +67,7 @@ public class ProcessScheduleJob implements Job { @Override public void execute(JobExecutionContext context) throws JobExecutionException { - Assert.notNull(processDao, "please call init() method first"); + Assert.notNull(processService, "please call init() method first"); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); @@ -80,7 +83,7 @@ public class ProcessScheduleJob implements Job { logger.info("scheduled fire time :{}, fire time :{}, process id :{}", scheduledFireTime, fireTime, scheduleId); // query schedule - Schedule schedule = processDao.querySchedule(scheduleId); + Schedule schedule = processService.querySchedule(scheduleId); if (schedule == null) { logger.warn("process schedule does not exist in db,delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId); deleteJob(projectId, scheduleId); @@ -88,7 +91,7 @@ public class ProcessScheduleJob implements Job { } - ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); + ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); // release state : online/offline ReleaseState releaseState = processDefinition.getReleaseState(); if (processDefinition == null || releaseState == ReleaseState.OFFLINE) { @@ -108,7 +111,7 @@ public class ProcessScheduleJob implements Job { command.setWarningType(schedule.getWarningType()); command.setProcessInstancePriority(schedule.getProcessInstancePriority()); - processDao.createCommand(command); + processService.createCommand(command); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/QuartzExecutors.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java similarity index 97% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/QuartzExecutors.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java index 054d7903fc..60cdb1dd97 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/quartz/QuartzExecutors.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java @@ -14,12 +14,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.quartz; +package org.apache.dolphinscheduler.service.quartz; +import org.apache.commons.lang.StringUtils; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Schedule; -import org.apache.commons.lang.StringUtils; import org.quartz.*; import org.quartz.impl.StdSchedulerFactory; import org.quartz.impl.matchers.GroupMatcher; @@ -223,7 +223,7 @@ public class QuartzExecutors { } } catch (SchedulerException e) { - logger.error(String.format("delete job : %s failed",jobName), e); + logger.error("delete job : {} failed",jobName, e); } finally { lock.writeLock().unlock(); } @@ -247,7 +247,7 @@ public class QuartzExecutors { return scheduler.deleteJobs(jobKeys); } catch (SchedulerException e) { - logger.error(String.format("delete all jobs in job group: %s failed",jobGroupName), e); + logger.error("delete all jobs in job group: {} failed",jobGroupName, e); } finally { lock.writeLock().unlock(); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java similarity index 99% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java index 0cda336d7d..0a2e31b610 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/AbstractCycle.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/AbstractCycle.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.utils.cron; +package org.apache.dolphinscheduler.service.quartz.cron; -import org.apache.dolphinscheduler.common.enums.CycleEnum; import com.cronutils.model.Cron; import com.cronutils.model.field.CronField; import com.cronutils.model.field.CronFieldName; import com.cronutils.model.field.expression.*; +import org.apache.dolphinscheduler.common.enums.CycleEnum; /** * Cycle diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java similarity index 95% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java index 8a9087a33c..d03a4a5cdc 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CronUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CronUtils.java @@ -14,15 +14,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.utils.cron; +package org.apache.dolphinscheduler.service.quartz.cron; -import org.apache.dolphinscheduler.common.enums.CycleEnum; -import org.apache.dolphinscheduler.common.thread.Stopper; -import org.apache.dolphinscheduler.common.utils.DateUtils; import com.cronutils.model.Cron; import com.cronutils.model.definition.CronDefinitionBuilder; import com.cronutils.parser.CronParser; +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import org.apache.dolphinscheduler.common.thread.Stopper; +import org.apache.dolphinscheduler.common.utils.DateUtils; import org.quartz.CronExpression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,14 +31,16 @@ import java.text.ParseException; import java.util.*; import static com.cronutils.model.CronType.QUARTZ; -import static org.apache.dolphinscheduler.dao.utils.cron.CycleFactory.*; +import static org.apache.dolphinscheduler.service.quartz.cron.CycleFactory.*; /** * cron utils */ public class CronUtils { - + private CronUtils() { + throw new IllegalStateException("CronUtils class"); + } private static final Logger logger = LoggerFactory.getLogger(CronUtils.class); @@ -169,7 +171,7 @@ public class CronUtils { cronExpression = parse2CronExpression(cron); }catch (ParseException e){ logger.error(e.getMessage(), e); - return Collections.EMPTY_LIST; + return Collections.emptyList(); } return getSelfFireDateList(startTime, endTime, cronExpression); } @@ -202,7 +204,7 @@ public class CronUtils { calendar.add(Calendar.DATE, 1); break; default: - logger.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name()); + logger.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum); break; } maxExpirationTime = calendar.getTime(); diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java similarity index 97% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java index 10906b42a3..1f807dce7f 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleFactory.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleFactory.java @@ -14,18 +14,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.utils.cron; +package org.apache.dolphinscheduler.service.quartz.cron; -import org.apache.dolphinscheduler.common.enums.CycleEnum; import com.cronutils.model.Cron; import com.cronutils.model.field.expression.Always; import com.cronutils.model.field.expression.QuestionMark; +import org.apache.dolphinscheduler.common.enums.CycleEnum; /** * Crontab Cycle Tool Factory */ public class CycleFactory { - + private CycleFactory() { + throw new IllegalStateException("CycleFactory class"); + } /** * min * @param cron cron diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleLinks.java similarity index 97% rename from dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleLinks.java index 63824bda8e..9f01b18868 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/cron/CycleLinks.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/cron/CycleLinks.java @@ -14,10 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.utils.cron; +package org.apache.dolphinscheduler.service.quartz.cron; -import org.apache.dolphinscheduler.common.enums.CycleEnum; import com.cronutils.model.Cron; +import org.apache.dolphinscheduler.common.enums.CycleEnum; import java.util.ArrayList; import java.util.List; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/ITaskQueue.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/ITaskQueue.java index 5beb8111ad..bed8a11247 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/ITaskQueue.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/ITaskQueue.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.queue; +package org.apache.dolphinscheduler.service.queue; import java.util.List; import java.util.Set; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueFactory.java similarity index 93% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueFactory.java index 0a2d943118..6be419f5a9 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueFactory.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueFactory.java @@ -14,11 +14,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.queue; +package org.apache.dolphinscheduler.service.queue; -import org.apache.dolphinscheduler.common.utils.CommonUtils; import org.apache.commons.lang.StringUtils; -import org.apache.dolphinscheduler.common.utils.SpringApplicationContext; +import org.apache.dolphinscheduler.common.utils.CommonUtils; +import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueZkImpl.java similarity index 97% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueZkImpl.java index d442c13ebc..9c1d318ea5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/queue/TaskQueueZkImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/TaskQueueZkImpl.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.queue; +package org.apache.dolphinscheduler.service.queue; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.IpUtils; import org.apache.dolphinscheduler.common.utils.OSUtils; -import org.apache.dolphinscheduler.common.zk.ZookeeperOperator; +import org.apache.dolphinscheduler.service.zk.ZookeeperOperator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -269,7 +269,7 @@ public class TaskQueueZkImpl implements ITaskQueue { zookeeperOperator.remove(taskIdPath); }catch(Exception e){ - logger.error(String.format("delete task:%s from zookeeper fail, exception:" ,nodeValue) ,e); + logger.error("delete task:{} from zookeeper fail, exception:" ,nodeValue ,e); } } @@ -318,7 +318,7 @@ public class TaskQueueZkImpl implements ITaskQueue { zookeeperOperator.remove(path + value); }catch(Exception e){ - logger.error(String.format("delete task:" + value + " exception"),e); + logger.error("delete task:{} exception",value,e); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java similarity index 99% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java index f62e106680..135bfdabc6 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/AbstractZKClient.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; +package org.apache.dolphinscheduler.service.zk; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.imps.CuratorFrameworkState; @@ -31,12 +31,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; + import static org.apache.dolphinscheduler.common.Constants.*; /** * abstract zookeeper client */ -public abstract class AbstractZKClient extends ZookeeperCachedOperator{ +public abstract class AbstractZKClient extends ZookeeperCachedOperator { private static final Logger logger = LoggerFactory.getLogger(AbstractZKClient.class); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/DefaultEnsembleProvider.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/DefaultEnsembleProvider.java similarity index 96% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/DefaultEnsembleProvider.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/DefaultEnsembleProvider.java index 0cf06c0503..9eedf7a4ca 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/DefaultEnsembleProvider.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/DefaultEnsembleProvider.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; +package org.apache.dolphinscheduler.service.zk; import org.apache.curator.ensemble.EnsembleProvider; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperCachedOperator.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java similarity index 90% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperCachedOperator.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java index 5aa25552d7..dccb768f8b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperCachedOperator.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperCachedOperator.java @@ -14,22 +14,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; +package org.apache.dolphinscheduler.service.zk; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.TreeCache; import org.apache.curator.framework.recipes.cache.TreeCacheEvent; -import org.apache.curator.framework.recipes.cache.TreeCacheListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import java.nio.charset.StandardCharsets; -import java.util.concurrent.ConcurrentHashMap; - -import static org.apache.dolphinscheduler.common.utils.Preconditions.*; -import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; @Component public class ZookeeperCachedOperator extends ZookeeperOperator { diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperConfig.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java similarity index 98% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperConfig.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java index 75a9f6c5f4..c6bdfc3b02 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperConfig.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperConfig.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; +package org.apache.dolphinscheduler.service.zk; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.PropertySource; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperOperator.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java similarity index 98% rename from dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperOperator.java rename to dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java index 9442afd7a0..a2cabce805 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/zk/ZookeeperOperator.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZookeeperOperator.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; +package org.apache.dolphinscheduler.service.zk; import org.apache.commons.lang.StringUtils; import org.apache.curator.framework.CuratorFramework; @@ -33,12 +33,10 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; -import org.springframework.util.CollectionUtils; import java.nio.charset.StandardCharsets; import java.util.List; -import static org.apache.dolphinscheduler.common.utils.Preconditions.*; import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; /** diff --git a/dolphinscheduler-common/src/main/resources/quartz.properties b/dolphinscheduler-service/src/main/resources/quartz.properties similarity index 96% rename from dolphinscheduler-common/src/main/resources/quartz.properties rename to dolphinscheduler-service/src/main/resources/quartz.properties index 2e3a2a0dc1..9c8930b647 100644 --- a/dolphinscheduler-common/src/main/resources/quartz.properties +++ b/dolphinscheduler-service/src/main/resources/quartz.properties @@ -59,6 +59,6 @@ org.quartz.jobStore.dataSource = myDs #============================================================================ # Configure Datasources #============================================================================ -org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.dao.quartz.DruidConnectionProvider +org.quartz.dataSource.myDs.connectionProvider.class = org.apache.dolphinscheduler.service.quartz.DruidConnectionProvider org.quartz.dataSource.myDs.maxConnections = 10 org.quartz.dataSource.myDs.validationQuery = select 1 \ No newline at end of file diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java b/dolphinscheduler-service/src/test/java/cron/CronUtilsTest.java similarity index 90% rename from dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java rename to dolphinscheduler-service/src/test/java/cron/CronUtilsTest.java index 1135cf20f5..6a402b5e67 100644 --- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/cron/CronUtilsTest.java +++ b/dolphinscheduler-service/src/test/java/cron/CronUtilsTest.java @@ -14,11 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.dao.cron; +package cron; -import org.apache.dolphinscheduler.common.enums.CycleEnum; -import org.apache.dolphinscheduler.common.utils.DateUtils; -import org.apache.dolphinscheduler.dao.utils.cron.CronUtils; import com.cronutils.builder.CronBuilder; import com.cronutils.model.Cron; import com.cronutils.model.CronType; @@ -26,6 +23,9 @@ import com.cronutils.model.definition.CronDefinitionBuilder; import com.cronutils.model.field.CronField; import com.cronutils.model.field.CronFieldName; import com.cronutils.model.field.expression.*; +import org.apache.dolphinscheduler.common.enums.CycleEnum; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -61,7 +61,7 @@ public class CronUtilsTest { String cronAsString = cron.asString(); // 0 */5 * * * ? * Every five minutes(once every 5 minutes) - Assert.assertEquals(cronAsString, "0 */5 * * * ? *"); + Assert.assertEquals("0 */5 * * * ? *", cronAsString); } @@ -74,12 +74,12 @@ public class CronUtilsTest { String strCrontab = "0 1 2 3 * ? *"; Cron depCron = CronUtils.parse2Cron(strCrontab); - Assert.assertEquals(depCron.retrieve(CronFieldName.SECOND).getExpression().asString(), "0"); - Assert.assertEquals(depCron.retrieve(CronFieldName.MINUTE).getExpression().asString(), "1"); - Assert.assertEquals(depCron.retrieve(CronFieldName.HOUR).getExpression().asString(), "2"); - Assert.assertEquals(depCron.retrieve(CronFieldName.DAY_OF_MONTH).getExpression().asString(), "3"); - Assert.assertEquals(depCron.retrieve(CronFieldName.MONTH).getExpression().asString(), "*"); - Assert.assertEquals(depCron.retrieve(CronFieldName.YEAR).getExpression().asString(), "*"); + Assert.assertEquals("0", depCron.retrieve(CronFieldName.SECOND).getExpression().asString()); + Assert.assertEquals("1", depCron.retrieve(CronFieldName.MINUTE).getExpression().asString()); + Assert.assertEquals("2", depCron.retrieve(CronFieldName.HOUR).getExpression().asString()); + Assert.assertEquals("3", depCron.retrieve(CronFieldName.DAY_OF_MONTH).getExpression().asString()); + Assert.assertEquals("*", depCron.retrieve(CronFieldName.MONTH).getExpression().asString()); + Assert.assertEquals("*", depCron.retrieve(CronFieldName.YEAR).getExpression().asString()); } /** @@ -89,13 +89,13 @@ public class CronUtilsTest { @Test public void testScheduleType() throws ParseException { CycleEnum cycleEnum = CronUtils.getMaxCycle(CronUtils.parse2Cron("0 */1 * * * ? *")); - Assert.assertEquals(cycleEnum.name(), "MINUTE"); + Assert.assertEquals("MINUTE", cycleEnum.name()); CycleEnum cycleEnum2 = CronUtils.getMaxCycle("0 * * * * ? *"); - Assert.assertEquals(cycleEnum2.name(), "MINUTE"); + Assert.assertEquals("MINUTE", cycleEnum2.name()); CycleEnum cycleEnum3 = CronUtils.getMiniCycle(CronUtils.parse2Cron("0 * * * * ? *")); - Assert.assertEquals(cycleEnum3.name(), "MINUTE"); + Assert.assertEquals("MINUTE", cycleEnum3.name()); } /** @@ -164,6 +164,7 @@ public class CronUtilsTest { logger.info("can't get scheduleType"); } } + Assert.assertTrue(true); } @Test diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/BaseTaskQueueTest.java b/dolphinscheduler-service/src/test/java/queue/BaseTaskQueueTest.java similarity index 90% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/BaseTaskQueueTest.java rename to dolphinscheduler-service/src/test/java/queue/BaseTaskQueueTest.java index 433e4fa30f..a0cc457e22 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/BaseTaskQueueTest.java +++ b/dolphinscheduler-service/src/test/java/queue/BaseTaskQueueTest.java @@ -14,9 +14,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.queue; +package queue; -import org.apache.dolphinscheduler.common.zk.ZKServer; +import org.apache.dolphinscheduler.service.queue.ITaskQueue; +import org.apache.dolphinscheduler.service.queue.TaskQueueFactory; import org.junit.*; /** diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueZKImplTest.java b/dolphinscheduler-service/src/test/java/queue/TaskQueueZKImplTest.java similarity index 99% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueZKImplTest.java rename to dolphinscheduler-service/src/test/java/queue/TaskQueueZKImplTest.java index b34a7d6924..d29c5aa610 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/queue/TaskQueueZKImplTest.java +++ b/dolphinscheduler-service/src/test/java/queue/TaskQueueZKImplTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.queue; +package queue; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.utils.IpUtils; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/ZKServer.java b/dolphinscheduler-service/src/test/java/queue/ZKServer.java similarity index 99% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/ZKServer.java rename to dolphinscheduler-service/src/test/java/queue/ZKServer.java index fc39e62ed8..65fb95c02b 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/zk/ZKServer.java +++ b/dolphinscheduler-service/src/test/java/queue/ZKServer.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.zk; +package queue; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PreconditionsTest.java b/dolphinscheduler-service/src/test/java/utils/PreconditionsTest.java similarity index 97% rename from dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PreconditionsTest.java rename to dolphinscheduler-service/src/test/java/utils/PreconditionsTest.java index dcb0e1370e..a1b85f1b12 100644 --- a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/PreconditionsTest.java +++ b/dolphinscheduler-service/src/test/java/utils/PreconditionsTest.java @@ -14,8 +14,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.dolphinscheduler.common.utils; +package utils; +import org.apache.dolphinscheduler.common.utils.Preconditions; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; @@ -35,7 +36,7 @@ public class PreconditionsTest { public void testCheckNotNull() throws Exception { String testReference = "test reference"; //test reference is not null - Assert.assertEquals(testReference,Preconditions.checkNotNull(testReference)); + Assert.assertEquals(testReference, Preconditions.checkNotNull(testReference)); Assert.assertEquals(testReference,Preconditions.checkNotNull(testReference,"reference is null")); Assert.assertEquals(testReference,Preconditions.checkNotNull(testReference,"%s is null",testReference)); diff --git a/dolphinscheduler-ui/.env b/dolphinscheduler-ui/.env index 4c7e96e795..e676be6059 100644 --- a/dolphinscheduler-ui/.env +++ b/dolphinscheduler-ui/.env @@ -17,4 +17,4 @@ API_BASE = http://192.168.xx.xx:12345 # If IP access is required for local development, remove the "#" -#DEV_HOST = 192.168.xx.xx \ No newline at end of file +#DEV_HOST = 192.168.xx.xx diff --git a/dolphinscheduler-ui/package.json b/dolphinscheduler-ui/package.json index 421fd394d6..da15b722fc 100644 --- a/dolphinscheduler-ui/package.json +++ b/dolphinscheduler-ui/package.json @@ -53,7 +53,7 @@ "html-loader": "^0.5.5", "html-webpack-plugin": "^3.2.0", "mini-css-extract-plugin": "^0.8.2", - "node-sass": "^4.13.0", + "node-sass": "^4.13.1", "postcss-loader": "^3.0.0", "progress-bar-webpack-plugin": "^1.12.1", "rimraf": "^2.6.2", diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js old mode 100644 new mode 100755 index e8187043bf..a9a51aa2b1 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js @@ -283,6 +283,14 @@ let tasksType = { 'DATAX': { desc: 'DataX', color: '#1fc747' + }, + 'SQOOP': { + desc: 'SQOOP', + color: '#E46F13' + }, + 'CONDITIONS': { + desc: 'CONDITIONS', + color: '#E46F13' } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js index 8225673ecd..240f3246aa 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.js @@ -49,10 +49,16 @@ Dag.prototype.setConfig = function (o) { * create dag */ Dag.prototype.create = function () { + let self = this jsPlumb.ready(() => { JSP.init({ dag: this.dag, - instance: this.instance + instance: this.instance, + options: { + onRemoveNodes ($id) { + self.dag.removeEventModelById($id) + } + } }) // init event @@ -108,7 +114,7 @@ Dag.prototype.backfill = function (arg) { tmp.push(locationsValue2[i]) } } - + function copy (array) { let newArray = [] for(let item of array) { @@ -117,7 +123,7 @@ Dag.prototype.backfill = function (arg) { return newArray; } - + let newArr = copy(arr) function getNewArr() { for(let i= 0; i1) { dataObject[Object.keys(locationsValue1)[0]].y = (countTree/2)*120+50 } - + locationsValue = dataObject + let self = this jsPlumb.ready(() => { JSP.init({ dag: this.dag, - instance: this.instance + instance: this.instance, + options: { + onRemoveNodes ($id) { + self.dag.removeEventModelById($id) + } + } }) // Backfill JSP.jspBackfill({ @@ -298,10 +310,16 @@ Dag.prototype.backfill = function (arg) { }) }) } else { + let self = this jsPlumb.ready(() => { JSP.init({ dag: this.dag, - instance: this.instance + instance: this.instance, + options: { + onRemoveNodes ($id) { + self.dag.removeEventModelById($id) + } + } }) // Backfill JSP.jspBackfill({ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss old mode 100644 new mode 100755 index 420bae8c89..6d97856960 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss @@ -104,6 +104,12 @@ .icos-DATAX { background: url("../img/toolbar_DATAX.png") no-repeat 50% 50%; } + .icos-SQOOP { + background: url("../img/toolbar_SQOOP.png") no-repeat 50% 50%; + } + .icos-CONDITIONS { + background: url("../img/toobar_CONDITIONS.png") no-repeat 50% 50%; + } .toolbar { width: 60px; height: 100%; diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue old mode 100644 new mode 100755 index 40b6d85198..d912a9a884 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue @@ -61,7 +61,7 @@

-
+ - 0) { + rearNode.forEach(v => { + let rearobj = {} + rearobj.value = $(`#${v}`).find('.name-p').text() + rearobj.label = $(`#${v}`).find('.name-p').text() + rearList.push(rearobj) + }) + } else { + rearList = [] + } + let targetarr = $(`#${id}`).attr('data-targetarr') + if (targetarr) { + let nodearr = targetarr.split(',') + nodearr.forEach(v => { + let nodeobj = {} + nodeobj.value = $(`#${v}`).find('.name-p').text() + nodeobj.label = $(`#${v}`).find('.name-p').text() + preNode.push(nodeobj) + }) + } else { + preNode = [] + } if (eventModel) { eventModel.remove() } @@ -486,6 +514,7 @@ } this.taskId = id + type = type || self.dagBarId eventModel = this.$drawer({ closable: false, @@ -522,11 +551,18 @@ }, props: { id: id, - taskType: type || self.dagBarId, - self: self + taskType: type, + self: self, + preNode: preNode, + rearList: rearList } }) }) + }, + removeEventModelById ($id) { + if(eventModel && this.taskId == $id){ + eventModel.remove() + } } }, watch: { @@ -580,6 +616,9 @@ clearInterval(this.setIntervalP) }, destroyed () { + if (eventModel) { + eventModel.remove() + } }, computed: { ...mapState('dag', ['tasks', 'locations', 'connects', 'isEditDag', 'name']) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue old mode 100644 new mode 100755 index e05de8e880..2500ce5772 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue @@ -109,6 +109,43 @@ ({{$t('Minute')}})
+
+
+ {{$t('State')}} +
+
+ + + + + + + {{$t('Branch flow')}} + + + + +
+
+ +
+
+ {{$t('State')}} +
+
+ + + + + + + {{$t('Branch flow')}} + + + + +
+
+ + + +
@@ -229,6 +280,8 @@ import mDependent from './tasks/dependent' import mHttp from './tasks/http' import mDatax from './tasks/datax' + import mConditions from './tasks/CONDITIONS' + import mSqoop from './tasks/sqoop' import mSubProcess from './tasks/sub_process' import mSelectInput from './_source/selectInput' import mTimeoutAlarm from './_source/timeoutAlarm' @@ -245,13 +298,21 @@ // loading spinnerLoading: false, // node name - name: ``, + name: '', // description description: '', // Node echo data backfillItem: {}, // Resource(list) resourcesList: [], + successNode: 'success', + failedNode: 'failed', + successBranch: '', + failedBranch: '', + conditionResult: { + 'successNode': [], + 'failedNode': [] + }, // dependence dependence: {}, // cache dependence @@ -271,7 +332,17 @@ // Task priority taskInstancePriority: 'MEDIUM', // worker group id - workerGroupId: -1 + workerGroupId: -1, + stateList:[ + { + value: 'success', + label: `${i18n.$t('success')}` + }, + { + value: 'failed', + label: `${i18n.$t('failed')}` + } + ] } }, /** @@ -282,7 +353,9 @@ props: { id: Number, taskType: String, - self: Object + self: Object, + preNode: Array, + rearList: Array }, methods: { /** @@ -391,6 +464,10 @@ this.$message.warning(`${i18n.$t('Please enter name (required)')}`) return false } + if (this.successBranch !='' && this.successBranch == this.failedBranch) { + this.$message.warning(`${i18n.$t('Cannot select the same node for successful branch flow and failed branch flow')}`) + return false + } if (this.name === this.backfillItem.name) { return true } @@ -419,6 +496,8 @@ } $(`#${this.id}`).find('span').text(this.name) + this.conditionResult.successNode[0] = this.successBranch + this.conditionResult.failedNode[0] = this.failedBranch // Store the corresponding node data structure this.$emit('addTaskInfo', { item: { @@ -428,12 +507,15 @@ params: this.params, description: this.description, runFlag: this.runFlag, + conditionResult: this.conditionResult, dependence: this.dependence, maxRetryTimes: this.maxRetryTimes, retryInterval: this.retryInterval, timeout: this.timeout, taskInstancePriority: this.taskInstancePriority, - workerGroupId: this.workerGroupId + workerGroupId: this.workerGroupId, + status: this.status, + branch: this.branch }, fromThis: this }) @@ -518,7 +600,10 @@ this.description = o.description this.maxRetryTimes = o.maxRetryTimes this.retryInterval = o.retryInterval - + if(o.conditionResult) { + this.successBranch = o.conditionResult.successNode[0] + this.failedBranch = o.conditionResult.failedNode[0] + } // If the workergroup has been deleted, set the default workergroup var hasMatch = false; for (let i = 0; i < this.store.state.security.workerGroupsListAll.length; i++) { @@ -589,6 +674,8 @@ mDependent, mHttp, mDatax, + mSqoop, + mConditions, mSelectInput, mTimeoutAlarm, mPriority, diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js old mode 100644 new mode 100755 index fc8fe654d2..cdf632f13d --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/commcon.js @@ -232,6 +232,16 @@ const positionList = [ code: "Headers" } ] +const nodeStatusList = [ + { + value: 'SUCCESS', + label: `${i18n.$t('success')}` + }, + { + value: 'FAILURE', + label: `${i18n.$t('failed')}` + } +] export { cycleList, @@ -239,5 +249,6 @@ export { typeList, directList, sqlTypeList, - positionList + positionList, + nodeStatusList } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue new file mode 100644 index 0000000000..4afb8b46c5 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/_source/nodeStatus.vue @@ -0,0 +1,231 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + \ No newline at end of file diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue new file mode 100644 index 0000000000..4ac04d91a6 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/conditions.vue @@ -0,0 +1,265 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + \ No newline at end of file diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue new file mode 100644 index 0000000000..eead745a06 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/sqoop.vue @@ -0,0 +1,981 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js old mode 100644 new mode 100755 index 6523a1c298..598c94209e --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js @@ -58,11 +58,13 @@ let JSP = function () { /** * dag init */ -JSP.prototype.init = function ({ dag, instance }) { +JSP.prototype.init = function ({ dag, instance, options }) { // Get the dag component instance this.dag = dag // Get jsplumb instance this.JspInstance = instance + // Get JSP options + this.options = options || {} // Register jsplumb connection type and configuration this.JspInstance.registerConnectionType('basic', { anchor: 'Continuous', @@ -133,15 +135,6 @@ JSP.prototype.draggable = function () { helper: 'clone', containment: $('.dag-model'), stop: function (e, ui) { - self.tasksEvent(selfId) - - // Dom structure is not generated without pop-up form form - if ($(`#${selfId}`).html()) { - // dag event - findComponentDownward(self.dag.$root, 'dag-chart')._createNodes({ - id: selfId - }) - } }, drag: function () { $('body').find('.tooltip.fade.top.in').remove() @@ -176,6 +169,16 @@ JSP.prototype.draggable = function () { self.initNode(thisDom[thisDom.length - 1]) }) selfId = id + + self.tasksEvent(selfId) + + // Dom structure is not generated without pop-up form form + if ($(`#${selfId}`).html()) { + // dag event + findComponentDownward(self.dag.$root, 'dag-chart')._createNodes({ + id: selfId + }) + } } }) } @@ -195,7 +198,8 @@ JSP.prototype.jsonHandle = function ({ largeJson, locations }) { targetarr: locations[v.id]['targetarr'], isAttachment: this.config.isAttachment, taskType: v.type, - runFlag: v.runFlag + runFlag: v.runFlag, + nodenumber: locations[v.id]['nodenumber'], })) // contextmenu event @@ -494,6 +498,9 @@ JSP.prototype.removeNodes = function ($id) { // delete dom $(`#${$id}`).remove() + + // callback onRemoveNodes event + this.options&&this.options.onRemoveNodes&&this.options.onRemoveNodes($id) } /** @@ -511,6 +518,9 @@ JSP.prototype.removeConnect = function ($connect) { targetarr = _.filter(targetarr, v => v !== sourceId) $(`#${targetId}`).attr('data-targetarr', targetarr.toString()) } + if ($(`#${sourceId}`).attr('data-tasks-type')=='CONDITIONS') { + $(`#${sourceId}`).attr('data-nodenumber',Number($(`#${sourceId}`).attr('data-nodenumber'))-1) + } this.JspInstance.deleteConnection($connect) this.selectedElement = {} @@ -566,6 +576,7 @@ JSP.prototype.copyNodes = function ($id) { [newId]: { name: newName, targetarr: '', + nodenumber: 0, x: newX, y: newY } @@ -652,6 +663,7 @@ JSP.prototype.saveStore = function () { locations[v.id] = { name: v.name, targetarr: v.targetarr, + nodenumber: v.nodenumber, x: v.x, y: v.y } @@ -705,6 +717,12 @@ JSP.prototype.handleEvent = function () { return false } + if ($(`#${sourceId}`).attr('data-tasks-type')=='CONDITIONS' && $(`#${sourceId}`).attr('data-nodenumber')==2) { + return false + } else { + $(`#${sourceId}`).attr('data-nodenumber',Number($(`#${sourceId}`).attr('data-nodenumber'))+1) + } + // Storage node dependency information saveTargetarr(sourceId, targetId) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js old mode 100644 new mode 100755 index c10dfda5d6..4b485fec0b --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js @@ -43,9 +43,9 @@ const rtBantpl = () => { /** * return node html */ -const rtTasksTpl = ({ id, name, x, y, targetarr, isAttachment, taskType, runFlag }) => { +const rtTasksTpl = ({ id, name, x, y, targetarr, isAttachment, taskType, runFlag, nodenumber }) => { let tpl = `` - tpl += `
` + tpl += `
` tpl += `
` tpl += `
` tpl += `
` @@ -73,6 +73,7 @@ const tasksAll = () => { id: e.attr('id'), name: e.find('.name-p').text(), targetarr: e.attr('data-targetarr') || '', + nodenumber: e.attr('data-nodenumber'), x: parseInt(e.css('left'), 10), y: parseInt(e.css('top'), 10) }) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_CONDITIONS.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_CONDITIONS.png new file mode 100644 index 0000000000..e8c5e38339 Binary files /dev/null and b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toobar_CONDITIONS.png differ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQOOP.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQOOP.png new file mode 100644 index 0000000000..2ab3b6bd4a Binary files /dev/null and b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_SQOOP.png differ diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue index 2f30f0bea4..1ef2e1f3e4 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/index.vue @@ -54,6 +54,9 @@
+
+ +
@@ -80,7 +83,9 @@ // search value searchVal: '', // host - host: '' + host: '', + // executor name + executorName: '' } } }, diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue index 1410a67f90..57ae6bd685 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/_source/list.vue @@ -28,6 +28,9 @@ {{$t('Process Name')}} + + {{$t('Executor')}} + {{$t('Run Type')}} @@ -67,6 +70,10 @@ {{item.name}} + + {{item.executorName}} + - + {{_rtRunningType(item.commandType)}} {{item.scheduleTime | formatDate}} diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue index 7bcf9ac26b..b95d4ed720 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/instance/pages/list/index.vue @@ -71,7 +71,9 @@ // Start Time startDate: '', // End Time - endDate: '' + endDate: '', + // Exectuor Name + executorName: '' } } }, diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue index bb972c3f3b..f7be553568 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue @@ -28,6 +28,9 @@ {{$t('Process Instance')}} + + {{$t('Executor')}} + {{$t('Node Type')}} @@ -64,6 +67,10 @@ {{item.name}} {{item.processInstanceName}} + + {{item.executorName}} + - + {{item.taskType}} diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue index a23eee5fa0..4cb166647e 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/index.vue @@ -68,7 +68,9 @@ // start date startDate: '', // end date - endDate: '' + endDate: '', + // Exectuor Name + executorName: '' } } }, @@ -147,4 +149,4 @@ }, components: { mList, mInstanceConditions, mSpin, mListConstruction, mSecondaryMenu, mNoData } } - \ No newline at end of file + diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dag/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/mutations.js old mode 100644 new mode 100755 index 6ceabed8c1..b914b86740 --- a/dolphinscheduler-ui/src/js/conf/home/store/dag/mutations.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/dag/mutations.js @@ -134,6 +134,7 @@ export default { state.locations[payload.id] = _.assign(state.locations[payload.id], { name: dom.find('.name-p').text(), targetarr: dom.attr('data-targetarr'), + nodenumber: dom.attr('data-nodenumber'), x: parseInt(dom.css('left'), 10), y: parseInt(dom.css('top'), 10) }) diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js old mode 100644 new mode 100755 index 0402d7e398..e8ac57adc0 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js @@ -240,6 +240,7 @@ export default { 'Service-Master': 'Service-Master', 'Service-Worker': 'Service-Worker', 'Process Name': 'Process Name', + 'Executor': 'Executor', 'Run Type': 'Run Type', 'Scheduling Time': 'Scheduling Time', 'Run Times': 'Run Times', @@ -518,5 +519,54 @@ export default { 'SpeedRecord': 'speed(record count)', '0 means unlimited by byte': '0 means unlimited', '0 means unlimited by count': '0 means unlimited', - 'Modify User': 'Modify User' + 'Modify User': 'Modify User', + 'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)', + 'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)', + 'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)', + 'Please enter Target Dir(required)': 'Please enter Target Dir(required)', + 'Please enter Export Dir(required)': 'Please enter Export Dir(required)', + 'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)', + 'Please enter Hive Table(required)': 'Please enter Hive Table(required)', + 'Please enter Hive Partition Keys': 'Please enter Hive Partition Key', + 'Please enter Hive Partition Values': 'Please enter Partition Value', + 'Please enter Replace Delimiter': 'Please enter Replace Delimiter', + 'Please enter Fields Terminated': 'Please enter Fields Terminated', + 'Please enter Lines Terminated': 'Please enter Lines Terminated', + 'Please enter Concurrency': 'Please enter Concurrency', + 'Please enter Update Key': 'Please enter Update Key', + 'Direct': 'Direct', + 'Type': 'Type', + 'ModelType': 'ModelType', + 'ColumnType': 'ColumnType', + 'Database': 'Database', + 'Column': 'Column', + 'Map Column Hive': 'Map Column Hive', + 'Map Column Java': 'Map Column Java', + 'Export Dir': 'Export Dir', + 'Hive partition Keys': 'Hive partition Keys', + 'Hive partition Values': 'Hive partition Values', + 'FieldsTerminated': 'FieldsTerminated', + 'LinesTerminated': 'LinesTerminated', + 'IsUpdate': 'IsUpdate', + 'UpdateKey': 'UpdateKey', + 'UpdateMode': 'UpdateMode', + 'Target Dir': 'Target Dir', + 'DeleteTargetDir': 'DeleteTargetDir', + 'FileType': 'FileType', + 'CompressionCodec': 'CompressionCodec', + 'CreateHiveTable': 'CreateHiveTable', + 'DropDelimiter': 'DropDelimiter', + 'OverWriteSrc': 'OverWriteSrc', + 'ReplaceDelimiter': 'ReplaceDelimiter', + 'Concurrency': 'Concurrency', + 'Form': 'Form', + 'OnlyUpdate': 'OnlyUpdate', + 'AllowInsert': 'AllowInsert', + 'Data Source': 'Data Source', + 'Data Target': 'Data Target', + 'All Columns': 'All Columns', + 'Some Columns': 'Some Columns', + 'Modify User': 'Modify User', + 'Branch flow': 'Branch flow', + 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow' } diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js old mode 100644 new mode 100755 index 95eb4a1081..c72090657b --- a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js @@ -239,6 +239,7 @@ export default { 'Service-Master': '服务管理-Master', 'Service-Worker': '服务管理-Worker', 'Process Name': '工作流名称', + 'Executor': '执行用户', 'Run Type': '运行类型', 'Scheduling Time': '调度时间', 'Run Times': '运行次数', @@ -518,5 +519,54 @@ export default { 'SpeedRecord': '限流(记录数)', '0 means unlimited by byte': 'KB,0代表不限制', '0 means unlimited by count': '0代表不限制', - 'Modify User': '修改用户' + 'Modify User': '修改用户', + 'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)', + 'Please enter Mysql Table(required)': '请输入Mysql表名(必填)', + 'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开', + 'Please enter Target Dir(required)': '请输入目标路径(必填)', + 'Please enter Export Dir(required)': '请输入数据源路径(必填)', + 'Please enter Hive Database(required)': '请输入Hive数据库(必填)', + 'Please enter Hive Table(required)': '请输入Hive表名(必填)', + 'Please enter Hive Partition Keys': '请输入分区键', + 'Please enter Hive Partition Values': '请输入分区值', + 'Please enter Replace Delimiter': '请输入替换分隔符', + 'Please enter Fields Terminated': '请输入列分隔符', + 'Please enter Lines Terminated': '请输入行分隔符', + 'Please enter Concurrency': '请输入并发度', + 'Please enter Update Key': '请输入更新列', + 'Direct': '流向', + 'Type': '类型', + 'ModelType': '模式', + 'ColumnType': '列类型', + 'Database': '数据库', + 'Column': '列', + 'Map Column Hive': 'Hive类型映射', + 'Map Column Java': 'Java类型映射', + 'Export Dir': '数据源路径', + 'Hive partition Keys': 'Hive 分区键', + 'Hive partition Values': 'Hive 分区值', + 'FieldsTerminated': '列分隔符', + 'LinesTerminated': '行分隔符', + 'IsUpdate': '是否更新', + 'UpdateKey': '更新列', + 'UpdateMode': '更新类型', + 'Target Dir': '目标路径', + 'DeleteTargetDir': '是否删除目录', + 'FileType': '保存格式', + 'CompressionCodec': '压缩类型', + 'CreateHiveTable': '是否创建新表', + 'DropDelimiter': '是否删除分隔符', + 'OverWriteSrc': '是否覆盖数据源', + 'ReplaceDelimiter': '替换分隔符', + 'Concurrency': '并发度', + 'Form': '表单', + 'OnlyUpdate': '只更新', + 'AllowInsert': '无更新便插入', + 'Data Source': '数据来源', + 'Data Target': '数据目的', + 'All Columns': '全表导入', + 'Some Columns': '选择列', + 'Modify User': '修改用户', + 'Branch flow': '分支流转', + 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点' } diff --git a/e2e/pom.xml b/e2e/pom.xml new file mode 100644 index 0000000000..ea67c0ca16 --- /dev/null +++ b/e2e/pom.xml @@ -0,0 +1,137 @@ + + + 4.0.0 + org.apache.dolphinscheduler-e2e + dolphinscheduler-e2e + 1.0.0 + + + 6.14.3 + 3.141.59 + 22.0 + 2.6 + 1.1.4 + 2.8.0 + 4.2.1 + testng.xml + + + + + + org.seleniumhq.selenium + selenium-java + ${selenium.version} + + + com.google.guava + guava + ${guava.version} + + + com.google.inject + guice + ${inject.version} + + + + org.testng + testng + ${testng.version} + + + + org.uncommons + reportng + ${reportng.version} + test + + + org.testng + testng + + + + + + commons-io + commons-io + ${commons-io.version} + + + + org.apache.servicemix.bundles + org.apache.servicemix.bundles.jedis + 2.6.2_1 + + + org.apache.commons + commons-pool2 + ${commons-pool2.version} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.0 + + 1.8 + 1.8 + UTF-8 + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.20.1 + + + ${xml.file} + + -Dfile.encoding=UTF-8 + + + usedefaultlisteners + + false + + + listener + org.uncommons.reportng.HTMLReporter,org.uncommons.reportng.JUnitXMLReporter + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/e2e/src/main/java/org/apache/dolphinscheduler/constant/TestConstant.java b/e2e/src/main/java/org/apache/dolphinscheduler/constant/TestConstant.java new file mode 100644 index 0000000000..8a3b8eb6b3 --- /dev/null +++ b/e2e/src/main/java/org/apache/dolphinscheduler/constant/TestConstant.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.constant; + +public class TestConstant { + /** + * 1000 + */ + public static final int ONE_THOUSANG = 1000; + + + + /** + * 3000 + */ + public static final int THREE_THOUSANG = 3000; + + /** + * 10000 + */ + public static final int TEN_THOUSANG = 10000; + +} diff --git a/e2e/src/main/java/org/apache/dolphinscheduler/util/PropertiesReader.java b/e2e/src/main/java/org/apache/dolphinscheduler/util/PropertiesReader.java new file mode 100644 index 0000000000..cbf7d07bd7 --- /dev/null +++ b/e2e/src/main/java/org/apache/dolphinscheduler/util/PropertiesReader.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.util; + +import java.io.*; +import java.util.Properties; + +/** + * read properties + */ +public class PropertiesReader { + private static Properties properties = new Properties(); + + /** + * @param propertiesPath properties path + * @return Properties + * @throws IOException IOException + */ + public static Properties readProperties(String propertiesPath) throws IOException { + System.out.println("read properties "); + InputStream inputStream = new FileInputStream(propertiesPath); + InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "UTF-8"); + BufferedReader bufferedReader = new BufferedReader(inputStreamReader); + properties.load(bufferedReader); + return properties; + } + + /*** + * @param key key + * @return value + */ + public static String getKey(String key) { + return properties.getProperty(key); + } +} diff --git a/e2e/src/main/java/org/apache/dolphinscheduler/util/RedisUtil.java b/e2e/src/main/java/org/apache/dolphinscheduler/util/RedisUtil.java new file mode 100644 index 0000000000..7f9340f0ca --- /dev/null +++ b/e2e/src/main/java/org/apache/dolphinscheduler/util/RedisUtil.java @@ -0,0 +1,204 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.util; + +import org.apache.dolphinscheduler.constant.TestConstant; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.JedisPoolConfig; + +/** + * redis util + */ +public class RedisUtil { + /** + * redis ip + */ + private static String redisIp; + + /** + * redis port + */ + private static int redisPort; + + /** + * redis password + */ + private static String redisPwd; + + /** + * redis pool config + */ + private static JedisPoolConfig jedisPoolConfig; + + /** + * redis pool + */ + private static JedisPool jedisPool; + + /** + * jedis connection + */ + private Jedis jedis; + + /** + * jedis expire time + */ + private int jedisExpireTime; + + /** + * jedis max total + */ + private static int jedisPoolMaxTotal; + + /** + * jedis max idle + */ + private static int jedisPoolMaxIdle; + + /** + * jedis max wait time + */ + private static int jedisPoolMaxWaitMillis; + + /** + * Whether to perform a valid check when calling the borrowObject method + */ + private static boolean jedisPoolTestOnBorrow; + + /** + * Whether to perform a valid check when calling the returnObject method + */ + private static boolean jedisPoolTestOnReturn; + + /** + * storage local thread + */ + public static ThreadLocal threadLocal = new ThreadLocal<>(); + + /* + * redis init + */ + static { + // redis properties + redisIp = PropertiesReader.getKey("redis.ip"); + redisPort = Integer.valueOf(PropertiesReader.getKey("redis.port")); + redisPwd = PropertiesReader.getKey("redis.pwd"); + //redis pool properties + jedisPoolMaxTotal = Integer.valueOf(PropertiesReader.getKey("jedis.pool.maxTotal")); + jedisPoolMaxIdle = Integer.valueOf(PropertiesReader.getKey("jedis.pool.maxIdle")); + jedisPoolMaxWaitMillis = Integer.valueOf(PropertiesReader.getKey("jedis.pool.maxWaitMillis")); + jedisPoolTestOnBorrow = Boolean.valueOf(PropertiesReader.getKey("jedis.pool.testOnBorrow")); + jedisPoolTestOnReturn = Boolean.valueOf(PropertiesReader.getKey("jedis.pool.testOnReturn")); + // redis pool start properties + jedisPoolConfig = new JedisPoolConfig(); + jedisPoolConfig.setMaxTotal(jedisPoolMaxTotal); + jedisPoolConfig.setMaxIdle(jedisPoolMaxIdle); + jedisPoolConfig.setMaxWaitMillis(jedisPoolMaxWaitMillis); + jedisPoolConfig.setTestOnBorrow(jedisPoolTestOnBorrow); + jedisPoolConfig.setTestOnReturn(jedisPoolTestOnReturn); + // connect redis + try { + System.out.println("redis init"); + if (redisPwd.isEmpty()) + jedisPool = new JedisPool(jedisPoolConfig, redisIp, redisPort, TestConstant.THREE_THOUSANG); + else { + jedisPool = new JedisPool(jedisPoolConfig, redisIp, redisPort, TestConstant.TEN_THOUSANG, redisPwd); + } + } catch (Exception e) { + e.printStackTrace(); + System.out.println("redis connect fail"); + } + } + + /** + * get redis pool + * + * @return redis pool + */ + public static JedisPool getJedisPool() { + return jedisPool; + } + + /** + * get jedis connection + * + * @return jedis connection + */ + public Jedis getNewJedis() { + Jedis newJedis = null; + try { + newJedis = jedisPool.getResource(); + } catch (Exception e) { + e.printStackTrace(); + System.out.println("redis connection fail"); + } + System.out.println("redis connection success"); + return newJedis; + } + + /** + * get jedis connection + * + * @return jedis connection + */ + public Jedis getJedis() { + return jedis; + } + + public void setJedisAndExpire(Jedis jedis) { + this.jedis = jedis; + threadLocal.set(jedis); + // jedis expire time(s) + jedisExpireTime = Integer.valueOf(PropertiesReader.getKey("jedis.expireTime")); + System.out.println("redisUtil sets up a redis connection"); + } + + /** + * set key + * + * @param key key + * @param value value + * + */ + + public void setKey(String key, String value) { + jedis.set(key, value); + // set expire time 1h + jedis.expire(key, jedisExpireTime); + } + + /** + * get key + * + * @param key key + * @return value + */ + public String getKey(String key) { + return jedis.get(key); + } + + /** + * Return jedis connection + */ + public void returnJedis() { + if (jedis != null) { + jedis.close(); + } + System.out.println("jedis has been returned"); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java b/e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java new file mode 100644 index 0000000000..7d3ab9b837 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/base/BaseDriver.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.base; + + +import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.util.PropertiesReader; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.WebDriver; +import org.openqa.selenium.chrome.ChromeDriver; +import org.openqa.selenium.chrome.ChromeOptions; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.openqa.selenium.PageLoadStrategy; + + +/** + * base driver class + */ +public class BaseDriver { + /** + * driver + */ + private WebDriver driver; + + /** + * chrome driver path + */ + private String chromeDriverPath; + + + /** + * implicitly wait time(s) + */ + private long implicitlyWait; + + /** + * page load timeout(s) + */ + private long pageLoadTimeout; + + /** + * script Timeout(s) + */ + private long setScriptTimeout; + + + /** + * Local thread storage is used to store the driver + */ + public static ThreadLocal threadLocal = new ThreadLocal<>(); + + /** + *Initialization parameters + */ + public BaseDriver() throws IOException { + /* driver test class path */ + chromeDriverPath = PropertiesReader.getKey("driver.chromeDriver"); + + /* wait time */ + implicitlyWait = Long.valueOf(PropertiesReader.getKey("driver.timeouts.implicitlyWait")); + pageLoadTimeout = Long.valueOf(PropertiesReader.getKey("driver.timeouts.pageLoadTimeout")); + setScriptTimeout = Long.valueOf(PropertiesReader.getKey("driver.timeouts.setScriptTimeout")); + } + + + /** + * start chrome browser + */ + public void startBrowser() throws Exception { + // set chrome driver + System.setProperty("webdriver.chrome.driver", chromeDriverPath); + ChromeOptions chromeOptions = new ChromeOptions(); + chromeOptions.setPageLoadStrategy(PageLoadStrategy.NONE); + chromeOptions.addArguments("--no-sandbox"); + chromeOptions.addArguments("--disable-dev-shm-usage"); + chromeOptions.addArguments("--headless"); + chromeOptions.addArguments("--disable-gpu"); + chromeOptions.addArguments("--whitelisted-ips"); + chromeOptions.addArguments("--disable-infobars"); + chromeOptions.addArguments("--disable-browser-side-navigation"); + driver = new ChromeDriver(chromeOptions); + + /* driver setting wait time */ + // implicitly wait time + driver.manage().timeouts().implicitlyWait(implicitlyWait, TimeUnit.SECONDS); + + // page load timeout + driver.manage().timeouts().pageLoadTimeout(pageLoadTimeout, TimeUnit.SECONDS); + + // page load timeout + driver.manage().timeouts().pageLoadTimeout(pageLoadTimeout, TimeUnit.SECONDS); + + // script timeout + driver.manage().timeouts().setScriptTimeout(setScriptTimeout, TimeUnit.SECONDS); + + // window maximize + driver.manage().window().maximize(); + + // set threadLocal + threadLocal.set(driver); + } + + /** + * get webDriver + * + * @return driver + */ + public WebDriver getDriver() { + return driver; + } + + /** + * set webDriver + * + * @param driver driver + */ + public void setDriver(WebDriver driver) { + this.driver = driver; + // Thread local storage + threadLocal.set(driver); + } + + /** + * close browser + */ + public void closeBrowser() throws InterruptedException { + // JS Show a pop-up box to indicate the end of the test + Thread.sleep(TestConstant.ONE_THOUSANG); + ((JavascriptExecutor) driver).executeScript("alert('Test completed, browser closes after 3s')"); + Thread.sleep(TestConstant.THREE_THOUSANG); + if (driver != null) { + driver.quit(); + } + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java b/e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java new file mode 100644 index 0000000000..c12c19fc1d --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/base/BaseTest.java @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.base; + + +import org.apache.dolphinscheduler.page.LoginPage; +import org.apache.dolphinscheduler.util.PropertiesReader; +import org.openqa.selenium.WebDriver; +import org.testng.annotations.*; + +import java.io.IOException; +import java.util.Properties; + +/** + * base test class + */ +public class BaseTest { + /** + * properties + */ + private static Properties properties; + + + /** + * baseDriver + */ + private BaseDriver baseDriver; + + /** + * driver + */ + public WebDriver driver; + + /** + * Executed before executing a test suite  + * Read the test configuration file + * + * @param propertiesPath properties path + * @throws IOException IOException + */ + @BeforeSuite(alwaysRun = true) + @Parameters({"propertiesPath"}) + public void beforeSuite(@Optional("src/test/resources/config/config.properties") String propertiesPath) throws IOException { + // read properties + properties = PropertiesReader.readProperties(propertiesPath); + } + + /** + * Executed before executing a testcase + */ + @BeforeTest(alwaysRun = true) + public void beforeTest() throws Exception { + //base driver + baseDriver = new BaseDriver(); + baseDriver.startBrowser(); + driver = baseDriver.getDriver(); + } + + /** + * Executed before executing a class method in a test case + */ + @BeforeClass(alwaysRun = true) + public void setUp() throws IOException, InterruptedException { + LoginPage loginPage = new LoginPage(driver); + loginPage.jumpPage(); + loginPage.login(); + } + + + /** + * Execute after executing a class method in a test case + */ + @AfterClass(alwaysRun = true) + public void afterClass() { + // logout + } + + /** + * Execute after executing a testcase + */ + @AfterTest(alwaysRun = true) + public void afterTest() throws InterruptedException { + // close browser + baseDriver.closeBrowser(); + } + + /** + * Execute after executing a testsuite + */ + @AfterSuite(alwaysRun = true) + public void afterSuite() { + } +} \ No newline at end of file diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java b/e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java new file mode 100644 index 0000000000..072ccb652e --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java @@ -0,0 +1,374 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common; + +import org.apache.dolphinscheduler.util.PropertiesReader; +import org.apache.dolphinscheduler.util.RedisUtil; +import org.openqa.selenium.*; +import org.openqa.selenium.interactions.Actions; +import org.openqa.selenium.support.ui.ExpectedConditions; +import org.openqa.selenium.support.ui.WebDriverWait; +import redis.clients.jedis.Jedis; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +/** + * Encapsulate the most basic operations on the interface in the browser + */ +public class BrowserCommon { + /** + * driver + */ + protected WebDriver driver; + + /** + * actions + */ + protected Actions actions; + + /** + * Javascript + */ + protected JavascriptExecutor je; + + /** + * Show wait + */ + protected WebDriverWait wait; + + /** + * Jedis + */ + protected Jedis jedis; + + /** + * redis util + */ + protected RedisUtil redisUtil; + + /** + * @param driver driver + */ + public BrowserCommon(WebDriver driver) { + this.driver = driver; + this.actions = new Actions(driver); + this.je = ((JavascriptExecutor) driver); + // show wait timeout + long timeout = Long.valueOf(PropertiesReader.getKey("driver.timeouts.webDriverWait")); + wait = new WebDriverWait(driver, timeout); + } + + /** + * @param driver driver + * @param jedis jedis + */ + public BrowserCommon(WebDriver driver, Jedis jedis) { + this.driver = driver; + this.actions = new Actions(driver); + this.je = ((JavascriptExecutor) driver); + // show wait timeout + long timeout = Long.valueOf(PropertiesReader.getKey("driver.timeouts.webDriverWait")); + wait = new WebDriverWait(driver, timeout); + this.jedis = jedis; + } + + /** + * @param driver driver + * @param redisUtil redisUtil + */ + public BrowserCommon(WebDriver driver, RedisUtil redisUtil) { + this.driver = driver; + this.actions = new Actions(driver); + this.je = ((JavascriptExecutor) driver); + // show wait timeout + long timeout = Long.valueOf(PropertiesReader.getKey("driver.timeouts.webDriverWait")); + wait = new WebDriverWait(driver, timeout); + } + + + /** + * Get WebElement element object through element positioning + * + * @param locator By + * @return WebElement + */ + + public WebElement locateElement(By locator) { + return wait.until(ExpectedConditions.presenceOfElementLocated(locator)); + } + + /** + * Click button element + * @param locator By + * @return clickButton + */ + public WebElement clickButton(By locator) { + WebElement buttonElement = locateElement(locator); + wait.until(ExpectedConditions.elementToBeClickable(locator)); + ExpectedConditions.elementToBeClickable(locator); + buttonElement.click(); + return buttonElement; + } + + /** + * Click element + * + * @param locator By + * @return inputElement + */ + public WebElement clickElement(By locator) { + WebElement clickElement = locateElement(locator); + clickElement.click(); + return clickElement; + } + + /** + * input element + * + * @param locator By + * @param content Input content + * @return inputElement + */ + public WebElement sendInput(By locator, String content) { + WebElement inputElement = locateElement(locator); + inputElement.clear(); + inputElement.sendKeys(content); + return inputElement; + } + /** + * clear element + * + * @param locator By + */ + public WebElement clearInput(By locator) { + WebElement clearElement = locateElement(locator); + clearElement.click(); + clearElement.sendKeys(Keys.chord(Keys.CONTROL, "a")); + clearElement.sendKeys(Keys.BACK_SPACE); + return clearElement; + } + + /** + * input codeMirror + * + * @param codeMirrorLocator By codeMirror + * @param codeMirrorLineLocator By codeMirrorLine + + */ + public void inputCodeMirror(By codeMirrorLocator,By codeMirrorLineLocator,String content) { + WebElement codeMirrorElement = locateElement(codeMirrorLocator); + WebElement codeMirrorLineElement = locateElement(codeMirrorLineLocator); + codeMirrorElement.click(); + codeMirrorLineElement.sendKeys(content); + } + + /** + * move to element + * @param locator BY + * @return actions + */ + public Actions moveToElement(By locator){ + return actions.moveToElement(locateElement(locator)); + } + + /** + * mouse drag element + * + * @param source_locator BY + * @param target_locator BY + */ + public void dragAndDrop(By source_locator, By target_locator){ + WebElement sourceElement = locateElement(source_locator); + WebElement targetElement = locateElement(target_locator); + actions.dragAndDrop(sourceElement, targetElement).perform(); + actions.release(); + } + + public void moveToDragElement(By target_locator, int X, int Y){ + WebElement targetElement = locateElement(target_locator); + actions.dragAndDropBy(targetElement, X, Y).perform(); + actions.release(); + } + + + /** + * jump page + * + * @param url url + */ + public void jumpPage(String url) { + driver.get(url); + } + + + /** + * Find the next handle, recommended for two windows + * + * @return driver + */ + public WebDriver switchNextHandle() { + // Current window handle + String currentHandle = driver.getWindowHandle(); + // All window handle + Set allHandles = driver.getWindowHandles(); + // Finding the next handle + for (String handle : allHandles) { + if (!handle.equals(currentHandle)) { + return driver.switchTo().window(handle); + } + } + return driver; + } + + /** + * Multi-window switch handle, according to the handle number passed in + * + * @param num Number starts from 1 + * @return driver + */ + public WebDriver switchHandle(int num) { + // current handle + String currentHandle = driver.getWindowHandle(); + // all handle + Set allHandlesSet = driver.getWindowHandles(); + List allHandlesList = new ArrayList<>(allHandlesSet); + // switch handle + return driver.switchTo().window(allHandlesList.get(num - 1)); + } + + /** + * Switch frame structure + * + * @param locator frame + * @return driver + */ + public WebDriver switchFrame(By locator) { + return driver.switchTo().frame(locateElement(locator)); + } + + /** + * Switch parent frame structure + * + * @return driver + */ + public WebDriver switchParentFrame() { + return driver.switchTo().parentFrame(); + } + + /** + * Switch out of frame structure + * + * @return driver + */ + public WebDriver switchOutOfFrame() { + return driver.switchTo().defaultContent(); + } + + + /** + * execute JS Script + * + * @param script JS script + */ + public void executeScript(String script) { + je.executeScript(script); + } + + /** + * execute JS Script + * + * @param script JS script + * @param args Object element array + */ + public void executeScript(String script, Object... args) { + je.executeScript(script, args); + } + + /** + * Page slide to top + */ + public void scrollToTop() { + executeScript("window.scrollTo(0, 0)"); + } + + /** + * Page slides to the bottom + */ + public void scrollToBottom() { + executeScript("window.scrollTo(0, document.body.scrollHeight)"); + } + + public void scrollToElementBottom() { + + WebElement webElement = driver.findElement(By.xpath("/html/body/div[4]/div/div[2]/div/div[2]/div/div[7]/div[3]")); + ((JavascriptExecutor) driver).executeScript("arguments[0].scrollIntoView(true);", webElement); + } + + + /** + * Page swipe makes the top of the element align with the top of the page + * + * @param by Elements that need to be aligned with the top of the page + */ + public void scrollElementTopToTop(By by) { + executeScript("arguments[0].scrollIntoView(true);", driver.findElement(by)); + } + + /** + * Page sliding makes the bottom of the element aligned with the bottom of the page + * + * @param by Elements that need to be aligned with the bottom of the page + */ + public void scrollElementBottomToBottom(By by) { + executeScript("arguments[0].scrollIntoView(false);", driver.findElement(by)); + } + + + /** + * Determine if the current page title is the specified title + * + * @param title title + * @return boolean + */ + + public boolean ifTitleIs(String title) { + return wait.until(ExpectedConditions.titleIs(title)); + } + + /** + * Determines whether the current page title contains the specified text + * + * @param text text + * @return boolean + */ + public boolean ifTitleContains(String text) { + return wait.until(ExpectedConditions.titleContains(text)); + } + + /** + * Determines whether the text value of an element on the current page is the specified text + * + * @param locator By + * @param text text + * @return boolean + */ + public boolean ifTextExists(By locator, String text) { + return wait.until(ExpectedConditions.textToBePresentInElementLocated(locator, text)); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/common/PageCommon.java b/e2e/src/test/java/org/apache/dolphinscheduler/common/PageCommon.java new file mode 100644 index 0000000000..72dc03a77c --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/common/PageCommon.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.common; + +import org.apache.dolphinscheduler.util.RedisUtil; +import org.openqa.selenium.WebDriver; +import redis.clients.jedis.Jedis; + + +/** + * Encapsulate the operation methods that can be used for each module page + */ +public class PageCommon extends BrowserCommon { + /** + * @param driver driver + */ + public PageCommon(WebDriver driver) { + super(driver); + } + + /** + * @param driver driver + * @param jedis jedis + */ + public PageCommon(WebDriver driver, Jedis jedis) { + super(driver, jedis); + } + + /** + * @param driver driver + * @param redisUtil redisUtil + */ + public PageCommon(WebDriver driver, RedisUtil redisUtil) { + super(driver, redisUtil); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java new file mode 100644 index 0000000000..532849565c --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/LoginData.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.data; + +import org.apache.dolphinscheduler.util.PropertiesReader; + +/** + * Landing page object: data + */ + +public class LoginData { + /** + * Login URL + */ + public static final String URL = PropertiesReader.getKey("LOGIN_URL"); + + + /** + * Login username + */ + public static final String USER = PropertiesReader.getKey("USER_NAME"); + + /** + * Login password + */ + public static final String PASSWORD = PropertiesReader.getKey("PASSWORD"); + + public static final String TENANT = "Tenant Manage - DolphinScheduler"; +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatProjectData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatProjectData.java new file mode 100644 index 0000000000..8f6c9c8e97 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatProjectData.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.data.project; + +public class CreatProjectData { + // create project name + public static final String PROJECT_NAME = "selenium_project"; + // create project description + public static final String DESCRIPTION = "test create project description"; + // project page title + public static final String PROJECT_TITLE = "项目 - DolphinScheduler"; +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java new file mode 100644 index 0000000000..765a54f406 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/project/CreatWorkflowData.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.data.project; + +public class CreatWorkflowData { + //input shell task name + public static final String SHELL_TASK_NAME = "shell task description test"; + + //input shell task description + public static final String SHELL_TASK_DESCRIPTION = "shell task description test"; + + //input timeout + public static final String INPUT_TIMEOUT = "60"; + + //input shell script + public static final String SHELL_SCRIPT = "echo 1111111"; + + //input custom parameters + public static final String INPUT_CUSTOM_PARAMETERS = "selenium_parameter"; + + //input custom parameters value + public static final String INPUT_CUSTOM_PARAMETERS_VALUE = "selenium_parameter_123"; + + //input add custom parameters + public static final String INPUT_ADD_CUSTOM_PARAMETERS = "selenium_parameter_delete"; + + //input add custom parameters value + public static final String INPUT_ADD_CUSTOM_PARAMETERS_VALUE = "selenium_parameter_delete_456"; + + //create workflow title + public static final String WORKFLOW_TITLE = "创建流程定义 - DolphinScheduler"; +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TenantManageData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TenantManageData.java new file mode 100644 index 0000000000..e6f6ee6b86 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/TenantManageData.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Tenant page object: data + */ +package org.apache.dolphinscheduler.data.security; + +import org.apache.dolphinscheduler.data.LoginData; + +public class TenantManageData { + /** + * Tenant URL + */ + public static final String TENANAT_URL = LoginData.URL + "/ui/#/security/tenant"; + + /** + * Tenant Code + */ + public static final String TENANAT_CODE = "dolphinscheduler_tenant_code2"; + + /** + * Tenant Name + */ + public static final String TENANAT_NAME = "dolphinscheduler_tenant_Name"; + + /** + * Queue + */ + public static final String QUEUE = "default"; + + /** + * Description + */ + public static final String DESCRIPTION = "creat tenant test"; + + public static final String TENANAT_MANAGE = "Tenant Manage - DolphinScheduler"; + + + +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java new file mode 100644 index 0000000000..03c985fd81 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/data/security/UserManageData.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.data.security; + +import org.apache.dolphinscheduler.data.LoginData; + +public class UserManageData { + public static final String USER_URL = LoginData.URL + "/ui/#/security/users"; + + public static final String USERNAME = "selenium111"; + + public static final String PASSWORD = "123456qwe"; + + public static final String EMAIL = "123456789@qq.com"; + + public static final String PHONE = "15811112222"; + + public static final String USER_MANAGE = "用户管理 - DolphinScheduler"; + +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/LoginLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/LoginLocator.java new file mode 100644 index 0000000000..32a82bbbc8 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/LoginLocator.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.locator; + +import org.openqa.selenium.By; + +/** + * Page object: element positioning + */ + +public class LoginLocator { + public static final By LOGIN_INPUT_USER = By.xpath("//input[@class='input-element suffix']"); + + public static final By LOGIN_INPUT_PASSWORD = By.xpath("//input[@class='input-element suffix']"); + + public static final By LOGIN_BUTTON = By.xpath("//button"); + + public static final By LOGIN_BUTTON_MOVE = By.xpath("//button[contains(.,' Loading...')]"); +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/CreateProjectLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/CreateProjectLocator.java new file mode 100644 index 0000000000..d2c0d8412c --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/CreateProjectLocator.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.locator.project; + +import org.openqa.selenium.By; + +public class CreateProjectLocator { + //click project manage + public static final By PROJECT_MANAGE = By.xpath("//div[2]/div/a/span"); + + //click create project button + public static final By CREATE_PROJECT_BUTTON = By.xpath("//button/span"); + + //input project name + public static final By PROJECT_NAME = By.xpath("//div[2]/div/div/div[2]/div/input"); + + //input project description + public static final By PROJECT_DESCRIPTION = By.xpath("//textarea"); + + //submit button + public static final By SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span"); +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/CreateWorkflowLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/CreateWorkflowLocator.java new file mode 100644 index 0000000000..f063d6ef61 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/project/CreateWorkflowLocator.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.locator.project; + +import org.openqa.selenium.By; + +public class CreateWorkflowLocator { + // click project name + public static final By CLICK_PROJECT_NAME = By.xpath("//span/a"); + + // click workflow define + public static final By CLICK_WORKFLOW_DEFINE = By.xpath("//li/span"); + + // click create workflow button + public static final By CLICK_CREATE_WORKFLOW_BUTTON = By.xpath("//button/span"); + + //mouse down at shell + public static final By MOUSE_DOWN_AT_SHELL = By.xpath("//div[@id='SHELL']/div/div"); + + //mouse down at spark + public static final By MOUSE_DOWN_AT_SPARK = By.xpath("//div[5]/div/div"); + + //mouse move at DAG + public static final By MOUSE_MOVE_SHELL_AT_DAG = By.xpath("//div[@id='canvas']"); + + //input shell task _name + public static final By INPUT_SHELL_TASK_NAME = By.xpath("//input"); + + //click stop run type + public static final By CLICK_STOP_RUN_TYPE = By.xpath("//label[2]/span/input"); + + //click normal run type + public static final By CLICK_NORMAL_RUN_TYPE = By.xpath("//span/input"); + + //input shell task description + public static final By INPUT_SHELL_TASK_DESCRIPTION = By.xpath("//label/div/textarea"); + + //click task priority + public static final By CLICK_TASK_PRIORITY = By.xpath("//span/div/div/div/div/div"); + + //select task priority + public static final By SELECT_TASK_PRIORITY = By.xpath("//li[2]/li/span"); + + //click work group + public static final By CLICK_WORK_GROUP = By.xpath("//div/div/input"); + + //select work group + public static final By SELECT_WORK_GROUP = By.xpath("//div[4]/div[2]/div/div[1]/div/input"); + + //select number of failed retries + public static final By SELECT_FAIL_RETRIES_NUMBER = By.xpath("//div[5]/div[2]/div[1]/div[1]/div/input"); + + //select failed retry interval + public static final By SELECT_FAIL_RETRIES_INTERVAL = By.xpath("//div[5]/div[2]/div[2]/div[1]/div/input"); + + //click timeout alarm + public static final By CLICK_TIMEOUT_ALARM = By.xpath("//label/div/span/span"); + + //select timeout fail + public static final By SELECT_TIMEOUT_FAIL = By.xpath("//div/div/label[2]/span/input"); + + //cancel timeout alarm + public static final By CANCEL_TIMEOUT_ALARM = By.xpath("//div/div/label/span/input"); + + //select timeout alarm + public static final By SELECT_TIMEOUT_ALARM = By.xpath("//div/div/label/span/input"); + + //input timeout + public static final By SELECT_TIMEOUT = By.xpath("//div[3]/div[2]/label/div/input"); + + //click codeMirror + public static final By CLICK_CODE_MIRROR = By.xpath("//div[5]/div/pre"); + + //input script + public static final By INPUT_SCRIPT = By.xpath("//div[2]/div/div/div/div/div/textarea"); + + //click custom parameters + public static final By CLICK_CUSTOM_PARAMETERS = By.xpath("//span/a/em"); + + //input custom parameters + public static final By INPUT_CUSTOM_PARAMETERS = By.xpath("//div[2]/div/div/div/div/div/input"); + + //input custom parameters value + public static final By INPUT_CUSTOM_PARAMETERS_VALUE = By.xpath("//div[2]/input"); + + //click add custom parameters + public static final By CLICK_ADD_CUSTOM_PARAMETERS = By.xpath("//span[2]/a/em"); + + //input add custom parameters + public static final By INPUT_ADD_CUSTOM_PARAMETERS = By.xpath("//div[2]/div/div/div/div[2]/div/input"); + + //input add custom parameters value + public static final By INPUT_ADD_CUSTOM_PARAMETERS_VALUE = By.xpath("//div[2]/div[2]/input"); + + //delete custom parameters + public static final By CLICK_DELETE_CUSTOM_PARAMETERS = By.xpath("//div[2]/span/a/em"); + + //click submit button + public static final By CLICK_SUBMIT_BUTTON = By.xpath("//button[2]/span"); +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java new file mode 100644 index 0000000000..7d9c8a57ce --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/TenantManageLocator.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.locator.security; + +import org.openqa.selenium.By; + +public class TenantManageLocator{ + public static final By CREATE_TENANT_BUTTON = By.xpath("//button[@class='ans-btn ans-btn-ghost ans-btn-small']"); + + public static final By TENANT_INPUT_CODE = By.xpath("//div[2]/div/div/div[2]/div/input"); + + public static final By TENANT_INPUT_NAME = By.xpath("//div[2]/div[2]/div/input"); + + public static final By QUEUE = By.xpath("//textarea"); + + public static final By DESCRIPTION = By.xpath("//textarea"); + + public static final By SUBMIT_BUTTON = By.xpath("//div[3]/button[2]/span"); +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java new file mode 100644 index 0000000000..0d84692cb2 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/locator/security/UserManageLocator.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.locator.security; + +import org.openqa.selenium.By; + +public class UserManageLocator { + + public static final By CLICK_USER_MANAGE = By.xpath("//div[3]/div/a/div/a/span"); + + public static final By CLICK_CREATE_USER_BUTTON = By.xpath("//span[contains(.,'创建用户')]"); + + public static final By INPUT_USERNAME = By.xpath("//div[2]/div/div/div[2]/div/input"); + + public static final By INPUT_PASSWORD = By.xpath("//div[2]/div[2]/div/input"); + + public static final By CLICK_TENANT = By.xpath("//div[3]/div[2]/div/div/div/input"); + + public static final By SELECT_TENANT = By.xpath("//div[3]/div[2]/div/div[2]/div/div/div/ul/li/span"); + + public static final By CLICK_QUEUE = By.xpath("//div[4]/div[2]/div/div/div/input"); + + public static final By SELECT_QUEUE = By.xpath("//div[4]/div[2]/div/div[2]/div/div/div/ul/li/span"); + + public static final By TENANT_INPUT_EMAIL = By.xpath("//div[5]/div[2]/div/input"); + + public static final By TENANT_INPUT_PHONE = By.xpath("//div[6]/div[2]/div/input"); + + public static final By SUBMIT = By.xpath("//div[3]/button[2]/span"); + +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/LoginPage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/LoginPage.java new file mode 100644 index 0000000000..cd6b318651 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/LoginPage.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.page; + +import org.apache.dolphinscheduler.common.PageCommon; +import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.data.LoginData; +import org.apache.dolphinscheduler.locator.LoginLocator; +import org.apache.dolphinscheduler.util.RedisUtil; +import org.openqa.selenium.WebDriver; + + + +public class LoginPage extends PageCommon { + /** + * Unique constructor + * @param driver driver + */ + public LoginPage(WebDriver driver) { + super(driver); + } + + + /** + * jump page + */ + public void jumpPage() { + System.out.println("jump login page"); + super.jumpPage(LoginData.URL); + } + + /** + * login + * + * @return Whether to enter the specified page after searching + */ + public boolean login() throws InterruptedException { + System.out.println("LoginPage"); + // login data + sendInput(LoginLocator.LOGIN_INPUT_USER, LoginData.USER); + sendInput(LoginLocator.LOGIN_INPUT_PASSWORD, LoginData.PASSWORD); + + // click login button + clickButton(LoginLocator.LOGIN_BUTTON); + + moveToElement(LoginLocator.LOGIN_BUTTON_MOVE); + + // Whether to enter the specified page after login + return ifTitleContains(LoginData.TENANT); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/CreateProjectPage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/CreateProjectPage.java new file mode 100644 index 0000000000..8dd1010a82 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/CreateProjectPage.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.page.project; + +import org.apache.dolphinscheduler.common.PageCommon; +import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.data.project.CreatProjectData; +import org.apache.dolphinscheduler.locator.project.CreateProjectLocator; +import org.openqa.selenium.WebDriver; + +public class CreateProjectPage extends PageCommon { + public CreateProjectPage(WebDriver driver) { + super(driver); + } + /** + * jump page + */ + public void jumpProjectManagePage() throws InterruptedException { + Thread.sleep(TestConstant.ONE_THOUSANG); + clickElement(CreateProjectLocator.PROJECT_MANAGE); + Thread.sleep(TestConstant.ONE_THOUSANG); + } + + /** + * creatTenant + * + * @return Whether to enter the specified page after creat tenant + */ + public boolean createProject() throws InterruptedException { + //click create project + clickElement(CreateProjectLocator.CREATE_PROJECT_BUTTON); + Thread.sleep(TestConstant.ONE_THOUSANG); + + // input create project data + sendInput(CreateProjectLocator.PROJECT_NAME, CreatProjectData.PROJECT_NAME); + sendInput(CreateProjectLocator.PROJECT_DESCRIPTION, CreatProjectData.DESCRIPTION); + + // click submit button + clickButton(CreateProjectLocator.SUBMIT_BUTTON); + + // Whether to enter the specified page after submit + return ifTitleContains(CreatProjectData.PROJECT_TITLE); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/project/CreateWorkflowPage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/CreateWorkflowPage.java new file mode 100644 index 0000000000..aeec83cd19 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/project/CreateWorkflowPage.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.page.project; + +import org.apache.dolphinscheduler.common.PageCommon; +import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.data.project.CreatWorkflowData; +import org.apache.dolphinscheduler.locator.project.CreateWorkflowLocator; +import org.openqa.selenium.WebDriver; + +public class CreateWorkflowPage extends PageCommon { + public CreateWorkflowPage(WebDriver driver) { + super(driver); + } + /** + * jump page + */ + + public boolean createWorkflow() throws InterruptedException { + // click project name + clickElement(CreateWorkflowLocator.CLICK_PROJECT_NAME); + Thread.sleep(TestConstant.ONE_THOUSANG); + + + // click workflow define + clickElement(CreateWorkflowLocator.CLICK_WORKFLOW_DEFINE); + Thread.sleep(TestConstant.ONE_THOUSANG); + + // click create workflow button + clickElement(CreateWorkflowLocator.CLICK_CREATE_WORKFLOW_BUTTON); + Thread.sleep(TestConstant.ONE_THOUSANG); + + //drag shell_task + dragAndDrop(CreateWorkflowLocator.MOUSE_DOWN_AT_SHELL,CreateWorkflowLocator.MOUSE_MOVE_SHELL_AT_DAG); + + //input shell task _name + sendInput(CreateWorkflowLocator.INPUT_SHELL_TASK_NAME , CreatWorkflowData.SHELL_TASK_NAME); + + //click stop run type + clickElement(CreateWorkflowLocator.CLICK_STOP_RUN_TYPE); + + //click normal run type + clickElement(CreateWorkflowLocator.CLICK_NORMAL_RUN_TYPE); + + //input shell task description + sendInput(CreateWorkflowLocator.INPUT_SHELL_TASK_DESCRIPTION , CreatWorkflowData.SHELL_TASK_DESCRIPTION); + + //select task priority + clickElement(CreateWorkflowLocator.CLICK_TASK_PRIORITY); + clickElement(CreateWorkflowLocator.SELECT_TASK_PRIORITY); + + //select work group + clickElement(CreateWorkflowLocator.CLICK_WORK_GROUP); + clickElement(CreateWorkflowLocator.SELECT_WORK_GROUP); + + //select number of failed retries + clickElement(CreateWorkflowLocator.SELECT_FAIL_RETRIES_NUMBER); + + //select failed retry interval + clickElement(CreateWorkflowLocator.SELECT_FAIL_RETRIES_INTERVAL); + + + //click timeout alarm + clickElement(CreateWorkflowLocator.CLICK_TIMEOUT_ALARM); + + + //select timeout fail + clickElement(CreateWorkflowLocator.SELECT_TIMEOUT_FAIL); + + + //cancel timeout alarm + clickElement(CreateWorkflowLocator.CANCEL_TIMEOUT_ALARM); + + + //select timeout alarm + clickElement(CreateWorkflowLocator.SELECT_TIMEOUT_ALARM); + + //clear timeout + clearInput(CreateWorkflowLocator.SELECT_TIMEOUT); + clearInput(CreateWorkflowLocator.SELECT_TIMEOUT); + + //input timeout + sendInput(CreateWorkflowLocator.SELECT_TIMEOUT,CreatWorkflowData.INPUT_TIMEOUT); + + //click codeMirror and input script + inputCodeMirror(CreateWorkflowLocator.CLICK_CODE_MIRROR, CreateWorkflowLocator.INPUT_SCRIPT,CreatWorkflowData.SHELL_SCRIPT); + scrollToElementBottom(); + Thread.sleep(TestConstant.ONE_THOUSANG); + + //click custom parameters + clickElement(CreateWorkflowLocator.CLICK_CUSTOM_PARAMETERS); + + //input custom parameters + sendInput(CreateWorkflowLocator.INPUT_CUSTOM_PARAMETERS, CreatWorkflowData.INPUT_CUSTOM_PARAMETERS); + + //input custom parameters value + sendInput(CreateWorkflowLocator.INPUT_CUSTOM_PARAMETERS_VALUE, CreatWorkflowData.INPUT_CUSTOM_PARAMETERS_VALUE); + + //click add custom parameters + clickElement(CreateWorkflowLocator.CLICK_ADD_CUSTOM_PARAMETERS); + + scrollToElementBottom(); + Thread.sleep(TestConstant.ONE_THOUSANG); + + //input add custom parameters + sendInput(CreateWorkflowLocator.INPUT_ADD_CUSTOM_PARAMETERS,CreatWorkflowData.INPUT_ADD_CUSTOM_PARAMETERS); + + //input add custom parameters value + sendInput(CreateWorkflowLocator.INPUT_ADD_CUSTOM_PARAMETERS_VALUE,CreatWorkflowData.INPUT_ADD_CUSTOM_PARAMETERS_VALUE); + + //click delete custom parameters + clickElement(CreateWorkflowLocator.CLICK_DELETE_CUSTOM_PARAMETERS); + Thread.sleep(TestConstant.ONE_THOUSANG); + + //click submit button + clickElement(CreateWorkflowLocator.CLICK_SUBMIT_BUTTON); + Thread.sleep(TestConstant.ONE_THOUSANG); + + moveToDragElement(CreateWorkflowLocator.MOUSE_MOVE_SHELL_AT_DAG,-300,-100); + + return ifTitleContains(CreatWorkflowData.WORKFLOW_TITLE); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java new file mode 100644 index 0000000000..4c88f6575e --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/TenantManagePage.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.page.security; + +import org.apache.dolphinscheduler.common.PageCommon; +import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.data.LoginData; +import org.apache.dolphinscheduler.data.security.TenantManageData; +import org.apache.dolphinscheduler.locator.LoginLocator; +import org.apache.dolphinscheduler.locator.security.TenantManageLocator; +import org.apache.dolphinscheduler.util.RedisUtil; +import org.openqa.selenium.WebDriver; + +public class TenantManagePage extends PageCommon { + /** + * Unique constructor + * @param driver driver + */ + public TenantManagePage(WebDriver driver) { + super(driver); + } + + + /** + * jump page + */ + public void jumpPage() { + System.out.println("jump tenant page"); + super.jumpPage(TenantManageData.TENANAT_URL); + } + + /** + * createTenant + * + * @return Whether to enter the specified page after creat tenant + */ + public boolean createTenant() throws InterruptedException { + Thread.sleep(TestConstant.ONE_THOUSANG); + + //create tenant + clickButton(TenantManageLocator.CREATE_TENANT_BUTTON); + + // tenant data + sendInput(TenantManageLocator.TENANT_INPUT_CODE, TenantManageData.TENANAT_CODE); + sendInput(TenantManageLocator.TENANT_INPUT_NAME, TenantManageData.TENANAT_NAME); + sendInput(TenantManageLocator.QUEUE, TenantManageData.QUEUE); + sendInput(TenantManageLocator.DESCRIPTION, TenantManageData.DESCRIPTION); + + // click button + clickButton(TenantManageLocator.SUBMIT_BUTTON); + + // Whether to enter the specified page after submit + return ifTitleContains(TenantManageData.TENANAT_MANAGE); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java new file mode 100644 index 0000000000..3e2be2d126 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/page/security/UserManagePage.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.page.security; + +import org.apache.dolphinscheduler.common.PageCommon; +import org.apache.dolphinscheduler.constant.TestConstant; +import org.apache.dolphinscheduler.data.security.UserManageData; +import org.apache.dolphinscheduler.locator.security.UserManageLocator; +import org.openqa.selenium.WebDriver; + +public class UserManagePage extends PageCommon { + public UserManagePage(WebDriver driver) { + super(driver); + } + /** + * jump page + */ + public void jumpPage() { + System.out.println("jump tenant page"); + super.jumpPage(UserManageData.USER_URL); + } + + /** + * creatTenant + * + * @return Whether to enter the specified page after creat tenant + */ + public boolean createUser() throws InterruptedException { + Thread.sleep(TestConstant.ONE_THOUSANG); + // click user manage + clickElement(UserManageLocator.CLICK_USER_MANAGE); + Thread.sleep(TestConstant.ONE_THOUSANG); + + // click create user button + clickButton(UserManageLocator.CLICK_CREATE_USER_BUTTON); + + // input user data + sendInput(UserManageLocator.INPUT_USERNAME, UserManageData.USERNAME); + sendInput(UserManageLocator.INPUT_PASSWORD, UserManageData.PASSWORD); + clickButton(UserManageLocator.CLICK_TENANT); + clickButton(UserManageLocator.SELECT_TENANT); + clickButton(UserManageLocator.CLICK_QUEUE); + clickButton(UserManageLocator.SELECT_QUEUE); + sendInput(UserManageLocator.TENANT_INPUT_EMAIL, UserManageData.EMAIL); + sendInput(UserManageLocator.TENANT_INPUT_PHONE, UserManageData.PHONE); + + // click button + clickButton(UserManageLocator.SUBMIT); + + // Whether to enter the specified page after submit + return ifTitleContains(UserManageData.USER_MANAGE); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/LoginTest.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/LoginTest.java new file mode 100644 index 0000000000..bd3c31d38b --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/LoginTest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.testcase; + +import org.apache.dolphinscheduler.base.BaseTest; +import org.apache.dolphinscheduler.page.LoginPage; +import org.testng.annotations.Test; + + +public class LoginTest extends BaseTest { + private LoginPage loginPage; + @Test(description = "LoginTest", priority = 1) + public void testLogin() throws InterruptedException { + // init login page + loginPage = new LoginPage(driver); + + // enter login page + loginPage.jumpPage(); + + //assert login page + assert loginPage.login(); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateProjectTest.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateProjectTest.java new file mode 100644 index 0000000000..8abd09d37c --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateProjectTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.testcase.project; + +import org.apache.dolphinscheduler.base.BaseTest; +import org.apache.dolphinscheduler.page.project.CreateProjectPage; +import org.testng.annotations.Test; + +public class CreateProjectTest extends BaseTest { + private CreateProjectPage createProjectPage; + + @Test(description = "TenantTest", priority = 1) + public void testUserManage() throws InterruptedException { + createProjectPage = new CreateProjectPage(driver); + // enter user manage page + createProjectPage.jumpProjectManagePage(); + //assert user manage page + assert createProjectPage.createProject(); + } + +} + diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateWorkflowTest.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateWorkflowTest.java new file mode 100644 index 0000000000..6ac13f8124 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/project/CreateWorkflowTest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.testcase.project; + +import org.apache.dolphinscheduler.base.BaseTest; +import org.apache.dolphinscheduler.page.project.CreateProjectPage; +import org.apache.dolphinscheduler.page.project.CreateWorkflowPage; +import org.testng.annotations.Test; + +public class CreateWorkflowTest extends BaseTest { + private CreateWorkflowPage createWorkflowPage; + private CreateProjectPage createProjectPage; + + + @Test(description = "CreateWorkflowTest", priority = 1) + public void CreateWorkflowTest() throws InterruptedException { + createProjectPage = new CreateProjectPage(driver); + createProjectPage.jumpProjectManagePage(); + createWorkflowPage = new CreateWorkflowPage(driver); + //assert create workflow + assert createWorkflowPage.createWorkflow(); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/TenantManageTest.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/TenantManageTest.java new file mode 100644 index 0000000000..7124b4e094 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/TenantManageTest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.testcase.security; + +import org.apache.dolphinscheduler.base.BaseTest; +import org.apache.dolphinscheduler.page.security.TenantManagePage; +import org.testng.annotations.Test; + +public class TenantManageTest extends BaseTest { + private TenantManagePage tenantManagePage; + + @Test(description = "TenantTest", priority = 1) + public void testTenantManage() throws InterruptedException { + tenantManagePage = new TenantManagePage(driver); + // enter tenant manage page + tenantManagePage.jumpPage(); + //assert tenant manage page + assert tenantManagePage.createTenant(); + } +} diff --git a/e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/UserManageTest.java b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/UserManageTest.java new file mode 100644 index 0000000000..834ebdbbf0 --- /dev/null +++ b/e2e/src/test/java/org/apache/dolphinscheduler/testcase/security/UserManageTest.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.dolphinscheduler.testcase.security; + +import org.apache.dolphinscheduler.base.BaseTest; +import org.apache.dolphinscheduler.page.security.UserManagePage; +import org.testng.annotations.Test; + +public class UserManageTest extends BaseTest { + private UserManagePage userManagePage; + + @Test(description = "TenantTest", priority = 1) + public void testUserManage() throws InterruptedException { + userManagePage = new UserManagePage(driver); + // enter user manage page + userManagePage.jumpPage(); + //assert user manage page + assert userManagePage.createUser(); + } +} diff --git a/e2e/src/test/resources/config/config.properties b/e2e/src/test/resources/config/config.properties new file mode 100644 index 0000000000..6a01234d01 --- /dev/null +++ b/e2e/src/test/resources/config/config.properties @@ -0,0 +1,58 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############### project ############## +# login url +LOGIN_URL=http://127.0.0.1:8888/dolphinscheduler/ +#login username +USER_NAME=admin +#login password +PASSWORD=dolphinscheduler123 + +############### web driver ############## +# driver path +driver.chromeDriver=/usr/local/bin/chromedriver +# implicitly wait(s) +driver.timeouts.implicitlyWait=10 +# show wait(s) +driver.timeouts.webDriverWait=10 +# page load timeout(s) +driver.timeouts.pageLoadTimeout=10 +# JS wait timeouts(s) +driver.timeouts.setScriptTimeout=10 + + +############### redis ############## +# redis ip +redis.ip=127.0.0.1 +# redis port +redis.port=6379 +# redis password +redis.pwd= +############### redis pool ############## +# jedis expireTime(s) +jedis.expireTime=3600 +# jedis maxTotal +jedis.pool.maxTotal=3000 +# jedis maxIdle +jedis.pool.maxIdle=1000 +# jedis maxWaitMillis +jedis.pool.maxWaitMillis=10000 +# jedis Whether to perform a valid check when calling the borrowObject method +jedis.pool.testOnBorrow=true +# jedis Whether to perform a valid check when calling the returnObject method +jedis.pool.testOnReturn=true diff --git a/e2e/suite.xml b/e2e/suite.xml new file mode 100644 index 0000000000..d9d7ae6845 --- /dev/null +++ b/e2e/suite.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/e2e/testng.xml b/e2e/testng.xml new file mode 100644 index 0000000000..757ffab248 --- /dev/null +++ b/e2e/testng.xml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/install.sh b/install.sh index 354cdd2be3..8735543c47 100644 --- a/install.sh +++ b/install.sh @@ -422,17 +422,8 @@ fi done -# 3,stop server -echo "3,stop server" -sh ${workDir}/script/stop-all.sh - -# 4,delete zk node -echo "4,delete zk node" - -sh ${workDir}/script/remove-zk-node.sh $zkRoot - -# 5,scp resources -echo "5,scp resources" +# 3,scp resources +echo "3,scp resources" sh ${workDir}/script/scp-hosts.sh if [ $? -eq 0 ] then @@ -442,6 +433,18 @@ else exit -1 fi + +# 4,stop server +echo "4,stop server" +sh ${workDir}/script/stop-all.sh + + +# 5,delete zk node +echo "5,delete zk node" + +sh ${workDir}/script/remove-zk-node.sh $zkRoot + + # 6,startup echo "6,startup" sh ${workDir}/script/start-all.sh \ No newline at end of file diff --git a/pom.xml b/pom.xml index 53184e7412..8a1e9b98f2 100644 --- a/pom.xml +++ b/pom.xml @@ -229,7 +229,12 @@ org.apache.dolphinscheduler - dolphinscheduler-rpc + dolphinscheduler-remote + ${project.version} + + + org.apache.dolphinscheduler + dolphinscheduler-service ${project.version} @@ -237,7 +242,6 @@ dolphinscheduler-alert ${project.version} - org.apache.curator curator-framework @@ -680,8 +684,10 @@ **/common/threadutils/*.java **/common/graph/*.java **/common/queue/*.java + **/common/task/SqoopParameterEntityTest.java **/api/utils/CheckUtilsTest.java **/api/utils/FileUtilsTest.java + **/api/utils/FourLetterWordTest.java **/api/utils/exportprocess/DataSourceParamTest.java **/api/utils/exportprocess/DependentParamTest.java **/api/enums/*.java @@ -702,6 +708,8 @@ **/api/service/BaseDAGServiceTest.java **/api/service/LoggerServiceTest.java **/api/service/DataAnalysisServiceTest.java + **/api/service/ProcessInstanceServiceTest.java + **/api/service/TaskInstanceServiceTest.java **/alert/utils/ExcelUtilsTest.java **/alert/utils/FuncUtilsTest.java **/alert/utils/JSONUtilsTest.java @@ -714,10 +722,13 @@ **/dao/mapper/AlertGroupMapperTest.java **/dao/mapper/AlertMapperTest.java **/dao/mapper/CommandMapperTest.java + **/dao/entity/TaskInstanceTest.java **/dao/cron/CronUtilsTest.java + **/dao/utils/DagHelperTest.java **/alert/template/AlertTemplateFactoryTest.java **/alert/template/impl/DefaultHTMLTemplateTest.java **/server/worker/task/datax/DataxTaskTest.java + **/server/worker/task/sqoop/SqoopTaskTest.java **/server/utils/DataxUtilsTest.java @@ -773,9 +784,6 @@ **/dolphinscheduler-ui/src/view/common/outro.inc **/dolphinscheduler-ui/src/view/common/meta.inc **/dolphinscheduler-ui/src/combo/1.0.0/3rd.css - - **/dolphinscheduler-rpc/src/main/java/org/apache/dolphinscheduler/rpc/LogViewServiceGrpc.java - true @@ -862,8 +870,9 @@ dolphinscheduler-api dolphinscheduler-dao dolphinscheduler-alert - dolphinscheduler-rpc dolphinscheduler-dist + dolphinscheduler-remote + dolphinscheduler-service diff --git a/script/dolphinscheduler-daemon.sh b/script/dolphinscheduler-daemon.sh index d4db103fe1..3e7f10783a 100644 --- a/script/dolphinscheduler-daemon.sh +++ b/script/dolphinscheduler-daemon.sh @@ -44,7 +44,7 @@ export DOLPHINSCHEDULER_LOG_DIR=$DOLPHINSCHEDULER_HOME/logs export DOLPHINSCHEDULER_CONF_DIR=$DOLPHINSCHEDULER_HOME/conf export DOLPHINSCHEDULER_LIB_JARS=$DOLPHINSCHEDULER_HOME/lib/* -export DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms4g -Xss512k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" +export DOLPHINSCHEDULER_OPTS="-server -Xmx16g -Xms1g -Xss512k -XX:+DisableExplicitGC -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:LargePageSizeInBytes=128m -XX:+UseFastAccessorMethods -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70" export STOP_TIMEOUT=5 if [ ! -d "$DOLPHINSCHEDULER_LOG_DIR" ]; then @@ -69,10 +69,7 @@ elif [ "$command" = "alert-server" ]; then LOG_FILE="-Dserver=alert-server" CLASS=org.apache.dolphinscheduler.alert.AlertServer elif [ "$command" = "logger-server" ]; then - CLASS=org.apache.dolphinscheduler.server.rpc.LoggerServer -elif [ "$command" = "combined-server" ]; then - LOG_FILE="-Dlogging.config=classpath:combined_logback.xml -Dspring.profiles.active=api -Dserver.is-combined-server=true" - CLASS=org.apache.dolphinscheduler.api.CombinedApplicationServer + CLASS=org.apache.dolphinscheduler.server.log.LoggerServer else echo "Error: No command named \`$command' was found." exit 1 diff --git a/sql/dolphinscheduler-postgre.sql b/sql/dolphinscheduler-postgre.sql index c68fd17be1..88d62737de 100644 --- a/sql/dolphinscheduler-postgre.sql +++ b/sql/dolphinscheduler-postgre.sql @@ -574,6 +574,7 @@ CREATE TABLE t_ds_task_instance ( max_retry_times int DEFAULT NULL , task_instance_priority int DEFAULT NULL , worker_group_id int DEFAULT '-1' , + executor_id int DEFAULT NULL , PRIMARY KEY (id) ) ; diff --git a/sql/dolphinscheduler_mysql.sql b/sql/dolphinscheduler_mysql.sql index ea0f9cb022..68393702fc 100644 --- a/sql/dolphinscheduler_mysql.sql +++ b/sql/dolphinscheduler_mysql.sql @@ -616,6 +616,7 @@ CREATE TABLE `t_ds_task_instance` ( `max_retry_times` int(2) DEFAULT NULL COMMENT 'max retry times', `task_instance_priority` int(11) DEFAULT NULL COMMENT 'task instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest', `worker_group_id` int(11) DEFAULT '-1' COMMENT 'worker group id', + `executor_id` int(11) DEFAULT NULL COMMENT 'executor id', PRIMARY KEY (`id`), KEY `process_instance_id` (`process_instance_id`) USING BTREE, KEY `task_instance_index` (`process_definition_id`,`process_instance_id`) USING BTREE, diff --git a/sql/upgrade/1.2.2_schema/mysql/dolphinscheduler_ddl.sql b/sql/upgrade/1.2.2_schema/mysql/dolphinscheduler_ddl.sql index 9fe246a8c2..790a9a8ada 100644 --- a/sql/upgrade/1.2.2_schema/mysql/dolphinscheduler_ddl.sql +++ b/sql/upgrade/1.2.2_schema/mysql/dolphinscheduler_ddl.sql @@ -35,3 +35,23 @@ d// delimiter ; CALL uc_dolphin_T_t_ds_process_definition_A_modify_by; DROP PROCEDURE uc_dolphin_T_t_ds_process_definition_A_modify_by; + +-- uc_dolphin_T_t_ds_process_definition_A_modify_by +drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_executor_id; +delimiter d// +CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_executor_id() + BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_task_instance' + AND TABLE_SCHEMA=(SELECT DATABASE()) + AND COLUMN_NAME ='executor_id') + THEN + ALTER TABLE t_ds_task_instance ADD `executor_id` int(11) DEFAULT NULL COMMENT 'executor id'; + END IF; + END; + +d// + +delimiter ; +CALL uc_dolphin_T_t_ds_task_instance_A_executor_id; +DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_executor_id; \ No newline at end of file diff --git a/sql/upgrade/1.2.2_schema/postgresql/dolphinscheduler_ddl.sql b/sql/upgrade/1.2.2_schema/postgresql/dolphinscheduler_ddl.sql index 7fc12900e4..cbe7c22bbe 100644 --- a/sql/upgrade/1.2.2_schema/postgresql/dolphinscheduler_ddl.sql +++ b/sql/upgrade/1.2.2_schema/postgresql/dolphinscheduler_ddl.sql @@ -32,3 +32,20 @@ delimiter ; SELECT uc_dolphin_T_t_ds_process_definition_A_modify_by(); DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_process_definition_A_modify_by(); +-- uc_dolphin_T_t_ds_process_definition_A_modify_by +delimiter d// +CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_executor_id() RETURNS void AS $$ +BEGIN + IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS + WHERE TABLE_NAME='t_ds_task_instance' + AND COLUMN_NAME ='executor_id') + THEN + ALTER TABLE t_ds_task_instance ADD COLUMN executor_id int DEFAULT NULL; + END IF; +END; +$$ LANGUAGE plpgsql; +d// + +delimiter ; +SELECT uc_dolphin_T_t_ds_task_instance_A_executor_id(); +DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_executor_id();