From 05a0d3892a514a0ed2764d894272e35e606b1496 Mon Sep 17 00:00:00 2001 From: break60 <790061044@qq.com> Date: Thu, 21 May 2020 18:55:57 +0800 Subject: [PATCH 1/3] Worker group adds check (#2787) * Worker group adds check * fix --- .../pages/dag/_source/formModel/formModel.vue | 17 +++++++++++++++-- .../src/js/module/i18n/locale/en_US.js | 3 ++- .../src/js/module/i18n/locale/zh_CN.js | 3 ++- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue index 459f9a8605..b079e941fe 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue @@ -484,6 +484,16 @@ } return true }, + _verifWorkGroup() { + let item = this.store.state.security.workerGroupsListAll.find(item => { + return item.id == this.workerGroup; + }); + if(item==undefined) { + this.$message.warning(`${i18n.$t('The Worker group no longer exists, please select the correct Worker group!')}`) + return false; + } + return true + }, /** * Global verification procedure */ @@ -492,6 +502,10 @@ if (!this._verifName()) { return } + // verif workGroup + if(!this._verifWorkGroup()) { + return + } // Verify task alarm parameters if (!this.$refs['timeout']._verification()) { return @@ -619,8 +633,7 @@ break; } } - - if(!hasMatch){ + if(!hasMatch && o.workerGroupId!=undefined){ this.workerGroup = 'default' } else { this.workerGroup = o.workerGroup diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js index 678acb1283..fdb897fc9c 100755 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js @@ -592,5 +592,6 @@ export default { 'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow', 'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required', 'Unauthorized or deleted resources': 'Unauthorized or deleted resources', - 'Please delete all non-existent resources': 'Please delete all non-existent resources' + 'Please delete all non-existent resources': 'Please delete all non-existent resources', + 'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!' } diff --git a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js index 628ee6151e..2cdf09db7b 100755 --- a/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js +++ b/dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js @@ -592,5 +592,6 @@ export default { 'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点', 'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填', 'Unauthorized or deleted resources': '未授权或已删除资源', - 'Please delete all non-existent resources': '请删除所有未授权或已删除资源' + 'Please delete all non-existent resources': '请删除所有未授权或已删除资源', + 'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!' } From 51df429a803189118abc0e271efe41a60e67fc06 Mon Sep 17 00:00:00 2001 From: qiaozhanwei Date: Thu, 21 May 2020 19:01:01 +0800 Subject: [PATCH 2/3] add not worker log and remove worker invalid property (#2784) * add LoggerServerTest UT * add LoggerServerTest UT * add LoggerServerTest UT add RemoveTaskLogRequestCommandTest UT add RemoveTaskLogResponseCommandTest * master select worker filter high load worker #2704 * master select worker filter high load worker #2704 * master select worker filter high load worker #2704 * master select worker filter high load worker #2704 * master select worker filter high load worker #2704 * master select worker filter high load worker #2704 * add not worker log and remove worker invalid property Co-authored-by: qiaozhanwei --- .../server/master/dispatch/ExecutorDispatcher.java | 5 ++++- .../server/master/processor/TaskAckProcessor.java | 2 +- dolphinscheduler-server/src/main/resources/worker.properties | 3 --- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java index 605297aadf..34c8c8e285 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java @@ -81,9 +81,12 @@ public class ExecutorDispatcher implements InitializingBean { /** * host select */ + Host host = hostManager.select(context); if (StringUtils.isEmpty(host.getAddress())) { - throw new ExecuteException(String.format("fail to execute : %s due to no worker ", context.getCommand())); + throw new ExecuteException(String.format("fail to execute : %s due to no suitable worker , " + + "current task need to %s worker group execute", + context.getCommand(),context.getWorkerGroup())); } context.setHost(host); executorManager.beforeExecute(context); diff --git a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java index 3460248dfb..7bc37590aa 100644 --- a/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java +++ b/dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskAckProcessor.java @@ -70,7 +70,7 @@ public class TaskAckProcessor implements NettyRequestProcessor { } /** - * task ack process + * task ack process * @param channel channel channel * @param command command TaskExecuteAckCommand */ diff --git a/dolphinscheduler-server/src/main/resources/worker.properties b/dolphinscheduler-server/src/main/resources/worker.properties index 9bbf90102c..eb01bbb3ab 100644 --- a/dolphinscheduler-server/src/main/resources/worker.properties +++ b/dolphinscheduler-server/src/main/resources/worker.properties @@ -21,9 +21,6 @@ # worker heartbeat interval #worker.heartbeat.interval=10 -# submit the number of tasks at a time TODO -#worker.fetch.task.num = 3 - # only less than cpu avg load, worker server can work. default value -1: the number of cpu cores * 2 #worker.max.cpuload.avg= -1 From cada877079e7168d7b30c6f3333dfd23a92db37d Mon Sep 17 00:00:00 2001 From: zhangchunyang Date: Thu, 21 May 2020 19:09:11 +0800 Subject: [PATCH 3/3] Modify the ambari-plugin readme for packing RPM packages and add a description of the replication plug-in directory (#2786) --- ambari_plugin/README.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/ambari_plugin/README.md b/ambari_plugin/README.md index bd634d4a29..2905580431 100644 --- a/ambari_plugin/README.md +++ b/ambari_plugin/README.md @@ -9,11 +9,25 @@ 1. 准备RPM包 - - 在源码dolphinscheduler-dist目录下执行命令```mvn -U clean install rpm:attached-rpm -Prpmbuild -Dmaven.test.skip=true -X```即可生成(在目录 dolphinscheduler-dist/target/rpm/apache-dolphinscheduler-incubating/RPMS/noarch 下) + - 在项目根目录下执行命令```mvn -U clean install -Prpmbuild -Dmaven.test.skip=true -X```即可生成(在目录 dolphinscheduler-dist/target/rpm/apache-dolphinscheduler-incubating/RPMS/noarch 下) -2. 创建DS的安装用户--权限 +2. 创建DS的安装用户--安装目录(/opt/soft)有读写权限 -3. 初始化数据库信息 +3. rpm安装 + + - 手动安装(推荐): + - copy准备的rpm包到集群的各个节点 + - 用DS的安装用户执行: ```rpm -ivh apache-dolphinscheduler-incubating-xxx.noarch.rpm``` + - ambar自动安装 + - 集群的各个节点需要配置本地yum源 + - copy准备的rpm包到各个节点本地yum源 + +4. copy插件目录 + + - copy目录ambari_plugin/common-services/DOLPHIN到ambari-server/resources/common-services/下 + - copy目录ambari_plugin/statcks/DOLPHIN到 ambari-server/resources/stacks/HDP/2.6/services/下--stack版本2.6根据实际情况选择 + +5. 初始化数据库信息 ``` -- 创建Dolphin Scheduler的数据库:dolphinscheduler