From 2e4f9f19ceccdce67d87fbb9faee74fe091f2055 Mon Sep 17 00:00:00 2001 From: lgcareer <18610854716@163.com> Date: Thu, 24 Sep 2020 15:28:02 +0800 Subject: [PATCH] [Fix-#3713][common]Fix that catfile method Stream not closed (#3810) * [Bug-3713][HadoopUtils] catfile method Stream not closed (#3715) * fix bug Delete invalid field: executorcores Modify verification prompt * fix bug Delete invalid field: executorcores Modify verification prompt * fix bug Delete invalid field: executorcores Modify verification prompt * dag add close button * reset last version * reset last version * dag add close buttion dag add close buttion * update CLICK_SAVE_WORKFLOW_BUTTON xpath * updae CLICK_SAVE_WORKFLOW_BUTTON xpath * updae CLICK_SAVE_WORKFLOW_BUTTON xpath * updae CLICK_SAVE_WORKFLOW_BUTTON xpath * Update CreateWorkflowLocator.java modify submit workflow button * Update CreateWorkflowLocator.java * Update CreateWorkflowLocator.java modify CLICK_ADD_BUTTON * Update CreateWorkflowLocator.java delete print * Update CreateWorkflowLocator.java 1 * Update CreateWorkflowLocator.java 1 * Setting '-XX:+DisableExplicitGC ' causes netty memory leaks in addition update '- XX: largepagesizeinbytes = 128M' to '- XX: largepagesizeinbytes = 10M' * Update dag.vue * Update dag.vue * Update dag.vue * Update CreateWorkflowLocator.java * Revert "Setting '-XX:+DisableExplicitGC ' causes netty memory leaks" This reverts commit 3a2cba7a * Setting '-XX:+DisableExplicitGC ' causes netty memory leaks in addition update '- XX: largepagesizeinbytes = 128M' to '- XX: largepagesizeinbytes = 10M' * Update dolphinscheduler-daemon.sh * catfile method Stream not closed * catfile method Stream not closed Co-authored-by: dailidong Co-authored-by: xingchun-chen <55787491+xingchun-chen@users.noreply.github.com> * [Fix-#3713][common]Fix that catfile method Stream not closed Co-authored-by: BoYiZhang <39816903+BoYiZhang@users.noreply.github.com> Co-authored-by: dailidong Co-authored-by: xingchun-chen <55787491+xingchun-chen@users.noreply.github.com> --- .../common/utils/HadoopUtils.java | 26 +++++++++---------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java index b4eebd6fc5..9f35f45b69 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java @@ -195,7 +195,7 @@ public class HadoopUtils implements Closeable { */ String appUrl = ""; - if (StringUtils.isEmpty(rmHaIds)){ + if (StringUtils.isEmpty(rmHaIds)) { //single resourcemanager enabled appUrl = appAddress; yarnEnabled = true; @@ -206,7 +206,7 @@ public class HadoopUtils implements Closeable { logger.info("application url : {}", appUrl); } - if(StringUtils.isBlank(appUrl)){ + if (StringUtils.isBlank(appUrl)) { throw new Exception("application url is blank"); } return String.format(appUrl, applicationId); @@ -226,11 +226,11 @@ public class HadoopUtils implements Closeable { return new byte[0]; } - FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath)); - return IOUtils.toByteArray(fsDataInputStream); + try (FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath))) { + return IOUtils.toByteArray(fsDataInputStream); + } } - /** * cat file on hdfs * @@ -493,20 +493,19 @@ public class HadoopUtils implements Closeable { return String.format("%s/udfs", getHdfsTenantDir(tenantCode)); } - /** * get hdfs file name * - * @param resourceType resource type - * @param tenantCode tenant code - * @param fileName file name + * @param resourceType resource type + * @param tenantCode tenant code + * @param fileName file name * @return hdfs file name */ public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) { if (fileName.startsWith("/")) { - fileName = fileName.replaceFirst("/",""); + fileName = fileName.replaceFirst("/", ""); } - return String.format("%s/%s", getHdfsDir(resourceType,tenantCode), fileName); + return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName); } /** @@ -518,7 +517,7 @@ public class HadoopUtils implements Closeable { */ public static String getHdfsResourceFileName(String tenantCode, String fileName) { if (fileName.startsWith("/")) { - fileName = fileName.replaceFirst("/",""); + fileName = fileName.replaceFirst("/", ""); } return String.format("%s/%s", getHdfsResDir(tenantCode), fileName); } @@ -532,7 +531,7 @@ public class HadoopUtils implements Closeable { */ public static String getHdfsUdfFileName(String tenantCode, String fileName) { if (fileName.startsWith("/")) { - fileName = fileName.replaceFirst("/",""); + fileName = fileName.replaceFirst("/", ""); } return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName); } @@ -545,7 +544,6 @@ public class HadoopUtils implements Closeable { return String.format("%s/%s", getHdfsDataBasePath(), tenantCode); } - /** * getAppAddress *