Browse Source

[Bug-3713][HadoopUtils] catfile method Stream not closed (#3715)

* fix bug

Delete invalid field: executorcores

Modify verification prompt

* fix bug

Delete invalid field: executorcores

Modify verification prompt

* fix bug

Delete invalid field: executorcores

Modify verification prompt

* dag  add close button

* reset last version

* reset last version

* dag add close buttion

dag add close buttion

* update  CLICK_SAVE_WORKFLOW_BUTTON  xpath

* updae CLICK_SAVE_WORKFLOW_BUTTON xpath

* updae CLICK_SAVE_WORKFLOW_BUTTON xpath

* updae CLICK_SAVE_WORKFLOW_BUTTON xpath

* Update CreateWorkflowLocator.java

modify submit workflow button

* Update CreateWorkflowLocator.java

* Update CreateWorkflowLocator.java

modify CLICK_ADD_BUTTON

* Update CreateWorkflowLocator.java

delete print

* Update CreateWorkflowLocator.java

1

* Update CreateWorkflowLocator.java

1

* Setting '-XX:+DisableExplicitGC ' causes netty memory leaks

in addition

update '- XX: largepagesizeinbytes = 128M' to '- XX: largepagesizeinbytes = 10M'

* Update dag.vue

* Update dag.vue

* Update dag.vue

* Update CreateWorkflowLocator.java

* Revert "Setting '-XX:+DisableExplicitGC ' causes netty memory leaks"

This reverts commit 3a2cba7a

* Setting '-XX:+DisableExplicitGC ' causes netty memory leaks

in addition

update '- XX: largepagesizeinbytes = 128M' to '- XX: largepagesizeinbytes = 10M'

* Update dolphinscheduler-daemon.sh

* catfile method Stream not closed

* catfile method Stream not closed

Co-authored-by: dailidong <dailidong66@gmail.com>
Co-authored-by: xingchun-chen <55787491+xingchun-chen@users.noreply.github.com>
pull/3/MERGE
BoYiZhang 4 years ago committed by GitHub
parent
commit
7af20ca3af
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 45
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

45
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -14,26 +14,31 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.commons.io.IOUtils;
import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
@ -43,7 +48,13 @@ import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
/**
* hadoop utils
@ -102,7 +113,6 @@ public class HadoopUtils implements Closeable {
}
}
/**
* init hadoop configuration
*/
@ -167,7 +177,6 @@ public class HadoopUtils implements Closeable {
fs = FileSystem.get(configuration);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
@ -232,11 +241,11 @@ public class HadoopUtils implements Closeable {
return new byte[0];
}
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
return IOUtils.toByteArray(fsDataInputStream);
try (FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath))) {
return IOUtils.toByteArray(fsDataInputStream);
}
}
/**
* cat file on hdfs
*
@ -527,7 +536,6 @@ public class HadoopUtils implements Closeable {
return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
}
/**
* get hdfs file name
*
@ -579,7 +587,6 @@ public class HadoopUtils implements Closeable {
return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
}
/**
* getAppAddress
*
@ -610,7 +617,6 @@ public class HadoopUtils implements Closeable {
return start + activeRM + end;
}
@Override
public void close() throws IOException {
if (fs != null) {
@ -623,7 +629,6 @@ public class HadoopUtils implements Closeable {
}
}
/**
* yarn ha admin utils
*/
@ -669,7 +674,6 @@ public class HadoopUtils implements Closeable {
return null;
}
/**
* get ResourceManager state
*
@ -694,4 +698,5 @@ public class HadoopUtils implements Closeable {
}
}
}

Loading…
Cancel
Save