@ -16,6 +16,9 @@
* /
* /
package org.apache.dolphinscheduler.common.utils ;
package org.apache.dolphinscheduler.common.utils ;
import com.google.common.cache.CacheBuilder ;
import com.google.common.cache.CacheLoader ;
import com.google.common.cache.LoadingCache ;
import org.apache.dolphinscheduler.common.Constants ;
import org.apache.dolphinscheduler.common.Constants ;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus ;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus ;
import org.apache.dolphinscheduler.common.enums.ResUploadType ;
import org.apache.dolphinscheduler.common.enums.ResUploadType ;
@ -37,6 +40,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.Collections ;
import java.util.Collections ;
import java.util.List ;
import java.util.List ;
import java.util.Map ;
import java.util.Map ;
import java.util.concurrent.TimeUnit ;
import java.util.stream.Collectors ;
import java.util.stream.Collectors ;
import java.util.stream.Stream ;
import java.util.stream.Stream ;
@ -48,24 +52,31 @@ public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory . getLogger ( HadoopUtils . class ) ;
private static final Logger logger = LoggerFactory . getLogger ( HadoopUtils . class ) ;
private static HadoopUtils instance = new HadoopUtils ( ) ;
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY" ;
private static final LoadingCache < String , HadoopUtils > cache = CacheBuilder
. newBuilder ( )
. expireAfterWrite ( PropertyUtils . getInt ( Constants . KERBEROS_EXPIRE_TIME , 7 ) , TimeUnit . DAYS )
. build ( new CacheLoader < String , HadoopUtils > ( ) {
@Override
public HadoopUtils load ( String key ) throws Exception {
return new HadoopUtils ( ) ;
}
} ) ;
private static Configuration configuration ;
private static Configuration configuration ;
private static FileSystem fs ;
private static FileSystem fs ;
private String hdfsUser ;
private static String hdfsUser = PropertyUtils . getString ( Constants . HDFS_ROOT_USER ) ;
private HadoopUtils ( ) {
private HadoopUtils ( ) {
hdfsUser = PropertyUtils . getString ( Constants . HDFS_ROOT_USER ) ;
init ( ) ;
init ( ) ;
initHdfsPath ( ) ;
initHdfsPath ( ) ;
}
}
public static HadoopUtils getInstance ( ) {
public static HadoopUtils getInstance ( ) {
// if kerberos startup , renew HadoopUtils
if ( CommonUtils . getKerberosStartupState ( ) ) {
return cache . getUnchecked ( HADOOP_UTILS_KEY ) ;
return new HadoopUtils ( ) ;
}
return instance ;
}
}
/ * *
/ * *
@ -88,10 +99,7 @@ public class HadoopUtils implements Closeable {
/ * *
/ * *
* init hadoop configuration
* init hadoop configuration
* /
* /
private void init ( ) {
private static void init ( ) {
if ( configuration = = null ) {
synchronized ( HadoopUtils . class ) {
if ( configuration = = null ) {
try {
try {
configuration = new Configuration ( ) ;
configuration = new Configuration ( ) ;
@ -161,10 +169,6 @@ public class HadoopUtils implements Closeable {
} catch ( Exception e ) {
} catch ( Exception e ) {
logger . error ( e . getMessage ( ) , e ) ;
logger . error ( e . getMessage ( ) , e ) ;
}
}
}
}
}
}
}
/ * *
/ * *
@ -203,7 +207,6 @@ public class HadoopUtils implements Closeable {
}
}
/ * *
/ * *
* cat file on hdfs
* cat file on hdfs
*
*
@ -258,7 +261,7 @@ public class HadoopUtils implements Closeable {
/ * *
/ * *
* the src file is on the local disk . Add it to FS at
* the src file is on the local disk . Add it to FS at
* the given dst name .
* the given dst name .
*
* @param srcFile local file
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param deleteSource whether to delete the src
@ -307,7 +310,6 @@ public class HadoopUtils implements Closeable {
}
}
/ * *
/ * *
*
* delete a file
* delete a file
*
*
* @param hdfsFilePath the path to delete .
* @param hdfsFilePath the path to delete .
@ -351,10 +353,11 @@ public class HadoopUtils implements Closeable {
/ * *
/ * *
* Renames Path src to Path dst . Can take place on local fs
* Renames Path src to Path dst . Can take place on local fs
* or remote DFS .
* or remote DFS .
*
* @param src path to be renamed
* @param src path to be renamed
* @param dst new path after rename
* @param dst new path after rename
* @throws IOException on failure
* @return true if rename is successful
* @return true if rename is successful
* @throws IOException on failure
* /
* /
public boolean rename ( String src , String dst ) throws IOException {
public boolean rename ( String src , String dst ) throws IOException {
return fs . rename ( new Path ( src ) , new Path ( dst ) ) ;
return fs . rename ( new Path ( src ) , new Path ( dst ) ) ;
@ -400,7 +403,6 @@ public class HadoopUtils implements Closeable {
}
}
/ * *
/ * *
*
* @return data hdfs path
* @return data hdfs path
* /
* /
public static String getHdfsDataBasePath ( ) {
public static String getHdfsDataBasePath ( ) {