diff --git a/plugin.xml b/plugin.xml index 8d02872..7954572 100644 --- a/plugin.xml +++ b/plugin.xml @@ -4,7 +4,7 @@ yes no - 1.3 + 1.4 10.0 2018-12-27 rinoux diff --git a/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java b/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java index 0ad320f..de5a037 100644 --- a/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java +++ b/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java @@ -18,8 +18,11 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.security.AnnotatedSecurityInfo; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.util.KerberosName; +import org.apache.hadoop.security.authentication.util.KerberosUtil; import java.io.IOException; +import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URI; @@ -86,11 +89,12 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory { String hdfsUrl = HDFS_SCHEMA.concat(host).concat(":").concat(port); String principal = config.getPrincipal(); String krb5Conf = config.getKrbConf(); + String keyTab = config.getKeyTab(); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Host:{}", host); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Port:{}", port); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Principal:{}", principal); - FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] KeyTab:{}", config.getKeyTab()); + FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] KeyTab:{}", keyTab); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] krb5.conf:{}", krb5Conf); @@ -103,14 +107,14 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory { conf.set("hadoop.security.authentication", "kerberos"); //需要重新刷新一下让krb5.conf配置生效 - refreshConfig(); + reset(); //类似OSGI下,类加载需要设置SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo()); //refer to https://stackoverflow.com/questions/37608049/how-to-connect-with-hdfs-via-kerberos-from-osgi-bundles SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo()); - //UserGroupInformation初始化 + //UserGroupInformation初始化和登录验证 UserGroupInformation.setConfiguration(conf); - UserGroupInformation.loginUserFromKeytab(principal, config.getKeyTab()); + UserGroupInformation.loginUserFromKeytab(principal, keyTab); } catch (Exception e) { FineLoggerFactory.getLogger().error(e.getMessage(), e); } finally { @@ -199,6 +203,10 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory { && StringUtils.isNotEmpty(config.getKrbConf()); } + private void reset() throws Exception { + refreshConfig(); + resetDefaultRealm(); + } /** @@ -219,4 +227,28 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory { Method refreshMethod = configClassRef.getDeclaredMethod("refresh"); refreshMethod.invoke(kerbConf); } + + + /** + * 重置defaultRealm, + * hadoop2.7以前没有提供resetDefaultRealm方法, + * 需要反射设置一下这个变量,否则无法登录 + *

+ * KerberosName有个静态块设置defaultRealm为KerberosUtil.getDefaultRealm() + * 如果Config对象重新通过System.setProperty设置过(见refreshConfig()方法),此时也需要刷新一下KerberosName, + * 否则这个defaultRealm还是之前的,如果有其他的kerberos认证的client设置过krb5.conf文件, + * 此时获取的defaultRealm是错误的或者为null,所以需要重置一下defaultRealm保证本次登录访问 + * + * + * 同理如果其它地方使用Kerberos认证出现Login failed时也应该怀疑krb5文件没有刷新和KerberosName没有重置 + * + * @throws Exception + */ + private void resetDefaultRealm() throws Exception { + + Field field = KerberosName.class.getDeclaredField("defaultRealm"); + assert field != null; + field.setAccessible(true); + field.set(KerberosName.class, KerberosUtil.getDefaultRealm()); + } } \ No newline at end of file