Browse Source

Merge pull request #9 in PG/plugin-repository-hdfs from ~RINOUX/plugin-repository-hdfs:master to master

* commit 'a13a3d16898a3ec3767877d9216e2c899697d82a':
  method name
  update
  update
  DEC-7216 kerberos登录失败的问题
release/10.0
rinoux 6 years ago
parent
commit
54f360fd60
  1. 2
      plugin.xml
  2. 40
      src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java

2
plugin.xml

@ -4,7 +4,7 @@
<name><![CDATA[HDFS资源仓库]]></name> <name><![CDATA[HDFS资源仓库]]></name>
<active>yes</active> <active>yes</active>
<hidden>no</hidden> <hidden>no</hidden>
<version>1.3</version> <version>1.4</version>
<env-version>10.0</env-version> <env-version>10.0</env-version>
<jartime>2018-12-27</jartime> <jartime>2018-12-27</jartime>
<vendor>rinoux</vendor> <vendor>rinoux</vendor>

40
src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java

@ -18,8 +18,11 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.security.AnnotatedSecurityInfo; import org.apache.hadoop.security.AnnotatedSecurityInfo;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.net.URI; import java.net.URI;
@ -86,11 +89,12 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
String hdfsUrl = HDFS_SCHEMA.concat(host).concat(":").concat(port); String hdfsUrl = HDFS_SCHEMA.concat(host).concat(":").concat(port);
String principal = config.getPrincipal(); String principal = config.getPrincipal();
String krb5Conf = config.getKrbConf(); String krb5Conf = config.getKrbConf();
String keyTab = config.getKeyTab();
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Host:{}", host); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Host:{}", host);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Port:{}", port); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Port:{}", port);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Principal:{}", principal); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Principal:{}", principal);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] KeyTab:{}", config.getKeyTab()); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] KeyTab:{}", keyTab);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] krb5.conf:{}", krb5Conf); FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] krb5.conf:{}", krb5Conf);
@ -103,14 +107,14 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
conf.set("hadoop.security.authentication", "kerberos"); conf.set("hadoop.security.authentication", "kerberos");
//需要重新刷新一下让krb5.conf配置生效 //需要重新刷新一下让krb5.conf配置生效
refreshConfig(); reset();
//类似OSGI下,类加载需要设置SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo()); //类似OSGI下,类加载需要设置SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
//refer to https://stackoverflow.com/questions/37608049/how-to-connect-with-hdfs-via-kerberos-from-osgi-bundles //refer to https://stackoverflow.com/questions/37608049/how-to-connect-with-hdfs-via-kerberos-from-osgi-bundles
SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo()); SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
//UserGroupInformation初始化 //UserGroupInformation初始化和登录验证
UserGroupInformation.setConfiguration(conf); UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(principal, config.getKeyTab()); UserGroupInformation.loginUserFromKeytab(principal, keyTab);
} catch (Exception e) { } catch (Exception e) {
FineLoggerFactory.getLogger().error(e.getMessage(), e); FineLoggerFactory.getLogger().error(e.getMessage(), e);
} finally { } finally {
@ -199,6 +203,10 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
&& StringUtils.isNotEmpty(config.getKrbConf()); && StringUtils.isNotEmpty(config.getKrbConf());
} }
private void reset() throws Exception {
refreshConfig();
resetDefaultRealm();
}
/** /**
@ -219,4 +227,28 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
Method refreshMethod = configClassRef.getDeclaredMethod("refresh"); Method refreshMethod = configClassRef.getDeclaredMethod("refresh");
refreshMethod.invoke(kerbConf); refreshMethod.invoke(kerbConf);
} }
/**
* 重置defaultRealm
* hadoop2.7以前没有提供resetDefaultRealm方法
* 需要反射设置一下这个变量否则无法登录
* <p>
* KerberosName有个静态块设置defaultRealm为KerberosUtil.getDefaultRealm()
* 如果Config对象重新通过System.setProperty设置过见refreshConfig()方法此时也需要刷新一下KerberosName
* 否则这个defaultRealm还是之前的如果有其他的kerberos认证的client设置过krb5.conf文件
* 此时获取的defaultRealm是错误的或者为null所以需要重置一下defaultRealm保证本次登录访问
*
*
* 同理如果其它地方使用Kerberos认证出现Login failed时也应该怀疑krb5文件没有刷新和KerberosName没有重置
*
* @throws Exception
*/
private void resetDefaultRealm() throws Exception {
Field field = KerberosName.class.getDeclaredField("defaultRealm");
assert field != null;
field.setAccessible(true);
field.set(KerberosName.class, KerberosUtil.getDefaultRealm());
}
} }
Loading…
Cancel
Save