diff --git a/build.xml b/build.xml
index 57976b5..36d0be8 100644
--- a/build.xml
+++ b/build.xml
@@ -108,11 +108,15 @@
+
+
+
+
diff --git a/core-site.xml b/core-site.xml
new file mode 100755
index 0000000..6afc04c
--- /dev/null
+++ b/core-site.xml
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+ hadoop.rpc.protection
+ authentication
+
+
diff --git a/demo/demo-kerberos.png b/demo/demo-kerberos.png
new file mode 100644
index 0000000..5672b92
Binary files /dev/null and b/demo/demo-kerberos.png differ
diff --git a/demo.png b/demo/demo.png
similarity index 100%
rename from demo.png
rename to demo/demo.png
diff --git a/hdfs-site.xml b/hdfs-site.xml
new file mode 100755
index 0000000..3dd8d98
--- /dev/null
+++ b/hdfs-site.xml
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+ dfs.namenode.kerberos.principal.pattern
+ *
+
+
diff --git a/plugin.xml b/plugin.xml
index 8bd4241..8d02872 100644
--- a/plugin.xml
+++ b/plugin.xml
@@ -4,12 +4,15 @@
yes
no
- 1.2
+ 1.3
10.0
2018-12-27
rinoux
-
+ [2019-04-15]无法加载到krb5.conf文件的问题
+ [2019-04-15]支持额外的hdfs或者hadoop配置文件
+ ]]>
com.fr.plugin
diff --git a/readme.md b/readme.md
index 1a6523b..7755983 100644
--- a/readme.md
+++ b/readme.md
@@ -4,5 +4,13 @@
- 选择HDFS协议,添加主机、端口等配置,如果设置了kerberos认证,还需要设置principal和keyTab文件路径。以及文件服务器在hdfs上的工作路径;
- 测试连接并保存;
+> 路径是dfs的路径而非本地文件系统的路径
+
+> 如果有自定义的hdfs或者hadoop配置,添加到插件安装目录下对应的hdfs-site.xml或者core-site.xml文件中
+
>> 示例
-![demo](demo.png)
+- 不带kerberso认证如图所示
+![demo](demo/demo.png)
+- 带kerberso认证如图所示
+![demo-kerberos](demo/demo-kerberos.png)
+
diff --git a/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java b/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java
index 9acfe21..0ad320f 100644
--- a/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java
+++ b/src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java
@@ -4,8 +4,13 @@ import com.fr.io.base.provider.impl.ConfigRepositoryFactory;
import com.fr.io.context.info.RepositoryProfile;
import com.fr.io.repository.ResourceRepository;
import com.fr.log.FineLoggerFactory;
+import com.fr.plugin.context.PluginContexts;
+import com.fr.plugin.context.PluginMarker;
import com.fr.plugin.hdfs.repository.HDFSFactoryProvider;
+import com.fr.stable.StableUtils;
import com.fr.stable.StringUtils;
+import com.fr.stable.project.ProjectConstants;
+import com.fr.workspace.WorkContext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -15,13 +20,14 @@ import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
+import java.lang.reflect.Method;
import java.net.URI;
/**
* Created by rinoux on 2018/8/10.
*/
public class HDFSRepositoryFactory extends ConfigRepositoryFactory {
- public static final String IDENTITY = "HDFS";
+ static final String IDENTITY = "HDFS";
private static final String HDFS_SCHEMA = "hdfs://";
private static final String DEFAULT_HOST = "localhost";
@@ -81,15 +87,24 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory {
String principal = config.getPrincipal();
String krb5Conf = config.getKrbConf();
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Host:{}", host);
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Port:{}", port);
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Principal:{}", principal);
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] KeyTab:{}", config.getKeyTab());
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] krb5.conf:{}", krb5Conf);
- //开启了kerberos验证
- boolean kerberos = kerberosAuthenticated(config);
- if (kerberos) {
+
+ //是否需要kerberos验证
+ boolean needKrbAuth = needKrbAuth(config);
+
+ if (needKrbAuth) {
try {
System.setProperty("java.security.krb5.conf", krb5Conf);
conf.set("hadoop.security.authentication", "kerberos");
- processConfForPrincipal(conf, principal);
+ //需要重新刷新一下让krb5.conf配置生效
+ refreshConfig();
+
//类似OSGI下,类加载需要设置SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
//refer to https://stackoverflow.com/questions/37608049/how-to-connect-with-hdfs-via-kerberos-from-osgi-bundles
SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
@@ -102,12 +117,12 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory {
kerberosAuthModeSet = true;
}
} else if (kerberosAuthModeSet) {
+ //如果不需要Kerberos认证,要设置认证方式和校验方式为默认
conf.set("hadoop.security.authorization", "false");
conf.set("hadoop.security.authentication", "simple");
}
try {
-
- if (StringUtils.isNotEmpty(principal) && !kerberos) {
+ if (StringUtils.isNotEmpty(principal) && !needKrbAuth) {
return FileSystem.newInstance(URI.create(hdfsUrl), conf, principal);
} else {
return FileSystem.newInstance(URI.create(hdfsUrl), conf);
@@ -136,34 +151,72 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory {
conf.setClass("fs.hdfs.impl", DistributedFileSystem.class, FileSystem.class);
conf.set("ipc.client.fallback-to-simple-auth-allowed", "true");
+
+ return readXmlProperties(conf);
+ }
+
+
+ /**
+ * 如果明确用代码设置了值不会从hdfs-site.xml,core-site.xml读取属性
+ *
+ * 这里主要是为了读取用户一些特殊的配置
+ * 比如rpc保护模式、principal格式限制之类的
+ *
+ * @param conf 配置
+ * @return 加载了hdfs-site.xml,core-site.xml文件的配置
+ */
+ private Configuration readXmlProperties(Configuration conf) {
+
+ //插件目录
+ PluginMarker marker = PluginContexts.currentContext().getMarker();
+ if (marker != null) {
+ String location = StableUtils.pathJoin(ProjectConstants.PLUGINS_NAME, marker.getHomeName());
+ String coreSiteXml = StableUtils.pathJoin(location, "core-site.xml");
+ String hdfsSiteXml = StableUtils.pathJoin(location, "hdfs-site.xml");
+ if (WorkContext.getWorkResource().exist(coreSiteXml)) {
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] core-site.xml is add to configuration");
+ conf.addResource(WorkContext.getWorkResource().openStream(coreSiteXml));
+ }
+
+ if (WorkContext.getWorkResource().exist(hdfsSiteXml)) {
+ FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] hdfs-site.xml is add to configuration");
+ conf.addResource(WorkContext.getWorkResource().openStream(hdfsSiteXml));
+ }
+ }
return conf;
}
- private boolean kerberosAuthenticated(HDFSConfig config) {
+ /**
+ * 根据是否配置了kerberos的字段决定是否走kerberos认证逻辑
+ *
+ * @param config
+ * @return
+ */
+ private boolean needKrbAuth(HDFSConfig config) {
return StringUtils.isNotEmpty(config.getKeyTab())
&& StringUtils.isNotEmpty(config.getPrincipal())
&& StringUtils.isNotEmpty(config.getKrbConf());
}
+
/**
- * BI-third内置了hadoop2.6的包,插件优先从lib下加载类,
- * 此时kerberos认证会报错"Failed to specify server's Kerberos principal name"
- * 需要设置一下principal的格式
- * @param conf
- * @param principal
+ * 刷新hadoop配置对象,加载设置的jvm参数里配置的krb文件
+ *
+ * @throws Exception
*/
- private void processConfForPrincipal(Configuration conf, String principal) {
- //2.6.2以前的版本hdfs-site.xml没有默认的pricipal格式设置,需要手动加上
- //根据Kerberos V5 principal的格式primary/instance@REALM,确定实际的格式
- String principalPattern;
- int primaryIdx = principal.indexOf("hdfs/");
- int atIdx = principal.indexOf("@");
- if (primaryIdx > -1 && atIdx > primaryIdx) {
- String name = principal.substring(primaryIdx + "hdfs/".length(), atIdx - 1);
- principalPattern = principal.replace(name, "*");
- conf.set("dfs.namenode.kerberos.principal.pattern", principalPattern);
+ private void refreshConfig() throws Exception {
+ Class> configClassRef;
+ if (System.getProperty("java.vendor").contains("IBM")) {
+ configClassRef = Class.forName("com.ibm.security.krb5.internal.Config");
+ } else {
+ configClassRef = Class.forName("sun.security.krb5.Config");
}
+
+ Method getInstanceMethod = configClassRef.getMethod("getInstance");
+ Object kerbConf = getInstanceMethod.invoke(configClassRef);
+ Method refreshMethod = configClassRef.getDeclaredMethod("refresh");
+ refreshMethod.invoke(kerbConf);
}
-}
+}
\ No newline at end of file