Browse Source

DEC-7216 HDFS插件krb文件读不到

release/10.0
rinoux 6 years ago
parent
commit
50f454318c
  1. 4
      build.xml
  2. 22
      core-site.xml
  3. BIN
      demo/demo-kerberos.png
  4. 0
      demo/demo.png
  5. 24
      hdfs-site.xml
  6. 7
      plugin.xml
  7. 10
      readme.md
  8. 101
      src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java

4
build.xml

@ -108,11 +108,15 @@
<fileset dir=".">
<include name="${plugin-jar}"/>
<include name="plugin.xml"/>
<include name="core-site.xml"/>
<include name="hdfs-site.xml"/>
</fileset>
</copy>
<zip destfile="${basedir}/${plugin-folder}.zip" basedir=".">
<include name="${plugin-folder}/*.jar"/>
<include name="${plugin-folder}/plugin.xml"/>
<include name="${plugin-folder}/core-site.xml"/>
<include name="${plugin-folder}/hdfs-site.xml"/>
</zip>
<xmlproperty file="${basedir}/plugin.xml"/>
<move file="${plugin-folder}.zip" todir="${destLoc}/${plugin.name}"/>

22
core-site.xml

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>hadoop.rpc.protection</name>
<value>authentication</value>
</property>
</configuration>

BIN
demo/demo-kerberos.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 131 KiB

0
demo.png → demo/demo.png

Before

Width:  |  Height:  |  Size: 268 KiB

After

Width:  |  Height:  |  Size: 268 KiB

24
hdfs-site.xml

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!--
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. See accompanying LICENSE file.
-->
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>dfs.namenode.kerberos.principal.pattern</name>
<value>*</value>
</property>
</configuration>

7
plugin.xml

@ -4,12 +4,15 @@
<name><![CDATA[HDFS资源仓库]]></name>
<active>yes</active>
<hidden>no</hidden>
<version>1.2</version>
<version>1.3</version>
<env-version>10.0</env-version>
<jartime>2018-12-27</jartime>
<vendor>rinoux</vendor>
<description><![CDATA[支持接入hadoop分布式hdfs文件系统。在单机或web集群场景下,可选择设置文件服务器为HDFS,保证模板等资源文件分布式存储,实现高可用。]]></description>
<change-notes><![CDATA[无]]></change-notes>
<change-notes><![CDATA[
<p>[2019-04-15]无法加载到krb5.conf文件的问题</p>
<p>[2019-04-15]支持额外的hdfs或者hadoop配置文件</p>
]]></change-notes>
<!--主包-->
<main-package>com.fr.plugin</main-package>
<!--功能记录点类-->

10
readme.md

@ -4,5 +4,13 @@
- 选择HDFS协议,添加主机、端口等配置,如果设置了kerberos认证,还需要设置principal和keyTab文件路径。以及文件服务器在hdfs上的工作路径;
- 测试连接并保存;
> 路径是dfs的路径而非本地文件系统的路径
> 如果有自定义的hdfs或者hadoop配置,添加到插件安装目录下对应的hdfs-site.xml或者core-site.xml文件中
>> 示例
![demo](demo.png)
- 不带kerberso认证如图所示
![demo](demo/demo.png)
- 带kerberso认证如图所示
![demo-kerberos](demo/demo-kerberos.png)

101
src/main/java/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java

@ -4,8 +4,13 @@ import com.fr.io.base.provider.impl.ConfigRepositoryFactory;
import com.fr.io.context.info.RepositoryProfile;
import com.fr.io.repository.ResourceRepository;
import com.fr.log.FineLoggerFactory;
import com.fr.plugin.context.PluginContexts;
import com.fr.plugin.context.PluginMarker;
import com.fr.plugin.hdfs.repository.HDFSFactoryProvider;
import com.fr.stable.StableUtils;
import com.fr.stable.StringUtils;
import com.fr.stable.project.ProjectConstants;
import com.fr.workspace.WorkContext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -15,13 +20,14 @@ import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URI;
/**
* Created by rinoux on 2018/8/10.
*/
public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
public static final String IDENTITY = "HDFS";
static final String IDENTITY = "HDFS";
private static final String HDFS_SCHEMA = "hdfs://";
private static final String DEFAULT_HOST = "localhost";
@ -81,15 +87,24 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
String principal = config.getPrincipal();
String krb5Conf = config.getKrbConf();
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Host:{}", host);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Port:{}", port);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] Principal:{}", principal);
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] KeyTab:{}", config.getKeyTab());
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] krb5.conf:{}", krb5Conf);
//开启了kerberos验证
boolean kerberos = kerberosAuthenticated(config);
if (kerberos) {
//是否需要kerberos验证
boolean needKrbAuth = needKrbAuth(config);
if (needKrbAuth) {
try {
System.setProperty("java.security.krb5.conf", krb5Conf);
conf.set("hadoop.security.authentication", "kerberos");
processConfForPrincipal(conf, principal);
//需要重新刷新一下让krb5.conf配置生效
refreshConfig();
//类似OSGI下,类加载需要设置SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
//refer to https://stackoverflow.com/questions/37608049/how-to-connect-with-hdfs-via-kerberos-from-osgi-bundles
SecurityUtil.setSecurityInfoProviders(new AnnotatedSecurityInfo());
@ -102,12 +117,12 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
kerberosAuthModeSet = true;
}
} else if (kerberosAuthModeSet) {
//如果不需要Kerberos认证,要设置认证方式和校验方式为默认
conf.set("hadoop.security.authorization", "false");
conf.set("hadoop.security.authentication", "simple");
}
try {
if (StringUtils.isNotEmpty(principal) && !kerberos) {
if (StringUtils.isNotEmpty(principal) && !needKrbAuth) {
return FileSystem.newInstance(URI.create(hdfsUrl), conf, principal);
} else {
return FileSystem.newInstance(URI.create(hdfsUrl), conf);
@ -136,34 +151,72 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
conf.setClass("fs.hdfs.impl", DistributedFileSystem.class, FileSystem.class);
conf.set("ipc.client.fallback-to-simple-auth-allowed", "true");
return readXmlProperties(conf);
}
/**
* 如果明确用代码设置了值不会从hdfs-site.xml,core-site.xml读取属性
* <p>
* 这里主要是为了读取用户一些特殊的配置
* 比如rpc保护模式principal格式限制之类的
*
* @param conf 配置
* @return 加载了hdfs-site.xml,core-site.xml文件的配置
*/
private Configuration readXmlProperties(Configuration conf) {
//插件目录
PluginMarker marker = PluginContexts.currentContext().getMarker();
if (marker != null) {
String location = StableUtils.pathJoin(ProjectConstants.PLUGINS_NAME, marker.getHomeName());
String coreSiteXml = StableUtils.pathJoin(location, "core-site.xml");
String hdfsSiteXml = StableUtils.pathJoin(location, "hdfs-site.xml");
if (WorkContext.getWorkResource().exist(coreSiteXml)) {
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] core-site.xml is add to configuration");
conf.addResource(WorkContext.getWorkResource().openStream(coreSiteXml));
}
if (WorkContext.getWorkResource().exist(hdfsSiteXml)) {
FineLoggerFactory.getLogger().debug("[HDFS REPOSITORY] hdfs-site.xml is add to configuration");
conf.addResource(WorkContext.getWorkResource().openStream(hdfsSiteXml));
}
}
return conf;
}
private boolean kerberosAuthenticated(HDFSConfig config) {
/**
* 根据是否配置了kerberos的字段决定是否走kerberos认证逻辑
*
* @param config
* @return
*/
private boolean needKrbAuth(HDFSConfig config) {
return StringUtils.isNotEmpty(config.getKeyTab())
&& StringUtils.isNotEmpty(config.getPrincipal())
&& StringUtils.isNotEmpty(config.getKrbConf());
}
/**
* BI-third内置了hadoop2.6的包插件优先从lib下加载类
* 此时kerberos认证会报错"Failed to specify server's Kerberos principal name"
* 需要设置一下principal的格式
* @param conf
* @param principal
* 刷新hadoop配置对象加载设置的jvm参数里配置的krb文件
*
* @throws Exception
*/
private void processConfForPrincipal(Configuration conf, String principal) {
//2.6.2以前的版本hdfs-site.xml没有默认的pricipal格式设置,需要手动加上
//根据Kerberos V5 principal的格式primary/instance@REALM,确定实际的格式
String principalPattern;
int primaryIdx = principal.indexOf("hdfs/");
int atIdx = principal.indexOf("@");
if (primaryIdx > -1 && atIdx > primaryIdx) {
String name = principal.substring(primaryIdx + "hdfs/".length(), atIdx - 1);
principalPattern = principal.replace(name, "*");
conf.set("dfs.namenode.kerberos.principal.pattern", principalPattern);
private void refreshConfig() throws Exception {
Class<?> configClassRef;
if (System.getProperty("java.vendor").contains("IBM")) {
configClassRef = Class.forName("com.ibm.security.krb5.internal.Config");
} else {
configClassRef = Class.forName("sun.security.krb5.Config");
}
Method getInstanceMethod = configClassRef.getMethod("getInstance");
Object kerbConf = getInstanceMethod.invoke(configClassRef);
Method refreshMethod = configClassRef.getDeclaredMethod("refresh");
refreshMethod.invoke(kerbConf);
}
}
}
Loading…
Cancel
Save