Browse Source

hdfs add kerberos authentication

pull/2/head
qiaozhanwei 6 years ago
parent
commit
4e3b655bcc
  1. 40
      escheduler-common/src/main/java/cn/escheduler/common/Constants.java
  2. 15
      escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java
  3. 16
      escheduler-common/src/main/resources/common/common.properties
  4. 15
      install.sh

40
escheduler-common/src/main/java/cn/escheduler/common/Constants.java

@ -241,7 +241,11 @@ public final class Constants {
*/
public static final String SCHEDULER_TASKS_QUEUE = "tasks_queue";
/**
* escheduler need kill tasks queue
*/
public static final String SCHEDULER_TASKS_KILL = "tasks_kill";
public static final String ZOOKEEPER_SCHEDULER_ROOT = "zookeeper.escheduler.root";
public static final String SCHEDULER_QUEUE_IMPL = "escheduler.queue.impl";
@ -336,11 +340,6 @@ public final class Constants {
*/
public static final int MAX_TASK_TIMEOUT = 24 * 3600;
/**
* max task timeout
*/
public static final int MAX_PROCESS_TIMEOUT = Integer.MAX_VALUE;
/**
* heartbeat threads number
@ -830,4 +829,35 @@ public final class Constants {
* preview schedule execute count
*/
public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5;
/**
* java.security.krb5.conf
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java.security.krb5.conf.path
*/
public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state";
/**
* loginUserFromKeytab user
*/
public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username";
/**
* loginUserFromKeytab path
*/
public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path";
}

15
escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java

@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -40,9 +41,7 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import static cn.escheduler.common.Constants.*;
import static cn.escheduler.common.utils.PropertyUtils.getInt;
import static cn.escheduler.common.utils.PropertyUtils.getString;
import static cn.escheduler.common.utils.PropertyUtils.getPrefixedProperties;
import static cn.escheduler.common.utils.PropertyUtils.*;
/**
* hadoop utils
@ -73,6 +72,16 @@ public class HadoopUtils implements Closeable {
if (configuration == null) {
try {
configuration = new Configuration();
if (getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE)){
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION,"kerberos");
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system

16
escheduler-common/src/main/resources/common/common.properties

@ -14,7 +14,19 @@ process.exec.basepath=/tmp/escheduler/exec
data.store2hdfs.basepath=/escheduler
# whether hdfs starts
hdfs.startup.state=true
hdfs.startup.state=false
# whether kerberos starts
hadoop.security.authentication.startup.state=false
# java.security.krb5.conf path
java.security.krb5.conf.path=/opt/krb5.conf
# loginUserFromKeytab user
login.user.keytab.username="hdfs-mycluster@ESZ.COM"
# loginUserFromKeytab path
login.user.keytab.path="/opt/hdfs.headless.keytab"
# system env path. self configuration, please make sure the directory and file exists and have read write execute permissions
escheduler.env.path=/opt/.escheduler_env.sh
@ -23,5 +35,5 @@ escheduler.env.path=/opt/.escheduler_env.sh
resource.view.suffixs=txt,log,sh,conf,cfg,py,java,sql,hql,xml
# is development state? default "false"
development.state=false
development.state=true

15
install.sh

@ -125,7 +125,7 @@ yarnHaIps="192.168.xx.xx,192.168.xx.xx"
# 如果是单 resourcemanager,只需要配置一个主机名称,如果是resourcemanager HA,则默认配置就好
singleYarnIp="ark1"
# hdfs根路径,根路径的owner必须是部署用户
# hdfs根路径,根路径的owner必须是部署用户。1.1.0之前版本不会自动创建hdfs根目录,需要自行创建
hdfsPath="/escheduler"
# common 配置
@ -147,6 +147,19 @@ resSuffixs="txt,log,sh,conf,cfg,py,java,sql,hql,xml"
# 开发状态,如果是true,对于SHELL脚本可以在execPath目录下查看封装后的SHELL脚本,如果是false则执行完成直接删除
devState="true"
# kerberos 配置
# kerberos 是否启动
kerberosStartUp="false"
# kdc krb5 配置文件路径
krb5ConfPath="$installPath/conf/krb5.conf"
# keytab 用户名
keytabUserName="hdfs-mycluster@ESZ.COM"
# 用户 keytab路径
keytabPath="$installPath/conf/hdfs.headless.keytab"
# zk 配置
# zk根目录
zkRoot="/escheduler"

Loading…
Cancel
Save