Java连接集成Kerberos的HA HDFS

2018-06-27  本文已影响0人  阿甘骑士
在实施方案前,假设读者已经基本熟悉以下技术 (不细说)
方案实施
  <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-client</artifactId>
        <version>2.6.5</version>
    </dependency>
    
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <version>2.6.5</version>
    </dependency>
[libdefaults]
default_realm = W.COM
 dns_lookup_realm = false
 dns_lookup_kdc = false
 ticket_lifetime = 24h
 renew_lifetime = 7d
 forwardable = true

[realms]
  
 W.COM = {
    kdc = node1:88
    admin_server = node1:749
    default_domain = W.COM
    
    kdc = bi-slave1
 }

#这里的kdc做了HA,需要把从kdc也加进realm里面
#默认的端口号也可以去掉
kadmin.local:  xst -norandkey -k deng_yb.keytab deng_yb@W.COM 

拿到core-site.xml文件


core-site-xml.png
package deng.yb.hdfsUtils;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

public class HDFSUtils {
    
    /**
     * 加载hdfs客户端配置
     * @return
     */
     public static Configuration initConfiguration() {
            Configuration configuration = new Configuration();
            configuration.addResource(new Path(getPath("core-site.xml")));
            return configuration;
    }
     
     
   /**
     * 初始化Kerberos环境
     */
    public static void initKerberosENV(Configuration conf) {
        System.setProperty("java.security.krb5.conf",
                getPath("krb5.conf"));
        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
        // System.setProperty("sun.security.krb5.debug", "true");
        try {
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab("deng_yb@W.COM",
                    getPath("deng_yb.keytab"));
            System.out.println(UserGroupInformation.getCurrentUser());
        } catch (IOException e) {
            e.printStackTrace();
        }

    }
    
     public static String getPath(String file) {
         if (null == file) {
             return null;
         }
         
         return Thread.currentThread().getContextClassLoader().getResource(file).getPath();
     }
}

package deng.yb.hdfsUtils;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

/**
 * 链接HA,kerberos 集群
 *
 */
public class App 
{
    public static void main( String[] args )
    {
           //初始化HDFS Configuration 配置
        Configuration configuration = HDFSUtils.initConfiguration();
        
         //初始化Kerberos环境
        HDFSUtils.initKerberosENV(configuration);
        
        try {  
             //其中nameservice1是HA别名
             Path dstPath = new Path("hdfs://nameservice1/");
             FileSystem fileSystem = FileSystem.get(configuration);
             
             FileStatus[] listStatus = fileSystem.listStatus(dstPath);
             for (FileStatus fileStatus : listStatus) {
                Path path = fileStatus.getPath();
                System.out.println(path);
             }
             
        } catch (Exception e){
            e.printStackTrace();
        }
    }
}
上一篇下一篇

猜你喜欢

热点阅读