java测试kerberos(kdc)认证的hadoop集群的hdfs的读写

使用springboot,加可配置的参数来动态验证 java程序对于kerberos认证的hadoop集群访问,尤其是在初次使用java连接kerberos时,用户名,keytab文件等等问题
在这里插入图片描述
需要下载配置好的keytab文件与krb5.conf文件
在执行java的机器配置kerberos.example.com 执行运行 kerberos的机器(kdc)

//pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.ddddd</groupId>
    <artifactId>hdfskeytab</artifactId>
    <version>1.0</version>

    <properties>
        <java.version>1.8</java.version>
        <pulsar.version>2.4.1</pulsar.version>
        <hadoop.version>2.7.1</hadoop.version>
    </properties>

    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.2.1.RELEASE</version>
        <relativePath/>
    </parent>

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>${hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
            </plugin>
        </plugins>
    </build>

</project>
//TestController.java
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.concurrent.TimeUnit;

@Slf4j
@RestController
public class Test {

    @RequestMapping({"","/"})
    public String hi(){
        return "Test#hi";
    }

    private String user;
    private String url;
    private String keyTab;


    /**
     * 测试hdfs链接
     * 注:no rule applied to user, 试试命令行测试keytab登陆,然后替换正确的/etc/krb5.conf再试
     * @param path
     * @param user
     * @param url
     * @param keyTab
     * @return
     */
    @RequestMapping("/hdfs")
    public String hdfs(@RequestParam String path,
                       @RequestParam String user,
                       @RequestParam String url,
                       @RequestParam String keyTab){
        log.info("in hdfs");
        log.info("cmd: curl 'http://localhost:8088/hdfs?path=/&user=ai@TDH&url=hdfs://192.168.127.131:9000&keyTab=/app/cp/ai.keytab'");

        System.setProperty("hadoop.home.dir",new File(".").getAbsolutePath());
//        System.setProperty("sun.security.krb5.debug", "true");
        System.setProperty("java.security.krb5.conf", "krb5.conf");

        if (StringUtils.isEmpty(path)) path = "/";
        if (StringUtils.isEmpty(user)) user = "ai@TDH";
        if (StringUtils.isEmpty(url)) url = "hdfs://192.168.127.131:9000";
        if (StringUtils.isEmpty(keyTab)) keyTab = "/app/cp/ai.keytab";

        log.info("path:"+path);
        log.info("user:"+user);
        log.info("hdfsUrl:"+url);
        log.info("keytab:"+keyTab);

        this.user = user;
        this.url  = url;
        this.keyTab=keyTab;

        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("fs.defaultFS",url);
        conf.set("hadoop.security.authentication", "kerberos");

        try {

            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab(user, keyTab);

            FileSystem fs = FileSystem.get(conf);
            FileStatus files[] = fs.listStatus(new Path(path));
            for (FileStatus file : files) {
                log.info(file.getPath().toString());
            }
        } catch (IOException e) {
            e.printStackTrace();
            return "error login hdfs";
        }

        return "done";
    }

    /**
     * 读取hdfs文件,测试系统配置
     * @param filePath
     * @return
     */
    @RequestMapping("/rHdfs")
    public String rHdfs(@RequestParam String filePath){
        log.info("in rHdfs:{}",filePath);

        if (url == null){
            log.error("no hdfs url");
            return "error";
        }

        System.setProperty("hadoop.home.dir",new File(".").getAbsolutePath());
//        System.setProperty("sun.security.krb5.debug", "true");
        System.setProperty("java.security.krb5.conf", "krb5.conf");
        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("fs.defaultFS",url);
        conf.set("hadoop.security.authentication", "kerberos");

        try {
            UserGroupInformation.setConfiguration(conf);
            UserGroupInformation.loginUserFromKeytab(user, keyTab);
            FileSystem fs = FileSystem.get(conf);

            String file = url+filePath;
            log.info("to read,url:[{}], filePath:[{}], FSpath:[{}]",url,filePath,file);
            Path path = new Path(file);
            if (!fs.exists(path)){
                log.error("file not found:{}",path.toString());
                return "error";
            }
            FileStatus[] fstatus = fs.listStatus(path);
            if (fstatus != null && fstatus.length > 0){
                log.info("behaviorLog sizes:{} Bytes--{} MB",fstatus[0].getLen(),fstatus[0].getLen()/1024/1024);
            }

            log.info("start test read whole file");
            long taskWatchStart = System.currentTimeMillis();
            BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
            String line = null;
            int count = 0;
            while ((line = br.readLine()) != null){
                count++;
                if (count!=0 && count % 10 == 0){
                    log.info("line count:{}",count);
                    log.info("line content:{}",line);
                }
            }
            br.close();
            log.info("task done, take {}s", TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - taskWatchStart));
            log.info("lines:{}",count);

        }catch (Exception e){
            e.printStackTrace();
        }

        return "done";
    }
}

有时候虽然可以列出目录但是读写文件还是有问题,需要一起测试

-Djava.library.path=/user/lib/hadoop/lib/native
# 验证登陆
curl 'http://172.24.1.24:8080/hdfs?path=/&user=hdfs/[email protected]&url=hdfs://192.168.127.131:9000&keyTab=hdfs.keytab'
# 验证对于hdfs的读写
curl 'http://172.24.1.24:8080/rHdfs?filePath=/README.txt'
发布了259 篇原创文章 · 获赞 118 · 访问量 187万+

猜你喜欢

转载自blog.csdn.net/c5113620/article/details/104051030
今日推荐