springboot 1.5.6 + jdbc使用kerberos认证方式连接hive

一、POM文件引入的依赖如下:

1、此处hadoop-common和hive-jdbc都需要排除servlet-api,否则将出现springboot启动报错,例如:'unable to start embedded tomcat' 或者 'A child container failed during start'

2、使用hive-jdbc其它版本并执行第二点中的代码时,可能出现protocl-client过时并连接不上的问题

<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>2.6.0</version>
    <exclusions>
        <exclusion>
            <groupId>javax.servlet</groupId>
            <artifactId>servlet-api</artifactId>
        </exclusion>
    </exclusions>
</dependency>
<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-jdbc</artifactId>
    <version>1.1.0</version>
    <exclusions>
        <exclusion>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-classic</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.eclipse.jetty.orbit</groupId>
            <artifactId>*</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.eclipse.jetty.aggregate</groupId>
            <artifactId>*</artifactId>
        </exclusion>
        <exclusion>
            <groupId>tomcat</groupId>
            <artifactId>*</artifactId>
        </exclusion>
        <exclusion>
            <groupId>javax.servlet</groupId>
            <artifactId>servlet-api</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.mortbay.jetty</groupId>
            <artifactId>*</artifactId>
        </exclusion>
    </exclusions>
</dependency>

二、JDBC连接的代码

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;

import java.sql.*;

public class Test {

    public static void main(String[] args) throws Exception {
        // 解决windows中执行可能出现读不到krb5配置文件的问题
        System.setProperty("java.security.krb5.conf", "C:/kerberos/test/config/krb5.conf");
        // 解决window中执行可能出现找不到HADOOP_HOME或hadoop.home.dir问题
        System.setProperty("hadoop.home.dir", "C:\\Program Files\\winutils-master\\hadoop-2.8.3");
        // Kerberos认证
        Configuration configuration = new Configuration();
        configuration.set("hadoop.security.authentication", "Kerberos");
        configuration.set("keytab.file" , "C:/kerberos/test/config/hadoop.keytab" );
        configuration.set("kerberos.principal" , "[email protected]" );        
        UserGroupInformation.setConfiguration(configuration);
        UserGroupInformation.loginUserFromKeytab("[email protected]", "C:/kerberos/test/config/hadoop.keytab");
        // 创建hive连接
        Connection connection = null;
        ResultSet rs = null;
        PreparedStatement ps = null;
        try {
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            connection = DriverManager.getConnection("jdbc:hive2://bdpnode1.domain.com:10000/;principal=hive/[email protected]","hadoop","");
            if (null != connection) {
                ps = connection.prepareStatement("SELECT * FROM test");
                rs = ps.executeQuery();
                while (rs.next()) {
                    System.out.println(rs.getInt(1));
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (rs != null) {
                rs.close();
            }
            if (ps != null) {
                ps.close();
            }
            if (connection != null) {
                connection.close();
            }
        }
    }
}

猜你喜欢

转载自www.cnblogs.com/wanbao/p/10736221.html
今日推荐