Hive java API接口

实验环境

Hadoop2.8.0
Hbase-1.4.9
Centos7.2

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>java_Hadoop</groupId>
    <artifactId>java_Hadoop</artifactId>
    <version>1.0-SNAPSHOT</version>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>6</source>
                    <target>6</target>
                </configuration>
            </plugin>
        </plugins>
    </build>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-core</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.8.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.3.0</version>
        </dependency>
    </dependencies>




</project>

Java代码

package Hive;

import Hdfs.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.Date;

public class Hive {

    private static Configuration conf = new Configuration();
    static   {

        conf.set("fs.defaultFS", "172.18.74.236:9000");

    }
    FileSystem fs = FileSystem.get(conf);

    // 驱动,固定的
    private static String driverName = "org.apache.hive.jdbc.HiveDriver";
    // 默认就是10000端口,ip地址使用hive服务器的
    private static String url = "jdbc:hive2://172.18.74.236:10000/default";
    // hive连接的用户名和密码,默认就算是下面这两个
    private static String user = "root";
    private static String password = "输入你的密码";
    public static String today = new SimpleDateFormat("yyyy-MM-dd").format(new Date());


    // 公共使用的变量
    private static Connection conn = null;
    private static Statement stmt = null;
    private static ResultSet rs = null;

    public Hive() throws IOException {
    }

    // 加载驱动、创建连接
    private static void init() throws Exception {
        Class.forName(driverName);
        conn = DriverManager.getConnection(url,user,password);
        stmt = conn.createStatement();
    }

    // 释放资源
    public static void destory() throws Exception {
        if ( rs != null) {
            rs.close();
        }
        if (stmt != null) {
            stmt.close();
        }
        if (conn != null) {
            conn.close();
        }
    }

    // 主函数
    public static void main(String[] args) throws Exception {
        //建立连接
        init();
        //表名称
        String Tablename = "Test";
        //文件路径

        String localFilePath = "E:\\Data.txt";
        String hdfsFilePath = "/Test" +today.substring(0,7) + "/upload_date=" + today + "/";
        File localfilepath = new File(localFilePath);

        /**
         * 1.查看hdfs中所有目录
         * 2.创建文件夹
         * 3.将本地文件上传到hdfs中
         * 4.将hdfs的文件上传到hive表中
         */
//        HdfsTest.getDiretoryFromHdfs("/");
//        HdfsTest.mkdir(hdfsFilePath);
//        HdfsTest.uploadFile(localFilePath,hdfsFilePath);
//        HdfsTest.getDiretoryFromHdfs(hdfsFilePath);

//        createTable(Tablename);
//        showTables();
//    	loadData(hdfsFilePath+localfilepath.getName(),Tablename);
        descTable(Tablename);

//    	selectData();
//    	countData();
//    	dropTable();
        destory();



    }

    // 创建表
    private static void createTable(String Tablename) throws Exception {
        stmt.execute("drop table if exists " + Tablename );
        String sql = "create table " + Tablename + " (id int, name string)";
        stmt.execute(sql);
    }

    // 查询所有表
    private static void showTables() throws Exception {
        String sql = "show tables";
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString(1));
        }
    }

    // 查看表结构
    public static void descTable(String Tablename) throws Exception {
        String sql = "desc "+Tablename+"";
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString(1) + "\t" + rs.getString(2));
        }
    }

    // 加载数据
    public static void loadData(String filePath,String Tablename) throws Exception {

        String sql = "load data inpath '" + filePath + "' into table "+Tablename;
        stmt.execute(sql);
    }

    // 查询数据
    public static void selectData(String Tablename) throws Exception {
        String sql = "select * from "+Tablename+"";
        rs = stmt.executeQuery(sql);
        while (rs.next()) {
            System.out.println(rs.getString("foo") + "\t\t" + rs.getString("bar"));
        }
    }



    // 删除数据库表
    public static void dropTable(String Tablename) throws Exception {
        String sql = "drop table if exists "+ Tablename;
        stmt.execute(sql);
    }

}

这里跟HDFS接口有所依赖,可以参考我这篇文章HDFS接口

发布了126 篇原创文章 · 获赞 35 · 访问量 1万+

猜你喜欢

转载自blog.csdn.net/qq_43442524/article/details/102913420