第一个Hive程序

第一个Hive程序

  • 通过beeline连接hive后,创建database
create database test;
  • 创建表tb_order
create table tb_order(id int,name string,price double)
row format delimited fields terminated by '\t'
lines terminated by '\n';
  • 查看hdfs文件
hdfs dfs -ls /home/wujinlei/hive/warehouse/test.db
  • 从本地文件夹load数据
    • 新建文件/home/wujinlei/work/tb_order,向其增加如下内容:
    1 beizi 12.34
    2 zhentou 23.56
    
    ps:由于建表时是以\t区分每列数据,所以以上文件内容用tab键隔开即可。
    • 执行命令
    load data local inpath '/home/wujinlei/work/tb_order' overwrite into table test.tb_order;
    
    • 查询数据
    select * from test.tb_order;
    
  • 添加依赖
<dependencies>
  <dependency>
      <groupId>org.apache.hive</groupId>
      <artifactId>hive-jdbc</artifactId>
      <version>1.1.0-cdh5.4.7</version>
  </dependency>
  <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>2.7.1</version>
  </dependency>
</dependencies>
  • 添加hadoop配置和hive配置一级log4j配置到resource
    • core-site.xml
    <?xml version="1.0" encoding="UTF-8"?>
    <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
    <configuration>
      <property>
        <name>fs.defaultFS</name>
        <value>hdfs://master:9000</value>
      </property>
    </configuration>
    
    • hdfs-site.xml
    <configuration>
      <property>  
        <name>dfs.replication</name>  
        <value>2</value>  
      </property>
      <property>
        <name>dfs.namenode.name.dir</name>
        <value>file:/home/wujinlei/hadoop/dfs/name</value>
      </property>
      <property>
        <name>dfs.datanode.data.dir</name>
        <value>file:/home/wujinlei/hadoop/dfs/data</value>
      </property>
      <property>
        <name>dfs.namenode.http-address</name>
        <value>master:9870</value>
      </property>
      <property>
        <name>dfs.datanode.http.address</name>
        <value>master:9864</value>
      </property>
    </configuration>
    
    • mapred-site.xml
    <configuration>
      <property>
        <name>mapreduce.framework.name</name>
        <value>yarn</value>
      </property>
    </configuration>
    
    • yarn-site.xml
    <configuration>
      <property>
        <name>yarn.resourcemanager.hostname</name>
        <value>master</value>
      </property>
      <property>
        <name>yarn.nodemanager.aux-services</name>
        <value>mapreduce_shuffle</value>
      </property>
      <property>
        <name>yarn.resourcemanager.webapp.address</name>
        <value>master:8088</value>
      </property>
      <property>
        <name>yarn.application.classpath</name>
        <value>
    	/usr/java/hadoop-3.0.0/etc/hadoop,
           	/usr/java/hadoop-3.0.0/share/hadoop/common/lib/*,
    	/usr/java/hadoop-3.0.0/share/hadoop/common/*,
    	/usr/java/hadoop-3.0.0/share/hadoop/hdfs,
    	/usr/java/hadoop-3.0.0/share/hadoop/hdfs/lib/*,
    	/usr/java/hadoop-3.0.0/share/hadoop/hdfs/*,
    	/usr/java/hadoop-3.0.0/share/hadoop/mapreduce/*,
    	/usr/java/hadoop-3.0.0/share/hadoop/yarn,
    	/usr/java/hadoop-3.0.0/share/hadoop/yarn/lib/*,
    	/usr/java/hadoop-3.0.0/share/hadoop/yarn/*,
    	/usr/java/jdk1.8.0_45/lib/tools.jar
        </value>
      </property>
    </configuration>
    
    • hive-site.xml
    <configuration>
      <property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://master:3306/hive?createDatabaseIfNotExist=true</value>
      </property>
      <property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.cj.jdbc.Driver</value>
      </property>  
      <property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>root</value>
      </property>
      <property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>wujinlei</value>
      </property>
      <property>
        <name>hive.metastore.warehouse.dir</name>
        <value>/home/wujinlei/hive/warehouse</value>
      </property>
    </configuration>
    
    • log4j.properties
    log4j.rootLogger=INFO,Console
    #Console
    log4j.appender.Console=org.apache.log4j.ConsoleAppender
    log4j.appender.Console.layout=org.apache.log4j.PatternLayout
    log4j.appender.Console.layout.ConversionPattern=%d [%t] %-5p %c{2} %X{traceId}- %m%n
    
  • 编写代码
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;

/**
 * Created by JackManWu on 2018/2/26.
 */
public class HiveMain {
    public static void main(String[] args) throws Exception {
        Class.forName("org.apache.hive.jdbc.HiveDriver");
        String url = "jdbc:hive2://master:10000/test";
        Connection connection = DriverManager.getConnection(url,"wujinlei","wujinlei");
        String sql = "select * from default.tb_order";
        PreparedStatement statement = connection.prepareStatement(sql);
        ResultSet resultSet = statement.executeQuery();
        while (resultSet.next()) {
            System.out.println(resultSet.getInt(1) + "    " + resultSet.getString(2) + "  " + resultSet.getDouble(3));
        }
    }
}
  • 直接执行代码即可

猜你喜欢

转载自my.oschina.net/u/3163032/blog/1624438