spring boot 连接hive

版权声明:Copyright ©2018-2019 凉白开不加冰 版权所有 https://blog.csdn.net/qq_21082615/article/details/91374550

介绍:linux文章里讲了怎么部署,这章主要讲如何通过springboot连接hive

第一步:加入jar

<dependency>
   <groupId>org.springframework.boot</groupId>
   <artifactId>spring-boot-starter-web</artifactId>
   <version>2.1.2.RELEASE</version>
</dependency>

<dependency>
   <groupId>com.alibaba</groupId>
   <artifactId>druid-spring-boot-starter</artifactId>
   <version>1.1.10</version>
</dependency>

<dependency>
   <groupId>org.springframework.boot</groupId>
   <artifactId>spring-boot-starter-jdbc</artifactId>
   <version>2.1.2.RELEASE</version>
</dependency>

<dependency>
   <groupId>org.springframework.data</groupId>
   <artifactId>spring-data-hadoop</artifactId>
   <version>2.5.0.RELEASE</version>
   <exclusions>
      <exclusion>
         <groupId>jdk.tools</groupId>
         <artifactId>jdk.tools</artifactId>
      </exclusion>
   </exclusions>
</dependency>

<dependency>
   <groupId>org.apache.hive</groupId>
   <artifactId>hive-jdbc</artifactId>
   <version>2.3.3</version>
   <exclusions>
      <exclusion>
         <groupId>org.eclipse.jetty.aggregate</groupId>
         <artifactId>*</artifactId>
      </exclusion>
      <exclusion>
         <groupId>jdk.tools</groupId>
         <artifactId>jdk.tools</artifactId>
      </exclusion>
   </exclusions>
</dependency>

<dependency>
   <groupId>org.apache.tomcat</groupId>
   <artifactId>tomcat-jdbc</artifactId>
   <version>9.0.11</version>
</dependency>

第二步:配置application.yml

hive:
  url: jdbc:hive2://192.168.0.130:10000/default #default 是hive默认的数据库名
  driver-class-name: org.apache.hive.jdbc.HiveDriver
  type: com.alibaba.druid.pool.DruidDataSource
  user: root
  password: 123456
  initialSize: 1
  minIdle: 3
  maxActive: 20
  maxWait: 60000
  timeBetweenEvictionRunsMillis: 60000
  minEvictableIdleTimeMillis: 30000
  validationQuery: select 1
  testWhileIdle: true
  testOnBorrow: false
  testOnReturn: false
  poolPreparedStatements: true
  maxPoolPreparedStatementPerConnectionSize: 20

第三步:配置数据源

package com.example.config;

import org.apache.tomcat.jdbc.pool.DataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;

/**
 * @Author: 凉白开不加冰
 * @Version: 0.0.1V
 * @Date: 2019/1/18
 * @Description: hive数据源配置
 **/
@Configuration
public class HiveJdbcConfig {

    @Value("${hive.url}")
    private String url;

    @Value("${hive.driver-class-name}")
    private String driver;

    @Value("${hive.user}")
    private String user;

    @Value("${hive.password}")
    private String password;

    @Bean
    public DataSource dataSource(){
        DataSource dataSource = new DataSource();
        dataSource.setUrl(url);
        dataSource.setDriverClassName(driver);
        dataSource.setUsername(user);
        dataSource.setPassword(password);
        return dataSource;
    }

    @Bean
    public JdbcTemplate jdbcTemplate(DataSource dataSource){
        return new JdbcTemplate(dataSource);
    }
}

第四步:写个简单的查询

package com.example.controller;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.List;
import java.util.Map;

/**
 * @Author: 凉白开不加冰
 * @Version: 0.0.1V
 * @Date: 2019/1/18
 * @Description: 使用JdbcTemplate
 **/
@RestController
@RequestMapping("/hive2")
public class HiveController {

    @Autowired
    @Qualifier("jdbcTemplate")
    private JdbcTemplate jdbcTemplate;

    @RequestMapping("/list")
    public List<Map<String, Object>> list() {
        String sql = "select * from t2";
        List<Map<String, Object>> list = jdbcTemplate.queryForList(sql);
        return list;
    }

}

第五步:启动项目

package com.example;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class HiveApplication {

   public static void main(String[] args) {
      SpringApplication.run(HiveApplication.class, args);
   }

}

第六步:请求localhost:8080/hive2/list 进行测试

请求成功返回数据:
[
     {
         "t2.id": 1,
         "t2.name": "zhangsan"
     },
     {
         "t2.id": 2,
         "t2.name": "lisi"
     }
 ]

如果请求报一下错误错:
The dir: /tmp/hive on HDFS should be writable. Current permissions are: rwxr-xr-x

需要给/tmp/hive读写权限
去hadoop服务器bin目录下执行即可
./hdfs dfs -chmod -R 777 /tmp

猜你喜欢

转载自blog.csdn.net/qq_21082615/article/details/91374550