Hbase相关增删改查操作示例

Hbase1.3.1基于Hadoop2.7.1上,相关基本操作代码示例


package com.xiva.cloud.study.hbase;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;

/**
 * Hbase相关操作示例
 */
public class HbaseUtils 
{
    public static void main( String[] args )
    {
        try
        {
//            createTable();
//            putData();
//            getData();
//            createNamespace();
//            createTableUseNamespace();
            deleteData();
        }
        catch (Exception e)
        {
            e.printStackTrace();
        }
    }
    
    public static void createNamespace() throws Exception
    {
        Connection conn = getConnection();
        
        Admin admin = conn.getAdmin();
        
        NamespaceDescriptor descriptor = NamespaceDescriptor.create("ajbzns").build();
        
        admin.createNamespace(descriptor);
        
        admin.close();
    }
    
    public static void deleteData() throws Exception
    {
        Table table = getTable("AJ_RECORD");
        Delete delete = new Delete("r000001".getBytes());
        table.delete(delete );
        
        table.close();
    }
    
    public static void getData() throws Exception
    {
        Table table = getTable("AJ_RECORD");
        Get get = new Get("r000001".getBytes());
        
        Result result = table.get(get);
        
        /*byte[] byteVals = result.getValue("bztag".getBytes(), "afdz".getBytes());
        System.out.println(new String(byteVals));*/
        
        for (Cell cell : result.rawCells())
        {
            System.out.print(new String(CellUtil.cloneQualifier(cell)) + ":");
            System.out.println(new String(CellUtil.cloneValue(cell)));
        }
        
        table.close();
    }
    
    public static void putData() throws Exception
    {
        Table table = getTable("AJ_RECORD");
        Put put = new Put("r000001".getBytes());
        put.addColumn("bztag".getBytes(), "afdz".getBytes(), "深圳市南山区桃园地铁站".getBytes());
        put.addColumn("bztag".getBytes(), "dqwp".getBytes(), "苹果手机".getBytes());
        
        put.addColumn("baseinfo".getBytes(), "ajbh".getBytes(), "A4403053400002017080500003".getBytes());
        put.addColumn("baseinfo".getBytes(), "ajmc".getBytes(), "苹果手机***".getBytes());
        
        table.put(put);
        
        table.close();
    }
    
    public static Table getTable(String tableName) throws Exception
    {
        Connection conn = getConnection();
        
        Table table = conn.getTable(TableName.valueOf(tableName));
        
        return table;
    }

    public static Connection getConnection() throws IOException
    {
        // create config
        Configuration config = HBaseConfiguration.create();
        config.addResource("hbase-site.xml");
        
        Connection conn = ConnectionFactory.createConnection(config);
        return conn;
    }
    
    public static void createTable() throws Exception
    {
        Connection conn = getConnection();
        
        // get a admin from connection
        Admin admin = conn.getAdmin();
        TableName ajbzTable = TableName.valueOf("AJ_RECORD");
        boolean exist = admin.tableExists(ajbzTable);
        
        // 判断表是否存在
        if (exist)
        {
            admin.disableTable(ajbzTable);
            admin.deleteTable(ajbzTable);
        }
        
        HTableDescriptor desc = new HTableDescriptor(ajbzTable);
        desc.addFamily(new HColumnDescriptor("bztag"));
        desc.addFamily(new HColumnDescriptor("baseinfo"));
        
        admin.createTable(desc);
        
        admin.close();
    }
    
    public static void createTableUseNamespace() throws Exception
    {
        Connection conn = getConnection();
        
        // get a admin from connection
        Admin admin = conn.getAdmin();
        TableName ajbzTable = TableName.valueOf("ajbzns", "AJ_RECORD_BANK");
        boolean exist = admin.tableExists(ajbzTable);
        
        // 判断表是否存在
        if (exist)
        {
            admin.disableTable(ajbzTable);
            admin.deleteTable(ajbzTable);
        }
        
        HTableDescriptor desc = new HTableDescriptor(ajbzTable);
        desc.addFamily(new HColumnDescriptor("bztag"));
        desc.addFamily(new HColumnDescriptor("baseinfo"));
        
        admin.createTable(desc);
        
        admin.close();
    }
}


除了引用下面的POM文件,还需要在工程中导入hbase-site.xml文件,否则需要在config变量中指定zk地址或者在环境变量中指定。

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>

  <groupId>com.xiva.cloud.study</groupId>
  <artifactId>hbase</artifactId>
  <version>0.0.1-SNAPSHOT</version>
  <packaging>jar</packaging>

  <name>hbase</name>
  <url>http://maven.apache.org</url>

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <hadoop.version>2.7.1</hadoop.version>
  </properties>

  <dependencies>
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-client</artifactId>
      <version>1.3.1</version>
    </dependency>
    
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-hdfs</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-common</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-auth</artifactId>
      <version>${hadoop.version}</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hadoop.version}</version>
    </dependency>

    <dependency>
      <groupId>jdk.tools</groupId>
      <artifactId>jdk.tools</artifactId>
      <version>1.7</version>
      <scope>system</scope>
      <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
    </dependency>

    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>3.8.1</version>
      <scope>test</scope>
    </dependency>
  </dependencies>
</project>

猜你喜欢

转载自xiva.iteye.com/blog/2388423