springboot connecting operation cluster hbase

Server environment: hadoop2.7.3 cluster, zookeper3.4.10 cluster, hbase1.2.5 cluster

Test environment: Windows 10, the Spring Tool Suite (STS)

A former test preparation

1, because the windows are connected Hbase, and hbase is dependent on hadoop, so we need to download hadoop binary packages, here I only 2.7.3 version. Download: https://pan.baidu.com/s/1eL6fEU9AFMfZ871xnlJ0-g   extraction code: vwel

Haddop to download the binary package and extract to a folder of your disk, followed by the configuration need to use.

2, the test using maven management JAR package, not installed maven can refer to: create Maven project (Eclipse), packing and running (Windows cmd)

3, open the local C: \ Windows \ System32 \ drivers \ system file called hosts under etc. Ip cluster name mapped to insert at the bottom of the file, with reference to the following figure:

Second, add dependencies

Add <dependencies> </ dependencies> pom.xml the following dependence file:


		<!--HBase依赖 -->
		<dependency>
			<groupId>org.apache.hbase</groupId>
			<artifactId>hbase-client</artifactId>
			<version>1.2.5</version>
			<exclusions>
				<exclusion>
					<groupId>org.slf4j</groupId>
					<artifactId>slf4j-log4j12</artifactId>
				</exclusion>
			</exclusions>
		</dependency>
		<dependency>
			<groupId>org.springframework.data</groupId>
			<artifactId>spring-data-hadoop</artifactId>
			<version>2.5.0.RELEASE</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hadoop</groupId>
			<artifactId>hadoop-hdfs</artifactId>
			<version>2.7.3</version>
		</dependency>
		<dependency>
			<groupId>org.springframework.data</groupId>
			<artifactId>spring-data-hadoop-core</artifactId>
			<version>2.4.0.RELEASE</version>
		</dependency>
		<dependency>
			<groupId>org.apache.hbase</groupId>
			<artifactId>hbase</artifactId>
			<version>1.2.5</version>
			<type>pom</type>
		</dependency>
		<!--HBase依赖 -->

Second, new tools like HBase

A HBaseUtils new tools, as follows:

HBaseUtils.java:

package com.qcxy.helper.hbase;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;

/**
 * HBase工具类 Author zzq
 */
public class HBaseUtils {
	private static Connection connection;
	private static Configuration configuration;
	private static HBaseUtils hBaseUtils;
	private static Properties properties;

	/**
	 * 创建连接池并初始化环境配置
	 */
	public void init() {
		properties = System.getProperties();
		// 实例化HBase配置类
		if (configuration == null) {
			configuration = HBaseConfiguration.create();
		}
		try {
			// 加载本地hadoop二进制包,换成你解压的地址
			properties.setProperty("hadoop.home.dir", "E:\\springboot\\hadoop-common-2.7.3-bin-master");
			// zookeeper集群的URL配置信息
			configuration.set("hbase.zookeeper.quorum", "master,slave01,slave02");
			// HBase的Master
			configuration.set("hbase.master", "master:90000");
			// 客户端连接zookeeper端口
			configuration.set("hbase.zookeeper.property.clientPort", "2181");
			// HBase RPC请求超时时间,默认60s(60000)
			configuration.setInt("hbase.rpc.timeout", 20000);
			// 客户端重试最大次数,默认35
			configuration.setInt("hbase.client.retries.number", 10);
			// 客户端发起一次操作数据请求直至得到响应之间的总超时时间,可能包含多个RPC请求,默认为2min
			configuration.setInt("hbase.client.operation.timeout", 30000);
			// 客户端发起一次scan操作的rpc调用至得到响应之间的总超时时间
			configuration.setInt("hbase.client.scanner.timeout.period", 200000);
			// 获取hbase连接对象
			if (connection == null || connection.isClosed()) {
				connection = ConnectionFactory.createConnection(configuration);
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/**
	 * 关闭连接池
	 */
	public static void close() {
		try {
			if (connection != null)
				connection.close();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	/**
	 * 私有无参构造方法
	 */
	private HBaseUtils() {
	}

	/**
	 * 唯一实例,线程安全,保证连接池唯一
	 * 
	 * @return
	 */
	public static HBaseUtils getInstance() {
		if (hBaseUtils == null) {
			synchronized (HBaseUtils.class) {
				if (hBaseUtils == null) {
					hBaseUtils = new HBaseUtils();
					hBaseUtils.init();
				}
			}
		}
		return hBaseUtils;
	}

	/**
	 * 单行插入数据
	 * 
	 * @param tablename
	 * @param rowkey
	 * @param family
	 * @param cloumns
	 * @throws IOException
	 */
	public static void put(String tablename, String rowkey, String family, Map<String, String> cloumns)
			throws IOException {
		Table table = null;
		try {
			table = connection.getTable(TableName.valueOf(tablename));
			Put put = new Put(rowkey.getBytes());
			for (Map.Entry<String, String> entry : cloumns.entrySet()) {
				put.addColumn(family.getBytes(), entry.getKey().getBytes(), entry.getValue().getBytes());
			}
			table.put(put);
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			table.close();
			close();
		}
	}

	/**
	 * 获取单条数据
	 * 
	 * @param tablename
	 * @param row
	 * @return
	 * @throws IOException
	 */
	public static Result getRow(String tablename, String row) throws IOException {
		Table table = null;
		Result result = null;
		try {
			table = connection.getTable(TableName.valueOf(tablename));
			Get get = new Get(row.getBytes());
			result = table.get(get);
		} finally {
			table.close();
		}
		return result;
	}

	/**
	 * 查询多行信息
	 * 
	 * @param tablename
	 * @param rows
	 * @return
	 * @throws IOException
	 */
	public static Result[] getRows(String tablename, List<byte[]> rows) throws IOException {
		Table table = null;
		List<Get> gets = null;
		Result[] results = null;
		try {
			table = connection.getTable(TableName.valueOf(tablename));
			gets = new ArrayList<Get>();
			for (byte[] row : rows) {
				if (row != null) {
					gets.add(new Get(row));
				}
			}
			if (gets.size() > 0) {
				results = table.get(gets);
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			table.close();
		}
		return results;
	}

	/**
	 * 获取整表数据
	 * 
	 * @param tablename
	 * @return
	 */
	public static ResultScanner get(String tablename) throws IOException {
		Table table = null;
		ResultScanner results = null;
		try {
			table = connection.getTable(TableName.valueOf(tablename));
			Scan scan = new Scan();
			scan.setCaching(1000);
			results = table.getScanner(scan);
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			table.close();
		}
		return results;
	}

	/**
	 * 删除数据
	 * 
	 * @param tablename
	 * @param family
	 * @param column
	 * @param row
	 * @throws IOException
	 */
	public static void delete(String tablename, String family, String column, String row) throws IOException {
		Table table = null;

		try {
			table = connection.getTable(TableName.valueOf(tablename));
			Delete del = new Delete(row.getBytes());
			del.addColumns(family.getBytes(), column.getBytes());
			table.delete(del);
		} finally {
			table.close();
		}

	}

}

Third, the operation Hbase

1, construction of the table

Switch server to the bin directory hbase installation directory Run the following commands, using the shell hbase

$ hbase shell

In hbase shell mode we execute the following command creates a table, the table must be built in the column family, created the following tables named person, and this table has a column family named the info.

create 'person','info'

After executing the command is successful you will see the following information:

There is no case we can also use the list command to verify this table

We can see there is this table. We built a table structure as follows

2, increase data

public void putdata() {
                //获取连接池唯
		HBaseUtils.getInstance();
		try {
			Map<String, String> cloumns = new HashMap<String, String>();
			cloumns.put("name", "zzq");    //列名和值
			cloumns.put("age", "22");    //列名和值
                        //往表中的第1row的info族中增加了名为name、age的列并给出数据
			HBaseUtils.put("person", "1", "info", cloumns);
			System.out.println("增加成功");
		} catch (IOException e) {
			e.printStackTrace();
			System.out.println("增加失败");
		} finally {
			HBaseUtils.close();
		}
	}

After executing this function to our table as follows

3, update data

Update data and increase data, the implementation of the above code change the value it will overwrite the original value

4, access to data

public void getTableAllData() {
		HBaseUtils.getInstance();
		ResultScanner results = null;
		try {
			results = HBaseUtils.get("person");//表名

			for (Result result : results) {
				NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> navigableMap = result.getMap();
				for (byte[] family : navigableMap.keySet()) {
					System.out.println("列族:" + new String(family));
					for (byte[] column : navigableMap.get(family).keySet()) {
						System.out.println("列:" + new String(column));
						for (Long t : navigableMap.get(family).get(column).keySet()) {
							System.out.println("值:" + new String(navigableMap.get(family).get(column).get(t)));
						}
					}
				}
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			results.close();
			HBaseUtils.close();
		}
	}

Outputs the following message after successful execution:

5, delete data

	public void delete() {
		HBaseUtils.getInstance();
		try {   //删除person表中的第1行的info列族中的name列中的的值
			HBaseUtils.delete("person", "info", "name", "1");
			System.out.println("删除成功");
		} catch (IOException e) {
			System.out.println("删除失败");
			e.printStackTrace();
		}
	}

After the success we delete the data in the lookup table is as follows

 Integration of the above operation codes

Test.java:

package com.qcxy.helper.Controller;

import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import java.io.IOException;
import java.util.*;
import com.qcxy.helper.hbase.HBaseUtils;

@Controller
public class Test {

	@ResponseBody
	@RequestMapping("/hello")
	public String hello() {
		return "hello";
	}

	/**
	 * 增加数据
	 */
	@ResponseBody
	@RequestMapping("/putdata")
	public void putdata() {
		HBaseUtils.getInstance();
		try {
			Map<String, String> cloumns = new HashMap<String, String>();
			cloumns.put("name", "zzq");
			cloumns.put("age", "22");
			HBaseUtils.put("person", "1", "info", cloumns);
			System.out.println("增加成功");
		} catch (IOException e) {
			e.printStackTrace();
			System.out.println("增加失败");
		} finally {
			HBaseUtils.close();
		}
	}

	/**
	 * 获取整表数据
	 */
	@ResponseBody
	@RequestMapping("/getTableAllData")
	public void getTableAllData() {
		HBaseUtils.getInstance();
		ResultScanner results = null;
		try {
			results = HBaseUtils.get("person");

			for (Result result : results) {
				NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> navigableMap = result.getMap();
				for (byte[] family : navigableMap.keySet()) {
					System.out.println("列族:" + new String(family));
					for (byte[] column : navigableMap.get(family).keySet()) {
						System.out.println("列:" + new String(column));
						for (Long t : navigableMap.get(family).get(column).keySet()) {
							System.out.println("值:" + new String(navigableMap.get(family).get(column).get(t)));
						}
					}
				}
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			results.close();
			HBaseUtils.close();
		}
	}

	/**
	 * 删除数据
	 */
	@ResponseBody
	@RequestMapping("/deletedata")
	public void delete() {
		HBaseUtils.getInstance();
		try {
			HBaseUtils.delete("person", "info", "name", "1");
			System.out.println("删除成功");
		} catch (IOException e) {
			System.out.println("删除失败");
			e.printStackTrace();
		}
	}

}

The completion of this test!

Published 17 original articles · won praise 70 · views 90000 +

Guess you like

Origin blog.csdn.net/zzqaaasss/article/details/89449807