HBase java code development (create tables, add data to the table, query data, filter query, delete data according to rowkey, drop table operation)

Step 1: Create maven project, import jar package

<repositories>
        <repository>
            <id>cloudera</id>
            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
        </repository>
    </repositories>
    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.6.0-mr1-cdh5.14.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>1.2.0-cdh5.14.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-server</artifactId>
            <version>1.2.0-cdh5.14.0</version>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.12</version>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>org.testng</groupId>
            <artifactId>testng</artifactId>
            <version>6.14.3</version>
            <scope>test</scope>

        </dependency>
        <dependency>
            <groupId>org.junit.jupiter</groupId>
            <artifactId>junit-jupiter-api</artifactId>
            <version>RELEASE</version>
            <scope>compile</scope>
        </dependency>
    </dependencies>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.0</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                    <encoding>UTF-8</encoding>
                    <!-- <verbal>true</verbal>-->
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-shade-plugin</artifactId>
                <version>2.2</version>
                <executions>
                    <execution>
                        <phase>package</phase>
                        <goals>
                            <goal>shade</goal>
                        </goals>
                        <configuration>
                            <filters>
                                <filter>
                                    <artifact>*:*</artifact>
                                    <excludes>
                                        <exclude>META-INF/*.SF</exclude>
                                        <exclude>META-INF/*.DSA</exclude>
                                        <exclude>META-INF/*/RSA</exclude>
                                    </excludes>
                                </filter>
                            </filters>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>

Step Two : Development javaAPI operation HB ASE table data

1, create tables, add data to the table, query data

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

public class HbaseAPI {
    /**
     * 创建表 myuser
     *
     * @throws IOException
     */
    public static void createTable() throws IOException {
        //创建配置文件对象,并指定zookeeper的连接地址
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        //获取连接
        Connection connection = ConnectionFactory.createConnection(conf);
        //获取超级管理员
        Admin admin = connection.getAdmin();
        //实例表名和表结构
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        //设置表结构
        HTableDescriptor hTableDescriptor = new HTableDescriptor("myuser");
        //设置列族
        HColumnDescriptor hColumnDescriptor1 = new HColumnDescriptor("f1");
        HColumnDescriptor hColumnDescriptor2 = new HColumnDescriptor("f2");
        //建立表与列族之间的关系
        hTableDescriptor.addFamily(hColumnDescriptor1);
        hTableDescriptor.addFamily(hColumnDescriptor2);
        //创建表
        admin.createTable(hTableDescriptor);
        admin.close();
        //关闭连接
        connection.close();
    }

    /**
     * 向表中添加数据
     *
     * @throws IOException
     */
    public static void putData() throws IOException {

        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);

        Admin admin = connection.getAdmin();
        //实例表名和表结构
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        //创建put对象,并指定rowkey
        Put put = new Put("001".getBytes());
        put.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes("1"));
        put.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("zhangsan"));
        put.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes("18"));
        //插入数据
        myuser.put(put);
        //关闭表
        myuser.close();
    }

    /**
     * 初始化一批数据到HBase当中用于查询
     *
     * @throws IOException
     */
    public static void putDatas() throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);

        Admin admin = connection.getAdmin();
        //实例表名和表结构
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        //创建put对象,并指定rowkey
        Put put = new Put("001".getBytes());

        put.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes(1));
        put.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("曹操"));
        put.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes(30));
        put.addColumn("f2".getBytes(), "sex".getBytes(), Bytes.toBytes("1"));
        put.addColumn("f2".getBytes(), "address".getBytes(), Bytes.toBytes("沛国谯县"));
        put.addColumn("f2".getBytes(), "phone".getBytes(), Bytes.toBytes("16888888888"));
        put.addColumn("f2".getBytes(), "say".getBytes(), Bytes.toBytes("helloworld"));

        Put put2 = new Put("0003".getBytes());
        put2.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes(2));
        put2.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("刘备"));
        put2.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes(32));
        put2.addColumn("f2".getBytes(), "sex".getBytes(), Bytes.toBytes("1"));
        put2.addColumn("f2".getBytes(), "address".getBytes(), Bytes.toBytes("幽州涿郡涿县"));
        put2.addColumn("f2".getBytes(), "phone".getBytes(), Bytes.toBytes("17888888888"));
        put2.addColumn("f2".getBytes(), "say".getBytes(), Bytes.toBytes("talk is cheap , show me the code"));


        Put put3 = new Put("0004".getBytes());
        put3.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes(3));
        put3.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("孙权"));
        put3.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes(35));
        put3.addColumn("f2".getBytes(), "sex".getBytes(), Bytes.toBytes("1"));
        put3.addColumn("f2".getBytes(), "address".getBytes(), Bytes.toBytes("下邳"));
        put3.addColumn("f2".getBytes(), "phone".getBytes(), Bytes.toBytes("12888888888"));
        put3.addColumn("f2".getBytes(), "say".getBytes(), Bytes.toBytes("what are you 弄啥嘞!"));

        Put put4 = new Put("0005".getBytes());
        put4.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes(4));
        put4.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("诸葛亮"));
        put4.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes(28));
        put4.addColumn("f2".getBytes(), "sex".getBytes(), Bytes.toBytes("1"));
        put4.addColumn("f2".getBytes(), "address".getBytes(), Bytes.toBytes("四川隆中"));
        put4.addColumn("f2".getBytes(), "phone".getBytes(), Bytes.toBytes("14888888888"));
        put4.addColumn("f2".getBytes(), "say".getBytes(), Bytes.toBytes("出师表你背了嘛"));

        Put put5 = new Put("0005".getBytes());
        put5.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes(5));
        put5.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("司马懿"));
        put5.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes(27));
        put5.addColumn("f2".getBytes(), "sex".getBytes(), Bytes.toBytes("1"));
        put5.addColumn("f2".getBytes(), "address".getBytes(), Bytes.toBytes("哪里人有待考究"));
        put5.addColumn("f2".getBytes(), "phone".getBytes(), Bytes.toBytes("15888888888"));
        put5.addColumn("f2".getBytes(), "say".getBytes(), Bytes.toBytes("跟诸葛亮死掐"));


        Put put6 = new Put("0006".getBytes());
        put6.addColumn("f1".getBytes(), "id".getBytes(), Bytes.toBytes(5));
        put6.addColumn("f1".getBytes(), "name".getBytes(), Bytes.toBytes("xiaobubu—吕布"));
        put6.addColumn("f1".getBytes(), "age".getBytes(), Bytes.toBytes(28));
        put6.addColumn("f2".getBytes(), "sex".getBytes(), Bytes.toBytes("1"));
        put6.addColumn("f2".getBytes(), "address".getBytes(), Bytes.toBytes("内蒙人"));
        put6.addColumn("f2".getBytes(), "phone".getBytes(), Bytes.toBytes("15788888888"));
        put6.addColumn("f2".getBytes(), "say".getBytes(), Bytes.toBytes("貂蝉去哪了"));

        List<Put> datas = new ArrayList<>();
        Collections.addAll(datas, put, put2, put3, put4, put5, put6);
        //插入数据
        myuser.put(datas);
        //关闭表
        myuser.close();
    }

    /**
     * 按照rowkey进行查询获取所有列的所有值
     * 查询主键为0004的人
     *
     * @throws IOException
     */
    public static void getData() throws IOException {
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));

        //设置需要读取的数据(rowkey为0004)
        Get get = new Get("0004".getBytes());

        //查询某一个列族的数据
//        get.addFamily("f1".getBytes());

        //查询指定列族下的指定列,查询
        get.addColumn("f1".getBytes(), "name".getBytes());

        //读取数据  一个result,就是一行数据
        Result result = myuser.get(get);
        Cell[] cells = result.rawCells();
        //获取所有的列名称以及列的值
        for (Cell cell : cells) {
            System.out.println(Bytes.toString(CellUtil.cloneFamily(cell)));
            System.out.println(Bytes.toString(CellUtil.cloneQualifier(cell)));
            System.out.println(Bytes.toString(CellUtil.cloneRow(cell)));
            System.out.println(Bytes.toString(CellUtil.cloneValue(cell)));
        }
        myuser.close();
    }

    /**
     * 通过startRowKey和endRowKey进行扫描
     *
     * @throws IOException
     */
    public static void scanRowKey() throws IOException {
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));

        Scan scan = new Scan();
        scan.setStartRow("0001".getBytes());
        scan.setStopRow("0006".getBytes());

        //读取数据  一个result,就是一行数据
        ResultScanner scanner = myuser.getScanner(scan);
        for (Result result : scanner) {
            KeyValue[] raw = result.raw();
            for (KeyValue keyValue : raw) {
                System.out.println(Bytes.toString(keyValue.getFamily()) + "   " + Bytes.toString(keyValue.getQualifier()));
            }

            System.out.println(Bytes.toString(result.getValue("f1".getBytes(), "age".getBytes())));
        }
        myuser.close();
    }

    /**
     * scan进行全表扫描
     *
     * @throws IOException
     */
    public static void scanAllData() throws IOException {
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));

        Scan scan = new Scan();
        ResultScanner scanner = myuser.getScanner(scan);
        for (Result result : scanner) {
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(), "id".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(), "name".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(), "age".getBytes())));
        }
        myuser.close();
    }

    public static void main(String[] args) throws IOException {
//        createTable();
//        putData();
//        putDatas();
//        getData();
//        scanRowKey();
        scanAllData();
    }

}

2, the filter query

Many types of filters, but can be divided into two categories - comparator filters, special filters

Action filter is to determine whether the data at the server to meet the conditions, and then only the data satisfying the conditions back to the client;

hbase filter comparison operators:

LESS  <

LESS_OR_EQUAL <=

EQUAL =

NOT_EQUAL <>

GREATER_OR_EQUAL >=

GREATER >

NO_OP exclude all

Hbase filters special filters (compare mechanism specified):

Comparative BinaryComparator specified byte array byte index order using Bytes.compareTo (byte [])

BinaryPrefixComparator with the same front, but more data is the same as the left end

Analyzing given NullComparator is empty

BitComparator bitwise comparison

RegexStringComparator provide a regular comparator, and only supports non-EQUAL EQUAL

Substring SubstringComparator determines whether there is provided in the value.

2.1, filter comparator

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class HbaseAPI2 {

    /**
     * 比较过滤器
     *
     * @throws IOException
     */
    public static void comparisonFilter() throws IOException {
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        Scan scan = new Scan();

        //1、比较过滤器
//        //1.1、rowKey过滤器RowFilter(查询rowkey=0003的数据)
//        RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("0003")));
//        scan.setFilter(rowFilter);

//        //1.2、列族过滤器FamilyFilter(查询比f2列族小的所有的列族内的数据)
//        FamilyFilter familyFilter = new FamilyFilter(CompareFilter.CompareOp.LESS_OR_EQUAL, new SubstringComparator("f2"));
//        scan.setFilter(familyFilter);

//        //1.3、列过滤器QualifierFilter(只查询name列的值)
//        QualifierFilter qualifierFilter = new QualifierFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator("name"));
//        scan.setFilter(qualifierFilter);

        //1.4、列值过滤器ValueFilter(hbase值过滤器  查询所有列当中包含i的数据)
        ValueFilter valueFilter = new ValueFilter(CompareFilter.CompareOp.EQUAL, new SubstringComparator("i"));
        scan.setFilter(valueFilter);

        ResultScanner scanner = myuser.getScanner(scan);
        for (Result result : scanner) {
            //获取rowkey
            System.out.println(Bytes.toString(result.getRow()));
            //指定列族以及列打印列当中的数据出来
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(),"id".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(),"name".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(),"age".getBytes())));
        }
        myuser.close();

    }

    public static void main(String[] args) throws IOException {
        comparisonFilter();
    }

}

 2.2, special filters

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class HbaseAPI3 {

    /**
     * 专用过滤器
     *
     * @throws IOException
     */
    public static void specialFilter() throws IOException {
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        Scan scan = new Scan();

//        //2.1、单列值过滤器 SingleColumnValueFilter(单列值过滤器,返回满足条件的整行数据)
//        SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter("f1".getBytes(), "name".getBytes(), CompareFilter.CompareOp.EQUAL, "刘备".getBytes());
//        scan.setFilter(singleColumnValueFilter);

//        //2.2、列值排除过滤器SingleColumnValueExcludeFilter(与SingleColumnValueFilter相反,会排除掉指定的列,其他的列全部返回)
//        SingleColumnValueExcludeFilter singleColumnValueExcludeFilter = new SingleColumnValueExcludeFilter("f1".getBytes(), "name".getBytes(), CompareFilter.CompareOp.EQUAL, "孙权".getBytes());
//        scan.setFilter(singleColumnValueExcludeFilter);

//        //2.3、rowkey前缀过滤器PrefixFilter(查询以001开头的所有前缀的rowkey)
//        PrefixFilter prefixFilter = new PrefixFilter("001".getBytes());
//        scan.setFilter(prefixFilter);

        //2.4、分页过滤器PageFilter(参数pageSize指定要查询数据 rowkey的个数)
        PageFilter pageFilter = new PageFilter(3);
        scan.setFilter(pageFilter);

        ResultScanner scanner = myuser.getScanner(scan);
        for (Result result : scanner) {
            //获取rowkey
            System.out.println(Bytes.toString(result.getRow()));
            //指定列族以及列打印列当中的数据出来
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(),"id".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(),"name".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(),"age".getBytes())));
        }
        myuser.close();
    }

    public static void main(String[] args) throws IOException {
        specialFilter();
    }

}

Paging filter PageFilter

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.IOException;

public class HbaseAPI4 {

    /**
     * 分页过滤器PageFilter
     * @throws IOException
     */
    public static void pageFilter() throws IOException {
        Configuration conf = new Configuration();
        conf.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(conf);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        int pageNum = 4;
        int pageSize = 2;
        Scan scan = new Scan();
        if (pageNum == 1) {
            PageFilter filter = new PageFilter(pageSize);
            scan.setStartRow(Bytes.toBytes(""));
            scan.setFilter(filter);
            scan.setMaxResultSize(pageSize);
            ResultScanner scanner = myuser.getScanner(scan);
            for (Result result : scanner) {
                //获取rowkey
                System.out.println(Bytes.toString(result.getRow()));
                //指定列族以及列打印列当中的数据出来
//            System.out.println(Bytes.toInt(result.getValue("f1".getBytes(), "id".getBytes())));
                System.out.println(Bytes.toString(result.getValue("f1".getBytes(), "name".getBytes())));
                //System.out.println(Bytes.toString(result.getValue("f2".getBytes(), "phone".getBytes())));
            }

        }else{
            String startRowKey ="";
            PageFilter filter = new PageFilter((pageNum - 1) * pageSize + 1  );
            scan.setStartRow(startRowKey.getBytes());
            scan.setMaxResultSize((pageNum - 1) * pageSize + 1);
            scan.setFilter(filter);
            ResultScanner scanner = myuser.getScanner(scan);
            for (Result result : scanner) {
                byte[] row = result.getRow();
                startRowKey =  new String(row);
            }
            Scan scan2 = new Scan();
            scan2.setStartRow(startRowKey.getBytes());
            scan2.setMaxResultSize(Long.valueOf(pageSize));
            PageFilter filter2 = new PageFilter(pageSize);
            scan2.setFilter(filter2);

            ResultScanner scanner1 = myuser.getScanner(scan2);
            for (Result result : scanner1) {
                byte[] row = result.getRow();
                System.out.println(new String(row));
            }
        }
        myuser.close();

    }

    public static void main(String[] args) throws IOException {
        pageFilter();
    }

}

2.3, multi-filter integrated query FilterList

Requirements: Use SingleColumnValueFilter query f1 column family, name is Liu Bei data, and at the same time meet the prefix rowkey at the beginning of the 00 data (PrefixFilter)

/**
     * 多过滤器组合使用
     */
    @Test
    public void manyFilter() throws IOException {
        //获取连接
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum", "node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        Scan scan = new Scan();
        FilterList filterList = new FilterList();

        SingleColumnValueFilter singleColumnValueFilter = new SingleColumnValueFilter("f1".getBytes(), "name".getBytes(), CompareFilter.CompareOp.EQUAL, "刘备".getBytes());
        PrefixFilter prefixFilter = new PrefixFilter("00".getBytes());
        filterList.addFilter(singleColumnValueFilter);
        filterList.addFilter(prefixFilter);
        scan.setFilter(filterList);
        ResultScanner scanner = myuser.getScanner(scan);
        for (Result result : scanner) {
            //获取rowkey
            System.out.println(Bytes.toString(result.getRow()));
            //指定列族以及列打印列当中的数据出来
//            System.out.println(Bytes.toInt(result.getValue("f1".getBytes(), "id".getBytes())));
            System.out.println(Bytes.toString(result.getValue("f1".getBytes(), "name".getBytes())));
            //System.out.println(Bytes.toString(result.getValue("f2".getBytes(), "phone".getBytes())));
        }
        myuser.close();

    }

3, delete data according to rowkey

/**
     * 删除数据
     */
    @Test
    public  void  deleteByRowKey() throws IOException {
        //获取连接
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);
        Table myuser = connection.getTable(TableName.valueOf("myuser"));
        Delete delete = new Delete("0001".getBytes());
        myuser.delete(delete);
        myuser.close();
    }

4, delete the operating table

@Test
    public void  deleteTable() throws IOException {
        //获取连接
        Configuration configuration = HBaseConfiguration.create();
        configuration.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
        Connection connection = ConnectionFactory.createConnection(configuration);
        Admin admin = connection.getAdmin();
        admin.disableTable(TableName.valueOf("myuser"));
        admin.deleteTable(TableName.valueOf("myuser"));
        admin.close();
    }

 

Published 81 original articles · won praise 21 · views 2217

Guess you like

Origin blog.csdn.net/qq_44065303/article/details/103633878