Hbase Java API包括协处理器统计行数

Posted 勤奋的园

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了Hbase Java API包括协处理器统计行数相关的知识,希望对你有一定的参考价值。

package com.zy;
import java.io.IOException;

import org.apache.commons.lang.time.StopWatch;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.AggregationClient;
import org.apache.hadoop.hbase.client.coprocessor.LongColumnInterpreter;
import org.apache.hadoop.hbase.util.Bytes;

public class HbaseTable {
    // 声明静态配置
    private static Configuration conf = HBaseConfiguration.create();
    // 创建表(tableName 表名; family 列族列表)
    public static void createTable(String tableName, String[] familys)
    throws IOException{
      HBaseAdmin admin = new HBaseAdmin(conf);
    if (admin.tableExists(tableName)){
    System.out.println(tableName+" already exists!");
    }
    else {
    HTableDescriptor descr = new HTableDescriptor(TableName.valueOf(tableName));
    for (String family:familys) {
    descr.addFamily(new HColumnDescriptor(family)); //添加列族
    }
    admin.createTable(descr); //建表
    System.out.println(tableName+" created successfully!");
    }
    }
    //插入数据(rowKey rowKey;tableName 表名;family 列族;qualifier 限定名;value 值)
    public static void addData(String tableName, String rowKey, String familyName, String
    columnName, String value)
    throws IOException {
    HTable table = new HTable(conf, Bytes.toBytes(tableName));//HTable负责跟记录相关的操作如增删改查等//
    Put put = new Put(Bytes.toBytes(rowKey));// 设置rowkey
    put.add(Bytes.toBytes(familyName), Bytes.toBytes(columnName), Bytes.toBytes(value));
    table.put(put);
    System.out.println("Add data successfully!rowKey:"+rowKey+", column:"+familyName+":"+columnName+", cell:"+value);
    }
    //遍历查询hbase表(tableName 表名)
    public static void getResultScann(String tableName) throws IOException {
    Scan scan = new Scan();
    ResultScanner rs = null;
    HTable table = new HTable(conf, Bytes.toBytes(tableName));
    try {
    rs = table.getScanner(scan);
    for (Result r : rs) {
    for (KeyValue kv : r.list()) {
    System.out.println("row:" + Bytes.toString(kv.getRow()));
    System.out.println("family:" + Bytes.toString(kv.getFamily()));
    System.out.println("qualifier:" + Bytes.toString(kv.getQualifier()));
    System.out.println("value:" + Bytes.toString(kv.getValue()));
    System.out.println("timestamp:" + kv.getTimestamp());
    System.out.println("-------------------------------------------");
    }
    }
    } finally {
    rs.close();
    }
    }
    //查询表中的某一列(
    public static void getResultByColumn(String tableName, String rowKey, String familyName, String
    columnName) throws IOException {
    HTable table = new HTable(conf, Bytes.toBytes(tableName));
    Get get = new Get(Bytes.toBytes(rowKey));
    get.addColumn(Bytes.toBytes(familyName), Bytes.toBytes(columnName)); //获取指定列族和列修饰符对应的列
    Result result = table.get(get);
    for (KeyValue kv : result.list()) {
    System.out.println("family:" + Bytes.toString(kv.getFamily()));
    System.out.println("qualifier:" + Bytes.toString(kv.getQualifier()));
    System.out.println("value:" + Bytes.toString(kv.getValue()));
    System.out.println("Timestamp:" + kv.getTimestamp());
    System.out.println("-------------------------------------------");
    }
    }
    //更新表中的某一列(tableName 表名;rowKey rowKey;familyName 列族名;columnName 列名;value 更新后的值)
    public static void updateTable(String tableName, String rowKey,
    String familyName, String columnName, String value)
    throws IOException {
    HTable table = new HTable(conf, Bytes.toBytes(tableName));
    Put put = new Put(Bytes.toBytes(rowKey));
    put.add(Bytes.toBytes(familyName), Bytes.toBytes(columnName),
    Bytes.toBytes(value));
    table.put(put);
    System.out.println("update table Success!");
    }
    //删除指定单元格
    public static void deleteColumn(String tableName, String rowKey,
    String familyName, String columnName) throws IOException {
    HTable table = new HTable(conf, Bytes.toBytes(tableName));
    Delete deleteColumn = new Delete(Bytes.toBytes(rowKey));
    deleteColumn.deleteColumns(Bytes.toBytes(familyName),
    Bytes.toBytes(columnName));
    table.delete(deleteColumn);
    System.out.println("rowkey:"+rowKey+",column:"+familyName+":"+columnName+" deleted!");
    }
    //删除指定的行
    public static void deleteAllColumn(String tableName, String rowKey)
    throws IOException {
    HTable table = new HTable(conf, Bytes.toBytes(tableName));
    Delete deleteAll = new Delete(Bytes.toBytes(rowKey));
    table.delete(deleteAll);
    System.out.println("rowkey:"+rowKey+" are all deleted!");
    }
    //删除表(tableName 表名)
    public static void deleteTable(String tableName) throws IOException { HBaseAdmin admin = new HBaseAdmin(conf);
    admin.disableTable(tableName);
    admin.deleteTable(tableName);
    System.out.println(tableName + " is deleted!");
    }


    //统计行数
    public void RowCount(String tablename) throws Exception,Throwable{
        //提前创建conf
        HBaseAdmin admin = new HBaseAdmin(conf);
        TableName name=TableName.valueOf(tablename);
        //先disable表,添加协处理器后再enable表
        admin.disableTable(name);
        HTableDescriptor descriptor = admin.getTableDescriptor(name);
        String coprocessorClass = "org.apache.hadoop.hbase.coprocessor.AggregateImplementation";
        if (! descriptor.hasCoprocessor(coprocessorClass)) {
            descriptor.addCoprocessor(coprocessorClass);
        }
        admin.modifyTable(name, descriptor);
        admin.enableTable(name);

        //计时
        StopWatch stopWatch = new StopWatch();
        stopWatch.start();

        //提高RPC通信时长
        conf.setLong("hbase.rpc.timeout", 600000);
        //设置Scan缓存
        conf.setLong("hbase.client.scanner.caching", 1000);
        Configuration configuration = HBaseConfiguration.create(conf);
        AggregationClient aggregationClient = new AggregationClient(configuration);
        Scan scan = new Scan();
        long rowCount = aggregationClient.rowCount(name, new LongColumnInterpreter(), scan);
        System.out.println(" rowcount is " + rowCount);
        System.out.println("统计耗时:"+stopWatch.getTime());
        }

    public static void main(String[] args) throws Exception {
    // 创建表
    String tableName = "test";
    String[] family = { "f1", "f2" };
    createTable(tableName, family);
    // 为表插入数据
    String[] rowKey = {"r1", "r2"};
    String[] columnName = { "c1", "c2", "c3" };
    String[] value = {"value1", "value2", "value3", "value4", "value5", "value6",};
    addData(tableName,rowKey[0],family[0],columnName[0],value[0]);
    addData(tableName,rowKey[0],family[0],columnName[1],value[1]);
    addData(tableName,rowKey[0],family[1],columnName[2],value[2]);
    addData(tableName,rowKey[1],family[0],columnName[0],value[3]);
    addData(tableName,rowKey[1],family[0],columnName[1],value[4]);
    addData(tableName,rowKey[1],family[1],columnName[2],value[5]);
    // 扫描整张表
    getResultScann(tableName);
    // 更新指定单元格的值
    updateTable(tableName, rowKey[0], family[0], columnName[0], "update value");
    // 查询刚更新的列的值
    getResultByColumn(tableName, rowKey[0], family[0], columnName[0]);
    // 删除一列
    deleteColumn(tableName, rowKey[0], family[0], columnName[1]);
    // 再次扫描全表
    getResultScann(tableName);
    // 删除整行数据
    deleteAllColumn(tableName, rowKey[0]);
    // 再次扫描全表
    getResultScann(tableName);
    // 删除表
    deleteTable(tableName);
    }

}
如果想要在本地成功运行上述的API Demo,必须满足如下几个条件:
1. 新建项目
本小节使用Intellij IDEA作为HBase的开发环境。安装好工具后需新建一个名为 hbase-test 的maven项
目,并在项目目录下的 ~/src/main/java/ 目录下将新建一个 HtableTest.java 文件,内容为上述
的API Demo。
2. 导入jar包
将上一章节中获取的jar包下载到本地,并将上一步新建的项目 hbase-test 与其建立依赖,也就是设定
新建项目 hbase-test 的 classpath ,用于API运行时查找jar包和配置文件。
3. 导入配置文件
若您要在本地进行开发还需要 hbase-site.xml 文件,将配置文件移入resources目录下。这个文件在集群中任意一台服务器上的
/etc/hbase/conf/ 目录下。
12. HBase API运行教程
本地的 hbase-site.xml 文件应放在上一步中与项目 hbase-test 建立了依赖的路径
下。
满足上述条件后,你就可以运行上述的API Demo了。

以上是关于Hbase Java API包括协处理器统计行数的主要内容,如果未能解决你的问题,请参考以下文章

细述hbase协处理器

HBase总结(10)--协处理器

[How to] 使用HBase协处理器---Endpoint客户端代码的实现

统计HBase表行数的四种方式

Hbase快速统计行数

视频分享HBase源码解析与开发实战