IDEA连接HBase

Posted curtime

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了IDEA连接HBase相关的知识,希望对你有一定的参考价值。

新建maven工程

打开pom.xml添加hbase需要的依赖

<dependency>
  <groupId>org.apache.hbase</groupId>
  <artifactId>hbase-client</artifactId>
  <version>2.3.5</version>
</dependency>

<dependency>
  <groupId>org.apache.hbase</groupId>
  <artifactId>hbase-server</artifactId>
  <version>2.3.5</version>
</dependency>

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;


import java.io.IOException;


public class HBase 
    public static void main(String[] args) 

        //配置HBase信息,连接HBase数据库
        Configuration conf = HBaseConfiguration.create();
        conf.set(HConstants.HBASE_DIR, "hdfs://192.168.153.146:9000/hbase");
        //给配置类添加配置
        conf.set(HConstants.ZOOKEEPER_QUORUM, "192.168.153.146");
        conf.set(HConstants.CLIENT_PORT_STR, "2181");

        try 
            //获取连接
            Connection conn = ConnectionFactory.createConnection(conf);
            System.out.println(conn);
            Table stuTB = conn.getTable(TableName.valueOf("bigdata:student"));
            Put put = new Put(Bytes.toBytes("rowkey11"));
            put.addColumn("baseinfo".getBytes(), "name".getBytes(), "guo".getBytes());
            put.addColumn("baseinfo".getBytes(), "age".getBytes(), "18".getBytes());
            put.addColumn("baseinfo".getBytes(), "birthday".getBytes(), "1994-10-06".getBytes());
            put.addColumn("schoolinfo".getBytes(), "name".getBytes(), "西华一高".getBytes());
            put.addColumn("schoolinfo".getBytes(), "address".getBytes(), "西华".getBytes());
            stuTB.put(put);
         catch (IOException e) 
            e.printStackTrace();
        
    
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

/**
 * Unit test for simple App.
 */
public class AppTest 
    static Configuration config = null;
    private Connection conn = null;
    private Admin admin;

    @Before//测试之前先加载
    public void init() throws IOException 
        System.out.println("执行init()");
        config = HBaseConfiguration.create();
        config.set(HConstants.HBASE_DIR, "hdfs://192.168.153.147:9000/hbase");
        config.set(HConstants.ZOOKEEPER_QUORUM, "192.168.153.147");
        config.set(HConstants.CLIENT_PORT_STR, "2181");
        conn = ConnectionFactory.createConnection(config);
        admin = conn.getAdmin();
    

    @Test
    public void test1() 

        System.out.println(conn);
        System.out.println("执行test1()");
    

    /**
     * 创建命名空间
     */
    @Test
    public void createNameSpace() throws IOException 
        NamespaceDescriptor kb21 = NamespaceDescriptor.create("kb21").build();

        try 
            admin.createNamespace(kb21);
         catch (IOException e) 
            e.printStackTrace();
        
    

    /**
     * 创建表
     */
    @Test
    public void createTable() throws IOException 
        //创建表的描述类
        TableName tableName = TableName.valueOf("bigdata:car2");
        HTableDescriptor desc = new HTableDescriptor(tableName);
        //创建列族的描述
        HColumnDescriptor family1 = new HColumnDescriptor("info");


        desc.addFamily(family1);


        admin.createTable(desc);

    

    /**
     * 删除表
     *
     * @throws IOException
     */
    @Test
    public void createTable2() throws IOException 
        admin.disableTable(TableName.valueOf("bigdata:car"));
        admin.deleteTable(TableName.valueOf("bigdata:car"));
    

    @Test
    public void getAllNamespace() throws IOException 
        String[] nps = admin.listNamespaces();
        String s = Arrays.toString(nps);
        System.out.println(s);
    

    /**
     *
     */
    @Test
    public void getAllNamespace2() throws IOException 
        List<TableDescriptor> tableDesc = admin.listTableDescriptorsByNamespace("kb21".getBytes());
        System.out.println(tableDesc.toString());
    

    @After
    public void close() throws IOException 
        System.out.println("执行close()");
        if (admin != null) 
            admin.close();
        
        if (conn != null) 
            conn.close();
        
    

    @Test
    public void insertData() throws IOException 
        Table table = conn.getTable(TableName.valueOf("bigdata:car2"));


        Put put2 = new Put(Bytes.toBytes("model3"));
        put2.addColumn("info".getBytes(), "brand".getBytes(), "TSLA".getBytes());
        put2.addColumn("info".getBytes(), "country".getBytes(), "美国".getBytes());
        put2.addColumn("info".getBytes(), "model".getBytes(), "轿车".getBytes());
        put2.addColumn("info".getBytes(), "price".getBytes(), "23万".getBytes());
        put2.addColumn("info".getBytes(), "data".getBytes(), "1994-10-01".getBytes());


        Put put3 = new Put(Bytes.toBytes("modely"));
        put3.addColumn("info".getBytes(), "brand".getBytes(), "TSLA".getBytes());
        put3.addColumn("info".getBytes(), "country".getBytes(), "美国".getBytes());
        put3.addColumn("info".getBytes(), "model".getBytes(), "suv".getBytes());
        put3.addColumn("info".getBytes(), "price".getBytes(), "40万".getBytes());
        put3.addColumn("info".getBytes(), "data".getBytes(), "1998-10-01".getBytes());

        ArrayList<Put> list = new ArrayList<>();
        list.add(put2);
        list.add(put3);
        table.put(list);
    

    /**
     * get查询
     */
    @Test
    public void queryData() throws IOException 
        Table table = conn.getTable(TableName.valueOf("kb21:student"));
        Get get = new Get(Bytes.toBytes("student1"));
        Result result = table.get(get);
        byte[] value = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));
        System.out.println("姓名:" + Bytes.toString(value));
        value = result.getValue(Bytes.toBytes("info2"), Bytes.toBytes("school"));
        System.out.println("学校:" + Bytes.toString(value));
    

    @Test
    public void scanData() throws IOException 
        Table table = conn.getTable(TableName.valueOf("kb21:student"));
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        for (Result result : scanner) 
            byte[] value = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));
            System.out.println("姓名:" + Bytes.toString(value));
            value = result.getValue(Bytes.toBytes("info2"), Bytes.toBytes("school"));
            System.out.println("学校:" + Bytes.toString(value));
            System.out.println(Bytes.toString(result.getRow()));
        
    

Hive连接Hbase

修改 /opt/soft/hive312/conf 下的 hive-site.xml

<property>
<name>hive.zookeeper.quorum</name>
<value>192.168.153.147</value>
</property>

<property>
<name>hbase.zookeeper.quorum</name>
<value>192.168.153.147</value>
</property>

<property>
<name>hive.aux.jars.path</name>
<value>file:///opt/soft/hive312/lib/hive-hbase-handler-3.1.2.jar,file:///opt/soft/hive312/lib/zookeeper-3.4.6.jar,file:///opt/soft/hive312/lib/hbase-client-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-common-2.3.5-tests.jar,file:///opt/soft/hive312/lib/hbase-server-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-common-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-protocol-2.3.5.jar,file:///opt/soft/hive312/lib/htrace-core-3.2.0-incubating.jar</value>
</property>

 把hbase jar包拷贝到hive

[root@guo147 conf]# cp /opt/soft/hbase235/lib/* /opt/soft/hive312/lib/

(会有重复的选 n 不覆盖)

删除hive  guava-11.0.2.jar

[root@guo147 conf]# find ../lib/guava*
../lib/guava-11.0.2.jar
../lib/guava-27.0-jre.jar

[root@guo147 conf]# rm -rf ../lib/guava-11.0.2.jar 

 删除hbase  guava-11.0.2.jar

[root@guo147 lib]# pwd
/opt/soft/hbase235/lib

[root@guo147 lib]# rm -rf guava-11.0.2.jar

//拷贝

[root@guo147 lib]# cp /opt/soft/hive312/lib/guava-27.0-jre.jar ./

用idea操作hbase数据库,并映射到hive

依赖条件:需要有Hadoop,hive,zookeeper,hbase环境

映射:每一个在 Hive 表中的域都存在于 HBase 中,而在 Hive 表中不需要包含所有HBase 中的列。HBase 中的 RowKey 对应到 Hive 中为选择一个域使用 :key 来对应,列族(cf:)映射到 Hive 中的其它所有域,列为(cf:cq)


配置映射环境

一:先关闭所有服务

[root@siwen ~]# stop-hbase.sh -----关闭hbase

[root@siwen ~]# zkServer.sh stop -----关闭zookeeper

[root@siwen ~]# stop-alll.sh -----关闭hadoop

二:配置文件

1,修改host文件:

C:\\Windows\\System32\\drivers\\etc在此目录下的hosts文件把此机器的ip和hostname加入进去

2,修改hive-site.xml

[root@siwen ~]# cd /opt/soft/hive312/conf/

[root@siwen conf]# vim ./hive-site.xml

加入下面几行

  <property>
    <name>hive.zookeeper.quorum</name>
    <value>192.168.255.159</value>
  </property>
  <property>
    <name>hbase.zookeeper.quorum</name>
    <value>192.168.255.159</value>
  </property>
  <property>
    <name>hive.aux.jars.path</name>
    <value>file:///opt/soft/hive312/lib/hive-hbase-handler-3.1.2.jar,file:///opt/soft/hive312/lib/zookeeper-3.4.6.jar,file:///opt/soft/hive312/lib/hbase-client-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-common-2.3.5-tests.jar,file:///opt/soft/hive312/lib/hbase-server-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-common-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-protocol-2.3.5.jar,file:///opt/soft/hive312/lib/htrace-core-3.2.0-incubating.jar</value>
  </property>

3,拷贝jar包

①将hbase235/lib目录下所有的jar包都拷贝到hive下面

[root@siwen conf]# cp /opt/soft/hbase235/lib/* /opt/soft/hive312/lib/

是否覆盖内容的时候,可以输入n,不覆盖;或者覆盖了也没问题

②统一guava文件

[root@siwen lib]# find ../lib/guava* -------查看所有的guava文件

[root@siwen lib]# rm -rf ../lib/guava-11.0.2.jar -------删除11版本的

[root@siwen conf]# cd /opt/soft/hbase235/lib/
[root@siwen lib]# pwd
/opt/soft/hbase235/lib

[root@siwen lib]# cp /opt/soft/hive312/lib/guava-27.0-jre.jar ./ -----把hive的guava文件拷贝给hbase

三:启动服务

#启动hadoop
[root@siwen lib]# start-all.sh
#启动zookeeper
[root@siwen lib]# zkServer.sh start
#启动hbase
[root@siwen lib]# start-hbase.sh
#启动hive
[root@siwen lib]# nohup hive --service metastore &
[root@siwen lib]# nohup hive --service hiveserver2 &

开始使用idea创建maven工程

在pom.xml 里面添加依赖

<dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-client</artifactId>
      <version>2.3.5</version>
    </dependency>
    <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-server</artifactId>
      <version>2.3.5</version>
    </dependency>

1,编写初始化方法:配置hbase信息,连接数据库

    //定义一个config,用于获取配置对象
    static Configuration config = null;
    //获取连接
    private Connection conn = null;
    Admin admin = null;
 
    @Before
    public void init() throws IOException 
        //配置hbase信息,连接hbase数据库
        config = HBaseConfiguration.create();
        config.set(HConstants.HBASE_DIR, "hdfs://192.168.255.159:9000/hbase");
        config.set(HConstants.ZOOKEEPER_QUORUM, "192.168.255.159");
        config.set(HConstants.CLIENT_PORT_STR, "2181");
        //hbase连接工厂
        conn = ConnectionFactory.createConnection(config);
        //拿到admin
        admin = conn.getAdmin();
    

2,编写关闭方法

    @After
    public void close() throws IOException 
        System.out.println("执行close()方法");
        if (admin!=null)
            admin.close();
        if (conn!=null)
            conn.close();
    

3,编写创建命名空间方法

    @Test
    public void createNameSpace() throws IOException 
        NamespaceDescriptor bigdata = NamespaceDescriptor.create("bigdata").build();
        #执行创建对象
        admin.createNamespace(bigdata); 
    

4,编写创建表的方法

    @Test
    public void createTable() throws IOException 
        //创建表的描述类
        TableName tableName = TableName.valueOf("bigdata:student");
        //获取表格描述器
        HTableDescriptor desc = new HTableDescriptor(tableName);

        //创建列族的描述,添加列族
        HColumnDescriptor family1 = new HColumnDescriptor("info1");
        HColumnDescriptor family2 = new HColumnDescriptor("info2");
        desc.addFamily(family1);
        desc.addFamily(family2);

        admin.createTable(desc);*/

5,编写查看表结构的方法

    @Test
    public void getAllNamespace() throws IOException 
        List<TableDescriptor> tableDesc = admin.listTableDescriptorsByNamespace("bigdata".getBytes());
        System.out.println(tableDesc.toString());
    

6,编写插入数据方法

   @Test
   public void insertData() throws IOException 
        //获取表的信息
        Table table = conn.getTable(TableName.valueOf("bigdata:student"));
        //设置行键
        Put put = new Put(Bytes.toBytes("student1"));
        //设置列的标识以及列值
        put.addColumn("info1".getBytes(), "name".getBytes(), "zs".getBytes());
        put.addColumn("info2".getBytes(), "school".getBytes(), "xwxx".getBytes());
        //执行添加
        table.put(put);
 
        //使用集合添加数据
        Put put2 = new Put(Bytes.toBytes("student2"));
        put2.addColumn("info1".getBytes(), "name".getBytes(), "zss".getBytes());
        put2.addColumn("info2".getBytes(), "school".getBytes(), "xwxx".getBytes());
        Put put3 = new Put(Bytes.toBytes("student3"));
        put3.addColumn("info1".getBytes(), "name".getBytes(), "zsr".getBytes());
        put3.addColumn("info2".getBytes(), "school".getBytes(), "xwxx".getBytes());
        List<Put> list = new ArrayList<>();
        list.add(put2);
        list.add(put3);
        table.put(list);
    

7,编写查询指定数据的方法

    #查询student1的信息
    @Test
    public void queryData() throws IOException 
        Table table = conn.getTable(TableName.valueOf("bigdata:student"));
        Get get = new Get(Bytes.toBytes("student1"));
        Result result = table.get(get);
        byte[] value = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));
        System.out.println("姓名:"+Bytes.toString(value));
        value = result.getValue(Bytes.toBytes("info2"), Bytes.toBytes("school"));
        System.out.println("学校:"+Bytes.toString(value));
    

8,编写扫描数据的方法(所有数据)

    @Test
    public void scanData() throws IOException 
        Table table = conn.getTable(TableName.valueOf("kb21:student"));
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        for (Result result : scanner) 
            byte[] value = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));
            System.out.println("姓名:"+Bytes.toString(value));
            value = result.getValue(Bytes.toBytes("info2"), Bytes.toBytes("school"));
            System.out.println("学校:"+Bytes.toString(value));
            System.out.println(Bytes.toString(result.getRow()));
        
    

9,编写删除表的方法

     @Test
    public void deleteTable() throws IOException 
        //先禁用
        admin.disableTable(TableName.valueOf("bigdata:student"));
        //再删除
        admin.deleteTable(TableName.valueOf("bigdata:student"));
    

创建外部表

---------主要外部表的字段需要和Hbase中的列形成映射

create external table student(
    id string,
    name string,
    school string
)
stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' with
serdeproperties ("hbase.columns.mapping"=":key,info1:name,info2:school")
tblproperties ("hbase.table.name"="bigdata:student");
select * from student

以上是关于IDEA连接HBase的主要内容,如果未能解决你的问题,请参考以下文章

Intellij Idea Maven“无法重新连接”错误

IntelliJ IDEA 连接数据库 详细过程

IntelliJ IDEA+Mysql connecter/j JDBC驱动连接

IntelliJ IDEA2017 java连接mysql数据库并查询数据

IntelliJ IDEA手动配置连接MySQL数据库

在 intellij idea 中使用 JDBC/Mysql 连接器