hbase 批量插入api

Posted yutingliuyl

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了hbase 批量插入api相关的知识,希望对你有一定的参考价值。

1、数据格式a.txt:

1363157985066 13726230503 00-FD-07-A4-72-B8:CMCC 120.196.100.82 i02.c.aliimg.com 24 27 2481 24681 200
1363157995052 13826544101 5C-0E-8B-C7-F1-E0:CMCC 120.197.40.4 4 0 264 0 200
1363157991076 13926435656 20-10-7A-28-CC-0A:CMCC 120.196.100.99 2 4 132 1512 200
1363154400022 13926251106 5C-0E-8B-8B-B1-50:CMCC 120.197.40.4 4 0 240 0 200
1363157993044 18211575961 94-71-AC-CD-E6-18:CMCC-EASY 120.196.100.99 iface.qiyi.com 视频站点 15 12 1527 2106 200
1363157995074 84138413 5C-0E-8B-8C-E8-20:7DaysInn 120.197.40.4 122.72.52.12 20 16 4116 1432 200
1363157993055 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 18 15 1116 954 200
1363157995033 15920133257 5C-0E-8B-C7-BA-20:CMCC 120.197.40.4 sug.so.360.cn 信息安全 20 20 3156 2936 200
1363157983019 13719199419 68-A1-B7-03-07-B1:CMCC-EASY 120.196.100.82 4 0 240 0 200
1363157984041 13660577991 5C-0E-8B-92-5C-20:CMCC-EASY 120.197.40.4 s19.cnzz.com 网站统计 24 9 6960 690 200
1363157973098 15013685858 5C-0E-8B-C7-F7-90:CMCC 120.197.40.4 rank.ie.sogou.com 搜索引擎 28 27 3659 3538 200
1363157986029 15989002119 E8-99-C4-4E-93-E0:CMCC-EASY 120.196.100.99 www.umeng.com 网站统计 3 3 1938 180 200
1363157992093 13560439658 C4-17-FE-BA-DE-D9:CMCC 120.196.100.99 15 9 918 4938 200
1363157986041 13480253104 5C-0E-8B-C7-FC-80:CMCC-EASY 120.197.40.4 3 3 180 180 200
1363157984040 13602846565 5C-0E-8B-8B-B6-00:CMCC 120.197.40.4 2052.flash2-http.qq.com 综合门户 15 12 1938 2910 200
1363157995093 13922314466 00-FD-07-A2-EC-BA:CMCC 120.196.100.82 img.qfc.cn 12 12 3008 3720 200
1363157982040 13502468823 5C-0A-5B-6A-0B-D4:CMCC-EASY 120.196.100.99 y0.ifengimg.com 综合门户 57 102 7335 110349 200
1363157986072 18320173382 84-25-DB-4F-10-1A:CMCC-EASY 120.196.100.99 input.shouji.sogou.com 搜索引擎 21 18 9531 2412 200
1363157990043 13925057413 00-1F-64-E1-E6-9A:CMCC 120.196.100.55 t3.baidu.com 搜索引擎 69 63 11058 48243 200
1363157988072 13760778710 00-FD-07-A4-7B-08:CMCC 120.196.100.82 2 2 120 120 200
1363157985079 13823070001 20-7C-8F-70-68-1F:CMCC 120.196.100.99 6 3 360 180 200
1363157985069 13600217502 00-1F-64-E2-E8-B1:CMCC 120.196.100.55 18 138 1080 186852 200


2、hbase 创建表 create ‘wlan‘,‘cf‘


3、代码

package com.utils;


import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;


public class HBaseImport {
static class BatchMapper extends Mapper<LongWritable, Text, LongWritable, Text>{
@Override
protected void map(LongWritable key, Text value,
Mapper<LongWritable, Text, LongWritable, Text>.Context context)
throws IOException, InterruptedException {
String line = value.toString();
String[] splited = line.split("\t");
SimpleDateFormat simpleDateFormatimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
String format = simpleDateFormatimpleDateFormat.format(new Date(Long.parseLong(splited[0].trim())));
String rowKey=splited[1]+"_"+format;
Text v2s = new Text();
v2s.set(rowKey+"\t"+line);
context.write(key, v2s);
}
}
static class BatchReducer extends TableReducer<LongWritable, Text, NullWritable>{
private String family="cf";//列族


@Override
protected void reduce(LongWritable arg0, Iterable<Text> v2s,
Reducer<LongWritable, Text, NullWritable, Mutation>.Context context)
throws IOException, InterruptedException {
for (Text v2 : v2s) {
String[] splited = v2.toString().split("\t");
String rowKey = splited[0];
Put put = new Put(rowKey.getBytes());
put.add(family.getBytes(), "raw".getBytes(), v2.toString().getBytes());
put.add(family.getBytes(), "rePortTime".getBytes(), splited[1].getBytes());
put.add(family.getBytes(), "msisdn".getBytes(), splited[2].getBytes());
put.add(family.getBytes(), "apmac".getBytes(), splited[3].getBytes());
put.add(family.getBytes(), "acmac".getBytes(), splited[4].getBytes());
put.add(family.getBytes(), "host".getBytes(), splited[5].getBytes());
put.add(family.getBytes(), "siteType".getBytes(), splited[6].getBytes());
put.add(family.getBytes(), "upPackNum".getBytes(), splited[7].getBytes());
put.add(family.getBytes(), "downPackNum".getBytes(), splited[8].getBytes());
put.add(family.getBytes(), "upPayLoad".getBytes(), splited[9].getBytes());
put.add(family.getBytes(), "downPayLoad".getBytes(), splited[10].getBytes());
put.add(family.getBytes(), "httpStatus".getBytes(), splited[11].getBytes());
context.write(NullWritable.get(), put);
}
}
}
private static final String TableName = "waln_log";
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","192.168.80.20,192.168.80.21,192.168.80.22");
//conf.set("hbase.rootdir", "hdfs://cluster/hbase");
conf.set("hbase.rootdir", "hdfs://192.168.80.20:9000/hbase");
conf.set(TableOutputFormat.OUTPUT_TABLE, TableName);

Job job = new Job(conf, HBaseImport.class.getSimpleName());
TableMapReduceUtil.addDependencyJars(job);
job.setJarByClass(HBaseImport.class);

job.setMapperClass(BatchMapper.class);
job.setReducerClass(BatchReducer.class);

job.setMapOutputKeyClass(LongWritable.class);
job.setMapOutputValueClass(Text.class);

job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TableOutputFormat.class);

FileInputFormat.setInputPaths(job, "hdfs://192.168.80.20:9000/data");
System.out.println("xxxxxxx1xxxxxxxx");
job.waitForCompletion(true);
}
}




























































































































以上是关于hbase 批量插入api的主要内容,如果未能解决你的问题,请参考以下文章

HBase结合MapReduce批量导入

mongodb数据库批量插入海量数据时为啥有少部分数据丢失

hbase批量查询卡住

跨多个集合的 Mongodb 批量插入

HBase表中批量加载的详细步骤

Laravel 5.2 中的批量插入