Hbase 用mr-hdfs hdfs-mr
Posted jbli
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了Hbase 用mr-hdfs hdfs-mr相关的知识,希望对你有一定的参考价值。
//将从hbas数据库中用mr读取的数据放入到 hdfs中
注:引入 jar 包
//使用mr 将hbase数据库中的单词计算出来
创建表 wordcount 放入4条数据
在eclipce中
package com.bw.hbase; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; //使用mr 计算 hbase数据库中单词的数量 public class HBaseWC { // 表中的数据是: // hello jack // hello world // hello tom // hello lmc // Text 单词 IntWritable是总数 public static class HMapper extends TableMapper<Text, IntWritable> { // map端的输出的值是将单词 拆分 (hello 1) (hello 1)(jack 1) ........... IntWritable outval = new IntWritable(1);// 输出的每个单词数量都是1 @Override protected void map(ImmutableBytesWritable key, Result value, Mapper<ImmutableBytesWritable, Result, Text, IntWritable>.Context context) throws IOException, InterruptedException { // 1.不需要key值 因为key是row_key 表中自带的 不变的那个值 byte[] val = value.getValue("info".getBytes(), "word".getBytes()); String word = new String(val); String[] split = word.split(" "); for (String str : split) { context.write(new Text(str), outval); } } } public static class HReducer extends Reducer<Text, IntWritable, Text, IntWritable> { // 重写reduce方法 @Override protected void reduce(Text arg0, Iterable<IntWritable> arg1, Reducer<Text, IntWritable, Text, IntWritable>.Context arg2) throws IOException, InterruptedException { int count = 0; for (IntWritable i : arg1) { count++; } arg2.write(arg0, new IntWritable(count)); } } public static void main(String[] args) throws Exception { Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "linux04:2181"); Job job = Job.getInstance(conf); job.setJarByClass(HBaseWC.class); Scan scan = new Scan(); TableMapReduceUtil.initTableMapperJob("wordcount", scan, HMapper.class, Text.class, IntWritable.class, job); job.setReducerClass(HReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileOutputFormat.setOutputPath(job, new Path("hbasewc")); job.waitForCompletion(true); } }
//将数据放入本地中
以上是关于Hbase 用mr-hdfs hdfs-mr的主要内容,如果未能解决你的问题,请参考以下文章