1. 程式人生 > >Hadoop中如何從hbase到hdfs

Hadoop中如何從hbase到hdfs

import java.io.IOException; import java.util.List;

import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class HbaseToHdfs { //maper階段 static class MyMapper extends TableMapper<Text, IntWritable>{ Text k=new Text(); @Override protected void map(ImmutableBytesWritable key, Result value, Mapper<ImmutableBytesWritable, Result, Text, IntWritable>.Context context) throws IOException, InterruptedException { List list = value.list(); String sex = “”; for(KeyValue kv : list){ String qualifier = new String(kv.getQualifier()); if(“sex”.equals(qualifier)){ sex = new String(kv.getValue()); } } if(!"".equals(sex)){ k.set(sex); context.write(k, new IntWritable(1)); } } } //reducer階段 static class MyReducer extends Reducer<Text, IntWritable, Text, IntWritable>{ IntWritable v = new IntWritable(); @Override protected void reduce(Text key, Iterable values, Reducer<Text, IntWritable, Text, IntWritable>.Context context) throws IOException, InterruptedException { int count = 0; for(IntWritable n : values){ count++; } v.set(count); context.write(key, v); }

}
public static void main(String[] args) {
	Configuration conf=new Configuration();
	conf.set("fs.defaultFS", "hdfs://bd1803");
	conf.set("hbase.zookeeper.quorum", "centos01:2181,centos02:2181,centos03:2181");
	System.setProperty("HADOOP_USER_NAME", "centos");
	try {
		FileSystem fs=FileSystem.get(conf);
		Job job=Job.getInstance(conf);
		job.setJarByClass(HbaseToHdfs.class);
		Scan scan=new Scan();
		TableMapReduceUtil.initTableMapperJob("mingxing", scan,
				MyMapper.class, 
				Text.class, 
				IntWritable.class, 
				job,false);	
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);	
		Path path=new Path("/hbase_hdfs121");
		if(fs.exists(path)) {
			fs.delete(path,true);
		}
		FileOutputFormat.setOutputPath(job, path);			
		job.waitForCompletion(true);		
	} catch (Exception e) {
		e.printStackTrace();
	}
}

}