1. 程式人生 > >MapReduce——單元測試

MapReduce——單元測試

新增依賴

    <dependency>
      <groupId>org.apache.mrunit</groupId>
      <artifactId>mrunit</artifactId>
      <version>1.1.0</version>
      <scope>test</scope>
      <classifier>hadoop2</classifier>
    </dependency>

MapTest

package Hadoop;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mrunit.mapreduce.MapDriver;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
public class MapTest {
    private Mapper map;
    private MapDriver  driver;
    @Before
    public void init(){
        System.setProperty( "hadoop.home.dir", "E:\\hadoop2.6");
        map =  new WordCount.TokenizerMapper();
        driver =  new MapDriver(map);
    }
    @Test
    public void test()  throws IOException{
        String text =  "hello world hello hadoop";
        driver.withInput(new LongWritable(0), new Text(text))
                .withOutput(new Text("hello"), new IntWritable(1))
                .withOutput(new Text("world"), new IntWritable(1))
                .withOutput(new Text("hello"), new IntWritable(1))
                .withOutput(new Text("hadoop"), new IntWritable(1))
                .runTest();  //執行測試
    }
}

ReduceTest

package Hadoop;

import org.apache.hadoop.io.IntWritable;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;

public class ReduceTest {
    private Reducer  reduce;
    private ReduceDriver  driver;
    @Before
    public void init(){
        System. setProperty ( "hadoop.home.dir",  "E:\\hadoop2.6");
        reduce =  new WordCount.IntSumReducer();
        driver =  new ReduceDriver( reduce);
    }
    @Test
    public void test()  throws IOException {
        ArrayList values =  new ArrayList();
        values.add(new IntWritable(1));
        values.add(new IntWritable(1));
        driver.withInput(new Text("hello"),values)
                .withOutput(new Text("hello"), new IntWritable(2))
                .runTest();  //執行測試
    }
}

MapReduceTest

package Hadoop;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;

public class MapReduceTest {
    private Mapper map;
    private Reducer reduce;
    private MapReduceDriver driver;
    @Before
    public void init(){
        System. setProperty ("hadoop.home.dir","E:\\hadoop2.6");
        map =  new WordCount.TokenizerMapper();
        reduce =  new WordCount.IntSumReducer();
        driver =  new MapReduceDriver(map,reduce);
    }
    @Test
    public void test()  throws IOException {
//注意輸出測試也要按照順序,不然也會報錯!
        driver.withInput(new LongWritable(0), new Text("hello world"))
                .withInput(new LongWritable(12), new Text( "hello hadoop"))
                .withOutput(new Text("hadoop"), new IntWritable(1))
                .withOutput(new Text("hello"), new IntWritable(2))
                .withOutput(new Text("world"), new IntWritable(1))
                .runTest();  //執行測試
    }
}