利用MapReduce統計學生的學習成績(及格率,優秀率,平均分)
阿新 • • 發佈:2019-02-08
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class Weibo1 {
public static class weiboMap extends Mapper<Object, Text, Text, IntWritable> {
@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String[] strings = value.toString().split("\t");
if (strings.length == 6) {
String classname = strings[0];
int chinese = Integer.parseInt(strings[3]);
context.write(new Text(classname),new IntWritable(chinese));
}
}
}
public static class weiboReducer extends Reducer<Text, IntWritable, Text, Text> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
double jigeCount =0;
double youxiuCount =0;
double Count = 0;
int sum = 0;
for (IntWritable value : values) {
Count += Integer.parseInt(value.toString());
sum++;
if (Integer.parseInt(value.toString())>=60){
jigeCount++;
}
if (Integer.parseInt(value.toString())>=80){
youxiuCount++;
}
}
double jigelv = jigeCount/sum;
double youxiulv =youxiuCount/sum;
double avg = Count/sum;
context.write(key,new Text(Double.toString(jigelv)+"\t"+Double.toString(youxiulv)+"\t"+Double.toString(avg)));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Job job = Job.getInstance(new Configuration());
job.setJarByClass(Weibo1.class);
job.setMapperClass(weiboMap.class);
job.setReducerClass(weiboReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class Weibo1 {
public static class weiboMap extends Mapper<Object, Text, Text, IntWritable> {
@Override
protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String[] strings = value.toString().split("\t");
if (strings.length == 6) {
String classname = strings[0];
int chinese = Integer.parseInt(strings[3]);
context.write(new Text(classname),new IntWritable(chinese));
}
}
}
public static class weiboReducer extends Reducer<Text, IntWritable, Text, Text> {
@Override
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
double jigeCount =0;
double youxiuCount =0;
double Count = 0;
int sum = 0;
for (IntWritable value : values) {
Count += Integer.parseInt(value.toString());
sum++;
if (Integer.parseInt(value.toString())>=60){
jigeCount++;
}
if (Integer.parseInt(value.toString())>=80){
youxiuCount++;
}
}
double jigelv = jigeCount/sum;
double youxiulv =youxiuCount/sum;
double avg = Count/sum;
context.write(key,new Text(Double.toString(jigelv)+"\t"+Double.toString(youxiulv)+"\t"+Double.toString(avg)));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Job job = Job.getInstance(new Configuration());
job.setJarByClass(Weibo1.class);
job.setMapperClass(weiboMap.class);
job.setReducerClass(weiboReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(job,new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}