reduceByKeyAndWindow實現基於滑動視窗的熱點搜尋詞實時統計(Java版本)
阿新 • • 發佈:2019-02-10
package gh.spark.SparkStreaming;
import java.util.List;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;
/**
* 基於滑動視窗的熱點搜尋詞實時統計
* @author Administrator
* 每隔5秒鐘,統計最近20秒鐘的搜尋詞的搜尋頻次,並打印出排名最靠前的3個搜尋詞以及出現次數
*
*/
public class WindowDemo {
public static void main(String[] args) throws Exception {
SparkConf conf=new SparkConf()
.setAppName("WindowDemo").setMaster("local[2]");
JavaStreamingContext jsc=new JavaStreamingContext(conf,Durations.seconds(5));
//從nc服務中讀取輸入的資料
JavaReceiverInputDStream<String> socketTextStream =
jsc.socketTextStream("tgmaster", 9999);
/**
* 搜尋的日誌格式:name words,比如:張三 hello
* 我們通過map運算元將搜尋詞取出
*/
JavaDStream<String> mapDStream = socketTextStream.map(new Function<String, String>() {
private static final long serialVersionUID = 1L;
public String call(String log) throws Exception {
// TODO Auto-generated method stub
return log.split(" ")[1];
}
});
// 將搜尋詞對映為(searchWord, 1)的tuple格式
JavaPairDStream<String, Integer> mapToPairDStream = mapDStream.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
public Tuple2<String, Integer> call(String searchWord) throws Exception {
// TODO Auto-generated method stub
return new Tuple2<String, Integer>(searchWord,1);
}
});
/**
* 對滑動視窗進行reduceByKeyAndWindow操作
* 其中,視窗長度是20秒,滑動時間間隔是5秒
*/
JavaPairDStream<String, Integer> reduceByKeyAndWindowDStream = mapToPairDStream.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
public Integer call(Integer v1, Integer v2) throws Exception {
// TODO Auto-generated method stub
return v1+v2;
}
}, Durations.seconds(20), Durations.seconds(5));
/**
* 獲取前3名的搜尋詞
*/
JavaPairDStream<String, Integer> resultDStream = reduceByKeyAndWindowDStream.transformToPair(new Function<JavaPairRDD<String,Integer>, JavaPairRDD<String,Integer>>() {
private static final long serialVersionUID = 1L;
public JavaPairRDD<String, Integer> call(
JavaPairRDD<String, Integer> wordsRDD) throws Exception {
//通過mapToPair運算元,將key與value互換位置
JavaPairRDD<Integer, String> mapToPairRDD = wordsRDD.mapToPair(new PairFunction<Tuple2<String,Integer>, Integer, String>() {
private static final long serialVersionUID = 1L;
public Tuple2<Integer, String> call(
Tuple2<String, Integer> tuple) throws Exception {
//將key與value互換位置
return new Tuple2<Integer, String>(tuple._2,tuple._1);
}
});
//根據key值進行降序排列
JavaPairRDD<Integer, String> sortByKeyRDD = mapToPairRDD.sortByKey(false);
// 然後再次執行反轉,變成(searchWord, count)的這種格式
JavaPairRDD<String, Integer> wordcountRDD = sortByKeyRDD.mapToPair(new PairFunction<Tuple2<Integer,String>, String, Integer>() {
private static final long serialVersionUID = 1L;
public Tuple2<String, Integer> call(
Tuple2<Integer, String> tuple) throws Exception {
return new Tuple2<String, Integer>(tuple._2,tuple._1);
}
});
//獲取降序排列之後的前3名
List<Tuple2<String, Integer>> result = wordcountRDD.take(3);
//遍歷輸出結果
for (Tuple2<String, Integer> info : result) {
System.out.println(info._1+" "+info._2);
}
return wordsRDD;
}
});
resultDStream.print();
jsc.start();
jsc.awaitTermination();
jsc.close();
}
}
import java.util.List;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;
/**
* 基於滑動視窗的熱點搜尋詞實時統計
* @author Administrator
* 每隔5秒鐘,統計最近20秒鐘的搜尋詞的搜尋頻次,並打印出排名最靠前的3個搜尋詞以及出現次數
*
*/
public class WindowDemo {
public static void main(String[] args) throws Exception {
SparkConf conf=new SparkConf()
.setAppName("WindowDemo").setMaster("local[2]");
JavaStreamingContext jsc=new JavaStreamingContext(conf,Durations.seconds(5));
//從nc服務中讀取輸入的資料
JavaReceiverInputDStream<String> socketTextStream =
jsc.socketTextStream("tgmaster", 9999);
/**
* 搜尋的日誌格式:name words,比如:張三 hello
* 我們通過map運算元將搜尋詞取出
*/
JavaDStream<String> mapDStream = socketTextStream.map(new Function<String, String>() {
private static final long serialVersionUID = 1L;
public String call(String log) throws Exception {
// TODO Auto-generated method stub
return log.split(" ")[1];
}
});
// 將搜尋詞對映為(searchWord, 1)的tuple格式
JavaPairDStream<String, Integer> mapToPairDStream = mapDStream.mapToPair(new PairFunction<String, String, Integer>() {
private static final long serialVersionUID = 1L;
public Tuple2<String, Integer> call(String searchWord) throws Exception {
// TODO Auto-generated method stub
return new Tuple2<String, Integer>(searchWord,1);
}
});
/**
* 對滑動視窗進行reduceByKeyAndWindow操作
* 其中,視窗長度是20秒,滑動時間間隔是5秒
*/
JavaPairDStream<String, Integer> reduceByKeyAndWindowDStream = mapToPairDStream.reduceByKeyAndWindow(new Function2<Integer, Integer, Integer>() {
public Integer call(Integer v1, Integer v2) throws Exception {
// TODO Auto-generated method stub
return v1+v2;
}
}, Durations.seconds(20), Durations.seconds(5));
/**
* 獲取前3名的搜尋詞
*/
JavaPairDStream<String, Integer> resultDStream = reduceByKeyAndWindowDStream.transformToPair(new Function<JavaPairRDD<String,Integer>, JavaPairRDD<String,Integer>>() {
private static final long serialVersionUID = 1L;
public JavaPairRDD<String, Integer> call(
JavaPairRDD<String, Integer> wordsRDD) throws Exception {
//通過mapToPair運算元,將key與value互換位置
JavaPairRDD<Integer, String> mapToPairRDD = wordsRDD.mapToPair(new PairFunction<Tuple2<String,Integer>, Integer, String>() {
private static final long serialVersionUID = 1L;
public Tuple2<Integer, String> call(
Tuple2<String, Integer> tuple) throws Exception {
//將key與value互換位置
return new Tuple2<Integer, String>(tuple._2,tuple._1);
}
});
//根據key值進行降序排列
JavaPairRDD<Integer, String> sortByKeyRDD = mapToPairRDD.sortByKey(false);
// 然後再次執行反轉,變成(searchWord, count)的這種格式
JavaPairRDD<String, Integer> wordcountRDD = sortByKeyRDD.mapToPair(new PairFunction<Tuple2<Integer,String>, String, Integer>() {
private static final long serialVersionUID = 1L;
public Tuple2<String, Integer> call(
Tuple2<Integer, String> tuple) throws Exception {
return new Tuple2<String, Integer>(tuple._2,tuple._1);
}
});
//獲取降序排列之後的前3名
List<Tuple2<String, Integer>> result = wordcountRDD.take(3);
//遍歷輸出結果
for (Tuple2<String, Integer> info : result) {
System.out.println(info._1+" "+info._2);
}
return wordsRDD;
}
});
resultDStream.print();
jsc.start();
jsc.awaitTermination();
jsc.close();
}
}