spark streaming的入門案例
阿新 • • 發佈:2018-12-20
1, spark streaming: tcp 源
maven依賴:
<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_2.11</artifactId> <version>2.1.0</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka-0-10_2.11</artifactId> <version>2.1.0</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.11</artifactId> <version>2.1.0</version> </dependency>
程式:
import org.apache.spark.SparkConf import org.apache.spark.streaming.{Seconds, StreamingContext} object wc { def main(args: Array[String]): Unit = { //配置:spark val conf = new SparkConf().setMaster("local[3]").setAppName("wc") //流的上下文 val ssc = new StreamingContext(conf,Seconds(2)) //獲取輸入源 val dstream1 = ssc.socketTextStream("localhost",9999) val dstream2 = dstream1.map((_, 1)).reduceByKey(_+_) //開啟上下文 dstream2.print() ssc.start() ssc.awaitTermination() } }
2, spark streaming : kafka資料來源
1.5.2版本
<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.10</artifactId> <version>1.5.2</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_2.10</artifactId> <version>1.5.2</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka_2.10</artifactId> <version>1.5.2</version> </dependency>
程式碼如下:
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark._
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, LocationStrategies, KafkaUtils}
object wc_kafka {
def main(args: Array[String]): Unit = {
val conf = new SparkConf()
conf.setAppName("kafka")
conf.setMaster("local[*]")
val ssc = new StreamingContext(conf, Seconds(2))
//kafka引數
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> "localhost:9092",
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "g1",
"auto.offset.reset" -> "latest",
"enable.auto.commit" -> (false: java.lang.Boolean)
)
//建立streaming輸入源
val topics = Array("t1")
val stream = KafkaUtils.createDirectStream[String, String](
ssc,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe[String, String](topics, kafkaParams)
)
//列印結果
val ds2 = stream.map(record => (record.key, record.value))
ds2.print()
//啟動
ssc.start()
ssc.awaitTermination()
}
}