1. 程式人生 > >Spark連線mysql

Spark連線mysql

spark連線mysql,注意mysql的配置等問題需要在pom檔案中新增。

版本根據自己版本對應新增。

<dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
    <version>5.1.22</version>
</dependency>
這是spark連線mysql檔案:
package SqlSpark

import org.apache.spark.{SparkConf, 
SparkContext} object sqlSpark { case class zbh_test(day_id:String, prvnce_id:String,pv_cnts:Int) def main(args: Array[String]) { val conf = new SparkConf().setAppName("mysql").setMaster("local[4]") val sc = new SparkContext(conf)val sqlContext = new org.apache.spark.sql.SQLContext(sc) val
sql="select id,region,city,company,name from tb_user_imei" //定義mysql資訊 val jdbcDF = sqlContext.read.format("jdbc").options( Map("url"->"jdbc:mysql://localhost:3306/student", "dbtable"->s"(${sql}) as table01", "driver"->"com.mysql.jdbc.Driver", "user"-> "root", //"partitionColumn"->"day_id",
"lowerBound"->"0", "upperBound"-> "1000", //"numPartitions"->"2", "fetchSize"->"100", "password"->"root") ).load() // jdbcDF.show() jdbcDF.collect().take(20).foreach(println) //終端列印DF中的資料。} }