1. 程式人生 > >SparkSQL寫資料到Hive的動態分割槽表

SparkSQL寫資料到Hive的動態分割槽表

object HiveTableHelper extends Logging {

  def hiveTableInit(sc: SparkContext): HiveContext = {
    val sqlContext = new HiveContext(sc)
    sqlContext
  }

  def writePartitionTable(HCtx: HiveContext, inputRdd: RDD[Row], tabName: String, colNames: String): Unit = {
    val schema = StructType(
      colNames.split(" "
).map(fieldName => StructField(fieldName, StringType, true)) ) val table = colNames.replace(" dt", "").split(" ").map(name => name + " String").toList.toString().replace("List(", "").replace(")", "") val df = HCtx.createDataFrame(inputRdd, schema) //df.show(5) //logInfo("----------------------------------begin write table-----------------------------------"
) val temptb = "temp" + tabName //HCtx.sql("drop table if exists " + tabName) df.registerTempTable(temptb) HCtx.sql("CREATE EXTERNAL TABLE if not exists " + tabName + " (" + table + ") PARTITIONED BY (`dt` string) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat' location '/spdbccc/data/dest/SPSJNEW/"
+tabName+"'") HCtx.sql("set hive.exec.dynamic.partition.mode = nonstrict") HCtx.sql("insert overwrite table " + tabName + " partition(`dt`)" + " select * from " + temptb) } }