1. 程式人生 > 其它 >Spark操作Hudi

Spark操作Hudi

pom檔案如下

<?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <
modelVersion
>
4.0.0</modelVersion> <groupId>org.hj</groupId> <artifactId>hudi-test</artifactId> <version>1.0-SNAPSHOT</version> <name>hudi-test</name> <url>http://www.example.com</url> <properties> <
project.build.sourceEncoding
>
UTF-8</project.build.sourceEncoding> <scala.version>2.11.8</scala.version> <scala.compat.version>2.11.8</scala.compat.version> <scala.binary.version>2.11</scala.binary.version> <spark.version>
2.4.4</spark.version> <hoodie.version>0.5.3-SNAPSHOT</hoodie.version> <scalikejdbc.version>2.5.0</scalikejdbc.version> <hadoop.version>2.7.3</hadoop.version> </properties> <dependencies> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> <version>${scala.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${spark.version}</version> <exclusions> <exclusion> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka-0-10_${scala.binary.version}</artifactId> <version>${spark.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${spark.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-avro_2.11</artifactId> <version>2.4.4</version> </dependency> <!--<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>provided</scope> </dependency>--> <dependency> <groupId>org.apache.hudi</groupId> <artifactId>hudi-spark-bundle_2.11</artifactId> <version>0.5.3</version> </dependency> <dependency> <groupId>org.apache.hudi</groupId> <artifactId>hudi-common</artifactId> <version>0.5.3</version> </dependency> <dependency> <groupId>com.alibaba</groupId> <artifactId>fastjson</artifactId> <version>1.2.29</version> </dependency> <dependency> <groupId>com.typesafe</groupId> <artifactId>config</artifactId> <version>1.3.1</version> </dependency> <dependency> <groupId>org.scalikejdbc</groupId> <artifactId>scalikejdbc_${scala.binary.version}</artifactId> <version>${scalikejdbc.version}</version> </dependency> <dependency> <groupId>org.scalikejdbc</groupId> <artifactId>scalikejdbc-core_${scala.binary.version}</artifactId> <version>${scalikejdbc.version}</version> </dependency> <dependency> <groupId>org.scalikejdbc</groupId> <artifactId>scalikejdbc-config_${scala.binary.version}</artifactId> <version>${scalikejdbc.version}</version> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>5.1.47</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> </exclusion> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <groupId>org.apache.httpcomponents</groupId> <artifactId>httpclient</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> <exclusions> <exclusion> <artifactId>slf4j-log4j12</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <groupId>xml-apis</groupId> <artifactId>xml-apis</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.parquet</groupId> <artifactId>parquet-avro</artifactId> <version>1.10.0</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>2.3.1</version> <scope>provided</scope> <exclusions> <exclusion> <groupId>javax.mail</groupId> <artifactId>mail</artifactId> </exclusion> <exclusion> <groupId>org.eclipse.jetty.aggregate</groupId> <artifactId>*</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-exec</artifactId> <version>2.3.1</version> <scope>provided</scope> <exclusions> <exclusion> <groupId>javax.mail</groupId> <artifactId>mail</artifactId> </exclusion> <exclusion> <groupId>org.eclipse.jetty.aggregate</groupId> <artifactId>*</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.facebook.presto</groupId> <artifactId>presto-jdbc</artifactId> <version>0.217</version> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>2.3.2</version> <configuration> <source>1.8</source> <target>1.8</target> <encoding>${project.build.sourceEncoding}</encoding> </configuration> </plugin> <plugin> <groupId>org.scala-tools</groupId> <artifactId>maven-scala-plugin</artifactId> <version>2.15.2</version> <executions> <execution> <goals> <goal>compile</goal> <goal>testCompile</goal> </goals> </execution> </executions> </plugin> </plugins> </build> </project>

core-site.xmlhdfs-site.xmlhive-site.xmlyarn-site.xml匯入到resources目錄下。

插入資料

package com.hudi

import org.apache.hudi.DataSourceWriteOptions
import org.apache.hudi.config.HoodieWriteConfig
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.hj.hudi.Util

object HudiInsert {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder.appName("hudi insert").config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local[3]").getOrCreate()
        val insertData = Util.readFromTxtByLineToDf(spark,"E:\\Demo\\hudi-test-master\\src\\main\\resources\\test_insert_data.txt")
        insertData.write.format("org.apache.hudi")
            // 設定主鍵列名
            .option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "rowkey")
            // 設定資料更新時間的列名
            .option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY, "lastupdatedttm")
            // 並行度引數設定
            .option("hoodie.insert.shuffle.parallelism", "2")
            .option("hoodie.upsert.shuffle.parallelism", "2")
            // 表名設定
            .option(HoodieWriteConfig.TABLE_NAME, "test")
            .mode(SaveMode.Overwrite)
            // 寫入路徑設定
            .save("/tmp/hudi")
    }

}

在這裡插入圖片描述
在這裡插入圖片描述

插入分割槽資料

package com.hudi

import org.apache.hudi.DataSourceWriteOptions
import org.apache.hudi.config.{HoodieIndexConfig, HoodieWriteConfig}
import org.apache.hudi.index.HoodieIndex
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.hj.hudi.Util

object HudiInsertBy {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder.appName("hudi insert").config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local[*]").getOrCreate()
        // 讀取文字檔案轉換為df
        val insertData = Util.readFromTxtByLineToDf(spark, "E:\\Demo\\hudi-test-master\\src\\main\\resources\\test_insert_data.txt")
        insertData.write.format("org.apache.hudi")
            // 設定主鍵列名
            .option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "rowkey")
            // 設定資料更新時間的列名
            .option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY, "lastupdatedttm")
            // 設定分割槽列
            .option(DataSourceWriteOptions.PARTITIONPATH_FIELD_OPT_KEY, "dt")
            // 設定當分割槽變更時,當前資料的分割槽目錄是否變更
            .option(HoodieIndexConfig.BLOOM_INDEX_UPDATE_PARTITION_PATH, "true")
            // 設定索引型別目前有HBASE,INMEMORY,BLOOM,GLOBAL_BLOOM 四種索引 為了保證分割槽變更後能找到必須設定全域性GLOBAL_BLOOM
            .option(HoodieIndexConfig.INDEX_TYPE_PROP, HoodieIndex.IndexType.GLOBAL_BLOOM.name())
            // 並行度引數設定
            .option("hoodie.insert.shuffle.parallelism", "2")
            .option("hoodie.upsert.shuffle.parallelism", "2")
            .option(HoodieWriteConfig.TABLE_NAME, "test_partition")
            .mode(SaveMode.Overwrite)
            .save("/tmp/hudi")
    }
}

更新資料(存在資料修改,不存在資料新增)

package com.hudi

import org.apache.hudi.DataSourceWriteOptions
import org.apache.hudi.config.HoodieWriteConfig
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.hj.hudi.Util

object HudiUpsert {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder.appName("hudi upsert").config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local[3]").getOrCreate()
        val upsertData = Util.readFromTxtByLineToDf(spark, "E:\\Demo\\hudi-test-master\\src\\main\\resources\\test_update_data.txt")

        upsertData.write.format("org.apache.hudi")
            // 設定主鍵列名
            .option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "rowkey")
            // 設定資料更新時間的列名
            .option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY, "lastupdatedttm")
            // 表名稱設定
            .option(HoodieWriteConfig.TABLE_NAME, "test")
            // 並行度引數設定
            .option("hoodie.insert.shuffle.parallelism", "2")
            .option("hoodie.upsert.shuffle.parallelism", "2")
            .mode(SaveMode.Append)
            // 寫入路徑設定
            .save("/tmp/hudi");
    }
}

更新分割槽資料

package com.hudi

import org.apache.hudi.DataSourceWriteOptions
import org.apache.hudi.config.{HoodieIndexConfig, HoodieWriteConfig}
import org.apache.hudi.index.HoodieIndex
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.hj.hudi.Util

object HudiUpsertBy {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder.appName("upsert partition").config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local[3]").getOrCreate()
        val upsertData = Util.readFromTxtByLineToDf(spark, "E:\\Demo\\hudi-test-master\\src\\main\\resources\\test_partition_update_data.txt")

        upsertData.write.format("org.apache.hudi").option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "rowkey")
            .option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY, "lastupdatedttm")
            // 分割槽列設定
            .option(DataSourceWriteOptions.PARTITIONPATH_FIELD_OPT_KEY, "dt")
            .option(HoodieWriteConfig.TABLE_NAME, "test_partition")
            .option(HoodieIndexConfig.INDEX_TYPE_PROP, HoodieIndex.IndexType.GLOBAL_BLOOM.name())
            .option("hoodie.insert.shuffle.parallelism", "2")
            .option("hoodie.upsert.shuffle.parallelism", "2")
            .mode(SaveMode.Append)
            .save("/tmp/hudi");
    }
}

刪除資料(和更新資料一樣,存在資料刪除)

package com.hudi

import org.apache.hudi.DataSourceWriteOptions
import org.apache.hudi.config.HoodieWriteConfig
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.hj.hudi.Util

object HudiDelte {
    def main(args: Array[String]): Unit = {
        val spark = SparkSession.builder.appName("delta insert").config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local[3]").getOrCreate()
        val deleteData = Util.readFromTxtByLineToDf(spark, "E:\\Demo\\hudi-test-master\\src\\main\\resources\\test_partition_delete_data.txt")
        deleteData.write.format("com.uber.hoodie")
            // 設定主鍵列名
            .option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "rowkey")
            // 設定資料更新時間的列名
            .option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY, "lastupdatedttm")
            // 表名稱設定
            .option(HoodieWriteConfig.TABLE_NAME, "test")
            // 硬刪除配置
            .option(DataSourceWriteOptions.PAYLOAD_CLASS_OPT_KEY, "org.apache.hudi.EmptyHoodieRecordPayload")
    }
}

查詢資料

package com.hudi

import org.apache.spark.sql.SparkSession

object HudiQuery {
    def main(args: Array[String]): Unit = {
        val basePath = "/tmp/hudi"
        val spark = SparkSession.builder.appName("query insert").config("spark.serializer", "org.apache.spark.serializer.KryoSerializer").master("local[3]").getOrCreate()
        val tripsSnapshotDF = spark.
            read.
            format("org.apache.hudi").
            load(basePath + "/*/*")

        tripsSnapshotDF.show()
    }

}