1. 程式人生 > >Spark 2.1.0整合CarbonData 1.1.0

Spark 2.1.0整合CarbonData 1.1.0

1.新建專案

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.lcc</groupId>
    <artifactId>SomeTest</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <lombok.version>1.12.6</lombok.version>
        <dubhe.common.version>1.0.4</dubhe.common.version>
        <junit.version>4.12</junit.version>
    </properties>


    <dependencies>


        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <scope>2.11.8</scope>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>2.1.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.11</artifactId>
            <version>2.1.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_2.11</artifactId>
            <version>2.1.0</version>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.28</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.7.2</version>
        </dependency>
       
        <!-- 注意:這裡一定要排除spark-sql_2.10,不然會導致錯誤 -->
        <dependency>
            <groupId>org.apache.carbondata</groupId>
            <artifactId>carbondata-core</artifactId>
            <version>1.1.0</version>
            <exclusions>
                <exclusion>
                    <artifactId>spark-sql_2.10</artifactId>
                    <groupId>org.apache.spark</groupId>
                </exclusion>
            </exclusions>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.carbondata/carbondata-spark2 -->
        <dependency>
            <groupId>org.apache.carbondata</groupId>
            <artifactId>carbondata-spark2</artifactId>
            <version>1.1.0</version>
        </dependency>


    </dependencies>



    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>2.3.2</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

2.測試類

package com.dtwave.megrez


import java.util.Base64

import com.alibaba.fastjson.{JSONArray, JSONObject}
import org.apache.spark.sql.CarbonSession._
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

import scala.io.Source

object MegrezApp {

  def main(args: Array[String]): Unit = {

    val Array(taskType, param) = args
    
    val conf = new SparkConf()
    conf.setAppName("lcc")
    conf.setMaster("local")
    val sc = new SparkContext(conf)

    val storePath = "hdfs://co1:9000/Opt/CarbonStore"
    //  這裡我們已經獲取到SparkSession了,可以像平常一樣操作spark
    val spark:SparkSession = SparkSession.builder().config(sc.getConf).getOrCreateCarbonSession(storePath)
	
	}
}