1. 程式人生 > >spark-submit 提交任務報錯 java.lang.ClassNotFoundException: Demo02

spark-submit 提交任務報錯 java.lang.ClassNotFoundException: Demo02

案例:把sparksql的程式提交到spark的單機模式下執行

package demo01

import org.apache.spark.SparkContext import org.apache.spark.sql.SQLContext import org.apache.spark.SparkConf

object Demo02 {     def main(args: Array[String]): Unit = {       //建立配置                 val conf = new SparkConf()                 conf.setAppName("Demo02")      //建立應用程式first                 conf.setMaster("local") //                conf.set("spark.shuffle.manager","hash")

                //基於配置生成sc                 val sc = new SparkContext(conf)

                //創建出sqlContext                 val sqlContext = new SQLContext(sc)

                import sqlContext.implicits._                 val rdd = sc.makeRDD(List((1,"zhang"),(2,"li"),(3,"wang")))                 val df = rdd.toDF("id","name")

//                df.registerTempTable("tabx");                 df.createTempView("tabx")

                val df2 = sqlContext.sql("select * from tabx order by name")

                val rdd2 = df2.toJavaRDD.repartition(1)                 rdd2.saveAsTextFile("file:///home/hadoop/df.txt"); //                rdd2.saveAsTextFile("d:/df")                                  sc.stop()                 } }

打成jar--> sql.jar  

提交叢集執行  ./spark-submit --class Demo02 ./sql.jar 

java.lang.ClassNotFoundException: Demo02         at java.net.URLClassLoader.findClass(URLClassLoader.java:381)         at java.lang.ClassLoader.loadClass(ClassLoader.java:424)         at java.lang.ClassLoader.loadClass(ClassLoader.java:357)         at java.lang.Class.forName0(Native Method)         at java.lang.Class.forName(Class.java:348)         at org.apache.spark.util.Utils$.classForName(Utils.scala:230)         at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:732)         at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)         at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)         at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)         at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

解決辦法:新增類的全路徑名稱  ./spark-submit --class demo01.Demo02 ./sql.jar