2016-05-09 124 views
0

這是我的代碼爲joinning 2個dataframes異常線程 「main」 scala.ScalaReflectionException

package org.test.rddjoins 
import org.apache.spark.SparkConf 
import org.apache.spark.SparkConf 
import org.apache.spark._ 
import org.apache.spark.rdd.RDD 


object rdd { 

case class Score(name: String, score: Int) 
case class Age(name: String, age: Int) 

def main(args: Array[String]) { 

val sparkConf = new SparkConf() 
    .setAppName("rdd") 
    .setMaster("local[2]") 
val sc = new SparkContext(sparkConf) 
val sqlContext = new org.apache.spark.sql.SQLContext(sc) 

import sqlContext._ 


val scores = sc.textFile("scores.txt").map(_.split(",")).map(s =>  Score(s(0), s(1).trim.toInt)) 
val ages = sc.textFile("ages.txt").map(_.split(",")).map(s => Age(s(0), s(1).trim.toInt)) 

scores.registerAsTable("scores") 
ages.registerAsTable("ages") 

val joined = sqlContext.sql(""" 
SELECT a.name, a.age, s.score 
FROM ages a JOIN scores s 
ON a.name = s.name""") 
joined.collect().foreach(println) 

} 
} 

同時運行它,我收到以下錯誤:

Exception in thread "main" scala.ScalaReflectionException: class org.apache.spark.sql.catalyst.ScalaReflection in JavaMirror with primordial classloader with boot classpath [C:\Users\Owner\Downloads\Compressed\eclipse\plugins\org.scala-lang.scala-library_2.11.8.v20160304-115712-1706a37eb8.jar;C:\Users\Owner\Downloads\Compressed\eclipse\plugins\org.scala-lang.scala-reflect_2.11.8.v20160304-115712-1706a37eb8.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\rt.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\sunrsasign.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_77\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_77\jre\classes] not found. 
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:123) 
    at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:22) 
    at org.apache.spark.sql.catalyst.ScalaReflection$$typecreator1$1.apply(ScalaReflection.scala:115) 
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:232) 
    at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:232) 
    at scala.reflect.api.TypeTags$class.typeOf(TypeTags.scala:341) 
    at scala.reflect.api.Universe.typeOf(Universe.scala:61) 
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:115) 
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:33) 
    at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:100) 
    at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:33) 
    at org.apache.spark.sql.catalyst.ScalaReflection$class.attributesFor(ScalaReflection.scala:94) 
    at org.apache.spark.sql.catalyst.ScalaReflection$.attributesFor(ScalaReflection.scala:33) 
    at org.apache.spark.sql.SQLContext.createSchemaRDD(SQLContext.scala:111) 
    at org.test.rddjoins.rdd$.main(rdd.scala:27) 

幫助!

回答

0

在類路徑中缺少Apache Spark庫。

例外說明沒有發現火花相關類之一classpath。 您應該修改您的classpath以添加指定的罐子。

相關問題