我在Scala中遇到了Module not found
錯誤。我試圖獲得與Oracle的jdbc連接,加入兩個表然後將其打印出來。Scala Oracle JDBC
我的斯卡拉文件
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
object sparkJDBC {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("Simple
Application").setMaster("local[2]").set("spark.executor.memory","1g")
val sc = new SparkContext(conf)
var sqlContext = new SQLContext(sc)
val chrttype = sqlContext.load("jdbc",
Map("url" -> "jdbc:oracle:thin:gductv1/[email protected]//localhost:1521/XE",
"dbtable" -> "chrt_typ"))
val clntlvl1 = sqlContext.load("jdbc",
Map("url" -> "jdbc:oracle:thin:gductv1/[email protected]//localhost:1521/XE",
"dbtable" -> "clnt_lvl1"))
val join2 =
chrttyp.join(clntlvl1,chrttyp.col("chrt_typ_key")===clntlvl1("lvl1_key"))
join2.foreach(println)
join2.printSchema()
}
}
我build.sbt文件
name := "sparkJDBC"
version := "0.1"
scalaVersion := "2.11.7"
libraryDependencies += "org.apache.spark" %% "spark-core" % "1.5.1"
libraryDependencies += "org.apache.tika" % "tika-core" % "1.11"
libraryDependencies += "org.apache.tika" % "tika-parsers" % "1.11"
libraryDependencies += "org.apache.hadoop" % "hadoop-client" % "2.7.1"
libraryDependencies += "org.apache.spark" % "spark-sql" % "1.0.0"
錯誤文件是
[warn] module not found: org.apache.spark#spark-sql;1.0.0
[warn] ==== local: tried
[warn] C:\Users\.ivy2\local\org.apache.spark\spark-sql\1.0.0\ivys\ivy.xml
[warn] ==== public: tried
[warn] https://repo1.maven.org/maven2/org/apache/spark/spark-sql/1.0.0/spark-sql-1.0.0.pom
[info] Resolving jline#jline;2.12.1 ...
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[warn] :: UNRESOLVED DEPENDENCIES ::
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[warn] :: org.apache.spark#spark-sql;1.0.0: not found
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[error] (*:update) sbt.ResolveException: unresolved dependency: org.apache.spark#spark-sql;1.0.0: not found
請幫我找出是什麼導致了這一點。
問題:當前的解決沒有你問 –