2015-11-06 46 views
3

當spark正在運行用sbt測試時。我得到這個異常:Apache Spark錯誤的akka​​-remote netty版本

18:58:49.049 [sparkDriver-akka.actor.default-dispatcher-2] ERROR akka.actor.ActorSystemImpl - Uncaught fatal error from thread [sparkDriver-akka.remote.default-remote-dispatcher-5] shutting down ActorSystem [sparkDriver] 
java.lang.VerifyError: (class: org/jboss/netty/channel/socket/nio/NioWorkerPool, method: createWorker signature: (Ljava/util/concurrent/Executor;)Lorg/jboss/netty/channel/socket/nio/AbstractNioWorker;) Wrong return type in function 
at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:283) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.remote.transport.netty.NettyTransport.<init>(NettyTransport.scala:240) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_45] 
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_45] 
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_45] 
at java.lang.reflect.Constructor.newInstance(Constructor.java:422) ~[na:1.8.0_45] 
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:78) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at scala.util.Try$.apply(Try.scala:192) ~[scala-library-2.11.7.jar:0.13.8] 
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:73) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at scala.util.Success.flatMap(Try.scala:231) ~[scala-library-2.11.7.jar:0.13.8] 
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:84) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:692) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager$$anonfun$9.apply(Remoting.scala:684) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:728) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.Iterator$class.foreach(Iterator.scala:742) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.AbstractIterator.foreach(Iterator.scala:1194) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.AbstractIterable.foreach(Iterable.scala:54) ~[scala-library-2.11.7.jar:0.13.8] 
at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:727) ~[scala-library-2.11.7.jar:0.13.8] 
at akka.remote.EndpointManager.akka$remote$EndpointManager$$listens(Remoting.scala:684) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager$$anonfun$receive$2.applyOrElse(Remoting.scala:492) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.actor.Actor$class.aroundReceive(Actor.scala:465) ~[akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.remote.EndpointManager.aroundReceive(Remoting.scala:395) ~[akka-remote_2.11-2.3.4-spark.jar:na] 
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:516) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.actor.ActorCell.invoke(ActorCell.scala:487) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:238) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.dispatch.Mailbox.run(Mailbox.scala:220) [akka-actor_2.11-2.3.4-spark.jar:na] 
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:393) [akka-actor_2.11-2.3.4-spark.jar:na] 
... 

第一件事,我不得不認爲這是我的子LIB的一個進口網狀的版本錯誤。

看着我的依賴關係圖後,我發現4個不同版本的netty。 3.6.6 3.8.0 3.9.3 4.0.23

論文版本大多是由火花本人進口:○ 的4.0.23直接由火花進口和3.8.0由其子依賴性Akka-remote

我試圖從我所有的子依賴性排除網狀依賴與excludeAll(ExclusionRule(organization = "io.netty"))並逐一添加每個netty版本。但它沒有解決問題。 我也嘗試排除所有com.typesafe.akka deps使用Akka-remote 2.4.0我得到同樣的問題。

Akka-remote需要的netty版本是3.8.0。甚至強迫這個不起作用。我還強迫我的項目使用空間火花Akka版本Akka-remote:3.8.0-spark,它們不會改變任何內容。

的信息我build.sbt

libraryDependencies ++= Seq(
    "com.datastax.cassandra" % "cassandra-driver-core"    % "2.1.7.1" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "com.datastax.spark"  %% "spark-cassandra-connector"   % "1.4.0" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "com.datastax.spark"  %% "spark-cassandra-connector-embedded" % "1.4.0" % Test excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "xxx.xxxxx"    %% "shed"        % "0.10.0-MOK-1848-DEBUG-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "com.twitter"    %% "util-collection"      % "6.27.0" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.circe"    %% "circe-core"       % "0.2.0-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.circe"    %% "circe-generic"      % "0.2.0-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.circe"    %% "circe-jawn"       % "0.2.0-SNAPSHOT" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "io.netty"    % "netty"        % "3.6.6.Final", 
    //"com.typesafe.akka"  % "akka-remote_2.11"     % "2.3.4", 
    "org.apache.cassandra" % "cassandra-all"      % "2.1.5" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.cassandra" % "cassandra-thrift"     % "2.0.5" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-streaming-kafka"    % "1.4.0" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-streaming"      % sparkVersion % "provided" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-core"       % sparkVersion % "provided" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.apache.spark"  %% "spark-sql"       % sparkVersion % "provided" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.scalaz.stream"  % "scalaz-stream_2.11"     % "0.7.3" % Test excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")), 
    "org.specs2"    %% "specs2-core"       % "3.6.1-scalaz-7.0.6" % "test" excludeAll(ExclusionRule(organization = "io.netty"), ExclusionRule(organization = "com.typesafe.akka")) 
) 

正如你看到我嘗試排除最大的事情我嘗試

和子項目棚含有

"com.github.scopt"   %% "scopt"    % "3.3.0" , 
    "com.typesafe.akka"   %% "akka-testkit"   % "2.3.8"    % "test", 
    "com.typesafe.play"   %% "play-test"   % "2.3.8"    % "test", 
    "com.tinkerpop.gremlin"  % "gremlin-java"   % gremlinVersion, 
    "com.tinkerpop"    % "pipes"    % gremlinVersion, 
    "com.thinkaurelius.titan" % "titan-core"   % titanVersion, 
    "com.thinkaurelius.titan" % "titan-cassandra"  % titanVersion, 
    "com.thinkaurelius.titan" % "titan-berkeleyje"  % titanVersion, 
    "com.netaporter"    %% "scala-uri"   % "0.4.8", 
    "com.github.nscala-time"  %% "nscala-time"   % "1.8.0", 
    "com.mandubian"    %% "play-json-zipper"  % "1.2", 
    "com.michaelpollmeier"  %% "gremlin-scala"  % "2.6.1", 
    "com.ansvia.graph"   %% "blueprints-scala"  % "0.1.61-20150416-SNAPSHOT", 
    "io.kanaka"     %% "play-monadic-actions" % "1.0.1" exclude("com.typesafe.play", "play_2.11"), 
    "org.scalaz"     %% "scalaz-concurrent" % "7.0.6", 
    "com.chuusai"    %% "shapeless"   % "2.3.0-SNAPSHOT", 
    ("io.github.jto"    %% "validation-json"  % "1.0").exclude("org.tpolecat", "tut-core_2.11"), 
    "org.parboiled"    %% "parboiled"   % "2.1.0", 
    "com.typesafe.scala-logging" %% "scala-logging"  % "3.1.0", 
    "ch.qos.logback"    % "logback-classic"  % "1.1.2", 
    "xxx.xxxxxxxxxx"    %% "chuck"    % "0.9.0-SNAPSHOT", 
    "xxx.xxxxxxxxxx"    %% "shed-graph"   % "0.9.0-MOK-1848-SNAPSHOT" exclude("com.thinkaurelius.titan", "titan-core"), 
    "io.circe"     %% "circe-core"   % "0.2.0-SNAPSHOT", 
    "io.circe"     %% "circe-generic"  % "0.2.0-SNAPSHOT", 
    "io.circe"     %% "circe-jawn"   % "0.2.0-SNAPSHOT" 
+0

你能分享你的sbt依賴項部分嗎? – eliasah

+0

是的,我剛剛添加build.sbt deps – crak

回答

6

這是陷阱!

網絡組織在過去發生了變化。 從org.jboss.netty到io.netty,但它們包含相同的包。

exclude(「org.jboss.netty」,「netty」)解決了我的問題。

+0

我有同樣的問題,你使用哪個版本的火花?,你可以共享添加這一行到哪個文件,以及哪個sbt構建函數?謝謝! – Djvu

+0

使用exclude(「org.jboss.netty」,「netty」)就像那樣'''com.thinkaurelius.titan「%」titan-cassandra「%titanVersion exclude(」org.jboss.netty「,」netty「) ,''' 當然你可能有另一個依賴包括netty。 – crak

+0

感謝您的回答,我使用了Spark1.5,它使用了pom.xml管理依賴關係,所以我沒有如何改變它。我後來設置了一個artifactory代理,清除所有內容並重新編譯,它工作。這很奇怪,因爲我編譯了很多次,並且總是編譯沒有錯誤,我不會發生什麼事情,或許最近在某些依賴關係瓶中有所改變? – Djvu