2016-07-26 122 views
5

我用下面的蜂巢查詢DROP TABLE查詢:拋出ClassCastException在apache的火花蜂巢

this.queryExecutor.executeQuery("Drop table user") 

和我收到以下異常:

java.lang.LinkageError: ClassCastException: attempting to castjar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/javax/ws/rs/ext/RuntimeDelegate.classtojar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/javax/ws/rs/ext/RuntimeDelegate.class 
at javax.ws.rs.ext.RuntimeDelegate.findDelegate(RuntimeDelegate.java:116) 
    at javax.ws.rs.ext.RuntimeDelegate.getInstance(RuntimeDelegate.java:91) 
    at javax.ws.rs.core.MediaType.<clinit>(MediaType.java:44) 
    at com.sun.jersey.core.header.MediaTypes.<clinit>(MediaTypes.java:64) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.initReaders(MessageBodyFactory.java:182) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.initReaders(MessageBodyFactory.java:175) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.init(MessageBodyFactory.java:162) 
    at com.sun.jersey.api.client.Client.init(Client.java:342) 
    at com.sun.jersey.api.client.Client.access$000(Client.java:118) 
    at com.sun.jersey.api.client.Client$1.f(Client.java:191) 
    at com.sun.jersey.api.client.Client$1.f(Client.java:187) 
    at com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193) 
    at com.sun.jersey.api.client.Client.<init>(Client.java:187) 
    at com.sun.jersey.api.client.Client.<init>(Client.java:170) 
    at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceInit(TimelineClientImpl.java:340) 
    at org.apache.hadoop.service.AbstractService.init(AbstractService.java:163) 
    at org.apache.hadoop.hive.ql.hooks.ATSHook.<init>(ATSHook.java:67) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at java.lang.Class.newInstance(Class.java:442) 
    at org.apache.hadoop.hive.ql.hooks.HookUtils.getHooks(HookUtils.java:60) 
    at org.apache.hadoop.hive.ql.Driver.getHooks(Driver.java:1309) 
    at org.apache.hadoop.hive.ql.Driver.getHooks(Driver.java:1293) 
    at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1347) 
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195) 
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) 
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:495) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:484) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:290) 
    at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:237) 
    at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:236) 
    at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:279) 
    at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:484) 
    at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:474) 
    at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:613) 
    at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:89) 
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58) 
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56) 
    at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70) 
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132) 
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130) 
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) 
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130) 
    at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55) 
    at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55) 
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) 
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) 
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) 
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817) 
    at com.accenture.aa.dmah.spark.core.QueryExecutor.executeQuery(QueryExecutor.scala:35) 
    at com.accenture.aa.dmah.attribution.transformer.MulltipleUserJourneyTransformer.transform(MulltipleUserJourneyTransformer.scala:32) 
    at com.accenture.aa.dmah.attribution.userjourney.UserJourneyBuilder$$anonfun$buildUserJourney$1.apply$mcVI$sp(UserJourneyBuilder.scala:31) 
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) 
    at com.accenture.aa.dmah.attribution.userjourney.UserJourneyBuilder.buildUserJourney(UserJourneyBuilder.scala:29) 
    at com.accenture.aa.dmah.attribution.core.AttributionHub.executeAttribution(AttributionHub.scala:47) 
    at com.accenture.aa.dmah.attribution.jobs.AttributionJob.process(AttributionJob.scala:33) 
    at com.accenture.aa.dmah.core.DMAHJob.processJob(DMAHJob.scala:73) 
    at com.accenture.aa.dmah.core.DMAHJob.execute(DMAHJob.scala:27) 
    at com.accenture.aa.dmah.core.JobRunner.<init>(JobRunner.scala:17) 
    at com.accenture.aa.dmah.core.ApplicationInstance.initilize(ApplicationInstance.scala:48) 
    at com.accenture.aa.dmah.core.Bootstrap.boot(Bootstrap.scala:112) 
    at com.accenture.aa.dmah.core.BootstrapObj$.main(Bootstrap.scala:134) 
    at com.accenture.aa.dmah.core.BootstrapObj.main(Bootstrap.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:498) 
    at scala.tools.nsc.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:71) 
    at scala.tools.nsc.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) 
    at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:139) 
    at scala.tools.nsc.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:71) 
    at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:139) 
    at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:28) 
    at scala.tools.nsc.ObjectRunner$.run(ObjectRunner.scala:45) 
    at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:35) 
    at scala.tools.nsc.ObjectRunner$.runAndCatch(ObjectRunner.scala:45) 
    at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:74) 
    at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:96) 
    at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:105) 
    at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala) 

我看到也出現了類似的帖子herehere,但他們至今沒有任何迴應。 也看了here,但不認爲這是我的情況下的有效行動。

有趣的是,當我們嘗試使用drop table(或drop table if exists)查詢時,這是特定的。

希望能找到相同的解決方案。

+0

你對這個問題有什麼答案嗎? –

+0

你解決了這個問題嗎? – Edge7

+0

@ Edge7嗨,不...我們無法爲此解決問題。事實證明,刪除該表的要求已被逐步淘汰,因此無法對此進行更多的研究。 – hbabbar

回答

0

據我所知,上述錯誤可能是因爲樣本類具有相同的包結構,即:在不同的JAR問題中發現'javax.ws.rs.ext.RuntimeDelegate'。類對象在運行時創建和鑄造。因此,負責觸發DROP語法的代碼很有可能會被使用和破壞,因爲它在類路徑中被多次發現。

我試圖DROP和跌落如果在CHD5 EXISTS,並正在沒有問題,下面是我運行的詳細信息:

第一次運行 - 的Hadoop版本 - 2.6,蜂巢1.1.0和星火 - 1.3.1 (包括蜂巢庫火花LIB) 第二運行-Hadoop版本 - 2.6,蜂房1.1.0和火花 - 1.6.1 模式下運行的 - CLI

scala> sqlContext.sql("DROP TABLE SAMPLE"); 
16/08/04 11:31:39 INFO parse.ParseDriver: Parsing command: DROP TABLE SAMPLE 
16/08/04 11:31:39 INFO parse.ParseDriver: Parse Completed 
...... 
scala>sqlContext.sql("DROP TABLE IF EXISTS SAMPLE"); 
16/08/04 11:40:34 INFO parse.ParseDriver: Parsing command: DROP TABLE IF EXISTS SAMPLE 
16/08/04 11:40:35 INFO parse.ParseDriver: Parse Completed 
..... 

如果可能的話,請確認使用不同版本命令DROP spark lib來縮小問題範圍。

與此同時,我正在分析jar來找出兩個同類RuntimeDelegate存在的鏈接,並將返回以檢查任何jar的移除是否可以解決問題,並且添加jar應該重新創建同樣的問題。

+0

另外,如果可能的話,請在您的環境中列出Jersey和javax.ws.rs-api jar引用以獲得疑問 – Aditya