2015-02-05 129 views
2

我嘗試在Cloudera VM中使用Java(在Eclipse中)連接到Hbase,但出現錯誤。我能夠(通過我的程序轉換成JAR)在Cloudera VM中無法使用java連接到hbase(在Eclipse中)

我的java程序

`import org.apache.hadoop.conf.Configuration; 
    import org.apache.hadoop.hbase.HBaseConfiguration; 
    import org.apache.hadoop.hbase.HColumnDescriptor; 
    import org.apache.hadoop.hbase.HTableDescriptor; 
    import org.apache.hadoop.hbase.TableName; 
    import org.apache.hadoop.hbase.client.*; 
    import org.apache.hadoop.hbase.util.Bytes; 
    //import org.apache.hadoop.mapred.MapTask; 
    import java.io.FileWriter; 
    import java.io.IOException; 
    public class HbaseConnection { 

     public static void main(String[] args) throws IOException { 
     Configuration config = HBaseConfiguration.create(); 
     config.addResource("/usr/lib/hbase/conf/hbase-site.xml"); 
     HTable table = new HTable(config, "test_table"); 
     byte[] columnFamily = Bytes.toBytes("colf"); 
     byte[] idColumnName = Bytes.toBytes("id"); 
     byte[] groupIdColumnName = Bytes.toBytes("g_id"); 
     Put put = new Put(Bytes.toBytes("testkey")); 
     put.add(columnFamily, idColumnName, Bytes.toBytes("test id")); 
     put.add(columnFamily, groupIdColumnName, Bytes.toBytes("test group id")); 
     table.put(put); 
     table.close(); 

     } 
    }` 

而且我已經在Eclipse中保持HBase的-site.xml中源文件夾 HBase的現場運行命令行程序相同.XML

<property> 
    <name>hbase.rest.port</name> 
    <value>8070</value> 
    <description>The port for the HBase REST server.</description> 
    </property> 

    <property> 
    <name>hbase.cluster.distributed</name> 
    <value>true</value> 
    </property> 

    <property> 
    <name>hbase.rootdir</name> 
    <value>hdfs://quickstart.cloudera:8020/hbase</value> 
    </property> 

    <property> 
    <name>hbase.regionserver.ipc.address</name> 
    <value>0.0.0.0</value> 
    </property> 

    <property> 
    <name>hbase.master.ipc.address</name> 
    <value>0.0.0.0</value> 
    </property> 

    <property> 
    <name>hbase.thrift.info.bindAddress</name> 
    <value>0.0.0.0</value> 
    </property> 

而且我得到以下錯誤在Eclipse中運行程序時

log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory). 
log4j:WARN Please initialize the log4j system properly. 
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info. 
Exception in thread "main" java.io.IOException: java.lang.reflect.InvocationTargetException 
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:389) 
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:366) 
    at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:247) 
    at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:188) 
    at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:150) 
    at com.aig.gds.hadoop.platform.idgen.hbase.HBaseTest.main(HBaseTest.java:34) 
Caused by: java.lang.reflect.InvocationTargetException 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:387) 
    ... 5 more 
Caused by: java.util.ServiceConfigurationError: org.apache.hadoop.fs.FileSystem: Provider org.apache.hadoop.hdfs.DistributedFileSystem could not be instantiated 
    at java.util.ServiceLoader.fail(ServiceLoader.java:224) 
    at java.util.ServiceLoader.access$100(ServiceLoader.java:181) 
    at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:377) 
    at java.util.ServiceLoader$1.next(ServiceLoader.java:445) 
    at org.apache.hadoop.fs.FileSystem.loadFileSystems(FileSystem.java:2400) 
    at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2411) 
    at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2428) 
    at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88) 
    at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467) 
    at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449) 
    at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367) 
    at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287) 
    at org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104) 
    at org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:197) 
    at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64) 
    at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69) 
    at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:801) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:633) 
    ... 10 more 
Caused by: java.lang.NoSuchMethodError: org.apache.hadoop.conf.Configuration.addDeprecations([Lorg/apache/hadoop/conf/Configuration$DeprecationDelta;)V 
    at org.apache.hadoop.hdfs.HdfsConfiguration.addDeprecatedKeys(HdfsConfiguration.java:66) 
    at org.apache.hadoop.hdfs.HdfsConfiguration.<clinit>(HdfsConfiguration.java:31) 
    at org.apache.hadoop.hdfs.DistributedFileSystem.<clinit>(DistributedFileSystem.java:114) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at java.lang.Class.newInstance(Class.java:374) 
    at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:373) 
    ... 26 more 

在此先感謝。

回答

2

你的問題的根本原因是在堆棧跟蹤:

NoSuchMethodError: org.apache.hadoop.conf.Configuration.addDeprecations 

這意味着你的hadoop-common-* jar版本不同步與hadoop-hdfs-* jar版本,或者你可能有不同型號的混合在你的類路徑。

注意addDeprecations存在於Hadoop的2.3.0及更高版本: https://hadoop.apache.org/docs/r2.3.0/api/org/apache/hadoop/conf/Configuration.html

但缺少2.2.0及以前: https://hadoop.apache.org/docs/r2.2.0/api/org/apache/hadoop/conf/Configuration.html