2012-09-06 132 views
0

我使用spring data + hbase將一些值寫入HBase數據庫。 不幸的是,在第一次調用之後,HbaseTemplate似乎關閉了連接。HBase在第二次插入時失敗

我是新來春和HBase的/ Hadoop的,所以我不知道這是否是一個Spring/HBase的配置問題或其他愚蠢

的TestClass:

package org.springframework.data.hadoop.samples; 

import org.apache.hadoop.hbase.client.HTable; 
import org.apache.hadoop.hbase.client.Put; 
import org.apache.hadoop.hbase.util.Bytes; 
import org.junit.Test; 
import org.junit.runner.RunWith; 
import org.springframework.beans.factory.annotation.Autowired; 
import org.springframework.context.ApplicationContext; 
import org.springframework.data.hadoop.hbase.HbaseTemplate; 
import org.springframework.data.hadoop.hbase.TableCallback; 
import org.springframework.test.context.ContextConfiguration; 
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; 

@RunWith(SpringJUnit4ClassRunner.class) 
@ContextConfiguration("/hbase-context.xml") 
public class WordCountWorkflowTest { 

    @Autowired 
    private ApplicationContext ctx; 

    @Autowired 
    private HbaseTemplate hbaseTemplate; 

    @Test 
    public void testWorkflowNS() throws Exception { 
     if (hbaseTemplate == null) { 
      throw new NullPointerException("template null!"); 
     } 
     // Write to HBase 
     InnerTableCallback itc = new InnerTableCallback("JustaString", 42); 
     hbaseTemplate.execute("Wordcount", itc); 
     itc = new InnerTableCallback("Anotherstring", 23); 
     // Here the HBase insert fails 
     hbaseTemplate.execute("Wordcount", itc); 
    } 

    @Test 
    public void testWorkflowNSSucess() throws Exception { 
     System.out.println("done"); 
    } 

    /** 
    * This is a Inner class providing access to the HBase Table to store the 
    * counted words and number of matches. 
    * 
    * */ 
    class InnerTableCallback implements TableCallback<Object> { 

     String foundStr; 

     int no; 

     /** 
     * The constructor just saved the given foundStr/no tuple in inner 
     * variables. 
     * 
     * @param foundstr 
     *   string found in the text 
     * @param no 
     *   number of found matches 
     * @return null 
     * */ 
     public InnerTableCallback(String foundStr, int no) { 
      this.foundStr = foundStr; 
      this.no = no; 
     } 

     /** 
     * This Method puts the given String and number of found matches into 
     * the HBase table the column family is "cf1" and the column is 
     * "matches". The rowname is the found string. 
     * */ 
     @Override 
     public Object doInTable(HTable table) throws Throwable { 
      Put p = new Put(Bytes.toBytes(foundStr)); 
      // Put operation on hbase shell: 
      // hbase(main):005:0> put 'testtable', 'myrow-2', 'colfam1:q2', 
      // 'value-2' 
      // add(byte[] family, byte[] qualifier, byte[] value) 
      p.add(Bytes.toBytes("cf1"), Bytes.toBytes("matches"), 
        Bytes.toBytes(new Integer(no).toString())); 
      table.put(p); 
      return null; 
     } 
    } 
} 

HBase的上下文。 XML:

<?xml version="1.0" encoding="UTF-8"?> 
<beans xmlns="http://www.springframework.org/schema/beans" 
    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
    xmlns:context="http://www.springframework.org/schema/context" 
    xmlns:p="http://www.springframework.org/schema/p" 
    xmlns:hdp="http://www.springframework.org/schema/hadoop" 
    xsi:schemaLocation=" 
     http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd 
     http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd 
      http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd"> 

    <context:property-placeholder location="classpath:batch.properties,classpath:hadoop.properties" 
      ignore-resource-not-found="true" ignore-unresolvable="true" /> 

    <context:component-scan base-package="org.springframework.data.hadoop.samples" /> 

    <bean class="org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor"/> 

    <bean id="hbaseTemplate" class="org.springframework.data.hadoop.hbase.HbaseTemplate" p:configuration-ref="hbaseConfiguration"/> 

    <hdp:hbase-configuration>   
    </hdp:hbase-configuration> 
</beans> 

堆棧跟蹤:

org.springframework.data.hadoop.hbase.HbaseSystemException: org.apache.h[email protected]61bb0cc0 closed; nested exception is java.io.IOException: org.apache.h[email protected]61bb0cc0 closed 
    at org.springframework.data.hadoop.hbase.HbaseUtils.convertHbaseException(HbaseUtils.java:42) 
    at org.springframework.data.hadoop.hbase.HbaseTemplate.convertHbaseAccessException(HbaseTemplate.java:111) 
    at org.springframework.data.hadoop.hbase.HbaseTemplate.execute(HbaseTemplate.java:82) 
    at org.springframework.data.hadoop.samples.WordCountWorkflowTest.testWorkflowNS(WordCountWorkflowTest.java:35) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:601) 
    at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44) 
    .... 
Caused by: java.io.IOException: org.apache.h[email protected]61bb0cc0 closed 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:822) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.locateRegion(HConnectionManager.java:810) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatchCallback(HConnectionManager.java:1492) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatch(HConnectionManager.java:1377) 
    at org.apache.hadoop.hbase.client.HTable.flushCommits(HTable.java:916) 
    at org.apache.hadoop.hbase.client.HTable.doPut(HTable.java:772) 
    at org.apache.hadoop.hbase.client.HTable.put(HTable.java:747) 
    at org.springframework.data.hadoop.samples.WordCountWorkflowTest$InnerTableCallback.doInTable(WordCountWorkflowTest.java:83) 
    at org.springframework.data.hadoop.hbase.HbaseTemplate.execute(HbaseTemplate.java:72) 

乾杯,R

回答

1

解決!

我在我的maven pom.xml中使用了spring-data-hadoop包(Milestone)的一個較老的和不推薦的版本。我切換到Snapshot存儲庫,該存儲庫修復了HBase表的錯誤處理。

如果您使用的是spring-batch:我必須使用新版本在* context.xml中將hadoop的<tasklet>定義更改爲<job-tasklet>

從XML解析器的錯誤信息是:

The matching wildcard is strict, but no declaration can be found for element 'tasklet' 

希望這可以幫助別人:-)