2012-04-05 29 views
1

我從我的java程序中調用mapreduce作業。 今天,當我設置MapReduce工作的輸入fromat到:LzoTextInputFormat MapReduce的工作失敗:需要使用hadoop native

Could not load native gpl library 
java.lang.UnsatisfiedLinkError: no gplcompression in java.library.path 
at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1738) 
at java.lang.Runtime.loadLibrary0(Runtime.java:823) 
at java.lang.System.loadLibrary(System.java:1028) 
at com.hadoop.compression.lzo.GPLNativeCodeLoader.<clinit>(GPLNativeCodeLoader.java:32) 
at com.hadoop.compression.lzo.LzoCodec.<clinit>(LzoCodec.java:67) 
at com.hadoop.mapreduce.LzoTextInputFormat.listStatus(LzoTextInputFormat.java:58) 
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:241) 
at com.hadoop.mapreduce.LzoTextInputFormat.getSplits(LzoTextInputFormat.java:85) 
at org.apache.hadoop.mapred.JobClient.writeNewSplits(JobClient.java:885) 
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:779) 
at org.apache.hadoop.mapreduce.Job.submit(Job.java:432) 
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:447) 
at company.Validation.run(Validation.java:99) 
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65) 
at company.mapreduceTest.main(mapreduceTest.java:18) 
Apr 5, 2012 4:40:29 PM com.hadoop.compression.lzo.LzoCodec <clinit> 
SEVERE: Cannot load native-lzo without native-hadoop 
java.lang.IllegalArgumentException: Wrong FS: hdfs://D-SJC-00535164:9000/local/usecases /gbase014/outbound/seed_2012-03-12_06-34-39/1_1.lzo.index, expected: file:/// 
at org.apache.hadoop.fs.FileSystem.checkPath(FileSystem.java:310) 
at org.apache.hadoop.fs.RawLocalFileSystem.pathToFile(RawLocalFileSystem.java:47) 
at org.apache.hadoop.fs.RawLocalFileSystem.getFileStatus(RawLocalFileSystem.java:357) 
at org.apache.hadoop.fs.FilterFileSystem.getFileStatus(FilterFileSystem.java:245) 
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:648) 
at com.hadoop.compression.lzo.LzoIndex.readIndex(LzoIndex.java:169) 
at com.hadoop.mapreduce.LzoTextInputFormat.listStatus(LzoTextInputFormat.java:69) 
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:241) 
at com.hadoop.mapreduce.LzoTextInputFormat.getSplits(LzoTextInputFormat.java:85) 
at org.apache.hadoop.mapred.JobClient.writeNewSplits(JobClient.java:885) 
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:779) 
at org.apache.hadoop.mapreduce.Job.submit(Job.java:432) 
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:447) 
at company.Validation.run(Validation.java:99) 
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65) 
at company.stopTransfer.mapreduceTest.main(mapreduceTest.java:18) 
Apr 5, 2012 4:40:29 PM company.Validation run 
SEVERE: LinkExtractor: java.lang.IllegalArgumentException: Wrong FS: hdfs://D-SJC-00535164:9000/local/usecases/gbase014/outbound/seed_2012-03-12_06-34-39/1_1.lzo.index, expected: file:/// 
at org.apache.hadoop.fs.FileSystem.checkPath(FileSystem.java:310) 
at org.apache.hadoop.fs.RawLocalFileSystem.pathToFile(RawLocalFileSystem.java:47) 
at org.apache.hadoop.fs.RawLocalFileSystem.getFileStatus(RawLocalFileSystem.java:357) 
at org.apache.hadoop.fs.FilterFileSystem.getFileStatus(FilterFileSystem.java:245) 
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:648) 
at com.hadoop.compression.lzo.LzoIndex.readIndex(LzoIndex.java:169) 
at com.hadoop.mapreduce.LzoTextInputFormat.listStatus(LzoTextInputFormat.java:69) 
at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:241) 
at com.hadoop.mapreduce.LzoTextInputFormat.getSplits(LzoTextInputFormat.java:85) 
at org.apache.hadoop.mapred.JobClient.writeNewSplits(JobClient.java:885) 
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:779) 
at org.apache.hadoop.mapreduce.Job.submit(Job.java:432) 
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:447) 
at company.Validation.run(Validation.java:99) 
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65) 
at company.stopTransfer.mapreduceTest.main(mapreduceTest.java:18) 

但在LIB /本地他們是一些文件有,拉伸出,所以... 我試着將它們設置在我的路徑環境變量中,但仍然無效。

任何人都可以請給我的建議!

非常感謝!

+0

你在哪個環境上運行?什麼版本的hadoop?你有hadoop本機安裝? – 2012-04-06 00:32:22

+0

我正在使用hadoop-0.20.0我想我有hadoop-native。我可以從命令行運行mapreduce作業:hadoop jar驗證... – 2012-04-06 16:49:07

回答

3

您的錯誤與Lzo的實際共享庫不存在於hadoop本機庫文件夾中有關。

GPLNativeCodeLoader的代碼正在尋找共享庫,稱爲gplcompression。 Java實際上是在尋找一個名爲libgplcompression.so的文件。如果這個文件不存在於你的lib/native/${arch}文件夾中,那麼你會看到這個錯誤。

在終端,導航到Hadoop的基本目錄,然後執行以下傾倒安裝在機庫,併發送回你原來的問題

uname -a 
find lib/native 
+0

感謝您的建議!我在文件夾中有libgplcompression文件。但它仍然無法正常工作。因爲我從java調用ToolRunner,所以我將它作爲java應用程序運行。我設置了conf.set(「java.library.path」,「本機文件夾的絕對路徑」),但它仍然不起作用! – 2012-04-06 16:51:50

+0

你的集羣中的每個節點上是否有這個libgplcompression.so文件? – 2012-04-06 17:56:18

+0

對不起,我的意思是在System中設置java.library.path。 – 2012-04-06 17:56:35