2010-07-06 32 views
-2
import java.awt.image.BufferedImage; 
import java.awt.image.DataBufferByte; 
import java.awt.image.Raster; 
import java.io.ByteArrayInputStream; 
import java.io.IOException; 
import javax.imageio.ImageIO; 
import javax.xml.soap.Text; 


import org.apache.hadoop.fs.Path; 
import org.apache.hadoop.io.*; 
import org.apache.hadoop.mapred.*; 

public class blur { 
public static class BlurMapper extends MapReduceBase implements Mapper<Text, BytesWritable, LongWritable, BytesWritable> 
{ 
    OutputCollector<LongWritable, BytesWritable> goutput; 

    int IMAGE_HEIGHT = 240; 
    int IMAGE_WIDTH = 320; 
    public BytesWritable Gmiu; 
    public BytesWritable Gsigma; 
    public BytesWritable w; 
    byte[] bytes = new byte[IMAGE_HEIGHT*IMAGE_WIDTH*3]; 
    public BytesWritable emit = new BytesWritable(bytes); 
    int count = 0; 
    int initVar = 125; 
    public LongWritable l = new LongWritable(1); 

    public void map(Text key, BytesWritable file,OutputCollector<LongWritable, BytesWritable> output, Reporter reporter) throws IOException { 
//Read Current Image from File. 
        goutput = output; 
        //System.out.println("akhil langer"); 
        BufferedImage img = ImageIO.read(new ByteArrayInputStream(file.getBytes())); 
       // BufferedImage dest = null; 

        //Apply Blur on Filter Operation - External JAR 
        // BoxBlurFilter BlurOp = new BoxBlurFilter(10,10,2); 
        Raster ras=img.getData(); 
        DataBufferByte db= (DataBufferByte)ras.getDataBuffer(); 
        byte[] data = db.getData(); 
        byte[] byte1 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH]; 
        byte[] byte2 = new byte[IMAGE_HEIGHT*IMAGE_WIDTH]; 
        for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++) 
        { 
        byte1[i]=20; 
        byte2[i]=125; 
        } 
        byte [] oldmiu; 
        oldmiu = new byte[IMAGE_HEIGHT*IMAGE_WIDTH] ; 
        byte [] oldsigma; 
        oldsigma = new byte[IMAGE_HEIGHT*IMAGE_WIDTH] ; 
        if(count==0){ 
          Gmiu = new BytesWritable(data); 
          Gsigma = new BytesWritable(byte1); 
          w = new BytesWritable(byte2); 
          count++; 

        oldmiu= Gmiu.getBytes(); 


        oldsigma= Gmiu.getBytes(); 
        } 

         else{ 
          for(int i=0;i<IMAGE_HEIGHT*IMAGE_WIDTH;i++) 
          { 
           byte pixel = data[i]; 
           Double tempmiu=new Double(0.0); 
           Double tempsig=new Double(0.0); 
           Double weight = new Double(0.0); 
           double temp1=0; double alpha = 0.05; 
           tempmiu = (1-alpha)*oldmiu[i] + alpha*pixel; 
           temp1=temp1+(pixel-oldmiu[i])*(pixel-oldmiu[i]); 
           tempsig=(1-alpha)*oldsigma[i]+ alpha*temp1; 

           byte1[i] = tempmiu.byteValue(); 
           byte2[i]= tempsig.byteValue(); 
           Gmiu.set(byte1,i,1); 
           Gsigma.set(byte2,i,1); 
           byte1 = w.getBytes(); 
           Double w1=new Double((1-alpha)*byte1[i]+alpha*100); 
           byte2[i] = w1.byteValue(); 
           w.set(byte2,i,1); 
          } 

         } 
         byte1 = Gsigma.getBytes();      
         emit.set(byte1,0,IMAGE_HEIGHT*IMAGE_WIDTH); 
         byte1 = Gsigma.getBytes(); 
         emit.set(byte1,IMAGE_HEIGHT*IMAGE_WIDTH,IMAGE_HEIGHT*IMAGE_WIDTH); 
         byte1 = w.getBytes(); 
         emit.set(byte1,2*IMAGE_HEIGHT*IMAGE_WIDTH,IMAGE_HEIGHT*IMAGE_WIDTH); 
     } 

     @Override 
     public void close(){ 
      try{ 
       goutput.collect(l, emit); 
      } 
      catch(Exception e){ 
       e.printStackTrace(); 
       System.exit(-1); 
      } 

     } 

} 

     public static void main(String[] args) { 

       if(args.length!=2) { 

         System.err.println("Usage: blurvideo input output"); 
         System.exit(-1); 

       } 
       JobClient client = new JobClient(); 
       JobConf conf = new JobConf(blur.class); 


       conf.setOutputValueClass(BytesWritable.class); 
       conf.setInputFormat(SequenceFileInputFormat.class); 
       //conf.setNumMapTasks(n) 

       SequenceFileInputFormat.addInputPath(conf, new Path(args[0])); 
       SequenceFileOutputFormat.setOutputPath(conf, new Path(args[1])); 
       conf.setMapperClass(BlurMapper.class); 
       conf.setNumReduceTasks(0); 
       //conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class); 

       client.setConf(conf); 
       try { 
         JobClient.runJob(conf); 
       } catch (Exception e) { 
         e.printStackTrace(); 
       } 
     } 
} 

錯誤:Hadoop的空指針異常

java.lang.NullPointerException at blur$BlurMapper.close(blur.java:99) at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:57) at org.apache.hadoop.mapred.MapTask.run(MapTask.java:342) at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:138)

請回復!

+4

當這麼多努力進入一個問題時,我們該如何抗拒? – skaffman 2010-07-06 09:53:28

回答

1

該死......我的抵抗力已經崩潰。

很明顯,goutput.collect(l, emit);正在拋出NPE。這意味着當調用close()goutputnull

這意味着要麼從未調用過map,要麼使用output參數null調用該參數。

我不知道Hadoop,但我懷疑根本問題是你的map方法不是覆蓋基類中的map方法,因爲簽名是不同的。淨結果是你的map方法沒有被調用,並且.... NPE。

+0

不是重寫 - 容易測試,OP可以用'@ Override'註解他的映射方法 - >編譯錯誤? - >解決方案。 – 2010-07-06 10:55:42

+0

@Andreas_D - 沒錯。事實上,我懷疑他刪除了「Override」是爲了編譯而做出的錯誤嘗試。 – 2010-07-06 11:13:03

+0

有同樣的想法:「該死,這個@Override是一個編譯錯誤..啊,很容易,我只是刪除它,然後編譯完美。」 :-)) – 2010-07-06 11:21:51