2014-04-24 72 views
2

我已經繼承了一個代碼片段,它可以繪製給定文件的音頻波形。但是這個波形是使用JAVA矢量圖形構建的簡單圖像,沒有任何標籤,Axes信息等。我想將它移植到jfreechart以增加它的信息價值。我的問題是,代碼至少可以說是神祕的。使用jfreechart繪製音頻信號(幅度與時間)

public class Plotter { 
AudioInputStream audioInputStream; 
Vector<Line2D.Double> lines = new Vector<Line2D.Double>(); 
String errStr; 
Capture capture = new Capture(); 
double duration, seconds; 
//File file; 
String fileName = "out.png"; 
SamplingGraph samplingGraph; 
String waveformFilename; 
Color imageBackgroundColor = new Color(20,20,20); 

public Plotter(URL url, String waveformFilename) throws Exception { 
    if (url != null) { 
     try { 
      errStr = null; 
      this.fileName = waveformFilename; 
      audioInputStream = AudioSystem.getAudioInputStream(url); 
      long milliseconds = (long)((audioInputStream.getFrameLength() * 1000)/audioInputStream.getFormat().getFrameRate()); 
      duration = milliseconds/1000.0; 
      samplingGraph = new SamplingGraph(); 
      samplingGraph.createWaveForm(null);  

     } catch (Exception ex) { 
      reportStatus(ex.toString()); 
      throw ex; 
     } 
    } else { 
     reportStatus("Audio file required."); 
    } 
} 
/** 
* Render a WaveForm. 
*/ 
class SamplingGraph implements Runnable { 

    private Thread thread; 
    private Font font10 = new Font("serif", Font.PLAIN, 10); 
    private Font font12 = new Font("serif", Font.PLAIN, 12); 
    Color jfcBlue = new Color(000, 000, 255); 
    Color pink = new Color(255, 175, 175); 


    public SamplingGraph() { 
    } 


    public void createWaveForm(byte[] audioBytes) { 

     lines.removeAllElements(); // clear the old vector 

     AudioFormat format = audioInputStream.getFormat(); 
     if (audioBytes == null) { 
      try { 
       audioBytes = new byte[ 
        (int) (audioInputStream.getFrameLength() 
        * format.getFrameSize())]; 
       audioInputStream.read(audioBytes); 
      } catch (Exception ex) { 
       reportStatus(ex.getMessage()); 
       return; 
      } 
     } 
     int w = 500; 
     int h = 200; 
     int[] audioData = null; 
     if (format.getSampleSizeInBits() == 16) { 
      int nlengthInSamples = audioBytes.length/2; 
      audioData = new int[nlengthInSamples]; 
      if (format.isBigEndian()) { 
       for (int i = 0; i < nlengthInSamples; i++) { 
        /* First byte is MSB (high order) */ 
        int MSB = (int) audioBytes[2*i]; 
        /* Second byte is LSB (low order) */ 
        int LSB = (int) audioBytes[2*i+1]; 
        audioData[i] = MSB << 8 | (255 & LSB); 
       } 
      } else { 
       for (int i = 0; i < nlengthInSamples; i++) { 
        /* First byte is LSB (low order) */ 
        int LSB = (int) audioBytes[2*i]; 
        /* Second byte is MSB (high order) */ 
        int MSB = (int) audioBytes[2*i+1]; 
        audioData[i] = MSB << 8 | (255 & LSB); 
       } 
      } 
     } else if (format.getSampleSizeInBits() == 8) { 
      int nlengthInSamples = audioBytes.length; 
      audioData = new int[nlengthInSamples]; 
      if (format.getEncoding().toString().startsWith("PCM_SIGN")) { 
       for (int i = 0; i < audioBytes.length; i++) { 
        audioData[i] = audioBytes[i]; 
       } 
      } else { 
       for (int i = 0; i < audioBytes.length; i++) { 
        audioData[i] = audioBytes[i] - 128; 
       } 
      } 
     } 

     int frames_per_pixel = audioBytes.length/format.getFrameSize()/w; 
     byte my_byte = 0; 
     double y_last = 0; 
     int numChannels = format.getChannels(); 
     for (double x = 0; x < w && audioData != null; x++) { 
      int idx = (int) (frames_per_pixel * numChannels * x); 
      if (format.getSampleSizeInBits() == 8) { 
       my_byte = (byte) audioData[idx]; 
      } else { 
       my_byte = (byte) (128 * audioData[idx]/32768); 
      } 
      double y_new = (double) (h * (128 - my_byte)/256); 
      lines.add(new Line2D.Double(x, y_last, x, y_new)); 
      y_last = y_new; 
     } 
     saveToFile(); 
    } 


    public void saveToFile() {    
     int w = 500; 
     int h = 200; 
     int INFOPAD = 15; 

     BufferedImage bufferedImage = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB); 
     Graphics2D g2 = bufferedImage.createGraphics(); 

     createSampleOnGraphicsContext(w, h, INFOPAD, g2);    
     g2.dispose(); 
     // Write generated image to a file 
     try { 
      // Save as PNG 
      File file = new File(fileName); 
      System.out.println(file.getAbsolutePath()); 
      ImageIO.write(bufferedImage, "png", file); 
      JOptionPane.showMessageDialog(null, 
        new JLabel(new ImageIcon(fileName))); 
     } catch (IOException e) { 
     } 
    } 


    private void createSampleOnGraphicsContext(int w, int h, int INFOPAD, Graphics2D g2) {    
     g2.setBackground(imageBackgroundColor); 
     g2.clearRect(0, 0, w, h); 
     g2.setColor(Color.white); 
     g2.fillRect(0, h-INFOPAD, w, INFOPAD); 

     if (errStr != null) { 
      g2.setColor(jfcBlue); 
      g2.setFont(new Font("serif", Font.BOLD, 18)); 
      g2.drawString("ERROR", 5, 20); 
      AttributedString as = new AttributedString(errStr); 
      as.addAttribute(TextAttribute.FONT, font12, 0, errStr.length()); 
      AttributedCharacterIterator aci = as.getIterator(); 
      FontRenderContext frc = g2.getFontRenderContext(); 
      LineBreakMeasurer lbm = new LineBreakMeasurer(aci, frc); 
      float x = 5, y = 25; 
      lbm.setPosition(0); 
      while (lbm.getPosition() < errStr.length()) { 
       TextLayout tl = lbm.nextLayout(w-x-5); 
       if (!tl.isLeftToRight()) { 
        x = w - tl.getAdvance(); 
       } 
       tl.draw(g2, x, y += tl.getAscent()); 
       y += tl.getDescent() + tl.getLeading(); 
      } 
     } else if (capture.thread != null) { 
      g2.setColor(Color.black); 
      g2.setFont(font12); 
      //g2.drawString("Length: " + String.valueOf(seconds), 3, h-4); 
     } else { 
      g2.setColor(Color.black); 
      g2.setFont(font12); 
      //g2.drawString("File: " + fileName + " Length: " + String.valueOf(duration) + " Position: " + String.valueOf(seconds), 3, h-4); 

      if (audioInputStream != null) { 
       // .. render sampling graph .. 
       g2.setColor(jfcBlue); 
       for (int i = 1; i < lines.size(); i++) { 
        g2.draw((Line2D) lines.get(i)); 
       } 

       // .. draw current position .. 
       if (seconds != 0) { 
        double loc = seconds/duration*w; 
        g2.setColor(pink); 
        g2.setStroke(new BasicStroke(3)); 
        g2.draw(new Line2D.Double(loc, 0, loc, h-INFOPAD-2)); 
       } 
      } 
     } 
    } 

    public void start() { 
     thread = new Thread(this); 
     thread.setName("SamplingGraph"); 
     thread.start(); 
     seconds = 0; 
    } 

    public void stop() { 
     if (thread != null) { 
      thread.interrupt(); 
     } 
     thread = null; 
    } 

    public void run() { 
     seconds = 0; 
     while (thread != null) { 
      if ((capture.line != null) && (capture.line.isActive())) { 
       long milliseconds = (long)(capture.line.getMicrosecondPosition()/1000); 
       seconds = milliseconds/1000.0; 
      } 
      try { thread.sleep(100); } catch (Exception e) { break; }        
      while ((capture.line != null && !capture.line.isActive())) 
      { 
       try { thread.sleep(10); } catch (Exception e) { break; } 
      } 
     } 
     seconds = 0; 
    } 
} // End class SamplingGraph 

/** 
* Reads data from the input channel and writes to the output stream 
*/ 
class Capture implements Runnable { 

    TargetDataLine line; 
    Thread thread; 

    public void start() { 
     errStr = null; 
     thread = new Thread(this); 
     thread.setName("Capture"); 
     thread.start(); 
    } 

    public void stop() { 
     thread = null; 
    } 

    private void shutDown(String message) { 
     if ((errStr = message) != null && thread != null) { 
      thread = null; 
      samplingGraph.stop();     
      System.err.println(errStr); 
     } 
    } 

    public void run() { 

     duration = 0; 
     audioInputStream = null; 

     // define the required attributes for our line, 
     // and make sure a compatible line is supported. 

     AudioFormat format = audioInputStream.getFormat(); 
     DataLine.Info info = new DataLine.Info(TargetDataLine.class, 
      format); 

     if (!AudioSystem.isLineSupported(info)) { 
      shutDown("Line matching " + info + " not supported."); 
      return; 
     } 

     // get and open the target data line for capture. 

     try { 
      line = (TargetDataLine) AudioSystem.getLine(info); 
      line.open(format, line.getBufferSize()); 
     } catch (LineUnavailableException ex) { 
      shutDown("Unable to open the line: " + ex); 
      return; 
     } catch (SecurityException ex) { 
      shutDown(ex.toString()); 
      //JavaSound.showInfoDialog(); 
      return; 
     } catch (Exception ex) { 
      shutDown(ex.toString()); 
      return; 
     } 

     // play back the captured audio data 
     ByteArrayOutputStream out = new ByteArrayOutputStream(); 
     int frameSizeInBytes = format.getFrameSize(); 
     int bufferLengthInFrames = line.getBufferSize()/8; 
     int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes; 
     byte[] data = new byte[bufferLengthInBytes]; 
     int numBytesRead; 

     line.start(); 

     while (thread != null) { 
      if((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) { 
       break; 
      } 
      out.write(data, 0, numBytesRead); 
     } 

     // we reached the end of the stream. stop and close the line. 
     line.stop(); 
     line.close(); 
     line = null; 

     // stop and close the output stream 
     try { 
      out.flush(); 
      out.close(); 
     } catch (IOException ex) { 
      ex.printStackTrace(); 
     } 

     // load bytes into the audio input stream for playback 

     byte audioBytes[] = out.toByteArray(); 
     ByteArrayInputStream bais = new ByteArrayInputStream(audioBytes); 
     audioInputStream = new AudioInputStream(bais, format, audioBytes.length/frameSizeInBytes); 

     long milliseconds = (long)((audioInputStream.getFrameLength() * 1000)/format.getFrameRate()); 
     duration = milliseconds/1000.0; 

     try { 
      audioInputStream.reset(); 
     } catch (Exception ex) { 
      ex.printStackTrace(); 
      return; 
     } 

     samplingGraph.createWaveForm(audioBytes); 
    } 
} // End class Capture  

}

我已經穿過了這幾次,知道下面的部分就是計算音頻值,但我的問題是,我不知道我怎麼能在那個檢索時間信息點,即該值屬於什麼時間間隔。

int frames_per_pixel = audioBytes.length/format.getFrameSize()/w; 
      byte my_byte = 0; 
      double y_last = 0; 
      int numChannels = format.getChannels(); 
      for (double x = 0; x < w && audioData != null; x++) { 
       int idx = (int) (frames_per_pixel * numChannels * x); 
       if (format.getSampleSizeInBits() == 8) { 
        my_byte = (byte) audioData[idx]; 
       } else { 
        my_byte = (byte) (128 * audioData[idx]/32768); 
       } 
       double y_new = (double) (h * (128 - my_byte)/256); 
       lines.add(new Line2D.Double(x, y_last, x, y_new)); 
       y_last = y_new; 
      } 

我想使用它的jfreechart的但XYSeriesPLot具有麻煩計算X(時間)和Y(這是振幅,但它y_new在這個代碼)的所需的值來繪製?

我明白這是一個很容易的事情,但我是新來這個整個音頻的東西,我理解其背後的音頻文件的理論,但是這似乎是一個簡單的問題,採用了硬朗的解決方案

enter link description here

回答

2

要實現的關鍵是,在提供的代碼中,預計情節的分辨率要比實際的音頻數據低得多。例如,考慮下面的波形: enter image description here

標繪代碼然後表示數據如曲線圖中的藍色框: enter image description here

當盒是1個像素寬,這對應於與線端點(x,y_last)(x,y_new)。正如你所看到的那樣,當波形足夠平滑時,從y_lasty_new的幅度範圍對於框內的樣本是相當接近的。

現在這種表示方式在試圖以逐像素的方式呈現波形(光柵顯示)時很方便。但是,對於XYPlot圖形(可在jfreechart中找到),只需指定點的序列,XYPlot負責在這些點之間繪製線段。這對應於下圖中綠線: enter image description here

從理論上講,你可以只提供每一個樣品原樣到XYPlot。但是,除非你有很少的樣本,否則這種情況往往會非常沉重。所以,通常會先下載數據。如果波形足夠平滑,則下采樣處理減少到抽取(即,每N個採樣取1)。抽取因子N然後控制渲染性能和波形近似精度之間的折衷。請注意,如果在所提供的代碼中使用的抽取因子frames_per_pixel可生成良好的光柵顯示(即,您希望看到的波形特徵未被塊狀像素外觀隱藏且不顯示混疊僞像的那個)相同的因子應該仍然足夠用於XY圖(實際上,您可能能夠進一步下取樣)。只要將樣本映射到時間/振幅軸,我不會使用參數xy,因爲它們是在所提供的繪圖代碼中定義的:它們只是適用於光柵類型顯示的像素索引(如是上面的藍色框表示)。

相反,我會將樣本索引(idx在提供的代碼中)直接映射到時間軸,除以採樣率(您可以從format.getFrameRate()獲得)。 類似地,我將全範圍樣本值映射到[-1,+1]範圍,將audioData[idx]樣本除以8位每樣本數據的128,以及每樣本16位數據的32768。

wh參數的主要目的仍然是配置繪圖區大小,但不再直接要求計算XYPlot輸入(XYPlot本身負責將時間/幅度值映射到像素座標)。另一方面,w參數還用於確定繪製點的數量。現在您可能希望根據波形可以承受多少次抽取來控制點的數量,而不會顯示過多的失真,或者您可以保持原樣以最大可用圖分辨率(以某些性能成本)顯示波形。 但是,請注意,如果您希望顯示少於w個採樣的波形,則可能必須將frames_per_pixel轉換爲浮點值。

+0

嘿,非常感謝您的回覆,之後我可以將它繪製到jfreechart,但是在代碼中,x和y的值也受寬度和高度(變量w&h)的影響這些事情會影響音頻信息嗎?如果是,那麼如何消除它們?因爲計算y值的主循環實際上取決於寬度 – Sudh

+0

@Sudh:請參閱關於寬度和高度參數的更新。 – SleuthEye