2011-04-01 99 views
8

我的Android是新的,我試圖讓一個程序,它捕捉音頻的聲音,然後分析和可視化的頻率顯示存在內它的頻率。我找到了一個繪製圖形均衡器圖形部分的示例。在這個例子中,它使用了一個類型爲 AudioRecord的對象來捕捉音頻聲音。用於將音頻信號分解爲分量頻率的技術採用稱爲離散傅里葉變換(DFT)的數學變換,並且使用快速傅立葉變換(FFT)來執行DFT。這個例子使用一個實現FFT的包。包裹鏈接在這裏www.netlib.org/fftpack/jfftpack.tgz。 問題是,我運行這個例子後,我按下開始按鈕後,圖形均衡器不會出現在顯示屏上。捕獲聲音在Android的

下面是活動類的源代碼:

package com.audio.processing; 
import android.app.Activity; 
import android.graphics.Bitmap; 
import android.graphics.Canvas; 
import android.graphics.Color; 
import android.graphics.Paint; 
import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 
import android.os.AsyncTask; 
import android.os.Bundle; 
import android.util.Log; 
import android.view.View; 
import android.view.View.OnClickListener; 
import android.widget.Button; 
import android.widget.ImageView; 

import ca.uol.aig.fftpack.RealDoubleFFT; 

public class AudioProcessing extends Activity implements OnClickListener{ 
    int frequency = 8000; 
    int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; 
    int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; 


    private RealDoubleFFT transformer; 
    int blockSize = 256; 
    Button startStopButton; 
    boolean started = false; 

    RecordAudio recordTask; 

    ImageView imageView; 
    Bitmap bitmap; 
    Canvas canvas; 
    Paint paint; 
    /** Called when the activity is first created. */ 
    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.main); 
     startStopButton = (Button) this.findViewById(R.id.StartStopButton); 
     startStopButton.setOnClickListener(this); 

     transformer = new RealDoubleFFT(blockSize); 

     imageView = (ImageView) this.findViewById(R.id.ImageView01); 
     bitmap = Bitmap.createBitmap((int)256,(int)100,Bitmap.Config.ARGB_8888); 
     canvas = new Canvas(bitmap); 
     paint = new Paint(); 
     paint.setColor(Color.GREEN); 
     imageView.setImageBitmap(bitmap); 
    } 

    private class RecordAudio extends AsyncTask<Void, double[], Void> { 
     @Override 
     protected Void doInBackground(Void... params) { 
     try { 
      int bufferSize = AudioRecord.getMinBufferSize(frequency, 
        channelConfiguration, audioEncoding); 
        AudioRecord audioRecord = new AudioRecord(
        MediaRecorder.AudioSource.DEFAULT, frequency, 
        channelConfiguration, audioEncoding, bufferSize); 

        short[] buffer = new short[blockSize]; 
        double[] toTransform = new double[blockSize]; 
        audioRecord.startRecording(); 
        while (started) { 
        int bufferReadResult = audioRecord.read(buffer, 0, blockSize); 

        for (int i = 0; i < blockSize && i < bufferReadResult; i++) { 
         toTransform[i] = (double) buffer[i]/32768.0; // signed 16 bit 
         } 

        transformer.ft(toTransform); 
        publishProgress(toTransform); 
        } 
        audioRecord.stop(); 
        } catch (Throwable t) { 
        Log.e("AudioRecord", "Recording Failed"); 
        } 
        return null; 
        } 
     } 

    protected void onProgressUpdate(double[]... toTransform) { 
     canvas.drawColor(Color.BLACK); 
     for (int i = 0; i < toTransform[0].length; i++) { 
     int x = i; 
     int downy = (int) (100 - (toTransform[0][i] * 10)); 
     int upy = 100; 
     canvas.drawLine(x, downy, x, upy, paint); 
     } 
     imageView.invalidate(); 
     } 

     public void onClick(View v) { 
     if (started) { 
     started = false; 
     startStopButton.setText("Start"); 
     recordTask.cancel(true); 
     } else { 
     started = true; 
     startStopButton.setText("Stop"); 
     recordTask = new RecordAudio(); 
     recordTask.execute(); 
     } 
     } 
} 

這裏是main.xml中:

<?xml version="1.0" encoding="utf-8"?> 
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" 
android:orientation="vertical" 
android:layout_width="fill_parent" 
android:layout_height="fill_parent" 
> 
<TextView 
android:layout_width="fill_parent" 
android:layout_height="wrap_content" 
android:text="@string/hello" 
/> 
<ImageView android:id="@+id/ImageView01" android:layout_width="wrap_content" 
android:layout_height="wrap_content"></ImageView><Button android:text="Start" 
android:id="@+id/StartStopButton" android:layout_width="wrap_content" 
android:layout_height="wrap_content"></Button> 
</LinearLayout> 

在AndroidManifest.xml我設置RECORD_AUDIO權限。 在此先感謝!

+1

其中是xml中的圖像視圖標籤? – codeScriber 2011-04-01 09:50:03

回答

2

是的,我也有過這樣的項目,我有同樣的錯誤,你卻添加權限下的所有現在是好的了。很可能你沒有將它添加到androidmanifest.xml中的正確位置。它應該在應用程序標記之外。

<uses-permission android:name="android.permission.RECORD_AUDIO"> 
    </uses-permission> 
2

onProgressUpdate方法應該屬於RecordAudio,因爲在您的代碼中它屬於AudioProcessing。檢查括號,它應該工作基於上述修正

12

這裏是工作的代碼。我自己嘗試過。它工作正常。

package com.example.frequencytest; 

import android.app.Activity; 
import android.graphics.Bitmap; 
import android.graphics.Canvas; 
import android.graphics.Color; 
import android.graphics.Paint; 
import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 
import android.os.AsyncTask; 
import android.os.Bundle; 
import android.util.Log; 
import android.view.Menu; 
import android.view.View; 
import android.view.View.OnClickListener; 
import android.widget.Button; 
import android.widget.ImageView; 
import ca.uol.aig.fftpack.RealDoubleFFT; 

public class MainActivity extends Activity implements OnClickListener { 

    int frequency = 8000; 
    int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; 
    int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; 
    private RealDoubleFFT transformer; 
    int blockSize = 256; 

    Button startStopButton; 
    boolean started = false; 

    RecordAudio recordTask; 

    ImageView imageView; 
    Bitmap bitmap; 
    Canvas canvas; 
    Paint paint; 

    //AudioRecord audioRecord; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 

     startStopButton = (Button) this.findViewById(R.id.start_stop_btn); 
     startStopButton.setOnClickListener(this); 

     transformer = new RealDoubleFFT(blockSize); 

     imageView = (ImageView) this.findViewById(R.id.imageView1); 
     bitmap = Bitmap.createBitmap((int) 256, (int) 100, 
       Bitmap.Config.ARGB_8888); 
     canvas = new Canvas(bitmap); 
     paint = new Paint(); 
     paint.setColor(Color.GREEN); 
     imageView.setImageBitmap(bitmap); 

    } 

    public class RecordAudio extends AsyncTask<Void, double[], Void> { 

     @Override 
     protected Void doInBackground(Void... arg0) { 

      try { 
       // int bufferSize = AudioRecord.getMinBufferSize(frequency, 
       // AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); 
       int bufferSize = AudioRecord.getMinBufferSize(frequency, 
         channelConfiguration, audioEncoding); 

       AudioRecord audioRecord = new AudioRecord( 
         MediaRecorder.AudioSource.MIC, frequency, 
         channelConfiguration, audioEncoding, bufferSize); 

       short[] buffer = new short[blockSize]; 
       double[] toTransform = new double[blockSize]; 

       audioRecord.startRecording(); 

       // started = true; hopes this should true before calling 
       // following while loop 

       while (started) { 
        int bufferReadResult = audioRecord.read(buffer, 0, 
          blockSize); 

        for (int i = 0; i < blockSize && i < bufferReadResult; i++) { 
         toTransform[i] = (double) buffer[i]/32768.0; // signed 
                     // 16 
        }          // bit 
         transformer.ft(toTransform); 
         publishProgress(toTransform); 



       } 

       audioRecord.stop(); 

      } catch (Throwable t) { 
       t.printStackTrace(); 
       Log.e("AudioRecord", "Recording Failed"); 
      } 
      return null; 
     } 

     @Override 
     protected void onProgressUpdate(double[]... toTransform) { 

      canvas.drawColor(Color.BLACK); 

      for (int i = 0; i < toTransform[0].length; i++) { 
       int x = i; 
       int downy = (int) (100 - (toTransform[0][i] * 10)); 
       int upy = 100; 

       canvas.drawLine(x, downy, x, upy, paint); 
      } 

      imageView.invalidate(); 

      // TODO Auto-generated method stub 
      // super.onProgressUpdate(values); 
     } 

    } 

    @Override 
    public boolean onCreateOptionsMenu(Menu menu) { 
     getMenuInflater().inflate(R.menu.activity_main, menu); 
     return true; 
    } 

    public void onClick(View arg0) { 
     // TODO Auto-generated method stub 
     if (started) { 
      started = false; 
      startStopButton.setText("Start"); 
      recordTask.cancel(true); 
     } else { 
      started = true; 
      startStopButton.setText("Stop"); 
      recordTask = new RecordAudio(); 
      recordTask.execute(); 
     } 
    } 
} 
+0

非常感謝,正在尋找這樣的東西! – Larphoid 2013-03-01 20:58:10

+1

後一些主要活動試驗... (這只是爲了更容易理解發生了什麼,並使用高度在以往任何時候使用100): \t靜態最終詮釋HEIGHT = 100; \t static final int MIDDLE = HEIGHT/2; \t static final int PEAK = MIDDLE/10; //是HEIGHT/10 然後在onProgressUpdate: \t INT X = 0;對於(int i = 0; i Larphoid 2013-03-01 22:52:06