2013-10-31 45 views
0

我想爲我的項目製作應用程序。它看起來像一個Android Calling Application,它可以通過網絡內部的IP地址和Android Phone發送語音。但我的問題是我無法接收音頻。我的音頻錄音工作正常。使用AudioRecord和AudioTrack實現AoIP(IP音頻)

這裏是我的代碼:

MainActivity.java

import java.net.DatagramSocket; 
import java.util.ArrayList; 

import android.app.Activity; 
import android.os.Bundle; 
import android.view.Menu; 
import android.view.View; 
import android.view.View.OnClickListener; 
import android.widget.Button; 

import com.example.voip.Microphone; 
import com.example.voip.Speaker; 
public class MainActivity extends Activity { 
    DatagramSocket sock = null; 
    ArrayList<String> on = null; 
    Button btnStart; 
    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 
     android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); 
     btnStart = (Button) findViewById(R.id.btnStart); 
     btnStart.setOnClickListener(new OnClickListener() { 

      @Override 
      public void onClick(View arg0) { 
       disp(); 
      } 
     }); 
    } 

    @Override 
    public boolean onCreateOptionsMenu(Menu menu) { 
     // Inflate the menu; this adds items to the action bar if it is present. 
     getMenuInflater().inflate(R.menu.activity_main, menu); 
     return true; 
    } 

    public void disp(){ 
     on = new ArrayList<String>(); 
      on.add("192.168.1.103"); 
      try{ 
       sock = new DatagramSocket(4003); 
      }catch(Exception e){ 

      } 
      Thread mic = new Thread(new Microphone(sock, 4003, on)); 
      mic.start(); 
    } 
} 

Microphone.java

import java.net.DatagramPacket; 
import java.net.DatagramSocket; 
import java.net.InetAddress; 
import java.util.ArrayList; 

import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 




public class Microphone implements Runnable{ 

    private DatagramSocket sock = null; 
    private int port = 0; 
    int buffsize = 0; 
    AudioRecord ar; 
    private ArrayList<String> on = null; 

    public Microphone(DatagramSocket d,int recPort,ArrayList<String> joi) { 
     this.sock = d; 
     this.on = joi; 
     this.port =recPort; 
    } 

    @Override 
    public void run(){ 
     try { 


      buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); 
      ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); 
      ar.startRecording(); 

      byte[] soundData = new byte[1]; 
      int bytesRead = 0; 
      while (bytesRead != -1) { 
       bytesRead = ar.read(soundData, 0, soundData.length); 

       if (bytesRead >= 0) { 
        for (int y = 0; y < on.size(); y++) { 
         DatagramPacket pac = new DatagramPacket(soundData,soundData.length,InetAddress.getByName(on.get(y).toString()),port); 
         sock.send(pac); 
        } 
       } 
       Thread.sleep(0); 
      } 

      /* 
      AudioFormat af = new AudioFormat(8000.0f, 8, 1, true, false); 
      DataLine.Info info = new DataLine.Info(TargetDataLine.class, af); 
      TargetDataLine microphone = (TargetDataLine) AudioSystem.getLine(info); 

      microphone.open(af); 
      microphone.start(); 

      byte[] soundData = new byte[1]; 
      int bytesRead = 0; 
      while (bytesRead != -1) { 
       bytesRead = microphone.read(soundData, 0, soundData.length); 

       if (bytesRead >= 0) { 
        for (int y = 0; y < on.size(); y++) { 
         DatagramPacket pac = new DatagramPacket(soundData, 
           soundData.length, 
           InetAddress.getByName(on.getElementAt(y).toString()), 
           port); 
         sock.send(pac); 
        } 
       } 
       Thread.sleep(0); 
      } 

     } catch (Exception e) { 
     } 
    } 
    */ 
     }catch(Exception e){ 

     } 
    } 

    public void setSock(DatagramSocket sock) { 
     this.sock = sock; 
    } 

    /** 
    * @param port the port to set 
    */ 
    public void setPort(int port) { 
     this.port = port; 
    } 

    /** 
    * @param on the on to set 
    */ 
    public void setOn(ArrayList<String> on) { 
     this.on = on; 
    } 
} 

Speaker.java

import java.io.IOException; 
    import java.net.DatagramPacket; 
    import java.net.DatagramSocket; 

    import android.media.AudioFormat; 
    import android.media.AudioManager; 
    import android.media.AudioRecord; 
    import android.media.AudioTrack; 
    import android.media.MediaRecorder; 
    import android.net.rtp.AudioStream; 

    public class Speaker implements Runnable { 
     private int freq = 44100 ; 

      private Thread Rthread = null; 

      private AudioManager audioManager = null; 
      private AudioTrack inSpeaker = null; 
      byte[] buffer = new byte[freq]; 
     private DatagramSocket soc = null; 
     DatagramPacket pack = null; 
     // SourceDataLine inSpeaker = null; 




     public Speaker(DatagramSocket s) { 

      try { 
       /* 
       this.soc = s; 
       AudioFormat af = new AudioFormat(8000.0f, 8, 1, true, false); 
       DataLine.Info info = new DataLine.Info(SourceDataLine.class, af); 
       inSpeaker = (SourceDataLine) AudioSystem.getLine(info); 
       inSpeaker.open(af); 
       */ 

       int bufferSize = AudioRecord.getMinBufferSize(freq, 
         AudioFormat.CHANNEL_CONFIGURATION_MONO, 
         AudioFormat.ENCODING_PCM_16BIT); 
       inSpeaker = new AudioTrack(AudioManager.ROUTE_HEADSET, freq, 
         AudioFormat.CHANNEL_CONFIGURATION_MONO, 
         MediaRecorder.AudioEncoder.AMR_NB, bufferSize, 
         AudioTrack.MODE_STREAM); 



       System.out.println("Speak"); 

      } catch (Exception e) { 
       System.out.println(e.getMessage()); 
      } 
     } 




     public void run() { 
      inSpeaker.setPlaybackRate(freq); 
      int bytesRead = 1; 
      byte[] inSound = new byte[1]; 

      inSpeaker.play(); 

      while (bytesRead != -1) { 
       pack = new DatagramPacket(inSound, inSound.length); 

       try { 
        soc.receive(pack); 
        if (bytesRead >= 0) { 
        inSpeaker.write(inSound, 0, bytesRead); 

        } 

       } catch (IOException ex) { 
        System.out.println(ex.getMessage()); 
       } 
      } 
     } 

     public void setSoc(DatagramSocket soc) { 
      this.soc = soc; 
     } 



    } 

Receiver.java 
package com.example.call; 

import java.net.DatagramSocket; 

import android.content.BroadcastReceiver; 
import android.content.Context; 
import android.content.Intent; 

import com.example.voip.Speaker; 

public class Receiver extends BroadcastReceiver{ 
DatagramSocket sock; 
    @Override 
    public void onReceive(Context arg0, Intent arg1) { 
     // TODO Auto-generated method stub 
     try{ 
     sock = new DatagramSocket(4003); 
     }catch(Exception e){ 

     } 
     Thread speak = new Thread(new Speaker(sock)); 
     speak.start(); 
    } 

} 

AndroidManifest.xml中

<?xml version="1.0" encoding="utf-8"?> 
<manifest xmlns:android="http://schemas.android.com/apk/res/android" 
    package="com.example.call" 
    android:versionCode="1" 
    android:versionName="1.0" > 

    <uses-sdk 
     android:minSdkVersion="16" 
     android:targetSdkVersion="16" /> 
    <uses-permission android:name="android.permission.USE_SIP" /> 
    <uses-permission android:name="android.permission.INTERNET" /> 
    <uses-permission android:name="android.permission.VIBRATE" /> 
    <uses-permission android:name="android.permission.ACCESS_WIFI_STATE" /> 
    <uses-permission android:name="android.permission.WAKE_LOCK" /> 
    <uses-permission android:name="android.permission.RECORD_AUDIO" /> 
    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"></uses-permission> 

    <uses-feature android:name="android.hardware.microphone" android:required="true"/> 
    <uses-feature android:name="android.hardware.wifi" android:required="true" /> 


    <application 
     android:allowBackup="true" 
     android:icon="@drawable/ic_launcher" 
     android:label="@string/app_name" 
     android:theme="@style/AppTheme" > 
     <activity 
      android:name="com.example.call.MainActivity" 
      android:label="@string/app_name" > 
      <intent-filter> 
       <action android:name="android.intent.action.MAIN" /> 

       <category android:name="android.intent.category.LAUNCHER" /> 
      </intent-filter> 
     </activity> 
     <receiver android:name="com.example.call.Receiver"></receiver> 
    </application> 

</manifest> 

activity_main.xml中

<?xml version="1.0" encoding="utf-8"?> 
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" 
    android:orientation="vertical" 
    android:layout_width="fill_parent" 
    android:layout_height="fill_parent" 
    android:padding="20dip"> 

    <ImageView 
     android:layout_width="fill_parent" 
     android:layout_height="wrap_content" 
     android:src="@drawable/ic_launcher" 
     android:scaleType="fitCenter"/> 

     <TextView 
     android:layout_width="fill_parent" 
     android:layout_height="wrap_content" 
     android:text="Info" 
     android:layout_weight="1.0" 
     android:textSize="20dip"/> 

    <LinearLayout 
     android:orientation="horizontal" 
     android:layout_width="fill_parent" 
     android:layout_height="wrap_content"> 

     <Button 
       android:layout_width="wrap_content" 
       android:layout_height="wrap_content" 
       android:id="@+id/btnStart" 
       android:text="Start Recording" 
       android:layout_weight="1.0"/> 

       <Button 
       android:layout_width="wrap_content" 
       android:layout_height="wrap_content" 
       android:id="@+id/btnStop" 
       android:text="Stop Recording" 
       android:layout_weight="1.0"/> 
    </LinearLayout> 
</LinearLayout> 

沒有錯誤可言。但我無法收到任何聲音。請幫助=(

在此先感謝

回答

0

這段代碼在Speaker.java是不正確的:

int bufferSize = AudioRecord.getMinBufferSize(freq, 
      AudioFormat.CHANNEL_CONFIGURATION_MONO, 
      AudioFormat.ENCODING_PCM_16BIT); 
    inSpeaker = new AudioTrack(AudioManager.ROUTE_HEADSET, freq, 
       AudioFormat.CHANNEL_CONFIGURATION_MONO, 
       MediaRecorder.AudioEncoder.AMR_NB, bufferSize, 
       AudioTrack.MODE_STREAM); 

它應該是:

int bufferSize = AudioTrack.getMinBufferSize(freq, 
      AudioFormat.CHANNEL_OUT_MONO, 
      AudioFormat.ENCODING_PCM_16BIT); 
    inSpeaker = new AudioTrack(AudioManager.STREAM_MUSIC, freq, 
       AudioFormat.CHANNEL_OUT_MONO, 
       AudioFormat.ENCODING_PCM_16BIT, bufferSize, 
       AudioTrack.MODE_STREAM); 
相關問題