2016-12-20 99 views
1

我找不到音頻流添加到'揚聲器'的位置。我有可能修改流並稍後自行添加它?我感覺libjingle正在處理流並添加它。音頻流在哪裏添加libjingle?

我加入了我的代碼而設的Libjingle部分:

import AVFoundation 
import UIKit 

let TAG = "ViewController" 
let AUDIO_TRACK_ID = TAG + "AUDIO" 
let LOCAL_MEDIA_STREAM_ID = TAG + "STREAM" 

class ViewController: UIViewController, RTCSessionDescriptionDelegate, RTCPeerConnectionDelegate { 

    var mediaStream: RTCMediaStream! 
    var localAudioTrack: RTCAudioTrack! 
    var remoteAudioTrack: RTCAudioTrack! 
    var renderer: RTCEAGLVideoView! 
    var renderer_sub: RTCEAGLVideoView! 
    var roomName: String!  

    override func viewDidLoad() { 
     super.viewDidLoad() 
     // Do any additional setup after loading the view, typically from a nib. 

     initWebRTC(); 
     sigConnect(wsUrl: "http://192.168.1.59:3000"); 

     localAudioTrack = peerConnectionFactory.audioTrack(withID: AUDIO_TRACK_ID) 
     mediaStream = peerConnectionFactory.mediaStream(withLabel: LOCAL_MEDIA_STREAM_ID) 
     mediaStream.addAudioTrack(localAudioTrack) 
    } 

    var peerConnectionFactory: RTCPeerConnectionFactory! = nil 
    var peerConnection: RTCPeerConnection! = nil 
    var pcConstraints: RTCMediaConstraints! = nil 
    var audioConstraints: RTCMediaConstraints! = nil 
    var mediaConstraints: RTCMediaConstraints! = nil 

    var wsServerUrl: String! = nil 
    var peerStarted: Bool = false 

    func initWebRTC() { 
     RTCPeerConnectionFactory.initializeSSL() 
     peerConnectionFactory = RTCPeerConnectionFactory() 

     pcConstraints = RTCMediaConstraints() 
     audioConstraints = RTCMediaConstraints() 
     mediaConstraints = RTCMediaConstraints(
      mandatoryConstraints: [ 
       RTCPair(key: "OfferToReceiveAudio", value: "true"), 
      ], 
      optionalConstraints: nil) 
    } 

    func prepareNewConnection() -> RTCPeerConnection { 
     var icsServers: [RTCICEServer] = [] 

     icsServers.append(RTCICEServer(uri: NSURL(string: "stun:stun.l.google.com:19302") as URL!, username: "", 
     password: "")) 

     let rtcConfig: RTCConfiguration = RTCConfiguration() 
     rtcConfig.tcpCandidatePolicy = RTCTcpCandidatePolicy.disabled 
     rtcConfig.bundlePolicy = RTCBundlePolicy.maxBundle 
     rtcConfig.rtcpMuxPolicy = RTCRtcpMuxPolicy.require 

     peerConnection = peerConnectionFactory.peerConnection(withICEServers: icsServers, constraints: pcConstraints, delegate: self) 
     peerConnection.add(mediaStream); 
     return peerConnection; 
    } 


    func peerConnection(_ peerConnection: RTCPeerConnection!, signalingStateChanged stateChanged: RTCSignalingState) { 
    } 

    func peerConnection(_ peerConnection: RTCPeerConnection!, iceConnectionChanged newState: RTCICEConnectionState) { 
    } 

    func peerConnection(_ peerConnection: RTCPeerConnection!, iceGatheringChanged newState: RTCICEGatheringState) { 
    } 

    func peerConnection(_ peerConnection: RTCPeerConnection!, gotICECandidate candidate: RTCICECandidate!) { 
     if (candidate != nil) { 
      print("iceCandidate: " + candidate.description) 
      let json:[String: AnyObject] = [ 
       "type" : "candidate" as AnyObject, 
       "sdpMLineIndex" : candidate.sdpMLineIndex as AnyObject, 
       "sdpMid" : candidate.sdpMid as AnyObject, 
       "candidate" : candidate.sdp as AnyObject 
      ] 
      sigSend(msg: json as NSDictionary) 
     } else { 
      print("End of candidates. -------------------") 
     } 
    } 

    func peerConnection(_ peerConnection: RTCPeerConnection!, addedStream stream: RTCMediaStream!) { 
     if (peerConnection == nil) { 
      return 
     } 

     if (stream.audioTracks.count > 1) { 
      print("Weird-looking stream: " + stream.description) 
      return 
     } 
    } 

    func peerConnection(_ peerConnection: RTCPeerConnection!, removedStream stream: RTCMediaStream!) { 
    } 

    func peerConnection(_ peerConnection: RTCPeerConnection!, didOpen dataChannel: RTCDataChannel!) { 
    } 

    func peerConnection(onRenegotiationNeeded peerConnection: RTCPeerConnection!) { 

    } 
} 

我的想法是,我能趕上在功能上的音頻流命令下。那是對的嗎?另外,我可以手動將音頻流添加到揚聲器嗎?

func peerConnection(_ peerConnection: RTCPeerConnection!, addedStream stream: RTCMediaStream!) { 
      if (peerConnection == nil) { 
       return 
      } 

      if (stream.audioTracks.count > 1) { 
       print("Weird-looking stream: " + stream.description) 
       return 
      } 
     } 

回答

1

當webRTC調用連接時,Webrtc堆棧使用平臺API播放或錄製音頻。你只能控制之類的東西

  1. 靜音或取消靜音音頻流
  2. 使用系統API來增大或減小音量或改變音頻配置

不能手動揚聲器,但你加流可以選擇將默認音頻輸出更改爲揚聲器或耳機,以使webrtc音頻重定向到正確的輸出。這可以使用avfoundation API