2015-12-03 95 views
1

在我的代碼中,我創建了兩個聲音,sound1和sound2。每個聲音包含許多樣本,可以同時播放相同的聲音。問題是,如果我爲每個AVAudioUnitTimePitch創建看起來多於6到8個AVAudioPlayerNodes,則音頻會完全混亂。當我將樣本數量增加太多時,我甚至無法播放單個聲音。我不確定我的代碼是否錯誤,或者AVAudioEngine的節點限制是什麼。AVAudioEngine可以創建的節點數是否有限制?

class AudioManager{ 
    var audioEngine:AVAudioEngine!; 
    var mixer:AVAudioMixerNode!; 
    var sound1:Sound!; 
    var sound2:Sound!; 
    init(){ 
     audioEngine = AVAudioEngine(); 
     mixer = audioEngine.mainMixerNode; //automatically creates instance of mixer node, output node, and connects 

     do{ 
      try audioEngine.start(); 
     }catch let e as NSError{ 
      print("Error Starting AudioEngine \(e)"); 
     } 

     sound1 = Sound(aManager: self, path: "assets/sounds/waterRefill", ofType: "mp3", numOfSamples: 7); 
     sound2 = Sound(aManager: self, path: "assets/sounds/balloonCreate", ofType: "mp3", numOfSamples: 2); 


    } 

    func playSound(){ 
     sound1.play(1.0, pitch: 1.0); 
    } 

    func playSound2(){ 
     sound2.play(1.0, pitch: 1.0); 
    } 

    class Sound { 
     var audioManager:AudioManager!; 
     var audioFileBuffer:AVAudioPCMBuffer!; 
     var numSamples:Int = 1; 
     var audioIndex:Int = 0; 
     var sampleList:[Sample] = [Sample](); 

     init(aManager:AudioManager, path:String, ofType:String, numOfSamples:Int){ 
      audioManager = aManager; 
      if(numOfSamples < 1){ 
       numSamples = 1; 
      }else{ 
       numSamples = numOfSamples; 
      } 
      audioFileBuffer = createAudioBuffer(path, ofType: ofType); 
      for (var i = 0; i < numSamples; i++){ 
       sampleList.append(Sample(sound: self)); 
      } 
     } 

     func createAudioBuffer(path:String, ofType:String)-> AVAudioPCMBuffer?{ 
      let filePath: String = NSBundle.mainBundle().pathForResource(path, ofType: ofType)! 
      let fileURL: NSURL = NSURL(fileURLWithPath: filePath) 
      do{ 
       let audioFile = try AVAudioFile(forReading: fileURL) 
       let audioFormat = audioFile.processingFormat 
       let audioFrameCount = UInt32(audioFile.length) 
       let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount) 
       do{ 
        try audioFile.readIntoBuffer(audioFileBuffer) 
        return audioFileBuffer; 
       }catch let e as NSError{ 
        print("Error loading Audio Into Buffer: \(e)"); 
       } 
      }catch let e as NSError{ 
       print("Error loading Audio File: \(e)"); 
      } 
      return nil; 
     } 

     private func runIndex(){ 
      if(audioIndex < (numSamples-1)){ 
       audioIndex++; 
      }else{ 
       audioIndex = 0; 
      } 
     } 

     func play(volume:Float, pitch:Float){ 

      var count:Int = 0; 
      while(count < numSamples){ 
       if(numSamples > 1){ 
        runIndex(); 
       } 
       if (!sampleList[audioIndex].pitchPlayer.playing) { 
        sampleList[audioIndex].volume = volume; 
        sampleList[audioIndex].pitch = pitch; 
        sampleList[audioIndex].playSample(); 
        break; 
       } 
       count++; 
      } 

     } 

     class Sample{ 
      var parentSound:Sound! 
      var pitchPlayer:AVAudioPlayerNode!; 
      var timePitch:AVAudioUnitTimePitch!; 
      var volume:Float = 1.0 
      var pitch:Float = 1.0 

      init(sound:Sound){ 
       parentSound = sound; 
       pitchPlayer = AVAudioPlayerNode(); 
       timePitch = AVAudioUnitTimePitch(); 

       parentSound.audioManager.audioEngine.attachNode(pitchPlayer); 
       parentSound.audioManager.audioEngine.attachNode(timePitch); 

       parentSound.audioManager.audioEngine.connect(pitchPlayer, to: timePitch, format: parentSound.audioFileBuffer.format); 
       parentSound.audioManager.audioEngine.connect(timePitch, to: parentSound.audioManager.mixer, format: parentSound.audioFileBuffer.format); 


      } 

      func playSample(){ 
       pitchPlayer.volume = volume; 
       timePitch.pitch = pitch; 
       print("Sample Play"); 

       pitchPlayer.play(); 
       pitchPlayer.scheduleBuffer(parentSound.audioFileBuffer, atTime: nil, options:.Interrupts, completionHandler: {[unowned self]() in 
        print("Is Stopped: \(self.pitchPlayer.playing)"); 
        self.pitchPlayer.stop(); 
        print("Is Stopped: \(self.pitchPlayer.playing)"); 
        }); 
      } 
     } 
    } 
} 
+0

我的使用AVAAudioEngine和AVAudioPlayerNodes播放文件的應用程序不是在某些設備上播放聲音(雖然在iPhone 6s Plus等新設備上工作正常)。當我減少引擎上的節點數量時,它可以在較舊的設備上運行。我正在研究如何調整我的代碼以允許它在這些較舊的設備上工作,因爲節點數似乎是問題所在。 – vikzilla

回答

0

我從來沒有聽說過任何限制的節點在AVAudioEngine圖形的數量,但我已經看到增加了幾百個節點後,一些非常糟糕的表現。我發現的解決方案是在完成播放後刪除這些節點。

scheduleBuffer的完成處理程序是這樣做的好地方,但我想包在dispatch_async -to-的-主隊列調用去除由於音頻引擎可能仍在使用節點時,它調用完成處理器。

另一種選擇是在播放完樣本後重新使用播放器節點,而不是爲下一個樣本創建一個新節點,但這種方法可能稍微複雜一點。

相關問題