2017-07-25 63 views
0

我想在swift 3中編寫一個應用程序,以便播放排隊的音頻文件時,從一個傳遞到另一個時沒有任何間隙,裂縫或噪音。Mac - Swift 3 - 排隊音頻文件和播放

我的第一次嘗試是使用AvAudioPlayer和AvAudioDelegate(AVAudioPlayer using array to queue audio files - Swift),但我不知道如何預加載下一首歌以避免出現間隙。即使我知道如何去做,我也不確定這是實現我的目標的最好方法。 AVQueuePlayer似乎是一個更好的候選人的工作,它是爲此目的,但我沒有找到任何示例來幫助我。 也許這只是一個預加載或緩衝的問題?我有點迷失在這種可能性的海洋中。

歡迎任何建議。

+0

除非你順利淡入淡出音頻文件,總是會有裂紋,因爲波形不會seamlesssly匹配,而「跳」對應於高頻率的頻譜的感知聲音。當然,如果音頻文件對應於最初的一個不間斷聲音的單個片段,則應該很好... –

+0

它們是將MIDI合成轉換爲音頻文件的樣本。它們都以「0」開始,並以「0」結束。通過擴大搜索範圍,我找到了一個完美的解決方案。它基於這篇文章:https://stackoverflow.com/questions/30479403/concatenate-two-audio-files-in-swift-and-play-them?rq=1。我很快就會發布代碼。 – Fredo

回答

0

它是非常完美的,特別是如果你想做兩次或更多(「文件存在」錯誤),但它可以作爲一個基地。

它所做的是取兩個文件(地雷是4秒的樣本),將它們編碼在一個文件中並播放結果文件。如果你有成百上千的人,不管是不是偶然的,它都會變得非常有趣。

mergeAudioFiles函數的所有功勞歸於@Peyman和@ Pigeon_39。 Concatenate two audio files in Swift and play them

斯威夫特3

import Cocoa 
import AVFoundation 

var action = AVAudioPlayer() 
let path = Bundle.main.path(forResource: "audiofile1.aif", ofType:nil)! 
let url = URL(fileURLWithPath: path) 
let path2 = Bundle.main.path(forResource: "audiofile2.aif", ofType:nil)! 
let url2 = URL(fileURLWithPath: path2) 
let array1 = NSMutableArray(array: [url, url2]) 


class ViewController: NSViewController, AVAudioPlayerDelegate 
{ 

    @IBOutlet weak var LanceStop: NSButton! 

    override func viewDidLoad() 
    { 
     super.viewDidLoad() 
    } 
    override var representedObject: Any? 
    { 
     didSet 
     { 
     // Update the view, if already loaded. 
     } 
    } 

    @IBAction func Lancer(_ sender: NSButton) 
    { 
     mergeAudioFiles(audioFileUrls: array1) 
     let url3 = NSURL(string: "/Users/ADDUSERNAMEHERE/Documents/FinalAudio.m4a") 

     do 
     { 
      action = try AVAudioPlayer(contentsOf: url3 as! URL) 
      action.delegate = self 
      action.numberOfLoops = 0 
      action.prepareToPlay() 
      action.volume = 1 
      action.play() 
     } 
     catch{print("error")} 

    } 


    func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) 
    { 
     if flag == true 
     { 

     } 
    } 

    var mergeAudioURL = NSURL() 

    func mergeAudioFiles(audioFileUrls: NSArray) { 
     //audioFileUrls.adding(url) 
     //audioFileUrls.adding(url2) 
     let composition = AVMutableComposition() 

     for i in 0 ..< audioFileUrls.count { 

      let compositionAudioTrack :AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) 

      let asset = AVURLAsset(url: (audioFileUrls[i] as! NSURL) as URL) 

      let track = asset.tracks(withMediaType: AVMediaTypeAudio)[0] 

      let timeRange = CMTimeRange(start: CMTimeMake(0, 600), duration: track.timeRange.duration) 

      try! compositionAudioTrack.insertTimeRange(timeRange, of: track, at: composition.duration) 
     } 

     let documentDirectoryURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! as NSURL 
     self.mergeAudioURL = documentDirectoryURL.appendingPathComponent("FinalAudio.m4a")! as URL as NSURL 

     let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) 
     assetExport?.outputFileType = AVFileTypeAppleM4A 
     assetExport?.outputURL = mergeAudioURL as URL 
     assetExport?.exportAsynchronously(completionHandler: 
      { 
       switch assetExport!.status 
       { 
       case AVAssetExportSessionStatus.failed: 
        print("failed \(assetExport?.error)") 
       case AVAssetExportSessionStatus.cancelled: 
        print("cancelled \(assetExport?.error)") 
       case AVAssetExportSessionStatus.unknown: 
        print("unknown\(assetExport?.error)") 
       case AVAssetExportSessionStatus.waiting: 
        print("waiting\(assetExport?.error)") 
       case AVAssetExportSessionStatus.exporting: 
        print("exporting\(assetExport?.error)") 
       default: 
        print("Audio Concatenation Complete") 
       } 
     }) 
    } 
}