2017-09-19 71 views
1

一旦用戶停止服用,我怎樣才能停止錄音? 像Siri。一旦你說,嗨Siri它會迴應你的聲音。意味着Siri應用程序正在收聽音頻,直到您停止服用。在AVAudioRecorder中尋找沉默會話

我正在嘗試做同樣的事情。如果我說,獲取天氣詳情一旦我停止了我的聲音。我想觸發一種方法或者用錄製的音頻調用API直到停止。

我的要求是應用程序應該不斷地聽用戶找到語音結束事件發送數據到服務器或者只是觸發一個方法。

代碼:

import UIKit 
import CoreAudio 
import CoreAudioKit 
import AVFoundation 
import Foundation 
import AVKit 



class ViewController: UIViewController, AVAudioRecorderDelegate { 

    private var recorder : AVAudioRecorder? = nil 
    private var isRecording : Bool = false 
    private var timer  : Timer? = nil 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     // Do any additional setup after loading the view, typically from a nib. 
     permissionWasGranted { (isValied) in 
      print("isValied") 
      self.isRecording = false; 
      self.intiateTimer() 

     } 
    } 

    @objc func intiateTimer() { 
     self.timer = Timer.scheduledTimer(timeInterval: 5, target: self, selector: #selector(self.updateTimer), userInfo: nil, repeats: true) 

    } 
    @objc func updateTimer() { 

     if !isRecording { 
      //recorder = nil 
      self.initRecorder() 
      print("Recording intiated") 
     } 
     else { 
      print("Recording Started") 
     } 
    } 
    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
     // Dispose of any resources that can be recreated. 
    } 

    func getDocumentsDirectory() -> URL { 
     let fileManager   = FileManager.default 
     let urls    = fileManager.urls(for: .documentDirectory, in: .userDomainMask) 
     let documentDirectory = urls.first! 
     return documentDirectory.appendingPathComponent("recording.m4a") 
    } 

    // MARK: protocol 


    func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) { 
     recorder.stop() 
     recorder.deleteRecording() 
     recorder.prepareToRecord() 
     isRecording = false 
     self.updateTimer() 
    } 


    func permissionWasGranted(result: @escaping (_: Bool)->()) { 
     switch AVAudioSession.sharedInstance().recordPermission() { 
     case AVAudioSessionRecordPermission.granted: 
      //if IS_DEBUG { print("Permission granted") } 
      print("Permission granted") 

      result(true) 
      return 
     case AVAudioSessionRecordPermission.denied: 
      //if IS_DEBUG { print("Pemission denied") } 
       print("Pemission denied") 
     case AVAudioSessionRecordPermission.undetermined: 
      //if IS_DEBUG { print("Request permission here") } 
      print("Request permission here") 

      AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in 
       if granted { 
        result(true) 
        return 
       } 
      }) 

     } 
     result(false) 
    } 

    func initRecorder() { 
     let settings = [ 
      AVFormatIDKey: Int(kAudioFormatMPEG4AAC), 
      AVSampleRateKey: 12000, 
      AVNumberOfChannelsKey: 1, 
      AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue 
     ] 
     do { 
      let session = AVAudioSession.sharedInstance() 
      try session.setCategory(AVAudioSessionCategoryPlayAndRecord) 
      try session.overrideOutputAudioPort(AVAudioSessionPortOverride.speaker) 
      try session.setActive(true) 

      try recorder = AVAudioRecorder(url: getDocumentsDirectory(), settings: settings) 
      recorder!.delegate = self 
      recorder!.isMeteringEnabled = true 

      if !recorder!.prepareToRecord() { 
       print("Error: AVAudioRecorder prepareToRecord failed") 
      } 


      let decibels = self.getDispersyPercent() 
      if decibels > -120 && decibels < -20 { 
       self.timer?.invalidate() 
       isRecording = true; 
       self.start() 
      } 


     } catch { 
      print("Error: AVAudioRecorder creation failed") 
     } 
    } 

    func start() { 
     recorder?.record() 
     recorder?.updateMeters() 
    } 

    func update() { 
     if let recorder = recorder { 
      recorder.updateMeters() 
     } 
    } 

    func getDispersyPercent() -> Float { 
     if let recorder = recorder { 
      let decibels = recorder.averagePower(forChannel: 0) 
      return decibels 
     } 
     return 0 
    } 

} 
+0

您需要跟蹤記錄 –

+0

的分貝(dB)這個答案可以幫助你https://stackoverflow.com/a/43429136/468724 。請記住,您需要觸發定時器以在特定時間間隔後獲取值 –

+0

您可以使用iOS sdk中的語音合成器。它會立即將您的聲音轉換爲文字。對於演示https://github.com/Gagan5278/SpeechSynthesizer –

回答

0

您可以定期監聽器添加到記錄器(開始錄音),並檢查recorder.averagePower(forChannel: 0)你可以處理的最低水平停止錄製。

+0

我已經嘗試過,但沒有工作 – Damodar

+0

你必須要小心這一點,我不知道你是否知道。 0值是最大值 **返回值 記錄聲音的當前平均功率,以分貝爲單位。 0dB的返回值表示滿量程或最大功率;返回值-160 dB表示最小功率(即接近無聲)。 如果提供給音頻記錄器的信號超過±滿量程,則返回值可能會超過0(也就是說,它可能會進入正範圍)。** [鏈接](https://developer.apple.com /文檔/ avfoundation/avaudiorecorder/1387176-averagepower) – Alexkater

0

在這裏,我創建了我的功能,將實際檢測沉默5秒,如果條件滿足,你可以停止錄製當時

- 我用錄像管理NSObject類,所以你可以從下面得到代碼功能和管理使用它在你的

代碼

//StartNewRecordingIfSilenceFor5Second 
    func newSessionIfSilence(){ 

     //get Audio file name to store 
     let AudioFileName = getDocumentsDirectory().appendingPathComponent("\(getUniqueName()).wav") 
     //Declare a value that will be updated when silence is detected 
     var statusForDetection = Float() 
     //Recorder Settings used 
     let settings: [String: Any] = [ 
      AVFormatIDKey: Int(kAudioFormatLinearPCM), 
      AVSampleRateKey: 16000, 
      AVNumberOfChannelsKey: 1, 
      AVLinearPCMBitDepthKey: 16, 
      AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue, 
      AVLinearPCMIsBigEndianKey: false, 
      AVLinearPCMIsFloatKey: false, 
      ] 
     //Try block 
     do { 
      //Start Recording With Audio File name 
      Manager.recorder = try AVAudioRecorder(url: AudioFileName, settings: settings) 
      Manager.recorder?.delegate = self 
      Manager.recorder?.isMeteringEnabled = true 
      Manager.recorder?.prepareToRecord() 
      Manager.recorder?.record() 

      //Tracking Metering values here only 
      Manager.meterTimer = Timer.scheduledTimer(withTimeInterval: 0.10, repeats: true, block: { (timer: Timer) in 

       //Update Recording Meter Values so we can track voice loudness 
       //Getting Recorder from another class 
       //i managed my recorder from Manager class 
       if let recorder = Manager.recorder 
       { 
        //Start Metering Updates 
        recorder.updateMeters() 

        //Get peak values 
        Manager.recorderApc0 = recorder.averagePower(forChannel: 0) 
        Manager.recorderPeak0 = recorder.peakPower(forChannel: 0) 

        //it’s converted to a 0-1 scale, where zero is complete quiet and one is full volume. 
        let ALPHA: Double = 0.05 
        let peakPowerForChannel = pow(Double(10), (0.05 * Double(Manager.recorderPeak0))) 

//     static var lowPassResults: Double = 0.0 
        RecordingManager.lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * RecordingManager.lowPassResults 

        if(RecordingManager.lowPassResults > 0){ 
         print("Mic blow detected") 
         //Do what you wanted to do here 

         //if blow is detected update silence value as zero 
         statusForDetection = 0.0 
        } 
        else 
        { 

         //Update Value for Status is blow being detected or not 
         //As timer is called at interval of 0.10 i.e 0.1 So add value each time in silence Value with 0.1 
         statusForDetection += 0.1 


         //if blow is not Detected for 5 seconds 
         if statusForDetection > 5.0 { 
          //Update value to zero 
          //When value of silence is greater than 5 Seconds 
          //Time to Stop recording 
          statusForDetection = 0.0 

          //Stop Audio recording 
          recorder.stop() 

         } 
        } 
       } 
      }) 

     } catch { 
      //Finish Recording with a Error 
      print("Error Handling: \(error.localizedDescription)") 
      self.finishRecording(success: false) 
     } 

    }