2014-01-25 101 views
3

我需要保存麥克風輸入以稍後在AudioElement中使用。我這樣做是爲了獲得麥克風輸入:如何保存麥克風音頻輸入?

window.navigator.getUserMedia(audio: true).then((MediaStream stream) { 
    # what should go here? 
}); 

我應該怎麼做才能保存音頻?

回答

4

有很多可怕的愚蠢的例子,你可以在當前的瀏覽器窗口中播放當前的錄音。有沒有這樣的用例。對於視頻我可以成像是一個想建立一個類似的Skype應用程序,有一個預覽窗口看到,如果你的視頻看起來愚蠢,但音頻...

我找到了一個好職位,但:​​From microphone to .WAV with: getUserMedia and Web Audio

我已經在鏈接的文章中移植了一部分代碼,展示瞭如何獲取數據。

import 'dart:html'; 
import 'dart:async'; 
import 'dart:web_audio'; 

void main() { 
    window.navigator.getUserMedia(video: true, audio: true).then((MediaStream stream) { 
    var context = new AudioContext(); 
    GainNode volume = context.createGain(); 
    MediaStreamAudioSourceNode audioInput = context.createMediaStreamSource(stream); 
    audioInput.connectNode(volume); 

    int bufferSize = 2048; 
    ScriptProcessorNode recorder = context.createJavaScriptNode(bufferSize, 2, 2); 

    recorder.onAudioProcess.listen((AudioProcessingEvent e) { 
    print('recording'); 
    var left = e.inputBuffer.getChannelData(0); 
    var right = e.inputBuffer.getChannelData(1); 
    print(left); 
    // process Data 
    }); 

    volume.connectNode(recorder); 
    recorder.connectNode(context.destination); 


/** 
* [How to get a file or blob from an object URL?](http://stackoverflow.com/questions/11876175) 
* [Convert blob URL to normal URL](http://stackoverflow.com/questions/14952052/convert-blob-url-to-normal-url) 
* Doesn't work as it seems blob urls are not supported in Dart 
*/ 
// String url = Url.createObjectUrlFromStream(stream); 
// var xhr = new HttpRequest(); 
// xhr.responseType = 'blob'; 
// xhr.onLoad.listen((ProgressEvent e) { 
//  print(xhr.response); 
//  var recoveredBlog = xhr.response; 
//  var reader = new FileReader(); 
// 
//  reader.onLoad.listen((e) { 
//  var blobAsDataUrl = reader.result; 
//  reader.readAsDataUrl(blobAsDataUrl); 
//  }); 
// }); 
// xhr.open('GET', url); 
// xhr.send(); 


/** 
* only for testing purposes 
**/ 
// var audio = document.querySelector('audio') as AudioElement; 
// audio.controls = true; 
// audio.src = url; 
    }); 
} 
+1

感謝您分享上述鏈接和部分解決方案。完整的解決方案需要處理原始數據並生成類似於鏈接頁面的實際聲音文件。我正在將JS解決方案翻譯成Dart。我會盡快發佈有關結果的更新。 –

+1

[This](https://github.com/nawafnaim/dart_examples/blob/master/save_mic_sound_to_file.dart)是我重寫Dart中JS解決方案的最佳嘗試。它從原始數據中生成一個wav文件,但是文件的開頭有兩條格式錯誤的文件,這些文件損壞了文件(就我所知,通過與工作的wav文件進行比較我可以看出)。 –

+0

我已經成功設法重寫了您引用的JS解決方案。查看我的答案獲取完整的代碼。謝謝。 –

1

感謝GünterZöchbauer指出this JS solution。我已經重寫了Dart中的代碼並且它可以工作。

import 'dart:html'; 
import 'dart:async'; 
import 'dart:web_audio'; 
import 'dart:typed_data'; 

bool recording; 
List leftchannel; 
List rightchannel; 
int recordingLength; 
int sampleRate; 

void main() { 

    leftchannel = []; 
    rightchannel = []; 
    recordingLength = 0; 
    sampleRate = 44100; 
    recording = true; 

    // add stop button 
    ButtonElement stopBtn = new ButtonElement() 
    ..text = 'Stop' 
    ..onClick.listen((_) { 

     // stop recording 
     recording = false; 

     // we flat the left and right channels down 
     var leftBuffer = mergeBuffers (leftchannel, recordingLength); 
     var rightBuffer = mergeBuffers (rightchannel, recordingLength); 
     // we interleave both channels together 
     var interleaved = interleave(leftBuffer, rightBuffer); 

     // we create our wav file 
     var buffer = new Uint8List(44 + interleaved.length * 2); 
     ByteData view = new ByteData.view(buffer); 

     // RIFF chunk descriptor 
     writeUTFBytes(view, 0, 'RIFF'); 
     view.setUint32(4, 44 + interleaved.length * 2, Endianness.LITTLE_ENDIAN); 
     writeUTFBytes(view, 8, 'WAVE'); 

     // FMT sub-chunk 
     writeUTFBytes(view, 12, 'fmt '); 
     view.setUint32(16, 16, Endianness.LITTLE_ENDIAN); 
     view.setUint16(20, 1, Endianness.LITTLE_ENDIAN); 

     // stereo (2 channels) 
     view.setUint16(22, 2, Endianness.LITTLE_ENDIAN); 
     view.setUint32(24, sampleRate, Endianness.LITTLE_ENDIAN); 
     view.setUint32(28, sampleRate * 4, Endianness.LITTLE_ENDIAN); 
     view.setUint16(32, 4, Endianness.LITTLE_ENDIAN); 
     view.setUint16(34, 16, Endianness.LITTLE_ENDIAN); 

     // data sub-chunk 
     writeUTFBytes(view, 36, 'data'); 
     view.setUint32(40, interleaved.length * 2, Endianness.LITTLE_ENDIAN); 

     // write the PCM samples 
     var lng = interleaved.length; 
     var index = 44; 
     var volume = 1; 
     for (var i = 0; i < lng; i++){ 
     view.setInt16(index, (interleaved[i] * (0x7FFF * volume)).truncate(), Endianness.LITTLE_ENDIAN); 
     index += 2; 
     } 

     // our final binary blob 
     var blob = new Blob ([ view ] , 'audio/wav' ); 

     // let's save it locally 
     String url = Url.createObjectUrlFromBlob(blob); 
     AnchorElement link = new AnchorElement() 
     ..href = url 
     ..text = 'download' 
     ..download = 'output.wav'; 
     document.body.append(link); 

    }); 

    document.body.append(stopBtn); 

    window.navigator.getUserMedia(audio: true).then((MediaStream stream) { 
    var context = new AudioContext(); 
    GainNode volume = context.createGain(); 
    MediaStreamAudioSourceNode audioInput = context.createMediaStreamSource(stream); 
    audioInput.connectNode(volume); 

    int bufferSize = 2048; 
    ScriptProcessorNode recorder = context.createJavaScriptNode(bufferSize, 2, 2); 

    recorder.onAudioProcess.listen((AudioProcessingEvent e) { 
     if (!recording) return; 
     print('recording'); 
     var left = e.inputBuffer.getChannelData(0); 
     var right = e.inputBuffer.getChannelData(1); 
     print(left); 

     // process Data 
     leftchannel.add(new Float32List.fromList(left)); 
     rightchannel.add(new Float32List.fromList(right)); 
     recordingLength += bufferSize; 

    }); 

    volume.connectNode(recorder); 
    recorder.connectNode(context.destination); 

    }); 

} 

void writeUTFBytes(ByteData view, offset, String string){ 
    var lng = string.length; 
    for (var i = 0; i < lng; i++){ 
    view.setUint8(offset + i, string.codeUnitAt(i)); 
    } 
} 

Float32List interleave(leftChannel, rightChannel){ 
    var length = leftChannel.length + rightChannel.length; 
    var result = new Float32List(length); 

    var inputIndex = 0; 

    for (var index = 0; index < length;){ 
    result[index++] = leftChannel[inputIndex]; 
    result[index++] = rightChannel[inputIndex]; 
    inputIndex++; 
    } 
    return result; 
} 

List mergeBuffers(channelBuffer, recordingLength){ 
    List result = new List(); 
    var offset = 0; 
    var lng = channelBuffer.length; 
    for (var i = 0; i < lng; i++){ 
    var buffer = channelBuffer[i]; 
    result.addAll(buffer); 
    } 
    return result; 
} 

您可以從github here中提取代碼。