2014-01-17 114 views
1

我有以下代碼...從網站http://forestmist.org/blog/web-audio-api-loops/ 它的工作好......但我需要記錄的功能,這將有助於錄製之前點擊按鈕,保存有音頻太...任何幫助網絡音頻API記錄功能

<!doctype html> 
<html lang="en"> 
<head> 
    <meta charset="utf-8"> 
    <title>Web Audio API Loops Demo</title> 
</head> 
<body> 

    <form> 
     <button id="button-loop-1" type="button" value="1">Loop 1</button> 
     <button id="button-loop-2" type="button" value="2">Loop 2</button> 
    </form> 

    <script> 
    //-------------- 
    // Audio Object 
    //-------------- 
    var audio = { 
     buffer: {}, 
     compatibility: {}, 
     files: [ 
      'synth.wav', 
      'beat.wav' 
     ], 
     proceed: true, 
     source_loop: {}, 
     source_once: {} 
    }; 

    //----------------- 
    // Audio Functions 
    //----------------- 
    audio.findSync = function(n) { 
     var first = 0, 
      current = 0, 
      offset = 0; 

     // Find the audio source with the earliest startTime to sync all others to 
     for (var i in audio.source_loop) { 
      current = audio.source_loop[i]._startTime; 
      if (current > 0) { 
       if (current < first || first === 0) { 
        first = current; 
       } 
      } 
     } 

     if (audio.context.currentTime > first) { 
      offset = (audio.context.currentTime - first) % audio.buffer[n].duration; 
     } 

     return offset; 
    }; 

    audio.play = function(n) { 
     if (audio.source_loop[n]._playing) { 
      audio.stop(n); 
     } else { 
      audio.source_loop[n] = audio.context.createBufferSource(); 
      audio.source_loop[n].buffer = audio.buffer[n]; 
      audio.source_loop[n].loop = true; 
      audio.source_loop[n].connect(audio.context.destination); 

      var offset = audio.findSync(n); 
      audio.source_loop[n]._startTime = audio.context.currentTime; 

      if (audio.compatibility.start === 'noteOn') { 
       /* 
       The depreciated noteOn() function does not support offsets. 
       Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that. 
       */ 
       audio.source_once[n] = audio.context.createBufferSource(); 
       audio.source_once[n].buffer = audio.buffer[n]; 
       audio.source_once[n].connect(audio.context.destination); 
       audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration 
       /* 
       Note about the third parameter of noteGrainOn(). 
       If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect. 
       If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset. 
       */ 

       // Now queue up our looping sound to start immediatly after the source_once audio plays. 
       audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset)); 
      } else { 
       audio.source_loop[n][audio.compatibility.start](0, offset); 
      } 

      audio.source_loop[n]._playing = true; 
     } 
    }; 

    audio.stop = function(n) { 
     if (audio.source_loop[n]._playing) { 
      audio.source_loop[n][audio.compatibility.stop](0); 
      audio.source_loop[n]._playing = false; 
      audio.source_loop[n]._startTime = 0; 
      if (audio.compatibility.start === 'noteOn') { 
       audio.source_once[n][audio.compatibility.stop](0); 
      } 
     } 
    }; 

    //----------------------------- 
    // Check Web Audio API Support 
    //----------------------------- 
    try { 
     // More info at http://caniuse.com/#feat=audio-api 
     window.AudioContext = window.AudioContext || window.webkitAudioContext; 
     audio.context = new window.AudioContext(); 
    } catch(e) { 
     audio.proceed = false; 
     alert('Web Audio API not supported in this browser.'); 
    } 

    if (audio.proceed) { 
     //--------------- 
     // Compatibility 
     //--------------- 
     (function() { 
      var start = 'start', 
       stop = 'stop', 
       buffer = audio.context.createBufferSource(); 

      if (typeof buffer.start !== 'function') { 
       start = 'noteOn'; 
      } 
      audio.compatibility.start = start; 

      if (typeof buffer.stop !== 'function') { 
       stop = 'noteOff'; 
      } 
      audio.compatibility.stop = stop; 
     })(); 

     //------------------------------- 
     // Setup Audio Files and Buttons 
     //------------------------------- 
     for (var a in audio.files) { 
      (function() { 
       var i = parseInt(a) + 1; 
       var req = new XMLHttpRequest(); 
       req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1 
       req.responseType = 'arraybuffer'; 
       req.onload = function() { 
        audio.context.decodeAudioData(
         req.response, 
         function(buffer) { 
          audio.buffer[i] = buffer; 
          audio.source_loop[i] = {}; 
          var button = document.getElementById('button-loop-' + i); 
          button.addEventListener('click', function(e) { 
           e.preventDefault(); 
           audio.play(this.value); 
          }); 
         }, 
         function() { 
          console.log('Error decoding audio "' + audio.files[i - 1] + '".'); 
         } 
        ); 
       }; 
       req.send(); 
      })(); 
     } 
    } 
    </script> 

</body> 
</html> 

回答

1

查看RecordJS(https://github.com/mattdiamond/Recorderjs)。它應該幫助你。

+0

看看我如何將RecordJS集成到我的AudioRecorder演示(http://github.com/cwilso/AudioRecorder)中。你並沒有確切地說你想要做什麼,但我希望你可以將ScriptProcessor節點插入你的音頻圖表並從那裏開始。 – cwilso