2015-06-14 52 views
0

我想錄制一個簡單的鼓音序器的音頻輸出並將其導出爲一個wav文件下載。我有一個實時鏈接到我目前在這個實施attempt的嘗試。recorderJS記錄/下載音頻緩衝器WEB AUDIO API

定序器的總和輸出被路由到變量finalMixNode ,但將其設置爲recorder.js的輸入不起作用。我認爲這可能是音頻環境的問題,但我無法弄清楚。我成功創建了一個振盪器並記錄了它的輸出,但是我無法將其擴展到音序器。

這裏是我嘗試記錄輸出的主要js代碼。我希望有人會看到我缺少的東西。

//audio node variables 
var context; 
var convolver; 
var compressor; 
var masterGainNode; 
var effectLevelNode; 
var lowPassFilterNode; 

var noteTime; 
var startTime; 
var lastDrawTime = -1; 
var LOOP_LENGTH = 16; 
var rhythmIndex = 0; 
var timeoutId; 
var testBuffer = null; 

var currentKit = null; 
var reverbImpulseResponse = null; 

var tempo = 120; 
var TEMPO_MAX = 200; 
var TEMPO_MIN = 40; 
var TEMPO_STEP = 4; 

if (window.hasOwnProperty('AudioContext') &&  !window.hasOwnProperty('webkitAudioContext')) { 
window.webkitAudioContext = AudioContext; 
} 

$(function() { 
init(); 
toggleSelectedListener(); 
playPauseListener(); 
lowPassFilterListener(); 
reverbListener(); 
createLowPassFilterSliders(); 
initializeTempo(); 
changeTempoListener(); 
}); 

function createLowPassFilterSliders() { 
$("#freq-slider").slider({ 
value: 1, 
min: 0, 
max: 1, 
step: 0.01, 
disabled: true, 
slide: changeFrequency 
}); 
$("#quality-slider").slider({ 
value: 0, 
min: 0, 
max: 1, 
step: 0.01, 
disabled: true, 
slide: changeQuality 
}); 
} 

function lowPassFilterListener() { 
$('#lpf').click(function() { 
$(this).toggleClass("active"); 
$(this).blur(); 
if ($(this).hasClass("btn-default")) { 
    $(this).removeClass("btn-default"); 
    $(this).addClass("btn-warning"); 
    lowPassFilterNode.active = true; 
    $("#freq-slider,#quality-slider").slider("option", "disabled", false  ); 
    } 
    else { 
    $(this).addClass("btn-default"); 
    $(this).removeClass("btn-warning"); 
    lowPassFilterNode.active = false; 
    $("#freq-slider,#quality-slider").slider("option", "disabled", true ); 
    } 
    }) 
    } 

    function reverbListener() { 
    $("#reverb").click(function() { 
    $(this).toggleClass("active"); 
    $(this).blur(); 
    if ($(this).hasClass("btn-default")) { 
    $(this).removeClass("btn-default"); 
    $(this).addClass("btn-warning"); 
    convolver.active = true; 
    } 
    else { 
    $(this).addClass("btn-default"); 
    $(this).removeClass("btn-warning"); 
    convolver.active = false; 
    } 
    }) 
    } 

function changeFrequency(event, ui) { 
var minValue = 40; 
var maxValue = context.sampleRate/2; 
var numberOfOctaves = Math.log(maxValue/minValue)/Math.LN2; 
var multiplier = Math.pow(2, numberOfOctaves * (ui.value - 1.0)); 
lowPassFilterNode.frequency.value = maxValue * multiplier; 
} 

function changeQuality(event, ui) { 
//30 is the quality multiplier, for now. 
lowPassFilterNode.Q.value = ui.value * 30; 
} 

function playPauseListener() { 
$('#play-pause').click(function() { 
var $span = $(this).children("span"); 
if($span.hasClass('glyphicon-play')) { 
    $span.removeClass('glyphicon-play'); 
    $span.addClass('glyphicon-pause'); 
    handlePlay(); 
} 
else { 
    $span.addClass('glyphicon-play'); 
    $span.removeClass('glyphicon-pause'); 
    handleStop(); 
    } 
}); 
} 

function toggleSelectedListener() { 
$('.pad').click(function() { 
$(this).toggleClass("selected"); 
}); 
} 

function init() { 
initializeAudioNodes(); 
loadKits(); 
loadImpulseResponses(); 
} 

function initializeAudioNodes() { 
context = new webkitAudioContext(); 
var finalMixNode; 
if (context.createDynamicsCompressor) { 
    // Create a dynamics compressor to sweeten the overall mix. 
    compressor = context.createDynamicsCompressor(); 
    compressor.connect(context.destination); 
    finalMixNode = compressor; 
    } else { 
    // No compressor available in this implementation. 
    finalMixNode = context.destination; 
    } 


// Create master volume. 
// for now, the master volume is static, but in the future there will be a slider 
masterGainNode = context.createGain(); 
masterGainNode.gain.value = 0.7; // reduce overall volume to avoid clipping 
masterGainNode.connect(finalMixNode); 

//connect all sounds to masterGainNode to play them 

//don't need this for now, no wet dry mix for effects 
// // Create effect volume. 
// effectLevelNode = context.createGain(); 
// effectLevelNode.gain.value = 1.0; // effect level slider controls this 
// effectLevelNode.connect(masterGainNode); 

// Create convolver for effect 
convolver = context.createConvolver(); 
convolver.active = false; 
// convolver.connect(effectLevelNode); 

//Create Low Pass Filter 
lowPassFilterNode = context.createBiquadFilter(); 
//this is for backwards compatibility, the type used to be an integer 
lowPassFilterNode.type = (typeof lowPassFilterNode.type === 'string') ?  'lowpass' : 0; // LOWPASS 
//default value is max cutoff, or passing all frequencies 
lowPassFilterNode.frequency.value = context.sampleRate/2; 
lowPassFilterNode.connect(masterGainNode); 
lowPassFilterNode.active = false; 
} 

function loadKits() { 
//name must be same as path 
var kit = new Kit("TR808"); 
kit.load(); 

//TODO: figure out how to test if a kit is loaded 
currentKit = kit; 
} 

function loadImpulseResponses() { 
reverbImpulseResponse = new ImpulseResponse("sounds/impulse- responses/matrix-reverb2.wav"); 
reverbImpulseResponse.load(); 
} 


//TODO delete this 
function loadTestBuffer() { 
var request = new XMLHttpRequest(); 
var url = "http://www.freesound.org/data/previews/102/102130_1721044-lq.mp3"; 
request.open("GET", url, true); 
request.responseType = "arraybuffer"; 

request.onload = function() { 
context.decodeAudioData(
    request.response, 
    function(buffer) { 
    testBuffer = buffer; 
    }, 
    function(buffer) { 
    console.log("Error decoding drum samples!"); 
    } 
); 
} 
request.send(); 
} 

//TODO delete this 
function sequencePads() { 
$('.pad.selected').each(function() { 
$('.pad').removeClass("selected"); 
$(this).addClass("selected"); 
}); 
} 

function playNote(buffer, noteTime) { 
var voice = context.createBufferSource(); 
voice.buffer = buffer; 

var currentLastNode = masterGainNode; 
if (lowPassFilterNode.active) { 
lowPassFilterNode.connect(currentLastNode); 
currentLastNode = lowPassFilterNode; 
} 
if (convolver.active) { 
convolver.buffer = reverbImpulseResponse.buffer; 
convolver.connect(currentLastNode); 
currentLastNode = convolver; 
} 

voice.connect(currentLastNode); 
voice.start(noteTime); 
} 

function schedule() { 
var currentTime = context.currentTime; 

// The sequence starts at startTime, so normalize currentTime so that it's 0 at the start of the sequence. 
currentTime -= startTime; 

while (noteTime < currentTime + 0.200) { 
    var contextPlayTime = noteTime + startTime; 
    var $currentPads = $(".column_" + rhythmIndex); 
    $currentPads.each(function() { 
    if ($(this).hasClass("selected")) { 
     var instrumentName = $(this).parents().data("instrument"); 
     switch (instrumentName) { 
     case "kick": 
     playNote(currentKit.kickBuffer, contextPlayTime); 
     break; 
     case "snare": 
     playNote(currentKit.snareBuffer, contextPlayTime); 
     break; 
     case "hihat": 
     playNote(currentKit.hihatBuffer, contextPlayTime); 
     break; 
     case "tomhi": 
     playNote(currentKit.tomhiBuffer, contextPlayTime); 
     break; 
     case "tommid": 
     playNote(currentKit.tommidBuffer, contextPlayTime); 
     break; 
     case "tomlow": 
     playNote(currentKit.tomlowBuffer, contextPlayTime); 
     break; 
     case "cl": 
     playNote(currentKit.clBuffer, contextPlayTime); 
     break; 
     case "cb": 
     playNote(currentKit.cbBuffer, contextPlayTime); 
     break; 
    case "cp": 
     playNote(currentKit.cpBuffer, contextPlayTime); 
     break; 
    case "cy": 
     playNote(currentKit.cyBuffer, contextPlayTime); 
     break; 
    case "rs": 
     playNote(currentKit.rsBuffer, contextPlayTime); 
     break; 

    } 
     //play the buffer 
     //store a data element in the row that tells you what instrument 
    } 
    }); 
    if (noteTime != lastDrawTime) { 
     lastDrawTime = noteTime; 
     drawPlayhead(rhythmIndex); 
    } 
    advanceNote(); 
    } 

    timeoutId = requestAnimationFrame(schedule) 
    } 

    function drawPlayhead(xindex) { 
    var lastIndex = (xindex + LOOP_LENGTH - 1) % LOOP_LENGTH; 

    //can change this to class selector to select a column 
    var $newRows = $('.column_' + xindex); 
    var $oldRows = $('.column_' + lastIndex); 

    $newRows.addClass("playing"); 
    $oldRows.removeClass("playing"); 
    } 

    function advanceNote() { 
    // Advance time by a 16th note... 
    // var secondsPerBeat = 60.0/theBeat.tempo; 
//TODO CHANGE TEMPO HERE, convert to float 
tempo = Number($("#tempo-input").val()); 
var secondsPerBeat = 60.0/tempo; 
rhythmIndex++; 
if (rhythmIndex == LOOP_LENGTH) { 
    rhythmIndex = 0; 
} 

//0.25 because each square is a 16th note 
noteTime += 0.25 * secondsPerBeat 
// if (rhythmIndex % 2) { 
//  noteTime += (0.25 + kMaxSwing * theBeat.swingFactor) * secondsPerBeat; 
// } else { 
//  noteTime += (0.25 - kMaxSwing * theBeat.swingFactor) * secondsPerBeat; 
// } 

} 

function handlePlay(event) { 
rhythmIndex = 0; 
noteTime = 0.0; 
startTime = context.currentTime + 0.005; 
schedule(); 
} 

function handleStop(event) { 
cancelAnimationFrame(timeoutId); 
$(".pad").removeClass("playing"); 
} 

function initializeTempo() { 
$("#tempo-input").val(tempo); 
} 

function changeTempoListener() { 
$("#increase-tempo").click(function() { 
if (tempo < TEMPO_MAX) { 
    tempo += TEMPO_STEP; 
    $("#tempo-input").val(tempo); 
} 
}); 

$("#decrease-tempo").click(function() { 
if (tempo > TEMPO_MIN) { 
    tempo -= TEMPO_STEP; 
    $("#tempo-input").val(tempo); 
} 
}); 
} 



function __log(e, data) { 
log.innerHTML += "\n" + e + " " + (data || ''); 
} 

var audio_context; 
var recorder; 

function startUserMedia() { 
var input = finalMixNode; 
__log('Media stream created.'); 
input.start(); 
__log('Input connected to audio context destination.'); 

recorder = new Recorder(input); 
__log('Recorder initialised.'); 
} 

function startRecording(button) { 
recorder && recorder.record(); 
button.disabled = true; 
button.nextElementSibling.disabled = false; 
__log('Recording...'); 
} 

    function stopRecording(button) { 
    recorder && recorder.stop(); 
    button.disabled = true; 
    button.previousElementSibling.disabled = false; 
    __log('Stopped recording.'); 

    // create WAV download link using audio data blob 
    createDownloadLink(); 

recorder.clear(); 
} 

function createDownloadLink() { 
recorder && recorder.exportWAV(function(blob) { 
    var url = URL.createObjectURL(blob); 
    var li = document.createElement('li'); 
    var au = document.createElement('audio'); 
    var hf = document.createElement('a'); 

    au.controls = true; 
    au.src = url; 
    hf.href = url; 
    hf.download = new Date().toISOString() + '.wav'; 
    hf.innerHTML = hf.download; 
    li.appendChild(au); 
    li.appendChild(hf); 
    recordingslist.appendChild(li); 
}); 
} 

window.onload = function init() { 
try { 
    // webkit shim 
window.AudioContext = window.AudioContext || window.webkitAudioContext; 
    navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia; 
    window.URL = window.URL || window.webkitURL; 

// audio_context = new AudioContext; 
    __log('Audio context set up.'); 
} catch (e) { 
    alert('No web audio support in this browser!'); 
} 

startUserMedia(); 

}; 

回答

0

finalMixNode的範圍限定爲initializeAudioNodes()功能,因此當您從startUserMedia()調用它是不確定的。

此外,此變量是一個dynamicCompressor節點或AudioContext的目的地。
Recorderjs需要一個節點與輸出(其中AudioDestinationNode沒有目前1),以便確保與最終壓縮機節點(或最終gainNode)

在我的JS執行此來構建你的刻錄機從您的網頁控制檯不工作:
var recorder = new Recorder(compressor);

感謝@padenot用於察覺我的它正在討論here

+1

請注意,AudioDestinationNode未來可能會有很好的輸出,所以我更加靈活。 – padenot

+0

@padenot值得注意,謝謝。你有規格草案嗎? – Kaiido

+0

絕對如此:https://github.com/WebAudio/web-audio-api/issues/349 – padenot