我正在開發一個Web應用程序,在該應用程序中我想使用recorder.js文件來實現音頻錄製。我有這個代碼,但我無法在web2py框架中使用。你能給我詳細信息如何使用它? 有一個.bower.json文件在web2py中上傳時隱藏。我們如何使用它並鏈接所有文件以便錄製可以完成?使用recorder.js在web2py中錄製音頻的實現
這是我.bower.json腳本:
{
"name": "Recorderjs",
"homepage": "https://github.com/mattdiamond/Recorderjs",
"_release": "f814ac7b3f",
"_resolution": {
"type": "branch",
"branch": "master",
"commit": "f814ac7b3f4ed4f62729860d5a02720f167480b3"
},
"_source": "git://github.com/mattdiamond/Recorderjs.git",
"_target": "*",
"_originalSource": "Recorderjs",
"_direct": true
}
這是我的html代碼:
<body>
<script src="bower_components/Recorderjs/recorder.js">
</script>
<script src="app.js"></script>
<button onclick="record()">Record</button>
<button onclick="stop()">Stop</button>
這些都是我的JavaScript:/app.js
var navigator = window.navigator;
var Context = window.AudioContext || window.webkitAudioContext;
var context = new Context();
// audio
var mediaStream;
var rec;
navigator.getUserMedia = (
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
);
function record() {
navigator.getUserMedia({audio: true}, function(localMediaStream){
mediaStream = localMediaStream;
var mediaStreamSource = context.createMediaStreamSource(localMediaStream);
rec = new Recorder(mediaStreamSource, {
workerPath: '/bower_components/Recorderjs/recorderWorker.js'
});
rec.record();
}, function(err){
console.log('Not supported');
});
}
function stop() {
mediaStream.stop();
rec.stop();
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "test.wav");
});
}
/recorder.js
(function(window){
var WORKER_PATH = 'recorderWorker.js';
var Recorder = function(source, cfg){
var config = cfg || {};
var bufferLen = config.bufferLen || 4096;
this.context = source.context;
this.node = (this.context.createScriptProcessor ||
this.context.createJavaScriptNode).call(this.context,
bufferLen, 2, 2);
var worker = new Worker(config.workerPath || WORKER_PATH);
worker.postMessage({
command: 'init',
config: {
sampleRate: this.context.sampleRate
}
});
var recording = false,
currCallback;
this.node.onaudioprocess = function(e){
if (!recording) return;
worker.postMessage({
command: 'record',
buffer: [
e.inputBuffer.getChannelData(0),
e.inputBuffer.getChannelData(1)
]
});
}
this.configure = function(cfg){
for (var prop in cfg){
if (cfg.hasOwnProperty(prop)){
config[prop] = cfg[prop];
}
}
}
this.record = function(){
recording = true;
}
this.stop = function(){
recording = false;
}
this.clear = function(){
worker.postMessage({ command: 'clear' });
}
this.getBuffer = function(cb) {
currCallback = cb || config.callback;
worker.postMessage({ command: 'getBuffer' })
}
個
recorderWorker.js:
var recLength = 0,
recBuffersL = [],
recBuffersR = [],
sampleRate;
this.onmessage = function(e){
switch(e.data.command){
case 'init':
init(e.data.config);
break;
case 'record':
record(e.data.buffer);
break;
case 'exportWAV':
exportWAV(e.data.type);
break;
case 'getBuffer':
getBuffer();
break;
case 'clear':
clear();
break;
}
};
function init(config){
sampleRate = config.sampleRate;
}
function record(inputBuffer){
recBuffersL.push(inputBuffer[0]);
recBuffersR.push(inputBuffer[1]);
recLength += inputBuffer[0].length;
}
function exportWAV(type){
var bufferL = mergeBuffers(recBuffersL, recLength);
var bufferR = mergeBuffers(recBuffersR, recLength);
var interleaved = interleave(bufferL, bufferR);
var dataview = encodeWAV(interleaved);
var audioBlob = new Blob([dataview], { type: type });
this.postMessage(audioBlob);
}
function getBuffer() {
var buffers = [];
buffers.push(mergeBuffers(recBuffersL, recLength));
buffers.push(mergeBuffers(recBuffersR, recLength));
this.postMessage(buffers);
}
function clear(){
recLength = 0;
recBuffersL = [];
recBuffersR = [];
}
function mergeBuffers(recBuffers, recLength){
var result = new Float32Array(recLength);
var offset = 0;
for (var i = 0; i < recBuffers.length; i++){
result.set(recBuffers[i], offset);
offset += recBuffers[i].length;
}
return result;
}
function interleave(inputL, inputR){
var length = inputL.length + inputR.length;
var result = new Float32Array(length);
var index = 0,
inputIndex = 0;
while (index < length){
result[index++] = inputL[inputIndex];
result[index++] = inputR[inputIndex];
inputIndex++;
}
return result;
}
function floatTo16BitPCM(output, offset, input){
for (var i = 0; i < input.length; i++, offset+=2){
var s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
function writeString(view, offset, string){
for (var i = 0; i < string.length; i++){
view.setUint8(offset + i, string.charCodeAt(i));
}
}
function encodeWAV(samples){
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);
/* RIFF identifier */
writeString(view, 0, 'RIFF');
/* file length */
view.setUint32(4, 32 + samples.length * 2, true);
/* RIFF type */
writeString(view, 8, 'WAVE');
/* format chunk identifier */
writeString(view, 12, 'fmt ');
/* format chunk length */
view.setUint32(16, 16, true);
/* sample format (raw) */
view.setUint16(20, 1, true);
/* channel count */
view.setUint16(22, 2, true);
/* sample rate */
view.setUint32(24, sampleRate, true);
/* byte rate (sample rate * block align) */
view.setUint32(28, sampleRate * 4, true);
/* block align (channel count * bytes per sample) */
view.setUint16(32, 4, true);
/* bits per sample */
view.setUint16(34, 16, true);
/* data chunk identifier */
writeString(view, 36, 'data');
/* data chunk length */
view.setUint32(40, samples.length * 2, true);
floatTo16BitPCM(view, 44, samples);
return view;
}
我是否必須更改此內容才能在web2py中上傳? { 「名」: 「Recorderjs」, 「主頁」: 「https://github.com/web2py/Recorderjs」, 「_release」: 「f814ac7b3f」, 「_resolution」:{ 「型」 : 「分支」, 「分支」: 「主站」, 「提交」: 「f814ac7b3f4ed4f62729860d5a02720f167480b3」 }, 「_source」: 「GIT中://github.com/mattdiamond/Recorderjs.git」, 「_target」 :「*」, 「_originalSource」:「Recorderjs」, 「_direct」:true } – Akriti 2014-11-10 12:51:51
我不確定。嘗試一下,看看會發生什麼。一般來說,任何JS庫都應該與web2py一起工作 - 您只需確保您的腳本'src'引用了正確的URL並且其他任何內部URL都是正確的。 – Anthony 2014-11-10 18:20:16
我們無法理解腳本中的正確內部URL(app.js,recorder.js中的工作路徑)和.bower.json文件 – Akriti 2014-11-11 05:13:45