2014-11-06 43 views
0

我在那裏時,我轉碼的音頻文件,並通過socket.io發送音頻緩衝到客戶端通過網絡音頻播放的問題我的連接,很快死亡我執行造成socket.io設置getChannelData在網絡音頻崩潰

source.buffer.getChannelData(0).set(audio); 

我假設這不是一個Socket.IO問題,而我只看到了Socket.IO問題,因爲真正的問題的結果。在客戶端,我通過管道將音頻文件轉換成的ffmpeg的標準輸入和聽的ffmpeg標準錯誤,以確定它是安全的發送緩衝區。客戶端正在接收緩衝區並正在做所有的事情,直到上面所述的行。以下是一些重現此問題的示例測試代碼。

服務器端:

var express = require('express'); 
var http = require('http'); 
var spawn = require('child_process').spawn; 

var app  = express(); 
var webServer = http.createServer(app); 
var io  = require('socket.io').listen(webServer, {log: false}); 

app.use(express.static(__dirname + '/public')); 

app.get('/', function(req, res){ 
    res.send(
    "<script src='/socket.io/socket.io.js'></script>\n"+ 
    "<script>var socket=io.connect('http://127.0.0.1:3000');</script>\n"+ 
    "<script src='/webaudio_file_cli.js'></script>" 
    ); 
}); 
webServer.listen(3000); 

io.sockets.on('connection', function(webSocket) { 

    var disconnect = '0'; 
    var count = 0; 
    var audBuf = new Buffer([]); 

    if (disconnect == '0') { 
     console.log('new connection...'); 

     var inputStream = spawn('wget', ['-O','-','http://www.noiseaddicts.com/samples/4353.mp3']); 

     var ffmpeg = spawn('ffmpeg', [ 
      '-i', 'pipe:0', // Input on stdin 
      '-acodec', 'pcm_s16le', // PCM 16bits, little-endian 
      '-ar', '24000', // Sampling rate 
      '-ac', 1, // Mono 
      '-f', 'wav', 
      'pipe:1' // Output on stdout 
     ], {stdio: ['pipe','pipe','pipe']}); 

      inputStream.stdout.pipe(ffmpeg.stdin); 

      ffmpeg.stdout.on('data', function(data) { 
       audBuf = Buffer.concat([audBuf,data]); 
      }); 

      ffmpeg.stderr.on('data', function(data) { 
      var _line = data.toString('utf8'); 
      if (_line.substring(0,5) == 'size=' && _line.indexOf('headers:') > -1) { 
       console.log('completed...'); 
       webSocket.emit('audio',audBuf); 
      } 
     }); 
    } 

    webSocket.on('disconnect', function() { 
     console.log('disconnecting...'); 
      disconnect=1; 
    }); 
}); 

客戶端(webaudio_file_cli.js):

window.AudioContext = window.AudioContext || window.webkitAudioContext; 
var context = new AudioContext(); 
var source = context.createBufferSource(); 

var audioStack = [], audio = []; 

socket.on('audio', function(data) { 
    playAudio(data); 
}); 

function playAudio(data) { 
    // playback starting... 
    audioStack = Int16Array(data); 
    for (var i = 0; i < audioStack.length; i++) { 
     audio[i] = (audioStack[i]>0)?audioStack[i]/32767:audioStack[i]/32768; // convert buffer to within the range -1.0 -> +1.0 
    } 

    var audioBuffer = context.createBuffer(1, audio.length, 24000); 
    source.buffer.getChannelData(0).set(audio); 
    source.buffer = audioBuffer; 
    source.connect(context.destination); 
    source.start(0); 
} 

回答

2

在這個例子中,你所訪問source.buffer.getChannelData您之前設置source.Buffer = audioBuffer 。翻轉這兩行的順序,也許?

+0

它實際上結束了兩部分的問題。你提到的和audioStack = Int16Array(data);需要audioStack = new Int16Array(data);.這很奇怪,因爲我已經有了這兩個設置他們與FF29的方式,它工作正常。 – 2014-11-06 17:37:37