1

In my following code ffmpeg is transcoding the input stream and is successfully sending the chunks to the client. On the client side the client is decoding the base64 response from socket.io and is converting the response to an array buffer. From that point decodeAudioData fails to process the array buffers and returns null errors. Does anyone know why decodeAudioData isn't working?

./webaudio_svr.js:

var express = require('/usr/local/lib/node_modules/express');
var http = require('http');
var spawn = require('child_process').spawn;
var util = require('util');
var fs = require('fs');

var app       = express();
var webServer = http.createServer(app);
var audServer = http.createServer(app);
var io        = require('/usr/local/lib/node_modules/socket.io').listen(webServer, {log: false, });

app.use(express.static(__dirname + '/public'));

app.get('/', function(req, res){
    res.send(
    "<script src='/socket.io/socket.io.js'></script>\n"+
    "<script>var socket=io.connect('http://127.0.0.1:3000');</script>\n"+
    "<script src='/webaudio_cli.js'></script>"
    );
});
webServer.listen(3000);

var inputStream = spawn('/usr/bin/wget', ['-O','-','http://nprdmp.ic.llnwd.net/stream/nprdmp_live01_mp3']);

var ffmpeg = spawn('ffmpeg', [
    '-i', 'pipe:0', // Input on stdin
    '-ar', '44100', // Sampling rate
    '-ac', 2, // Stereo
    '-f', 'mp3',
    'pipe:1' // Output on stdout
]);

io.sockets.on('connection', function(webSocket) {
    var disconnect = '0';

    if (disconnect == '0') {
        inputStream.stdout.pipe(ffmpeg.stdin);
        ffmpeg.stdout.on('data', function(data) {
            var data64 = data.toString('base64');
            webSocket.emit('stream',data64);
        });
    }

    webSocket.on('disconnect', function() {
        disconnect=1;
    });
});

./public/webaudio_cli.js:

function str2ab(str) {
    var buf = new ArrayBuffer(str.length*2); // 2 bytes for each char
    var bufView = new Uint16Array(buf);
    for (var i=0, strLen=str.length; i<strLen; i++) {
        bufView[i] = str.charCodeAt(i);
    }
    return buf;
}

window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var source = context.createBufferSource();

socket.on('stream', function(data) {
        var data=str2ab(atob(data));
        context.decodeAudioData(data, function(buffer) {
            source.connect(context.destination);
            source.buffer = buffer;
            source.start(0);
        }, function(err) {
            console.log("err(decodeAudioData): "+err);
        });
});
Brad.Smith
  • 1,071
  • 3
  • 14
  • 28
  • If your audio is just raw 16-bit PCM, I'm pretty sure `decodeAudioData` doesn't know how to deal with it. – Kevin Ennis Dec 03 '13 at 20:49
  • It's not raw PCM, ffmpeg is transcoding it to mp3. I actually want to use a speex encoded ogg stream from ffmpeg but all the posts I see on this subject deal with mp3s so I figured I would do the same to increase the chances of getting an answer. – Brad.Smith Dec 03 '13 at 21:03
  • var `source` is declared inside the `load` callback function. How does `decodeAudioData` have access to it? – levi Dec 03 '13 at 21:23
  • Ah, sorry. Missed the `-f` flag in your ffmpeg call. – Kevin Ennis Dec 03 '13 at 21:23
  • Sorry about that @levi. I updated the code for the client. – Brad.Smith Dec 04 '13 at 02:11
  • 1
    Just to narrow this down a bit, have you tried dumping your base64 data, decoding it, and playing it back with VLC or something? Let's narrow this down to a client-side issue, and then whether or not there is a codec support problem. – Brad Dec 04 '13 at 02:25
  • Have you seen - http://stackoverflow.com/questions/10365335/decodeaudiodata-returning-a-null-error – levi Dec 04 '13 at 03:16
  • I took a look at that @levi and tried adapting it to my code but I'm at a point now where I'm lost. I've added additional lines for debugging and it seems like there's something going on with the client converting the response to different data types. From what I can gather when str2ab() is ran it isn't storing the correct values to the array buffer. I'm assuming it has something to do with Uint16Array but I'm not sure. I uploaded the code to github. https://github.com/grkblood13/web-audio-stream – Brad.Smith Dec 04 '13 at 17:49
  • @Brad, not yet. I'll probably jump on that if this method levi suggested doesnt work out. – Brad.Smith Dec 04 '13 at 17:59
  • You might want to check out the `base64DecToArr` function on this MDN page: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Base64_encoding_and_decoding#Appendix.3A_Decode_a_Base64_string_to_Uint8Array_or_ArrayBuffer Looks like it's not as simple as just reading the `charCode` and dumping it into a typed array. – Kevin Ennis Dec 05 '13 at 15:57
  • Thanks @KevinEnnis but apparently that function isn't built into chrome so it wouldn't work for me. I have an update though I'll post in my next comment. – Brad.Smith Dec 05 '13 at 18:53
  • Ok, so good news and bad news. The good news is that I got audio! The bad news is that I ended up scrapping socket.io for binaryjs to get it to work and the audio is choppy as hell. This might be getting out of the realm of the original question because I'm not getting audio but does anyone know what is wrong with my client side code that's causing such the choppy playback? Here's the code: https://github.com/grkblood13/web-audio-stream/tree/master/1.0.2 – Brad.Smith Dec 05 '13 at 18:55
  • 1
    @Brad.Smith, it's not a built-in DOM method -- they actually have the function written out on the page. – Kevin Ennis Dec 05 '13 at 19:34
  • Your problem now is just that `setTimeout` isn't nearly precise enough for scheduling seamless audio playback. You might want to check out Chris Wilson's excellent article here: http://www.html5rocks.com/en/tutorials/audio/scheduling/ – Kevin Ennis Dec 05 '13 at 19:37
  • I took your advice and looked at that @Kevin but its a little over my head atm and I'm not sure if it really pertains to realtime audio. How to you look ahead to something that hasn't even happened yet? Like I said though it's a bit over my head so I'm probably missing a lot of stuff. Regardless, I went back and restructured my playback device without setTimeout. What I've written seems pretty straightforward yet it still sounds like crap. Any reason why? http://www.fpaste.org/59524/38633522/ – Brad.Smith Dec 06 '13 at 13:10

1 Answers1

0

If you read the notes section of the original post you'll see that I ended up getting this working with binaryjs but with the help of Kevin I was able to get this to work with socket.io. Note there this is still a HUGE issue with choppy playback. If someone could lend some assistance to cleaning the audio up please do. This solution is really pointless unless the audio works as expected so I need to figure that out.

The issue has to do with how the browser encodes/decodes your base64 string. Until this is changed you must supply your own functions from https://developer.mozilla.org/en-US/docs/Web/JavaScript/Base64_encoding_and_decoding.

webaudio_svr.js:

var express = require('/usr/local/lib/node_modules/express');
var http = require('http');
var spawn = require('child_process').spawn;
var util = require('util');
var fs = require('fs');

var app       = express();
var webServer = http.createServer(app);
var io        = require('/usr/local/lib/node_modules/socket.io').listen(webServer, {log: false, });

app.use(express.static(__dirname + '/public'));

app.get('/', function(req, res){
    res.send(
    "<script src='/socket.io/socket.io.js'></script>\n"+
    "<script>var socket=io.connect('http://127.0.0.1:3000');</script>\n"+
    "<script src='/base64.js'></script>\n"+
    "<script src='/webaudio_cli.js'></script>"
    );
});
webServer.listen(3000);

var inputStream = spawn('/usr/bin/wget', ['-O','-','http://nprdmp.ic.llnwd.net/stream/nprdmp_live01_mp3']);

var ffmpeg = spawn('ffmpeg', [
    '-i', 'pipe:0', // Input on stdin
    '-ar', '44100', // Sampling rate
    '-ac', 2, // Stereo
    '-f', 'mp3',
    'pipe:1' // Output on stdout
]);

io.sockets.on('connection', function(webSocket) {
    var disconnect = '0';

    if (disconnect == '0') {
        inputStream.stdout.pipe(ffmpeg.stdin);
        ffmpeg.stdout.on('data', function(data) {
            var data64 = data.toString('base64');
            webSocket.emit('stream',data64);
        });
    }

    webSocket.on('disconnect', function() {
        disconnect=1;
    });
});

public/webaudio_cli.js:

window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var startTime = context.currentTime;

// buffer to arraybuffer
function toArrayBuffer(buffer) {
    var ab = new ArrayBuffer(buffer.length);
    var view = new Uint8Array(ab);
    for (var i = 0; i < buffer.length; ++i) {
    view[i] = buffer[i];
    }
    return ab;
}
socket.on('stream', function(data) {
        var data=toArrayBuffer(base64DecToArr(data));
        context.decodeAudioData(data, function(buffer) {
            playBuffer(buffer);
        }, function(err) {
            console.log("decodeAudioData err: "+err);
        });
});

function playBuffer(buf) {
    var source    = context.createBufferSource();
    source.buffer = buf;
    source.connect(context.destination);
    source.start(startTime);
    startTime = startTime+source.buffer.duration;
}

public/base64.js:

Brad.Smith
  • 1,071
  • 3
  • 14
  • 28
  • A few things: first, don't assign a value to `startTime` until you're ready to play the first buffer. As is, `startTime` will be in the past by the time you actually get your first chunk - which means all your timing will be off, and things will start to play immediately when they shouldn't. Second, you might want to store the buffers in an array and not start trying to play them until you have a few of them queued up. That'll give you some protection against network issues. – Kevin Ennis Dec 06 '13 at 15:27