3

I need to save microphone input to use later in an AudioElement. I do this to get microphone input:

window.navigator.getUserMedia(audio: true).then((MediaStream stream) {
  # what should go here?
});

What should I do to save the audio?

Günter Zöchbauer
  • 623,577
  • 216
  • 2,003
  • 1,567
Nawaf Alsulami
  • 699
  • 1
  • 8
  • 15

2 Answers2

4

There are many horrible stupid examples out there where you are able to play the current audio recording in the current browser window. Is there ever a use case for this. For video I can imaging that one want to build a Skype like application and have a preview window to see if you look stupid on the video, but audio ...

I found one good post though: From microphone to .WAV with: getUserMedia and Web Audio

I have ported a part of the code in the linked article that shows how to get hold of the data.

import 'dart:html';
import 'dart:async';
import 'dart:web_audio';

void main() {
  window.navigator.getUserMedia(video: true, audio: true).then((MediaStream stream) {
  var context = new AudioContext();
  GainNode volume = context.createGain();
  MediaStreamAudioSourceNode audioInput = context.createMediaStreamSource(stream);
  audioInput.connectNode(volume);

  int bufferSize = 2048;
  ScriptProcessorNode recorder = context.createJavaScriptNode(bufferSize, 2, 2);

  recorder.onAudioProcess.listen((AudioProcessingEvent e) {
    print('recording');
    var left = e.inputBuffer.getChannelData(0);
    var right = e.inputBuffer.getChannelData(1);
    print(left);
    // process Data
  });

  volume.connectNode(recorder);
  recorder.connectNode(context.destination);


/**
 * [How to get a file or blob from an object URL?](http://stackoverflow.com/questions/11876175)
 * [Convert blob URL to normal URL](http://stackoverflow.com/questions/14952052/convert-blob-url-to-normal-url)
 *  Doesn't work as it seems blob urls are not supported in Dart
 */
//    String url = Url.createObjectUrlFromStream(stream);
//    var xhr = new HttpRequest();
//    xhr.responseType = 'blob';
//    xhr.onLoad.listen((ProgressEvent e) {
//      print(xhr.response);
//      var recoveredBlog = xhr.response;
//      var reader = new FileReader();
//
//      reader.onLoad.listen((e) {
//        var blobAsDataUrl = reader.result;
//        reader.readAsDataUrl(blobAsDataUrl);
//      });
//    });
//    xhr.open('GET', url);
//    xhr.send();


/**
 * only for testing purposes
 **/
//    var audio = document.querySelector('audio') as AudioElement;
//    audio.controls = true;
//    audio.src = url;
  });
}
Günter Zöchbauer
  • 623,577
  • 216
  • 2,003
  • 1,567
  • 1
    Thank you for sharing the above link and part of the solution. The complete solution needs to take care of processing raw data and producing an actual sound file similar to the linked page. I'm working on translating the JS solution into Dart. I'll post updates about the outcome soon. – Nawaf Alsulami Jan 25 '14 at 21:39
  • 1
    [This](https://github.com/nawafnaim/dart_examples/blob/master/save_mic_sound_to_file.dart) is my best attempt at rewriting the JS solution in Dart. It produces a wav file from raw data but the beginning of the file has two malformed lines corrupting the file (as far as I can tell from comparing it with with a working wav file). – Nawaf Alsulami Jan 25 '14 at 23:19
  • I've successfully managed to rewrite the JS solution you referenced. See my answer for the complete code. Thanks. – Nawaf Alsulami Jan 26 '14 at 13:40
  • @NawafAlsulami I have the impression you think my answer is not good enough to be accepted? – Günter Zöchbauer Jan 26 '14 at 14:05
  • I've just marked your answer as accepted. I give you credit for pointing me to the right direction and helping me get unstuck. Thank you. – Nawaf Alsulami Jan 26 '14 at 14:34
  • @NawafAlsulami Thanks. Your question is way to broad for StackOverflow. (BTW you didn't even say how you would like the audio data to be saved). Usually such questions are not answered but just closed as 'to broad'. StackOverflow is no code generation site. I appreciate it that you posted the full example anyway. Some references: http://meta.stackexchange.com/questions/143352 (look for "Can you give me some code"), http://meta.stackexchange.com/questions/52101 – Günter Zöchbauer Jan 26 '14 at 14:54
1

Thanks to Günter Zöchbauer for pointing to this JS solution. I have rewrote the code in Dart and it works.

import 'dart:html';
import 'dart:async';
import 'dart:web_audio';
import 'dart:typed_data';

bool recording;
List leftchannel;
List rightchannel;
int recordingLength;
int sampleRate;

void main() {

  leftchannel = [];
  rightchannel = [];
  recordingLength = 0;
  sampleRate = 44100;
  recording = true;

  // add stop button
  ButtonElement stopBtn = new ButtonElement()
    ..text = 'Stop'
    ..onClick.listen((_) { 

      // stop recording
      recording = false;

      // we flat the left and right channels down
      var leftBuffer = mergeBuffers ( leftchannel, recordingLength );
      var rightBuffer = mergeBuffers ( rightchannel, recordingLength );
      // we interleave both channels together
      var interleaved = interleave( leftBuffer, rightBuffer );

      // we create our wav file
      var buffer = new Uint8List(44 + interleaved.length * 2);
      ByteData view = new ByteData.view(buffer);

      // RIFF chunk descriptor
      writeUTFBytes(view, 0, 'RIFF');
      view.setUint32(4, 44 + interleaved.length * 2, Endianness.LITTLE_ENDIAN);
      writeUTFBytes(view, 8, 'WAVE');

      // FMT sub-chunk
      writeUTFBytes(view, 12, 'fmt ');
      view.setUint32(16, 16, Endianness.LITTLE_ENDIAN);
      view.setUint16(20, 1, Endianness.LITTLE_ENDIAN);

      // stereo (2 channels)
      view.setUint16(22, 2, Endianness.LITTLE_ENDIAN);
      view.setUint32(24, sampleRate, Endianness.LITTLE_ENDIAN);
      view.setUint32(28, sampleRate * 4, Endianness.LITTLE_ENDIAN);
      view.setUint16(32, 4, Endianness.LITTLE_ENDIAN);
      view.setUint16(34, 16, Endianness.LITTLE_ENDIAN);

      // data sub-chunk
      writeUTFBytes(view, 36, 'data');
      view.setUint32(40, interleaved.length * 2, Endianness.LITTLE_ENDIAN);

      // write the PCM samples
      var lng = interleaved.length;
      var index = 44;
      var volume = 1;
      for (var i = 0; i < lng; i++){
        view.setInt16(index, (interleaved[i] * (0x7FFF * volume)).truncate(), Endianness.LITTLE_ENDIAN);
        index += 2;
      }

      // our final binary blob
      var blob = new Blob ( [ view ] , 'audio/wav'  );

      // let's save it locally
      String url = Url.createObjectUrlFromBlob(blob);
      AnchorElement link = new AnchorElement()
      ..href = url
      ..text = 'download'
      ..download = 'output.wav';
      document.body.append(link);

    });

  document.body.append(stopBtn);

  window.navigator.getUserMedia(audio: true).then((MediaStream stream) {
    var context = new AudioContext();
    GainNode volume = context.createGain();
    MediaStreamAudioSourceNode audioInput = context.createMediaStreamSource(stream);
    audioInput.connectNode(volume);

    int bufferSize = 2048;
    ScriptProcessorNode recorder = context.createJavaScriptNode(bufferSize, 2, 2);

    recorder.onAudioProcess.listen((AudioProcessingEvent e) {
      if (!recording) return;
      print('recording');
      var left = e.inputBuffer.getChannelData(0);
      var right = e.inputBuffer.getChannelData(1);
      print(left);

      // process Data
      leftchannel.add(new Float32List.fromList(left));
      rightchannel.add(new Float32List.fromList(right));
      recordingLength += bufferSize;

    });

    volume.connectNode(recorder);
    recorder.connectNode(context.destination);

  });

}

void writeUTFBytes(ByteData view, offset, String string){ 
  var lng = string.length;
  for (var i = 0; i < lng; i++){
    view.setUint8(offset + i, string.codeUnitAt(i));
  }
}

Float32List interleave(leftChannel, rightChannel){
  var length = leftChannel.length + rightChannel.length;
  var result = new Float32List(length);

  var inputIndex = 0;

  for (var index = 0; index < length; ){
    result[index++] = leftChannel[inputIndex];
    result[index++] = rightChannel[inputIndex];
    inputIndex++;
  }
  return result;
}

List mergeBuffers(channelBuffer, recordingLength){
  List result = new List();
  var offset = 0;
  var lng = channelBuffer.length;
  for (var i = 0; i < lng; i++){
    var buffer = channelBuffer[i];
    result.addAll(buffer);
  }
  return result;
}

You can pull the code from github here.

Nawaf Alsulami
  • 699
  • 1
  • 8
  • 15