28

I have this Javascript code which I am using to capture audio input from user when they click on a microphone button. This code works in Mozila Firefox but when I am using it in Google Chrome it does not work and shows this warning/error in console - The AudioContext was not allowed to start. It must be resumed (or created) after a user gesture on the page.

var r = function() {
            var e = {}
              , t = void 0
              , n = getBotConfig()
              , r = new Audio("data:audio/wav;base64,")
              , o = !1;
            if (!n.isIE()) {
                window.AudioContext = window.AudioContext || window.webkitAudioContext;
                var i = new AudioContext;
                e.toggleRecording = function(e, t, n, r, s, a, c) {
                    e.classList.contains("recording") ? (e.classList.remove("recording"),
                    o = !1,
                    t.emit("end-recording", {
                        session_id: a,
                        bot_id: c
                    }),
                    document.getElementById("btnToggle").setAttribute("style", "background-color:transparent"),
                    document.getElementsByClassName("fa-microphone")[0] && document.getElementsByClassName("fa-microphone")[0].setAttribute("style", "color:" + s)) : (e.classList.add("recording"),
                    o = !0,
                    t.emit("start-recording", {
                        numChannels: 1,
                        bps: 16,
                        fps: parseInt(i.sampleRate),
                        session_id: a,
                        bot_id: c
                    }),
                    document.getElementById("btnToggle").setAttribute("style", "background-color:" + n),
                    document.getElementsByClassName("fa-microphone")[0] && document.getElementsByClassName("fa-microphone")[0].setAttribute("style", "color:" + r))
                }
                ,
                e.onAudioTTS = function(e) {
                    try {
                        r.pause(),
                        c(e)
                    } catch (t) {
                        c(e)
                    }
                }
                ,
                e.initAudio = function(e, n, r) {
                    console.log("audio initiated"),
                    t = e,
                    navigator.getUserMedia || (navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia),
                    navigator.cancelAnimationFrame || (navigator.cancelAnimationFrame = navigator.webkitCancelAnimationFrame || navigator.mozCancelAnimationFrame),
                    navigator.requestAnimationFrame || (navigator.requestAnimationFrame = navigator.webkitRequestAnimationFrame || navigator.mozRequestAnimationFrame),
                    navigator.getUserMedia({
                        audio: !0
                    }, a, function(e) {
                        alert("Error getting audio"),
                        console.log(e)
                    })
                }
                ;
                var s = function(e) {
                    var t = i.createChannelSplitter(2)
                      , n = i.createChannelMerger(2);
                    return e.connect(t),
                    t.connect(n, 0, 0),
                    t.connect(n, 0, 1),
                    n
                }
                  , a = function(e) {
                    var n = i.createGain()
                      , r = i.createMediaStreamSource(e)
                      , a = r;
                    a = s(a),
                    a.connect(n);
                    var c = (i.createScriptProcessor || i.createJavaScriptNode).call(i, 1024, 1, 1);
                    c.onaudioprocess = function(e) {
                        if (o) {
                            for (var n = e.inputBuffer.getChannelData(0), r = new ArrayBuffer(2 * n.length), i = new DataView(r), s = 0, a = 0; s < n.length; s++,
                            a += 2) {
                                var c = Math.max(-1, Math.min(1, n[s]));
                                i.setInt16(a, c < 0 ? 32768 * c : 32767 * c, !0)
                            }
                            t.emit("write-audio", r)
                        }
                    }
                    ,
                    n.connect(c),
                    c.connect(i.destination);
                    var u = i.createGain();
                    u.gain.value = 0,
                    n.connect(u),
                    u.connect(i.destination)
                }
                  , c = function(e) {
                    r.src = "data:audio/wav;base64," + e,
                    r.play()
                };
                return e
            }
        };

The warning/error is coming at line var i = new AudioContext;. It used to work before on Google Chrome browser as well but now it's not working. The description on Google developers page says resume() must be used to but I am not sure how and where should I do this.

user2966197
  • 2,793
  • 10
  • 45
  • 77

4 Answers4

21

you should be able to call resume() somewhere right before you call play(). The important thing is to call it within a user action/event - like the click on the microphone button.

Key Point: If an AudioContext is created prior to the document receiving a user gesture, it will be created in the "suspended" state, and you will need to call resume() after a user gesture is received.

from https://developers.google.com/web/updates/2017/09/autoplay-policy-changes

It used to work before on Google Chrome browser as well but now it's not working.

The new policy has been enforced recently within a chrome update.

st-h
  • 2,444
  • 6
  • 37
  • 60
  • Is this specific to just chrome? Will calling `resume()` have side effects for other browser engines? – kano Aug 12 '22 at 12:48
11

You should call, getAudioContext().resume(); somewhere.

If your question is related to p5.js when accessing microphone, do this in setup

function setup() {
  mic = new p5.AudioIn();
  mic.start();
  getAudioContext().resume();
}

Or add touchStarted function the document. You have to click on the web page to trigger this function.

function touchStarted() {
  getAudioContext().resume();
}
Nishan
  • 3,644
  • 1
  • 32
  • 41
3

Came across this much later and just wanted to add something for posterity. The above answers are correct using getAudioContext().resume() but this error can also be avoided if the AudioContext is created inside getUserMedia as such

navigator.getUserMedia(constraints)
.then((stream) => {
    var audioContext = new AudioContext();
    // ... rest of code
})

In this way, the AudioContext will not be put into the suspended state.

jojeyh
  • 276
  • 1
  • 3
  • 12
1

Similar to @jojeyh but their code threw an error, for me. After reading the docs for getUserMedia(), I added .mediaDevices and set the constraints

var constraints = { audio: true } // add video constraints if required

navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
    var audioContext = new AudioContext();
    // ... rest of code
})

This worked, for me.

Robert Yeomans
  • 192
  • 1
  • 4