I have to send audio stream from microphone with websocket as wav format, during convert convert audio buffer array to wav I get this error:
DOMException: Failed to execute 'decodeAudioData' on 'BaseAudioContext': Unable to decode audio data
I tried javascript-processor-node for this but it was deprecated, I think audio worklet for sending simple converted format on websocket stream is too much for task like this!
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Document</title>
</head>
<body id="body">
<button id="button">test</button>
<script defer>
const btn = document.getElementById("button");
btn.addEventListener("click", () => {
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
const recorder = new MediaRecorder(stream);
recorder.start(10);
recorder.addEventListener("dataavailable", (event) => {
const streamBlob = new Blob([event.data]);
streamBlob
.arrayBuffer()
.then((arrayBuffer) => {
const audioCtx = new AudioContext({ sampleRate: 16000 });
audioCtx
?.decodeAudioData(arrayBuffer)
?.then((buffer) => {
console.log(" ?.then wavBlob", buffer);
})
.catch((error) => {
console.log(" . error1", error);
})
.finally(() => {});
})
.catch((error) => {
console.log(" . error2", error);
});
});
}),
function (e) {
alert("Error capturing audio.");
};
});
</script>
</body>
</html>