Uncaught (in promise) DOMException: Unable to decode audio data - javascript

I'm having an issue with decodeAudioData method using Web Audio API to playback in Chrome (it works fine in Firefox)-
I am sending the audio buffer recorded by media recorder back from the server.
Server side
wss = new WebSocketServer({server: server}, function () {});
wss.on('connection', function connection(ws) {
ws.binaryType = "arraybuffer";
ws.on('message', function incoming(message) {
if ((typeof message) == 'string') {
console.log("string message: ", message);
} else {
console.log("not string: ", message);
ws.send(message);
}
});
});
Client side
window.AudioContext = window.AudioContext||window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
var context = new AudioContext();
var mediaRecorder;
var chunks = [];
var startTime = 0;
ws = new WebSocket(url);
ws.binaryType = "arraybuffer";
ws.onmessage = function(message) {
if (message.data instanceof ArrayBuffer) {
context.decodeAudioData(message.data, function(soundBuffer){
playBuffer(soundBuffer);
},function(x) {
console.log("decoding failed", x)
});
} else {
console.log("not arrayBuffer", message.data);
}
};
createMediaRecorder();
function createMediaRecorder() {
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {
"audio": true
};
var onSuccess = function(stream) {
var options = {
audioBitsPerSecond : 128000,
mimeType : 'audio/webm\;codecs=opus'
};
mediaRecorder = new MediaRecorder(stream, options);
};
var onError = function(err) {
console.log('The following error occured: ' + err);
};
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
alert('getUserMedia not supported on your browser!');
}
}
function playBuffer(buf) {
var source = context.createBufferSource();
source.buffer = buf;
source.connect(context.destination);
if (startTime == 0)
startTime = context.currentTime + 0.1; // add 50ms latency to work well across systems
source.start(startTime);
startTime = startTime + source.buffer.duration;
}
function startRecording() {
mediaRecorder.start();
getRecordedData();
}
function getRecordedData() {
mediaRecorder.ondataavailable = function(e) {
console.log('ondataavailable: ', e.data);
chunks.push(e.data);
};
}
function sendRecordedData() {
var superBuffer = new Blob(chunks, {type: 'audio/ogg'});
ws.send(superBuffer);
}
function stopRecording() {
mediaRecorder.stop();
mediaRecorder.onstop = function(e) {
sendRecordedData();
chunks = [];
};
}
While testing with firefox working fine but with chrome generate the following error:
Uncaught (in promise) DOMException: Unable to decode audio data
Any suggestion will be helpful, thanks in advance.

I encountered the same error. Updating Chrome did not fix it. However, debugging in Firefox instead gave me a much more descriptive error:
The buffer passed to decodeAudioData contains an unknown content type.
Uncaught (in promise) DOMException: MediaDecodeAudioDataUnknownContentType
Uncaught (in promise) DOMException: The given encoding is not supported.
Which by the way was occurring because the .mp3 file I wanted to load wasn't found. So I was making a web request, receiving the 404 HTML page, and trying to load that as an mp3, which failed as an 'unsupported audio format'.

I encountered the same issue. Upgrading Chrome to the latest release, eg 85.0.4183, resolved the issue for me.

I Had the same error with createjs (I used to load up the files).
It was integration problem with Internet Download Manager (IDM) ... I've solved it by disabling IDM !

Related

Why does "WebRTC Screen sharing" is not streaming when using different computers?

I am developing a web application, like an online classroom in which I want to share the screen of a browser from a computer and view it in another. I am getting an error "Uncaught (in promise) DOMException: Error" in chrome when I tried to share the screen from a computer and view it on another computer.
I am using WebRTC for this and WebSocket for signaling. At the backend, java jersey would be doing the searching & forwarding of the request to the appropriate clients. I can share the screen from one browser window and view it in another on the same machine. But when I'm using different computers I get this error. When I debug the RTCPeerConnection object, it shows that the property connectionState as failed and iceConnectionState as disconnected.
Javascript: Request to share the screen
var urlS = [];
var config = {iceServers: urlS};
var $source = $('#monitor-src');
$scope.context.peerConnection = new RTCPeerConnection();
$scope.context.peerConnection.onicecandidate = function(event)
{
console.log(event)
if (event.candidate)
{
var json =
{
type:event.type,
label:event.candidate.sdpMLineIndex,
id:event.candidate.sdpMid,
candidate:event.candidate
}
WebSocket.send({desc:json,usrId:$scope.context.me.id},$scope)
}
else
{
console.error("Failed to create ice candidate")
}
};
try
{
$scope.context.peerConnection.createOffer({offerToReceiveAudio: true,offerToReceiveVideo: true}).then(function(offer)
{
return $scope.context.peerConnection.setLocalDescription(offer);
}).then(function()
{
WebSocket.send({desc:$scope.context.peerConnection.localDescription,callee:mentee.id,caller:$scope.context.me.id,usrId:$scope.context.me.id},$scope)
});
}
catch(error)
{
console.error("onnegotiationneeded-"+error)
}
$scope.context.peerConnection.onnegotiationneeded = function()
{
console.error("onnegotiationneeded")
};
try
{
console.log($scope.context.peerConnection);
$scope.context.peerConnection.ontrack = function(event)
{
console.log("ontrack:"+event.streams.length)
$source.parent()[0].srcObject= event.streams[0];
};
$scope.context.peerConnection.onaddstream = function(event)
{
console.log("onaddstream:"+event.stream)
$source.parent()[0].srcObject = event.stream;
};
}
catch(error)
{
console.error(error)
}
Javascript:WebSocket Handling of the request and sending response
$rootScope.socket.onMessage(function(message)
{
data = angular.fromJson(message.data);
if(data.type == 'offer')
{
var stream = null;
//var urlS = [{urls: 'stun:192.168.1.16:8443'}];
var urlS = [];
var config = {iceServers: urlS};
scope.context.peerConnection = new RTCPeerConnection();
scope.context.peerConnection.setRemoteDescription(data).then(function(){
return navigator.mediaDevices.getDisplayMedia({video:true,audio: true});
}).then(function(stream){
console.log(scope.context.peerConnection)
scope.context.peerConnection.addStream(stream);
stream.getTracks().forEach(function(track)
{
//var rtpSender =
});
scope.context.peerConnection.createAnswer().then(function(answer){
return scope.context.peerConnection.setLocalDescription(answer)
}).then(function()
{
send({desc:scope.context.peerConnection.localDescription,usrId:scope.context.me.id},scope)
}).catch(function(error){
console.error(error)});
})
}
else if(data.type == 'answer')
{
scope.context.peerConnection.setRemoteDescription(data);
}
else if(data.type == 'icecandidate')
{
console.log("icecandidate:"+angular.toJson(data))
var promise = scope.context.peerConnection.addIceCandidate(data.candidate).then(function success(){
console.log("ice success")
}).catch(function error(err)
{
console.log(err);
});
}
});
};
The ontrack and onaddstream events are triggered when a stream is added to the peer connection. But I get nothing on the requested peer.
I am not a JavaScript expert but your urlS array seems to be empty. Try to add both Turn and Stun servers in your urlS.
For more info take a look here

Audio recording plays back at inconsistent rate

I am using the code below to record audio in the browser, then use socket.io to send the resulting blob to a node.js server. The data is then saved to a mysql database or as an .ogg file.
The playback from either is strangely garbled at times - in different moments the sound will suddenly accelerate then return to normal. I've not been able to find anything online about this issue.
UPDATE: Further testing leads me to believe it is a hardware/software/compatibility with the Asus D1 soundcard I'm using. recording off of other devices does not result in the same behavior.
var mediaRecorder;
var startRecord = function(recordData){
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio:true}, success, function(stream) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
function success(stream) {
var chunks = [];
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
mediaRecorder.onstop = function(e) {
stream.getAudioTracks()[0].stop()
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' })
var audioURL = URL.createObjectURL(blob);
if (recordData.id) {
socket.emit('audio', {
'id':recordData.id,
'audio':blob,
'option':recordData.type
});
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
};
var stopRecord = function(recordData){
mediaRecorder.stop();
mediaRecorder.state = 'recording'
};

Loading audio files through URLs from remote locations (Opera)?

Maybe someone knows why codepend does not load audio files from URLs? (I do not have pro codepen, so I can't use direct uploading of files to pen).
I have this Audio "loader" implementation in my program:
// Audio loader implementation.
window.onload = init;
let context;
let bufferLoader;
let greenBuffer = null;
let redBuffer = null;
let blueBuffer = null;
let yellowBuffer = null;
let dohBuffer = null;
let woohooBuffer = null;
let excellentBuffer = null;
let superDohBuffer = null;
// Buffer loader class taken from https://www.html5rocks.com/en/tutorials/webaudio/intro/
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
let request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
let loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (let i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
function init() {
try {
// Fix up for prefixing
window.AudioContext = window.AudioContext||window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
bufferLoader = new BufferLoader(
context,
[
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound1.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound2.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound3.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound4.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doh.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/woohoo.mp3',
'https://cors-anywhere.herokuapp.com/http://springfieldfiles.com/sounds/burns/excellnt.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doheth.mp3',
],
setBuffers
);
bufferLoader.load();
}
function setBuffers(bufferList){
greenBuffer = bufferList[0];
redBuffer = bufferList[1];
blueBuffer = bufferList[2];
yellowBuffer = bufferList[3];
dohBuffer = bufferList[4];
woohooBuffer = bufferList[5];
excellentBuffer = bufferList[6];
superDohBuffer = bufferList[7];
}
If I use this code locally (not on codepen), it works fine. It loads those files and later I can play those audio files how I want. But if I run it on codepen, it throws this (note I also prepended https://cors-anywhere.herokuapp.com/ to URLs to bypass CORS):
console_runner-079c09a….js:1 decodeAudioData error DOMException: Unable to decode audio data
(anonymous) # console_runner-079c09a….js:1
(anonymous) # pen.js:80
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
Full pen can be checked here: https://codepen.io/andriusl/pen/proxKj
Update.
It seems this is related with browsers. AudioContext does not properly work with Opera browser, so this question is more oriented to browser than codepen itself.

Decoding audio data of Mediarecorder is failed on chrome

I using MediaRecorder to record microphone. The default format of MediaRecorder in chrome is video/webm. Here is short example:
navigator.mediaDevices.getUserMedia({audio: true,video: false})
.then(function(stream) {
var recordedChunks = [];
var recorder = new MediaRecorder(stream);
recorder.start(10);
recorder.ondataavailable = function (event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
} else {
// ...
}
}
setTimeout(function(){
recorder.stop();
var blob = new Blob(recordedChunks, {
"type": recordedChunks[0].type
});
var blobUrl = URL.createObjectURL(blob);
var context = new AudioContext();
var request = new XMLHttpRequest();
request.open("GET", blobUrl, true);
request.responseType = "arraybuffer";
request.onload = function () {
context.decodeAudioData(
request.response,
function (buffer) {
if (!buffer) {
alert("buffer is empty!");
}
var dataArray = buffer.getChannelData(0);
//process channel data...
context.close();
},
function (error) {
alert(error);
}
);
};
request.send();
}, 3000);
})
.catch(function(error) {
console.log('error: ' + error);
});
This code is trow error of "Uncaught (in promise) DOMException: Unable to decode audio data" on context.decodeAudioData in chrome only.
What is wrong here and how can i fix it in chrome?
Here is working example in plunker: plunker
Ok... There is no fix for the issue. This is a chrome bug and you can see it here
The fix of decoding audio from MediaRecorder will be available on chrome version 58. I already tested it on 58-beta and it works.

QuotaExceededError using MediaStream API

My application receives some video chunks from a ServerSentEvent (SSE) and, using MediaStream API, it should append them in a buffer and visualize them into an HTML5 video tag.
The problem is MediaSource API, that stops working when the program tries to append a chunk to the mediaStream buffer.
The error appears when the program tries to append the first chunk.
This is the client-side code:
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
window.URL = window.URL || window.webkitURL;
if (!!!window.MediaSource) {alert('MediaSource API is not available');}
var video = document.getElementById("video");
var mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function(e) {
var source = new EventSource('http://localhost:5000/video');
// this is the line that catch the error
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
source.addEventListener('chunkStreamed', function(e){
var chunk = new Blob(JSON.parse(e.data));
console.log("chunk: ", chunk);
var reader = new FileReader();
reader.onload = function(e) {
// error is caused by this line
sourceBuffer.appendBuffer(new Uint8Array(e.target.result));
};
reader.readAsArrayBuffer(chunk);
});
source.addEventListener('endStreaming', function(e){
console.log(e.data);
// mediaSource.endOfStream();
// endOfStream() not here, sourceBuffer.appendBuffer will done after this command and will cause InvalidStateError
});
source.onopen = function(e) {
console.log("EventSource open");
};
source.onerror = function(e) {
console.log("error", e);
source.close();
};
}, false);
and this is the complete error log:
Uncaught QuotaExceededError: An attempt was made to add something to storage that exceeded the quota.
The problem comes out when the app tries to do sourceBuffer.appendBuffer(new Uint8Array(e.target.result));.
I really can't understand way this error appear. Code is really
like the code of this example

Categories

Resources