Web Audio API Unable to decode audio data - javascript

Got following error while trying to load mp3 audio file in Google Chrome: Uncaught (in promise) DOMException: Unable to decode audio data. Here's my code:
loadSong(url) {
var AudioCtx = new AudioContext();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = "arraybuffer";
request.onload = function () {
console.log(request.response);
AudioCtx.decodeAudioData(
request.response,
function (buffer) {
var currentSong = AudioCtx.createBufferSource();
currentSong.buffer = buffer;
currentSong.connect(AudioCtx.destination);
currentSong.start(0);
},
function (e) {
alert("error: " + e.err);
}
);
};
request.send();
}
Here's console.log of request.response:
ArrayBuffer(153) {}
byteLength:0
__proto__:ArrayBuffer
byteLength:0
constructor:ƒ ArrayBuffer()
slice:ƒ slice()
Symbol(Symbol.toStringTag):"ArrayBuffer"
get byteLength:ƒ byteLength()
__proto__:Object
In Firefox I got error The buffer passed to decodeAudioData contains an unknown type of content.. Same error with OGG files

Related

javascript audio api play blob url

I worked out a testing function for Web Audio API to play url blob:
// trigger takes a sound play function
function loadSound(url, trigger) {
let context = new (window.AudioContext || window.webkitAudioContext)();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
trigger(()=>{
// play sound
var source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(context.destination); // connect the source to the context's destination (the speakers)
source.start();
});
}, e=>{
console.log(e);
});
}
request.send();
}
loadSound(url, fc=>{
window.addEventListener('click', fc);
});
this is just for test, actually, I need a function to call to directly play the sound from url, if any current playing, quit it.
let ac;
function playSound(url) {
if(ac){ac.suspend()}
let context = new (window.AudioContext || window.webkitAudioContext)();
let request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
// play sound
let source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(context.destination); // connect the source to the context's destination (the speakers)
// source.noteOn(0); // play the source now
ac = context;
source.start();
}, e=>{
console.log(e);
});
}
request.send();
}
window.addEventListener('click',()=>{
playSound(url);
});
I did not do much modification, however, the second version, triggers works fine, but always produces no sound.
I suspect it may be this variable scope issue, I will be very glad if you can help me debug it.
since the blob url is too long, I put two versions in code pen.
working version
not working version
Instead of calling supsend on the stored AudioContext, save a reference to the AudioBufferSourceNode that is currently playing. Then check if the reference exits and call stop() whenever you play a new sound.
const context = new AudioContext();
let bufferSource = null;
function playSound(url) {
if (bufferSource !== null) {
bufferSource.stop();
bufferSource = null;
}
let request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, (buffer) => {
bufferSource = context.createBufferSource();
bufferSource.buffer = buffer;
bufferSource.connect(context.destination);
bufferSource.start();
bufferSource.addEventListener('ended', () => {
bufferSource = null;
});
}, (error) => {
console.log(error);
});
}
request.send();
}
window.addEventListener('click', () => {
playSound(url);
});

Trim an audio file using javascript (first 3 seconds)

I have a question that can I trim my audio file that is recorded via javascript? Like I want to trim the first 3 seconds. I recorded the audio file using p5.js and merged the recorded file with karaoke audio with AudioContext() and I want to trim it because of an unpleasant sound at the start.
You will probably need to read the audio into an AudioBuffer using something like AudioContext.decodeAudioData(), plug the AudioBuffer into a AudioBufferSourceNode. Then you can skip the first 3 seconds using the offset parameter of AudioBufferSourceNode.start() and record the resulting output stream.
Example code:
var source = audioCtx.createBufferSource();
var dest = audioCtx.createMediaStreamDestination();
var mediaRecorder = new MediaRecorder(dest.stream);
var request = new XMLHttpRequest();
request.open('GET', 'your.ogg', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(
audioData,
function(buffer) {
source.buffer = buffer;
source.connect(dest);
mediaRecorder.start();
source.start(audioCtx.currentTime, 3);
// etc...
},
function(e){
console.log("Error with decoding audio data" + e.err);
}
);
}
request.send();

Loading audio files through URLs from remote locations (Opera)?

Maybe someone knows why codepend does not load audio files from URLs? (I do not have pro codepen, so I can't use direct uploading of files to pen).
I have this Audio "loader" implementation in my program:
// Audio loader implementation.
window.onload = init;
let context;
let bufferLoader;
let greenBuffer = null;
let redBuffer = null;
let blueBuffer = null;
let yellowBuffer = null;
let dohBuffer = null;
let woohooBuffer = null;
let excellentBuffer = null;
let superDohBuffer = null;
// Buffer loader class taken from https://www.html5rocks.com/en/tutorials/webaudio/intro/
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
let request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
let loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (let i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
function init() {
try {
// Fix up for prefixing
window.AudioContext = window.AudioContext||window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
bufferLoader = new BufferLoader(
context,
[
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound1.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound2.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound3.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound4.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doh.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/woohoo.mp3',
'https://cors-anywhere.herokuapp.com/http://springfieldfiles.com/sounds/burns/excellnt.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doheth.mp3',
],
setBuffers
);
bufferLoader.load();
}
function setBuffers(bufferList){
greenBuffer = bufferList[0];
redBuffer = bufferList[1];
blueBuffer = bufferList[2];
yellowBuffer = bufferList[3];
dohBuffer = bufferList[4];
woohooBuffer = bufferList[5];
excellentBuffer = bufferList[6];
superDohBuffer = bufferList[7];
}
If I use this code locally (not on codepen), it works fine. It loads those files and later I can play those audio files how I want. But if I run it on codepen, it throws this (note I also prepended https://cors-anywhere.herokuapp.com/ to URLs to bypass CORS):
console_runner-079c09a….js:1 decodeAudioData error DOMException: Unable to decode audio data
(anonymous) # console_runner-079c09a….js:1
(anonymous) # pen.js:80
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
Full pen can be checked here: https://codepen.io/andriusl/pen/proxKj
Update.
It seems this is related with browsers. AudioContext does not properly work with Opera browser, so this question is more oriented to browser than codepen itself.

Decoding audio data of Mediarecorder is failed on chrome

I using MediaRecorder to record microphone. The default format of MediaRecorder in chrome is video/webm. Here is short example:
navigator.mediaDevices.getUserMedia({audio: true,video: false})
.then(function(stream) {
var recordedChunks = [];
var recorder = new MediaRecorder(stream);
recorder.start(10);
recorder.ondataavailable = function (event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
} else {
// ...
}
}
setTimeout(function(){
recorder.stop();
var blob = new Blob(recordedChunks, {
"type": recordedChunks[0].type
});
var blobUrl = URL.createObjectURL(blob);
var context = new AudioContext();
var request = new XMLHttpRequest();
request.open("GET", blobUrl, true);
request.responseType = "arraybuffer";
request.onload = function () {
context.decodeAudioData(
request.response,
function (buffer) {
if (!buffer) {
alert("buffer is empty!");
}
var dataArray = buffer.getChannelData(0);
//process channel data...
context.close();
},
function (error) {
alert(error);
}
);
};
request.send();
}, 3000);
})
.catch(function(error) {
console.log('error: ' + error);
});
This code is trow error of "Uncaught (in promise) DOMException: Unable to decode audio data" on context.decodeAudioData in chrome only.
What is wrong here and how can i fix it in chrome?
Here is working example in plunker: plunker
Ok... There is no fix for the issue. This is a chrome bug and you can see it here
The fix of decoding audio from MediaRecorder will be available on chrome version 58. I already tested it on 58-beta and it works.

JavaScript Web Audio: cannot properly decode audio data?

I'm trying to use the Web Audio API in JavaScript to load a sound into a buffer and play it. Unfortunately it doesn't work and I get the following error:
Uncaught TypeError: Failed to set the 'buffer' property on 'AudioBufferSourceNode':
The provided value is not of type 'AudioBuffer'.
I can pinpoint which line is giving me the error, but I don't know why. Here is the relevant code if it helps:
var audioContext;
var playSoundBuffer;
function init() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audioContext = new AudioContext();
loadNote();
}
function loadNote() {
var request = new XMLHttpRequest();
request.open("GET", "./sounds/topE.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
audioContext.decodeAudioData(request.response, function(buffer) {
playSoundBuffer = buffer;
}, function(error) {
console.error("decodeAudioData error", error);
});
};
request.send();
playSound();
}
function playSound() {
var source = audioContext.createBufferSource();
source.buffer = playSoundBuffer; // This is the line that generates the error
source.connect(audioContext.destination);
source.start(0);
}
I believe the decodeAudioData method returns an AudioBuffer to its first callback function (its second parameter). I tried to save this AudioBuffer to the "playSoundBuffer" and then play it, but I get that error and I'm not sure why. Any help would be greatly appreciated.
The reason you get that error is because you are ignoring the asynchronous nature of your code and treat it as if it were synchronous. If you always log the contents of all relevant parts as the first step in debugging you will realize that at the time you try to process your buffer it's undefined and not an AudioBuffer at all. Tip: Always console.log everything until you know exactly how it behaves at any point.
function loadNote() {
var request = new XMLHttpRequest();
request.open("GET", "./sounds/topE.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
audioContext.decodeAudioData(request.response, function(buffer) {
playSoundBuffer = buffer;
playSound(); // don't start processing it before the response is there!
}, function(error) {
console.error("decodeAudioData error", error);
});
};
request.send();//start doing something async
}

Categories

Resources