Maybe someone knows why codepend does not load audio files from URLs? (I do not have pro codepen, so I can't use direct uploading of files to pen).
I have this Audio "loader" implementation in my program:
// Audio loader implementation.
window.onload = init;
let context;
let bufferLoader;
let greenBuffer = null;
let redBuffer = null;
let blueBuffer = null;
let yellowBuffer = null;
let dohBuffer = null;
let woohooBuffer = null;
let excellentBuffer = null;
let superDohBuffer = null;
// Buffer loader class taken from https://www.html5rocks.com/en/tutorials/webaudio/intro/
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
let request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
let loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (let i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
function init() {
try {
// Fix up for prefixing
window.AudioContext = window.AudioContext||window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
bufferLoader = new BufferLoader(
context,
[
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound1.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound2.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound3.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound4.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doh.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/woohoo.mp3',
'https://cors-anywhere.herokuapp.com/http://springfieldfiles.com/sounds/burns/excellnt.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doheth.mp3',
],
setBuffers
);
bufferLoader.load();
}
function setBuffers(bufferList){
greenBuffer = bufferList[0];
redBuffer = bufferList[1];
blueBuffer = bufferList[2];
yellowBuffer = bufferList[3];
dohBuffer = bufferList[4];
woohooBuffer = bufferList[5];
excellentBuffer = bufferList[6];
superDohBuffer = bufferList[7];
}
If I use this code locally (not on codepen), it works fine. It loads those files and later I can play those audio files how I want. But if I run it on codepen, it throws this (note I also prepended https://cors-anywhere.herokuapp.com/ to URLs to bypass CORS):
console_runner-079c09a….js:1 decodeAudioData error DOMException: Unable to decode audio data
(anonymous) # console_runner-079c09a….js:1
(anonymous) # pen.js:80
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
Full pen can be checked here: https://codepen.io/andriusl/pen/proxKj
Update.
It seems this is related with browsers. AudioContext does not properly work with Opera browser, so this question is more oriented to browser than codepen itself.
Related
I using MediaRecorder to record microphone. The default format of MediaRecorder in chrome is video/webm. Here is short example:
navigator.mediaDevices.getUserMedia({audio: true,video: false})
.then(function(stream) {
var recordedChunks = [];
var recorder = new MediaRecorder(stream);
recorder.start(10);
recorder.ondataavailable = function (event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
} else {
// ...
}
}
setTimeout(function(){
recorder.stop();
var blob = new Blob(recordedChunks, {
"type": recordedChunks[0].type
});
var blobUrl = URL.createObjectURL(blob);
var context = new AudioContext();
var request = new XMLHttpRequest();
request.open("GET", blobUrl, true);
request.responseType = "arraybuffer";
request.onload = function () {
context.decodeAudioData(
request.response,
function (buffer) {
if (!buffer) {
alert("buffer is empty!");
}
var dataArray = buffer.getChannelData(0);
//process channel data...
context.close();
},
function (error) {
alert(error);
}
);
};
request.send();
}, 3000);
})
.catch(function(error) {
console.log('error: ' + error);
});
This code is trow error of "Uncaught (in promise) DOMException: Unable to decode audio data" on context.decodeAudioData in chrome only.
What is wrong here and how can i fix it in chrome?
Here is working example in plunker: plunker
Ok... There is no fix for the issue. This is a chrome bug and you can see it here
The fix of decoding audio from MediaRecorder will be available on chrome version 58. I already tested it on 58-beta and it works.
Using below service to play audio file (wav/mp3) that comes in byte array format.
myAudioService.getAudioTone(userid).then(function (data) {
var context; // Audio context
var buf; // Audio buffer
$window.AudioContext = $window.webkitAudioContext;
context = new AudioContext();
$timeout(function () {
$scope.playByteArray = function(){
var arrayBuffer = new ArrayBuffer(data.length);
var bufferView = new Uint8Array(arrayBuffer);
for (i = 0; i < data.length; i++) {
bufferView[i] = data[i];
}
context.decodeAudioData(arrayBuffer, function(buffer) {
buf = buffer;
play();
});
}
$scope.play = function(audioBuffer){
// Create a source node from the buffer
var source = context.createBufferSource();
source.buffer = buf;
// Connect to the final output node (the speakers)
source.connect(context.destination);
// Play immediately
source.start(0);
}
if(data.length !== '' || data !== ''){
$scope.playByteArray();
}
}, 3000);
});
The functions are called but it throws below exception.
Uncaught (in promise) DOMException: Unable to decode audio data
How do I run it in Chrome, FF and IE ?
P.S. $window and $timeout are already defined in controller.
Based on the error message arrayBuffer doesn't contain what you think it contains. You should verify that the bytes in the array are the same as the encoded wav/mp3 file.
I'm having an issue with decodeAudioData method using Web Audio API to playback in Chrome (it works fine in Firefox)-
I am sending the audio buffer recorded by media recorder back from the server.
Server side
wss = new WebSocketServer({server: server}, function () {});
wss.on('connection', function connection(ws) {
ws.binaryType = "arraybuffer";
ws.on('message', function incoming(message) {
if ((typeof message) == 'string') {
console.log("string message: ", message);
} else {
console.log("not string: ", message);
ws.send(message);
}
});
});
Client side
window.AudioContext = window.AudioContext||window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
var context = new AudioContext();
var mediaRecorder;
var chunks = [];
var startTime = 0;
ws = new WebSocket(url);
ws.binaryType = "arraybuffer";
ws.onmessage = function(message) {
if (message.data instanceof ArrayBuffer) {
context.decodeAudioData(message.data, function(soundBuffer){
playBuffer(soundBuffer);
},function(x) {
console.log("decoding failed", x)
});
} else {
console.log("not arrayBuffer", message.data);
}
};
createMediaRecorder();
function createMediaRecorder() {
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {
"audio": true
};
var onSuccess = function(stream) {
var options = {
audioBitsPerSecond : 128000,
mimeType : 'audio/webm\;codecs=opus'
};
mediaRecorder = new MediaRecorder(stream, options);
};
var onError = function(err) {
console.log('The following error occured: ' + err);
};
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
alert('getUserMedia not supported on your browser!');
}
}
function playBuffer(buf) {
var source = context.createBufferSource();
source.buffer = buf;
source.connect(context.destination);
if (startTime == 0)
startTime = context.currentTime + 0.1; // add 50ms latency to work well across systems
source.start(startTime);
startTime = startTime + source.buffer.duration;
}
function startRecording() {
mediaRecorder.start();
getRecordedData();
}
function getRecordedData() {
mediaRecorder.ondataavailable = function(e) {
console.log('ondataavailable: ', e.data);
chunks.push(e.data);
};
}
function sendRecordedData() {
var superBuffer = new Blob(chunks, {type: 'audio/ogg'});
ws.send(superBuffer);
}
function stopRecording() {
mediaRecorder.stop();
mediaRecorder.onstop = function(e) {
sendRecordedData();
chunks = [];
};
}
While testing with firefox working fine but with chrome generate the following error:
Uncaught (in promise) DOMException: Unable to decode audio data
Any suggestion will be helpful, thanks in advance.
I encountered the same error. Updating Chrome did not fix it. However, debugging in Firefox instead gave me a much more descriptive error:
The buffer passed to decodeAudioData contains an unknown content type.
Uncaught (in promise) DOMException: MediaDecodeAudioDataUnknownContentType
Uncaught (in promise) DOMException: The given encoding is not supported.
Which by the way was occurring because the .mp3 file I wanted to load wasn't found. So I was making a web request, receiving the 404 HTML page, and trying to load that as an mp3, which failed as an 'unsupported audio format'.
I encountered the same issue. Upgrading Chrome to the latest release, eg 85.0.4183, resolved the issue for me.
I Had the same error with createjs (I used to load up the files).
It was integration problem with Internet Download Manager (IDM) ... I've solved it by disabling IDM !
I'm currently editing my mp3 file with multiple effects like so
var mainVerse = document.getElementById('audio1');
var s = source;
source.disconnect(audioCtx.destination);
for (var i in filters1) {
s.connect(filters1[i]);
s = filters1[i];
}
s.connect(audioCtx.destination);
The mp3 plays accordingly on the web with the filters on it. Is it possible to create and download a new mp3 file with these new effects, using web audio api or any writing to mp3 container javascript library ? If not whats the best to solve this on the web ?
UPDATE - Using OfflineAudioContext
Using the sample code from https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext/oncomplete
I've tried using the offline node like so;
var audioCtx = new AudioContext();
var offlineCtx = new OfflineAudioContext(2,44100*40,44100);
osource = offlineCtx.createBufferSource();
function getData() {
request = new XMLHttpRequest();
request.open('GET', 'Song1.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
osource.buffer = myBuffer;
osource.connect(offlineCtx.destination);
osource.start();
//source.loop = true;
offlineCtx.startRendering().then(function(renderedBuffer) {
console.log('Rendering completed successfully');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var song = audioCtx.createBufferSource();
song.buffer = renderedBuffer;
song.connect(audioCtx.destination);
song.start();
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "filename.wav");
});
}).catch(function(err) {
console.log('Rendering failed: ' + err);
// Note: The promise should reject when startRendering is called a second time on an OfflineAudioContext
});
});
}
request.send();
}
// Run getData to start the process off
getData();
Still getting the recorder to download an empty file, I'm using the song source as the source for the recorder. The song plays and everything with his code but recorder doesn't download it
Use https://github.com/mattdiamond/Recorderjs to record a .wav file. Then use https://github.com/akrennmair/libmp3lame-js to encode it to .mp3.
There's a nifty guide here, if you need a hand: http://audior.ec/blog/recording-mp3-using-only-html5-and-javascript-recordmp3-js/
UPDATE
Try moving
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
so that it is located above the call to start rendering, and connect it to osource instead, like so:
rec = new Recorder(osource, {
workerPath: 'Recorderjs/recorderWorker.js'
});
osource.connect(offlineCtx.destination);
osource.start();
offlineCtx.startRendering().then(function(renderedBuffer) {
.....
I have 2 functions here: load recieves a path to an mp3 and count is the number of tracks being loaded in. When for loop reaches the last track arraybufferList contains an array of 'arraybuffer' of all the songs. This is then passed on to the loadData function which uses an webaudioapi function 'decodeAudioData' to decode all the information and put everything inside an "currentBuffer". However outside of the closure the "currentBuffer" seems to vanish (in my google chrome debugger).
I'm trying to use the wavesurfer.js library and extend it to multiple tracks.
load: function (src,count) {
var my = this;
var xhr = new XMLHttpRequest();
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
console.log("loading mp3 & putting inside arraybufferList");
arraybufferList.push(xhr.response);
if (arraybufferList.length==count){
console.log(arraybufferList);
for (var i=0; i<arraybufferList.length; i++){
my.backend.loadData(
arraybufferList[i],
playList[i].draw.bind(playList[i])
);
console.log(playList);
//playList.push(my);
}
console.log(playList);
}
};
xhr.open('GET', src, true);
xhr.send();
},
loadData: function (audioData, cb) {
var my = this;
this.pause();
this.ac.decodeAudioData(
audioData,
function (buffer) {
my.currentBuffer = buffer;
my.lastStart = 0;
my.lastPause = 0;
my.startTime = null;
cb(buffer);
},
Error
);
},
At the beginning for each audio file an object is created where the information from loadData should go into
var WaveSurfer = {
init: function (params) {
var my = this;
var backend = WaveSurfer.Audio;
if (!params.predrawn) {
backend = WaveSurfer.WebAudio;
}
this.backend = Object.create(backend);
this.backend.init(params);
this.drawer = Object.create(WaveSurfer.Drawer);
this.drawer.init(params);
this.backend.bindUpdate(function () {
my.onAudioProcess();
});
this.bindClick(params.canvas, function (percents) {
my.playAt(percents);
});
playList.push(my);
},
some essential code: ac: new (window.AudioContext || window.webkitAudioCont