javascript audio api play blob url - javascript

I worked out a testing function for Web Audio API to play url blob:
// trigger takes a sound play function
function loadSound(url, trigger) {
let context = new (window.AudioContext || window.webkitAudioContext)();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
trigger(()=>{
// play sound
var source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(context.destination); // connect the source to the context's destination (the speakers)
source.start();
});
}, e=>{
console.log(e);
});
}
request.send();
}
loadSound(url, fc=>{
window.addEventListener('click', fc);
});
this is just for test, actually, I need a function to call to directly play the sound from url, if any current playing, quit it.
let ac;
function playSound(url) {
if(ac){ac.suspend()}
let context = new (window.AudioContext || window.webkitAudioContext)();
let request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
// play sound
let source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(context.destination); // connect the source to the context's destination (the speakers)
// source.noteOn(0); // play the source now
ac = context;
source.start();
}, e=>{
console.log(e);
});
}
request.send();
}
window.addEventListener('click',()=>{
playSound(url);
});
I did not do much modification, however, the second version, triggers works fine, but always produces no sound.
I suspect it may be this variable scope issue, I will be very glad if you can help me debug it.
since the blob url is too long, I put two versions in code pen.
working version
not working version

Instead of calling supsend on the stored AudioContext, save a reference to the AudioBufferSourceNode that is currently playing. Then check if the reference exits and call stop() whenever you play a new sound.
const context = new AudioContext();
let bufferSource = null;
function playSound(url) {
if (bufferSource !== null) {
bufferSource.stop();
bufferSource = null;
}
let request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
context.decodeAudioData(request.response, (buffer) => {
bufferSource = context.createBufferSource();
bufferSource.buffer = buffer;
bufferSource.connect(context.destination);
bufferSource.start();
bufferSource.addEventListener('ended', () => {
bufferSource = null;
});
}, (error) => {
console.log(error);
});
}
request.send();
}
window.addEventListener('click', () => {
playSound(url);
});

Related

How to fix “cracking” audio when using Audio Worklets?

I’m trying to understand how AudioWorklet is working and made some tests.
So far, I have a huge “cracking” problem when I let the browser play the sound in the background and do something else (e.g. opening a CPU-heavy application like Photoshop or VSCode and move the window around).
At first I thought it was a hardware problem. I upgraded to Catalina, removed any system audio extension I found, but it’s the same on Android, and some other friends’ computers (Mac, PC).
I’m using Version 1.0.1 Chromium: 78.0.3904.108 (Official Build) (64-bit) myself.
This YouTube video demonstrates the cracking audio issue.
I made two CodePen demos you can test here:
Web Audio Cracks (Vanila + no Worklet):
const ctx = new(window.AudioContext || window.webkitAudioContext)();
const request = new XMLHttpRequest();
const gainNode = ctx.createGain();
const sourceNode = ctx.createBufferSource();
request.open('GET', 'https://poppublic.s3.amazonaws.com/other/2.mp3', true);
request.responseType = 'arraybuffer';
request.onload = () => {
ctx.decodeAudioData(request.response, buffer => {
sourceNode.buffer = buffer;
console.log(sourceNode.buffer.sampleRate);
});
};
request.onerror = function(e) {
console.log('HTTP error', e);
};
request.send();
play = () => {
sourceNode.connect(gainNode);
gainNode.connect(ctx.destination);
sourceNode.start(0);
}
stop = () => {
sourceNode.stop(0);
}
<button onClick="play()">Play</button>
<button onClick="stop()">Stop</button>
Web Audio Cracks (Vanila + Worklet):
const ctx = new(window.AudioContext || window.webkitAudioContext)();
const request = new XMLHttpRequest();
let gainNode = null;
let sourceNode = null;
let buffer = null;
let worklet = null;
try {
const script = 'https://poppublic.s3.amazonaws.com/other/worklet/processor.js';
ctx.audioWorklet.addModule(script).then(() => {
worklet = new AudioWorkletNode(ctx, 'popscord-processor')
request.open('GET', 'https://poppublic.s3.amazonaws.com/other/2.mp3', true);
request.responseType = 'arraybuffer';
request.onload = () => {
ctx.decodeAudioData(request.response, buff => {
buffer = buff;
console.log(buff.sampleRate);
});
};
request.onerror = function(e) {
console.log('HTTP error', e);
};
request.send();
});
} catch (e) {
this.setState({
moduleLoaded: false
});
console.log('Failed to load module', e);
}
play = () => {
stop();
gainNode = ctx.createGain();
sourceNode = ctx.createBufferSource();
sourceNode.buffer = buffer;
sourceNode.connect(gainNode);
gainNode.connect(ctx.destination);
sourceNode.start(0);
}
stop = () => {
try {
sourceNode.disconnect();
gainNode.disconnect();
sourceNode.stop(0);
} catch (e) {
console.log(e.message)
}
}
<button onClick="play()">Play</button>
<button onClick="stop()">Stop</button>
The piano MP3 you’ll hear is a 48000Hz / 32bits / 320kb audio recorded in studio.
Before filing any bugs, I need to make sure my code is correct. Maybe I’m not chaining the things the way it should.
When using a worklet, the default priority of the audio thread is normal. This is not good for audio as you've seen by running webaudio and then moving a window around.
What you can do is go to chrome://flags, search for worklet and enable the flag named "Use realtime priority thread for Audio Worklet". This should help on mac and windows. I don't know if it will make a difference on Android.
If you are hearing cracks with WebAudio without a worklet, as you do in your first codepen example, then that's unexpected and you really should file an issue on that.

Trim an audio file using javascript (first 3 seconds)

I have a question that can I trim my audio file that is recorded via javascript? Like I want to trim the first 3 seconds. I recorded the audio file using p5.js and merged the recorded file with karaoke audio with AudioContext() and I want to trim it because of an unpleasant sound at the start.
You will probably need to read the audio into an AudioBuffer using something like AudioContext.decodeAudioData(), plug the AudioBuffer into a AudioBufferSourceNode. Then you can skip the first 3 seconds using the offset parameter of AudioBufferSourceNode.start() and record the resulting output stream.
Example code:
var source = audioCtx.createBufferSource();
var dest = audioCtx.createMediaStreamDestination();
var mediaRecorder = new MediaRecorder(dest.stream);
var request = new XMLHttpRequest();
request.open('GET', 'your.ogg', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(
audioData,
function(buffer) {
source.buffer = buffer;
source.connect(dest);
mediaRecorder.start();
source.start(audioCtx.currentTime, 3);
// etc...
},
function(e){
console.log("Error with decoding audio data" + e.err);
}
);
}
request.send();

Decoding audio data of Mediarecorder is failed on chrome

I using MediaRecorder to record microphone. The default format of MediaRecorder in chrome is video/webm. Here is short example:
navigator.mediaDevices.getUserMedia({audio: true,video: false})
.then(function(stream) {
var recordedChunks = [];
var recorder = new MediaRecorder(stream);
recorder.start(10);
recorder.ondataavailable = function (event) {
if (event.data.size > 0) {
recordedChunks.push(event.data);
} else {
// ...
}
}
setTimeout(function(){
recorder.stop();
var blob = new Blob(recordedChunks, {
"type": recordedChunks[0].type
});
var blobUrl = URL.createObjectURL(blob);
var context = new AudioContext();
var request = new XMLHttpRequest();
request.open("GET", blobUrl, true);
request.responseType = "arraybuffer";
request.onload = function () {
context.decodeAudioData(
request.response,
function (buffer) {
if (!buffer) {
alert("buffer is empty!");
}
var dataArray = buffer.getChannelData(0);
//process channel data...
context.close();
},
function (error) {
alert(error);
}
);
};
request.send();
}, 3000);
})
.catch(function(error) {
console.log('error: ' + error);
});
This code is trow error of "Uncaught (in promise) DOMException: Unable to decode audio data" on context.decodeAudioData in chrome only.
What is wrong here and how can i fix it in chrome?
Here is working example in plunker: plunker
Ok... There is no fix for the issue. This is a chrome bug and you can see it here
The fix of decoding audio from MediaRecorder will be available on chrome version 58. I already tested it on 58-beta and it works.

Web Audio Api - Download edited MP3

I'm currently editing my mp3 file with multiple effects like so
var mainVerse = document.getElementById('audio1');
var s = source;
source.disconnect(audioCtx.destination);
for (var i in filters1) {
s.connect(filters1[i]);
s = filters1[i];
}
s.connect(audioCtx.destination);
The mp3 plays accordingly on the web with the filters on it. Is it possible to create and download a new mp3 file with these new effects, using web audio api or any writing to mp3 container javascript library ? If not whats the best to solve this on the web ?
UPDATE - Using OfflineAudioContext
Using the sample code from https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext/oncomplete
I've tried using the offline node like so;
var audioCtx = new AudioContext();
var offlineCtx = new OfflineAudioContext(2,44100*40,44100);
osource = offlineCtx.createBufferSource();
function getData() {
request = new XMLHttpRequest();
request.open('GET', 'Song1.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
osource.buffer = myBuffer;
osource.connect(offlineCtx.destination);
osource.start();
//source.loop = true;
offlineCtx.startRendering().then(function(renderedBuffer) {
console.log('Rendering completed successfully');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var song = audioCtx.createBufferSource();
song.buffer = renderedBuffer;
song.connect(audioCtx.destination);
song.start();
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "filename.wav");
});
}).catch(function(err) {
console.log('Rendering failed: ' + err);
// Note: The promise should reject when startRendering is called a second time on an OfflineAudioContext
});
});
}
request.send();
}
// Run getData to start the process off
getData();
Still getting the recorder to download an empty file, I'm using the song source as the source for the recorder. The song plays and everything with his code but recorder doesn't download it
Use https://github.com/mattdiamond/Recorderjs to record a .wav file. Then use https://github.com/akrennmair/libmp3lame-js to encode it to .mp3.
There's a nifty guide here, if you need a hand: http://audior.ec/blog/recording-mp3-using-only-html5-and-javascript-recordmp3-js/
UPDATE
Try moving
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
so that it is located above the call to start rendering, and connect it to osource instead, like so:
rec = new Recorder(osource, {
workerPath: 'Recorderjs/recorderWorker.js'
});
osource.connect(offlineCtx.destination);
osource.start();
offlineCtx.startRendering().then(function(renderedBuffer) {
.....

JavaScript Web Audio: cannot properly decode audio data?

I'm trying to use the Web Audio API in JavaScript to load a sound into a buffer and play it. Unfortunately it doesn't work and I get the following error:
Uncaught TypeError: Failed to set the 'buffer' property on 'AudioBufferSourceNode':
The provided value is not of type 'AudioBuffer'.
I can pinpoint which line is giving me the error, but I don't know why. Here is the relevant code if it helps:
var audioContext;
var playSoundBuffer;
function init() {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audioContext = new AudioContext();
loadNote();
}
function loadNote() {
var request = new XMLHttpRequest();
request.open("GET", "./sounds/topE.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
audioContext.decodeAudioData(request.response, function(buffer) {
playSoundBuffer = buffer;
}, function(error) {
console.error("decodeAudioData error", error);
});
};
request.send();
playSound();
}
function playSound() {
var source = audioContext.createBufferSource();
source.buffer = playSoundBuffer; // This is the line that generates the error
source.connect(audioContext.destination);
source.start(0);
}
I believe the decodeAudioData method returns an AudioBuffer to its first callback function (its second parameter). I tried to save this AudioBuffer to the "playSoundBuffer" and then play it, but I get that error and I'm not sure why. Any help would be greatly appreciated.
The reason you get that error is because you are ignoring the asynchronous nature of your code and treat it as if it were synchronous. If you always log the contents of all relevant parts as the first step in debugging you will realize that at the time you try to process your buffer it's undefined and not an AudioBuffer at all. Tip: Always console.log everything until you know exactly how it behaves at any point.
function loadNote() {
var request = new XMLHttpRequest();
request.open("GET", "./sounds/topE.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
audioContext.decodeAudioData(request.response, function(buffer) {
playSoundBuffer = buffer;
playSound(); // don't start processing it before the response is there!
}, function(error) {
console.error("decodeAudioData error", error);
});
};
request.send();//start doing something async
}

Categories

Resources