Getting audio data as array from microphone - javascript

I want to log audio data that i get from microphone:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var analyser = context.createAnalyser();
navigator.webkitGetUserMedia({ audio: true }, function (stream) {
var source = context.createMediaStreamSource(stream);
source.connect(analyser);
analyser.connect(context.destination);
setInterval(function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
console.log(array);
}, 1000);
}, function () { });
I'm talking in microphone but logged array contains only 0 values every time. Can you tell me what i'm doing wrong? Thanks

Tried in chrome canary and it works! Browser issue, hope they'll fix it soon

Related

(Javascript) Microphone and audio from mediastream are out of sync

I wrote a recorder that records microphone from getUsermedia and audio which is from local using Howler JS.
I created mediastream destination, and
connected each sources (mic, audio) to the destination.
audio seems fine, but microphone is delayed about 2seconds.
I can't figure out the problem.
could you help me guys?
var recorder;
const stop = document.getElementsByClassName("stop");
const record = document.getElementsByClassName("record");
let mediaDest = Howler.ctx.createMediaStreamDestination();
Howler.masterGain.connect(mediaDest);
function onRecordingReady(e) {
// 'e' has 'blob event'
//var audio = document.getElementById("audio");
audioBlob = e.data; // e.data has blob.
//audio.src = URL.createObjectURL(e.data);
}
let audioBlob;
let audioURL = "";
navigator.mediaDevices.getUserMedia({ audio: true }).then(function (stream) {
let userMic = Howler.ctx.createMediaStreamSource(stream);
userMic.connect(mediaDest);
Howler.masterGain.connect(mediaDest);
recorder = new MediaRecorder(mediaDest.stream);
recorder.addEventListener("dataavailable", onRecordingReady);
recorder.addEventListener("stop", function () {
W3Module.convertWebmToMP3(audioBlob).then((mp3blob) => {
const downloadLink = document.createElement("a");
downloadLink.href = URL.createObjectURL(mp3blob);
downloadLink.setAttribute("download", "audio");
//downloadLink.click();
var audio = document.getElementById("audio");
audio.src = URL.createObjectURL(mp3blob);
console.log(mp3blob);
});
});
});
record[0].addEventListener("click", function () {
recorder.start();
});
stop[0].addEventListener("click", function () {
recorder.stop();
});
I figured out the solution.
I didn't know I could connect MediaStreamAudioSourceNode to GainNode.
If someone is suffering this issue, just connect one Node to another Node rather than connect each node to the destination.
I connected the sourceNode to the GainNode, and connected GainNode to the destination.
=========================
It was not the solution...
GainNode playback in realtime whenever input is present...so, even if i can remove the latency, annoying playback occurs.

How to visualize recorded audio from Blob with AudioContext?

I have successfully created an audio wave visualizer based on the mdn example here. I now want to add visualization for recorded audio as well. I record the audio using MediaRecorder and save the result as a Blob. However I cannot find a way to connect my AudioContext to the Blob.
This is the relevant code part so far:
var audioContext = new (window.AudioContext || window.webkitAudioContext)();
var analyser = audioContext.createAnalyser();
var dataArray = new Uint8Array(analyser.frequencyBinCount);
if (mediaStream instanceof Blob)
// Recorded audio - does not work
var stream = URL.createObjectURL(mediaStream);
else
// Stream from the microphone - works
stream = mediaStream;
var source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
mediaStream comes from either:
navigator.mediaDevices.getUserMedia ({
audio: this.audioConstraints,
video: this.videoConstraints,
})
.then( stream => {
mediaStream = stream;
}
or as a result of the recorded data:
mediaRecorder.addEventListener('dataavailable', event => {
mediaChunks.push(event.data);
});
...
mediaStream = new Blob(mediaChunks, { 'type' : 'video/webm' });
How do I connect the AudioContext to the recorded audio? Is it possible with a Blob? Do I need something else? What am I missing?
I've created a fiddle. The relevant part starts at line 118.
Thanks for help and suggestions.
EDIT:
Thanks to Johannes Klauß, I've found a solution.
See the updated fiddle.
You can use the Response API to create an ArrayBuffer and decode that with the audio context to create an AudioBuffer which you can connect to the analyser:
mediaRecorder.addEventListener('dataavailable', event => {
mediaChunks.push(event.data);
});
...
const arrayBuffer = await new Response(new Blob(mediaChunks, { 'type' : 'video/webm' })).arrayBuffer();
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
const source = audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(analyser);

How to get audio stream from cordova-plugin-audioinput for realtime visualizer

I am using cordova-plugin-audioinput for recording audio in my cordova based app.
The documentation can be found here : https://www.npmjs.com/package/cordova-plugin-audioinput
I was previously using the MediaRecorder function of the browser to record audio but I switched to the plugin due to audio quality issues.
My problem is that I have a realtime visualizer of the volume during the record, my function used to work using an input stream from the media recorder
function wave(stream) {
audioContext = new AudioContext();
analyser = audioContext.createAnalyser();
microphone = audioContext.createMediaStreamSource(stream);
javascriptNode = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.8;
analyser.fftSize = 1024;
microphone.connect(analyser);
analyser.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
javascriptNode.onaudioprocess = function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var values = 0;
var length = array.length;
for (var i = 0; i < length; i++) {
values += (array[i]);
}
var average = values / length;
// use average for visualization
}
}
Now that I use the cordova-plugin-audioinput, I can't find a way to retrieve the stream from the microphone even though the documentation mention a "streamToWebAudio" parameter, I can't find a way to make it work.
Any insight on this ?
Thanks you in advance !
I believe you have to connect the analyser instead, such as
function wave(stream) {
var audioContext = new AudioContext();
var analyser = audioContext.createAnalyser();
analyser.connect(audioContext.destination);
audioinput.start({streamToWebAudio: true});
var dest = audioinput.getAudioContext().createMediaStreamDestination();
audioinput.connect(dest);
var stream = dest.stream;
var input = audioContext.createMediaStreamSource(stream);
input.connect(analyser);
analyser.onaudioprocess = function(){
...
}
}
As someone who stumbled upon this a few years later and wondered why there was an extra destination being made in the other answer, i now realise it's because Eric needed to get the input stream into the same AudioContext as the analyser.
Now, ignoring the fact that the spec for analyser has changed since the answer, and just focusing on getting the input stream into something useful. You could just pass the audiocontext into the audioinput config like so and save yourself a few steps
function wave(stream) {
var audioContext = new AudioContext();
var analyser = audioContext.createAnalyser();
analyser.connect(audioContext.destination);
audioinput.start({
streamToWebAudio: true,
audioContext: audioContext
});
audioinput.connect(analyser);
analyser.onaudioprocess = function(){
...
}
}

Why doesn't audio context work correctly on firefox?

A while ago I made this, which uses the browser's AudioContext to create an Analyser and give me audio data so that I can animate the music being played.
When I finished this at the time it worked perfectly in both Chrome and Firefox. When I tried it recently it still worked in Chrome but didn't work correctly in Firefox.
Whenever I try to skip through the song or change the source in Firefox the audio stops playing and I stop receiving Analyser data.
Initialization
//Creating audio
this.audio = document.createElement("audio");
this.audio.setAttribute("loop", true);
this.audio.volume=1;
//Important part
var audioctx = new AudioContext();
var audioSrc = audioctx.createMediaElementSource(this.audio);
var analyser = audioctx.createAnalyser();
var gainNode = audioctx.createGain();
var fakeGainNode = audioctx.createGain();
analyser.smoothingTimeConstant=0.5;
analyser.minDecibels=-100;
analyser.maxDecibels=0;
analyser.fftSize=2048;
//Connecting audioSrc to gainNode
audioSrc.connect(fakeGainNode);
audioSrc.connect(gainNode);
//Connecting gainNode to analyser
gainNode.connect(audioctx.destination);
//Connecting analyser to destination
//analyser.connect(gainNode);
//connect fake gain to analyser
fakeGainNode.connect(analyser);
gainNode.gain.value=0.3;
fakeGainNode.gain.value=0.05;
var size=analyser.frequencyBinCount;
var frequencyData = new Uint8Array(size);
Changing source
this.next = function(){
var next = ml.next();
this.songName.innerHTML=next.name;
this.changeSrc(next.url);
if(playing)
this.play();
}
this.prev = function(){
var prev = ml.prev();
this.songName.innerHTML=prev.name;
this.changeSrc(prev.url);
if(playing)
this.play();
}
this.changeSrc = function(src){
this.audio.src=src;
}
So the question is. Can I do anything to fix this on Firefox or is this just a bug and I should just stick to Chrome?
Edit: I have put console logs throughout the functions that were problematic but all the logs come through and no errors are thrown

AudioContext Live Streaming

What is the correct way to play live stream with use of WebAudio API.
I am trying with following code, however all I see is that MP3 is being downloaded, but not played; probably MediaElementSource expect a file, not continuous stream.
window.AudioContext = window.AudioContext||window.webkitAudioContext;
context = new AudioContext();
var audio = new Audio();
var source = context.createMediaElementSource(audio);
source.connect(context.destination);
audio.src = '<live mp3 stream>';
audio.play();
try
audio.addEventListener('canplaythrough', function() {
audio.play();
}, false);
You maybe miss audio.crossOrigin = "anonymous" for hosted live stream with CORS enabled.
This is my whole working solution, MP3 as well:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
const context = new AudioContext();
const audio = new Audio();
audio.crossOrigin = 'anonymous'; // Useful to play hosted live stream with CORS enabled
const sourceAudio = context.createMediaElementSource(audio);
sourceAudio.connect(context.destination);
const playHandler = () => {
audio.play();
audio.removeEventListener('canplaythrough', playHandler);
};
const errorHandler = e => {
console.error('Error', e);
audio.removeEventListener('error', errorHandler);
};
audio.addEventListener('canplaythrough', playHandler, false);
audio.addEventListener('error', errorHandler);
audio.src = '<live mp3 stream>';

Categories

Resources