Microphone audio capture in Chrome - javascript

I'm trying to get audio stream using navigator.mediaDevices.getUserMedia.
This works fine in Firefox, unfortunatly not in Chrome.
have done al this with a secure page (thus https) but Chome does not react on "onAudioProcess" function.
in console, Firefox shows the audio chunks captured by microphone. But Chrome does not show anythink.
Does anybody have an idea?
that would be great.
a working jsfiddle example can be shown here:
https://jsfiddle.net/aminekassir/3fjxq7wr/2/
<script src="https://code.jquery.com/jquery-1.12.4.min.js"></script>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<input type="button" id="btn_001" value="Click" />
```html
```javascript
console.log(adapter.browserDetails.browser);
$(function () {
$('#btn_001').click(function () {
console.log('start recording');
startRecording();
});
});
const mediaConstraints = window.constraints = { audio: true, video: false };
const micBufferSize = 512;
var audioCtx;
function startRecording() {
if (typeof AudioContext === 'function') {
console.log('AudioContext defined');
audioCtx = new AudioContext();
} else if (typeof webkitAudioContext === 'function') {
console.log('webkitAudioContext defined');
audioCtx = new webkitAudioContext();
} else if (typeof mozAudioContext === 'function') {
console.log('mozAudioContext defined');
audioCtx = new mozAudioContext();
} else {
console.error('Web Audio not supported!');
}
console.log('audioCtx', audioCtx);
audioCtx.resume();
navigator.mediaDevices.getUserMedia(mediaConstraints).then(onMicrophoneStream).catch(onMicrophoneStreamError);
console.log('hasOwnProperty("createScriptProcessor")', window.AudioContext.prototype.hasOwnProperty('createScriptProcessor'));
function onMicrophoneStream(stream) {
console.log('onMicrophoneStream', stream);
let micStream = audioCtx.createMediaStreamSource(stream);
var scriptProcessorNode = audioCtx.createScriptProcessor(micBufferSize, 1, 1);
scriptProcessorNode.onaudioprocess = onAudioProcess;
micStream.connect(scriptProcessorNode);
}
function onMicrophoneStreamError(e) {
console.log('onMicrophoneStreamError', e);
}
function onAudioProcess(e) {
//console.log('onAudioProcess');
if (audioCtx.state === 'suspended') {
audioCtx.resume();
}
var micOutBuff = e.inputBuffer.getChannelData(0); // incoming microphone stream is Float32
console.log(micOutBuff);
}
}
```javascript

Related

Why does my audio buffer doesn't play any sound? [Web Audio API]

My audio buffer doesn't seems to work and I don't know why. I already tried opening it in Chrome and in Safari but nothing happens. I also checked that everything's ok with my audio file "Audio2.mp3".
"use strict"
//Create the Audio Context, compatible with older Firefox and Chrome browsers
function audioContextCheck(){
if (typeof AudioContext !== "undefined"){
return new AudioContext();
}
else if (typeof webkitAudioContext !== "undefined") {
return new webkitAudioContext();
}
else if (typeof mozAudioContext !== "undefined") {
return new mozAudioContext();
}
else {
throw new Error('AudioContext not supported');
}
}
var audioContext = audioContextCheck();
//Create audio buffer to the audio file with the XMLHttpRequest
var audioBuffer;
var getSound = new XMLHttpRequest();
getSound.open("get", "Audio2.mp3", true);
getSound.responseType = "arraybuffer";
getSound.onload = function(){
audioContext.decodeAudioData(getSound.response, function(buffer) {
audioBuffer = buffer;
});
};
getSound.send();
//EventListener
window.addEventListener("load", playback);
//Now create the function necessary to play back the audio buffer
function playback(){
var playSound = audioContext.createBufferSource();
playSound.buffer = audioBuffer;
playSound.connect(audioContext.destination);
playSound.start(audioContext.currentTime);
}
Because, you trigger playback() before audioBuffer has been defined.
Try to wait until the audio xhr has fully loaded, assign the audioBuffer then perform the playback(), and it will work as expected.
E.g.
//Create the Audio Context, compatible with older Firefox and Chrome browsers
function audioContextCheck() {
if (typeof AudioContext !== "undefined") {
return new AudioContext();
} else if (typeof webkitAudioContext !== "undefined") {
return new webkitAudioContext();
} else if (typeof mozAudioContext !== "undefined") {
return new mozAudioContext();
} else {
throw new Error('AudioContext not supported');
}
}
var audioContext = audioContextCheck();
//Create audio buffer to the audio file with the XMLHttpRequest
var audioBuffer;
var getSound = new XMLHttpRequest();
getSound.open("get", "https://cdn.rawgit.com/devildrey33/devildrey33/ddb01d71/Ejemplos/BannerTest/Canciones/LevenRain_-_ActionMan_Versus_The_CyberParasites.mp3", true);
getSound.responseType = "arraybuffer";
getSound.onload = function() {
document.getElementById("xhrStatus").textContent = "Loaded";
audioContext.decodeAudioData(getSound.response, function(buffer) {
audioBuffer = buffer;
playback(); // <--- Start the playback after `audioBuffer` is defined.
});
};
getSound.send();
//EventListener
// window.addEventListener("load", playback);
//Now create the function necessary to play back the audio buffer
function playback() {
var playSound = audioContext.createBufferSource();
playSound.buffer = audioBuffer;
playSound.connect(audioContext.destination);
playSound.start(audioContext.currentTime);
}
<p id="xhrStatus"> Loading the audio.. </p>

mediarecorder api playback through Web Audio api (not audio element)

I'm having an issue getting a captured blob from the mediaRecorder api to playback in Chrome (it works in Firefox). Not sure if it's a bug in Chrome.
The error it reports:
undefined:1 Uncaught (in promise) DOMException: Unable to decode audio data
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var context = new AudioContext();
var record = document.querySelector('#record');
var stop = document.querySelector('#stop');
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {
audio: true
};
var chunks = [];
var onSuccess = function(stream) {
var mediaRecorder = new MediaRecorder(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
console.log("onstop() called.", e);
var blob = new Blob(chunks, {
'type': 'audio/wav'
});
chunks = [];
var reader = new FileReader();
reader.addEventListener("loadend", function() {
context.decodeAudioData(reader.result, function(buffer) {
playsound(buffer);
},
function(e) {
console.log("error ", e)
});
});
reader.readAsArrayBuffer(blob);
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
var onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
function playsound(thisbuffer) {
var source = context.createBufferSource();
source.buffer = thisbuffer;
source.connect(context.destination);
source.start(0);
}
<button id="record">record</button>
<button id="stop">stop</button>
I have used your code exactly the way it is. Everything is working fine in Chrome browser.
This issue was fixed when bug https://codereview.chromium.org/1579693006/ was closed and added to the Chrome pipeline.
This is no longer an issue.
To close the loop on this, I suspect this was due to the Chrome bug documented in a comment above. It appears this bug was fixed several years ago and should no longer be a problem as WebAudio now uses ffmpeg for decoding.

WebRTC file transfer and syntax querySelector

i never use WebRTC. so do not have any understand how their syntax look like.
i got a couple of syntax which i think it is not jquery. so anyone mind to tell me is it specific to webRTC related code.
document.querySelector('#stop-recording').onclick = function() {
this.disabled = true;
mediaRecorder.stop();
mediaRecorder.stream.stop();
document.querySelector('#pause-recording').disabled = true;
document.querySelector('#start-recording').disabled = false;
};
what is querySelector ?
i got the code from this url https://github.com/streamproc/MediaStreamRecorder/blob/master/demos/video-recorder.html
looking for bit info. thanks
You can refer following code:
var audio_context;
var recorder;
$(function () {
try {
//Audio Recording
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
window.URL = window.URL || window.webkitURL;
var recorderObject;
var VM_IDForAudio = "";
var audio_context = new AudioContext;
var localMediaStreamForAudio;
var audioStream;
//Audio-Video Recording (Firefox)
var videoFile = !!navigator.mozGetUserMedia ? 'video.gif' : 'video.webm';
var inner = document.querySelector('.inner');
var videoElement = document.getElementById('webcamVideo');
var VM_IDForAudioVideo = "";
var localMediaStreamForAudioVideo;
//Disable Live Webcam Button
$("#btnShowWebcam").prop("disabled", true);
} catch (e) {
//alert('No web audio support in this browser!');
console.log("No web audio support in this browser!");
}
//Audio Recording
$("[id$='btnAudioRecord']").click(function () {
//VM_IDForAudio = $("[id$='hdVMID']").val();
VM_IDForAudio = $("[id$='hdPRN']").val() + "_" + $("[id$='hdVMID']").val() + "_" +
patientDet.visitType + "_" + replateDateString(patientDet.visitDate);
$this = $(this);
$recorder = $this.parent();
if ($("[id$='btnAudioRecord']").val() == "Record Audio") {
if (VM_IDForAudio != "") {
$this.attr("value", "Stop Record");
navigator.getUserMedia({ audio: true }, function (stream) {
if (window.IsChrome) stream = new window.MediaStream(stream.getAudioTracks());
audioStream = stream;
recorder = window.RecordRTC(stream, {
type: 'audio'
});
recorder.startRecording();
}, function () { });
}
else {
//Select Patient
}
} else {
$this.attr("value", "Record Audio");
if (recorder)
recorder.stopRecording(function (url) {
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function () {
base64data = reader.result;
PageMethods.SaveAudioRecording(base64data, VM_IDForAudio);
audioStream.stop();
}
});
}
});
//Audio-Video Recording
$("[id$='btnAudioVideoRecord']").click(function () {
//VM_IDForAudioVideo = $("[id$='hdVMID']").val();
VM_IDForAudioVideo = $("[id$='hdPRN']").val() + "_" + $("[id$='hdVMID']").val() + "_" +
patientDet.visitType + "_" + replateDateString(patientDet.visitDate);
$this = $(this);
if ($("[id$='btnAudioVideoRecord']").val() == "Record Aud/Vid") {
if (VM_IDForAudioVideo != "") {
$this.attr("value", "Stop Record");
captureUserMedia(function (stream) {
window.audioVideoRecorder = window.RecordRTC(stream, {
type: 'video', // don't forget this; otherwise you'll get video/webm instead of audio/ogg
canvas: {
width: 320,
height: 240
}
});
localMediaStreamForAudioVideo = stream;
$("#btnShowWebcam").prop("disabled", false);
window.audioVideoRecorder.startRecording();
});
}
else {
//Select Patient
}
} else {
$this.attr("value", "Record Aud/Vid");
$("#btnShowWebcam").prop("disabled", true);
window.audioVideoRecorder.stopRecording(function (url) {
convertStreams(audioVideoRecorder.getBlob(), videoFile, VM_IDForAudioVideo);
});
localMediaStreamForAudioVideo.stop();
}
});
and use RecordRTC javascript library.
for more go through this: http://recordrtc.org/RecordRTC.html,
for live demo: https://www.webrtc-experiment.com/RecordRTC/AudioVideo-on-Firefox.html
you should check webtorrent github repository, there is a detailed description about webRTC and how it is implemented. also check out the webtorrent official website

How mediasource readystate changes its state

After going through the online documentation, I have written the code mentioned below. Whenever it is a small sized file it works well. But with bigger files i am getting the Error: Uncaught InvalidStateError: An attempt was made to use an object that is not, or is no longer, usable. This is because the mediasource.readystate has changed to close. I am not able to figure out which statement changes readystate from close to open and then back to close. I logged the readystate value at all places. It changes to open inside the mediacallback method. What triggers this change ??
WT = (function () {
var mediaSource = new MediaSource();
function fileSelected(evt) {
var video = document.getElementById("vid");
var file = evt.target.files[0];
if (file) {
var NUM_CHUNKS = 10000, i = 0;
var chunkSize = Math.ceil(file.size / NUM_CHUNKS);
console.log(mediaSource.readyState);
video.src = window.URL.createObjectURL(mediaSource);
console.log(mediaSource.readyState);
var queue = [];
var mediaCallback = function (e) {
console.log(mediaSource.readyState);
var sourceBuffer = mediaSource.addSourceBuffer('video/mp4; codecs="avc1.42E01E, mp4a.40.2"');
sourceBuffer.addEventListener('updateend', function() {
console.log("update end");
if (queue.length ) {
sourceBuffer.appendBuffer(queue.shift());
}
}, false);
(function readChunk(i) {
var reader = new FileReader();
reader.onload = function(et) {
var buff = new Uint8Array(et.target.result);
if(!sourceBuffer.updating && queue.length === 0) {
sourceBuffer.appendBuffer(buff);
} else {
queue.push(buff);
}
if (i == NUM_CHUNKS - 1) {
console.log("end of stream");
mediaSource.endOfStream();
} else {
if (video.paused) {
console.log("video puased");
video.play(); // Start playing after 1st chunk is appended.
}
readChunk(++i);
}
};
var startByte = chunkSize * i;
var chunk = file.slice(startByte, startByte + chunkSize);
reader.readAsArrayBuffer(chunk);
})(i);
}
mediaSource.addEventListener('sourceopen', mediaCallback, false);
mediaSource.addEventListener('sourceended', function(e) {
console.log('ended: mediaSource readyState: ' + this.readyState);
}, false);
mediaSource.addEventListener('sourceclose', function(e) {
console.log('closed: mediaSource readyState: ' + this.readyState);
}, false);
} else {
console.log("NO FILE SELECTED");
}
}
return {
init: function() {
console.log("Onclick");
if (window.File && window.FileReader && window.FileList && window.Blob) {
$("document").ready(function(){
$("#fileToUpload").on("change", fileSelected);
});
} else {
alert('The File APIs are not fully supported in this browser.');
}
}
}
})();
WT.init();

Disable back button or set back button to close app on android

Hi I am making a program that uses Google chrome to open my android camera. i need to know how to close the chrome completely when the user presses the back button or else disable the use of the back button when on chrome. this is my code. I would like to do it with JavaScript.
<body style="overflow: hidden">
<video muted autoplay id="myvideo" style="width: 100%; height: auto"></video>
<div class='select'>
<select style="visibility: hidden" id='videoSource'>
</select>
</div>
</body>
<script type="text/javascript">
//Camera
var videoElement = document.querySelector("video");
var videoSelect = document.querySelector("select#videoSource");
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
//Checks for the sources available of cameras
function gotSources(sourceInfos) {
for (var i = 0; i != sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
var option = document.createElement("option");
option.value = sourceInfo.id;
if (sourceInfo.kind === 'video') {
option.text = sourceInfo.label || 'camera ' + (videoSelect.length + 1);
videoSelect.appendChild(option);
if (i == sourceInfos.length - 1) {
option.selected = true;
start();
}
} else {
console.log('Some other kind of source: ', sourceInfo);
}
}
}
if (typeof MediaStreamTrack === 'undefined') {
alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
} else {
MediaStreamTrack.getSources(gotSources);
}
//Checks if everything is successful
function successCallback(stream) {
window.stream = stream; // make stream available to console
videoElement.src = window.URL.createObjectURL(stream);
videoElement.play();
}
//Checks if an error occurred
function errorCallback(error) {
console.log("navigator.getUserMedia error: ", error);
}
// starts the streaming of the camera
function start() {
if (!!window.stream) {
videoElement.src = null;
window.stream.stop();
}
var videoSource = videoSelect.value;
var constraints = {
video: {
optional: [{ sourceId: videoSource}]
}
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
}
videoSelect.onchange = start;

Categories

Resources