I am using the code below to record audio in the browser, then use socket.io to send the resulting blob to a node.js server. The data is then saved to a mysql database or as an .ogg file.
The playback from either is strangely garbled at times - in different moments the sound will suddenly accelerate then return to normal. I've not been able to find anything online about this issue.
UPDATE: Further testing leads me to believe it is a hardware/software/compatibility with the Asus D1 soundcard I'm using. recording off of other devices does not result in the same behavior.
var mediaRecorder;
var startRecord = function(recordData){
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio:true}, success, function(stream) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
function success(stream) {
var chunks = [];
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
mediaRecorder.onstop = function(e) {
stream.getAudioTracks()[0].stop()
var blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' })
var audioURL = URL.createObjectURL(blob);
if (recordData.id) {
socket.emit('audio', {
'id':recordData.id,
'audio':blob,
'option':recordData.type
});
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
};
var stopRecord = function(recordData){
mediaRecorder.stop();
mediaRecorder.state = 'recording'
};
Related
I have been trying to record audio in OGG format on Chrome and send it back to the server, but it always gets their in video/ogg format. Here is what I have:
Capturing audio:
let chunks = [];
let recording = null;
let mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
mediaRecorder.onstop = function() {
recording = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
}
mediaRecorder.ondataavailable = function(e){
chunks.push(e.data);
}
Sending it to the server:
let data = new FormData();
data.append('audio', recording);
jQuery.ajax(...);
The blob gets to the backend, but always in video/ogg!
I ended up using kbumsik/opus-media-recorder, solved the issue for me. A drop-in replacement for MediaRecorder.
You need to remove the VideoTrack from your MediaStream:
const input = document.querySelector("video");
const stop_btn = document.querySelector("button");
input.onplaying = (evt) => {
input.onplaying = null;
console.clear();
const stream = input.captureStream ? input.captureStream() : input.mozCaptureStream();
// get all video tracks (usually a single one)
stream.getVideoTracks().forEach( (track) => {
track.stop(); // stop that track, so the browser doesn't feed it for nothing
stream.removeTrack( track ); // remove it from the MediaStream
} );
const data = [];
const recorder = new MediaRecorder( stream, { mimeType: "audio/webm" } );
recorder.ondataavailable = (evt) => data.push( evt.data );
recorder.onstop = (evt) => exportFile( new Blob( data ) );
stop_btn.onclick = (evt) => recorder.stop();
stop_btn.disabled = false;
recorder.start();
};
console.log( "play the video to start recording" );
function exportFile( blob ) {
stop_btn.remove();
input.src = URL.createObjectURL( blob );
console.log( "video element now playing recoded file" );
}
video { max-height: 150px; }
<video src="https://upload.wikimedia.org/wikipedia/commons/2/22/Volcano_Lava_Sample.webm" controls crossorigin></video>
<button disabled>stop recording</button>
And since StackOverflow's null origined iframes don't allow for safe download links, here is a fiddle with a download link.
You need to set the mimeType of the MediaRecorder. Otherwise the browser will pick whatever format it likes best to encode the media.
let mediaRecorder = new MediaRecorder(stream, { mimeType: 'my/mimetype' });
To be sure that the browser can actually encode the format you want you could use isTypeSupported().
console.log(MediaRecorder.isTypeSupported('my/mimetype'));
Chrome for example doesn't support "audio/ogg; codecs=opus" but supports "audio/webm; codecs=opus". Firefox supports both. Safari none of them.
Once you've configured the MediaRecorder you can use its mimeType when creating the blob.
recording = new Blob(chunks, { 'type' : mediaRecorder.mimeType });
I'm having an issue with decodeAudioData method using Web Audio API to playback in Chrome (it works fine in Firefox)-
I am sending the audio buffer recorded by media recorder back from the server.
Server side
wss = new WebSocketServer({server: server}, function () {});
wss.on('connection', function connection(ws) {
ws.binaryType = "arraybuffer";
ws.on('message', function incoming(message) {
if ((typeof message) == 'string') {
console.log("string message: ", message);
} else {
console.log("not string: ", message);
ws.send(message);
}
});
});
Client side
window.AudioContext = window.AudioContext||window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
var context = new AudioContext();
var mediaRecorder;
var chunks = [];
var startTime = 0;
ws = new WebSocket(url);
ws.binaryType = "arraybuffer";
ws.onmessage = function(message) {
if (message.data instanceof ArrayBuffer) {
context.decodeAudioData(message.data, function(soundBuffer){
playBuffer(soundBuffer);
},function(x) {
console.log("decoding failed", x)
});
} else {
console.log("not arrayBuffer", message.data);
}
};
createMediaRecorder();
function createMediaRecorder() {
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {
"audio": true
};
var onSuccess = function(stream) {
var options = {
audioBitsPerSecond : 128000,
mimeType : 'audio/webm\;codecs=opus'
};
mediaRecorder = new MediaRecorder(stream, options);
};
var onError = function(err) {
console.log('The following error occured: ' + err);
};
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
alert('getUserMedia not supported on your browser!');
}
}
function playBuffer(buf) {
var source = context.createBufferSource();
source.buffer = buf;
source.connect(context.destination);
if (startTime == 0)
startTime = context.currentTime + 0.1; // add 50ms latency to work well across systems
source.start(startTime);
startTime = startTime + source.buffer.duration;
}
function startRecording() {
mediaRecorder.start();
getRecordedData();
}
function getRecordedData() {
mediaRecorder.ondataavailable = function(e) {
console.log('ondataavailable: ', e.data);
chunks.push(e.data);
};
}
function sendRecordedData() {
var superBuffer = new Blob(chunks, {type: 'audio/ogg'});
ws.send(superBuffer);
}
function stopRecording() {
mediaRecorder.stop();
mediaRecorder.onstop = function(e) {
sendRecordedData();
chunks = [];
};
}
While testing with firefox working fine but with chrome generate the following error:
Uncaught (in promise) DOMException: Unable to decode audio data
Any suggestion will be helpful, thanks in advance.
I encountered the same error. Updating Chrome did not fix it. However, debugging in Firefox instead gave me a much more descriptive error:
The buffer passed to decodeAudioData contains an unknown content type.
Uncaught (in promise) DOMException: MediaDecodeAudioDataUnknownContentType
Uncaught (in promise) DOMException: The given encoding is not supported.
Which by the way was occurring because the .mp3 file I wanted to load wasn't found. So I was making a web request, receiving the 404 HTML page, and trying to load that as an mp3, which failed as an 'unsupported audio format'.
I encountered the same issue. Upgrading Chrome to the latest release, eg 85.0.4183, resolved the issue for me.
I Had the same error with createjs (I used to load up the files).
It was integration problem with Internet Download Manager (IDM) ... I've solved it by disabling IDM !
Ive been playing about with MediaRecorder to save a MediaStream that has been created by getUserMedia. I'm really happy with the results, however I need something similar that has better cross browser support.
Here is some sample code for how I'm using MediaRecorder (just to give you some context):
var mediaRec;
navigator.getUserMedia({
audio:true
},function(stream){
mediaRec=new MediaRecorder(stream);
mediaRec.start(10000);
mediaRec.ondataavailable=function(e){
};
},function(err){});
It seems that MediaRecorder only works in the Firefox browser and the Firefox OS.
However MediaRecorder is part of the W3C Specification, and Google Chrome has stated it intends to support it in a future release, but what options do I have in the meantime?
I know that plug ins such as Flash and Silverlight can achieve the same thing that MediaRecorder performs, but what I need is a javascript solution.
Can anyone help?
All the other options that are available will be utilizing high-level APIs and implemented at the Browser/JavaScript level. Thus, none will really be comparable to the MediaRecorder API provided by Firefox as it is integrated into the browser and has the benefit of being "lower level" in the browser's implementation.
One option I know that works for sure(though utilizes the Web Audio API) is Matt Diamond's Recorderjs.
And Example using Recorderjs taken from Matt's github.
var audio_context;
var recorder;
function startUserMedia(stream) {
var input = audio_context.createMediaStreamSource(stream);
input.connect(audio_context.destination);
recorder = new Recorder(input);
}
function startRecording() {
recorder.record();
}
function stopRecording(button) {
recorder.stop();
createDownloadLink();
recorder.clear();
}
function createDownloadLink() {
recorder.exportWAV(function(blob) {
var url = URL.createObjectURL(blob);
var li = document.createElement('li');
var au = document.createElement('audio');
var hf = document.createElement('a');
au.controls = true;
au.src = url;
hf.href = url;
hf.download = new Date().toISOString() + '.wav';
hf.innerHTML = hf.download;
li.appendChild(au);
li.appendChild(hf);
recordingslist.appendChild(li);
});
}
window.onload = function init() {
try {
// webkit shim
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
audio_context = new AudioContext;
navigator.getUserMedia({audio: true}, startUserMedia, function(e) {
console.log('No live audio input: ' + e);
});
i start do some test with rtcPeerConnection, i'm a begginer with this tecnologic and i want to know if it is normal:
in the console i print the ice candidate when method onicecandidate is called, but i don't know if is normal have many RTCIceCandidate appearing in the console
here the output console
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {
url: isChrome
? 'stun:stun.l.google.com:19302'
: 'stun:23.21.150.121'
};
var TURN = {
url: 'turn:homeo#turn.bistri.com:80',
credential: 'homeo'
};
var iceServers = {
iceServers: [STUN, TURN]
};
var sdpConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
var video = document.getElementById('thevideo');
var button = document.getElementById('thebutton');
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
RTCPeerConnection = webkitRTCPeerConnection || mozRTCPeerConnection;
var local_stream;
navigator.getUserMedia({video:true, audio:false}, function(stream){
local_stream = stream;
video.src = URL.createObjectURL(stream);
start();
}, function(err){
console.log("The Following error ocurred:"+ err);
});
function start()
{
pc = new RTCPeerConnection(iceServers);
pc.onicecandidate = function(evt)
{
console.log(evt.candidate);
}
pc.createOffer(function(desc)
{
pc.setLocalDescription(desc);
console.log(desc);
},function(err){
console.log("The Following error ocurred:"+ err);
},sdpConstraints);
}
Yes -- that many ICE candidates is normal. (You'll get a similar result from apprtc.appspot.com.)
Note that to display the video you'll need an autoplay attribute on the video element, or video.play() in the gUM success handler.
I am trying out the sample code for peer-to-peer webcam communication in http://www.html5rocks.com/en/tutorials/webrtc/basics/ where both clients are implemented in the same page.
The 'local' webcam stream is displayed correctly. However, nothing shows up on the 'remote' stream and I'm not sure why.
Below is my code. I am currently testing it out on a hosted server. Thanks!
var localStream;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia({'audio':true, 'video':true}, onMediaSuccess, onMediaFail);
function onMediaSuccess(stream) {
var localVideo = document.getElementById("localVideo");
var url = window.URL.createObjectURL(stream);
localVideo.autoplay = true;
localVideo.src = url;
localStream = stream;
console.log('Local stream established: ' + url);
}
function onMediaFail() {
alert('Could not connect stream');
}
function iceCallback1(){}
function iceCallback2(){}
function gotRemoteStream(e) {
var remoteVideo = document.getElementById("remoteVideo");
var stream = e.stream;
var url = window.URL.createObjectURL(stream);
remoteVideo.autoplay = true;
remoteVideo.src = url;
console.log('Remote stream received: ' + url);
}
function call(){
pc1 = new webkitPeerConnection00(null, iceCallback1); // create the 'sending' PeerConnection
pc2 = new webkitPeerConnection00(null, iceCallback2); // create the 'receiving' PeerConnection
pc2.onaddstream = gotRemoteStream; // set the callback for the receiving PeerConnection to display video
console.log("Adding local stream to pc1");
pc1.addStream(localStream); // add the local stream for the sending PeerConnection
console.log("Creating offer");
var offer = pc1.createOffer({audio:true, video:true}); // create an offer, with the local stream
console.log("Setting local description for pc1");
pc1.setLocalDescription(pc1.SDP_OFFER, offer); // set the offer for the sending and receiving PeerConnection
console.log("Start pc1 ICE");
pc1.startIce();
console.log("Setting remote description for pc2");
pc2.setRemoteDescription(pc2.SDP_OFFER, offer);
// gotRemoteStream Triggers here
console.log("Creating answer"); // create an answer
var answer = pc2.createAnswer(offer.toSdp(), {has_audio:true, has_video:true});
console.log("Setting local description for pc2");
pc2.setLocalDescription(pc2.SDP_ANSWER, answer); // set it on the sending and receiving PeerConnection
console.log("Setting remote description for pc1");
pc1.setRemoteDescription(pc1.SDP_ANSWER, answer);
console.log("Start pc2 ICE");
pc2.startIce(); // start the connection process
console.log("script done");
}
Try this:
simpl.info RTCPeerConnectionby Vikas Marwaha and Justin Uberti
http://www.simpl.info/rtcpeerconnection/
It's working well for me and it's very clean and simple.