How to make a voice recording in a browser? - javascript

I use webrtc in javascript:
function start() {
var constraints = {
audio: true,
video: true
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
console.log(window.URL.createObjectURL(mediaStream));
var video = document.querySelector('#my-video');
video.src = window.URL.createObjectURL(mediaStream);
/*video.srcObject = mediaStream;
video.onloadedmetadata = function (e) {
video.play();
};*/
})
.catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
html:
<video id="my-video" autoplay="true" muted="true"></video>
<br />
<input id="start" type="button" value="Start" onclick="start()" />
Please, tell me, what you need to do to record sound and send it to the server (Asp .NET Core)?

For the record needed library RecorderJS.
HTML:
<h4>Recording audio</h4>
<input type="button" onclick="startRecording(this);" value="Record" />
<input type="button" onclick="stopRecording(this);" value="Stop" />
<h4>Record:</h4>
<div class="newRecord"></div>
JS:
window.onload = function () {
init();
};
var audio_context;
var recorder;
function init() {
try {
// webkit shim
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
audio_context = new AudioContext;
} catch (e) {
alert('No web audio support in this browser!');
console.log(err.name + ": " + err.message);
}
queryToUseMicrophone();
};
function queryToUseMicrophone() {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(function (mediaStream) {
var input = audio_context.createMediaStreamSource(mediaStream);
recorder = new Recorder(input);
}).catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
function startRecording(button) {
recorder && recorder.record();
button.disabled = true;
button.nextElementSibling.disabled = false;
}
function stopRecording(button) {
recorder && recorder.stop();
button.disabled = true;
button.previousElementSibling.disabled = false;
createDownloadLink();
recorder.clear();
}
function createDownloadLink() {
recorder && recorder.exportWAV(function (blob) {
var url = URL.createObjectURL(blob);
var audio = document.createElement('audio');
var a = document.createElement('a');
audio.controls = true;
audio.src = url;
a.href = url;
a.download = new Date().toISOString() + '.wav';
a.innerHTML = a.download;
document.querySelector(".newRecord").appendChild(audio);
document.querySelector(".newRecord").appendChild(a);
});
}

Related

How to record webcam in browser and store video on same page?

I have a simple program where a user records themselves (via their webcam) and that video is stored on the same HTML page. Can anyone let me know how I can go about adding to what I have below to record and save the video?
#HTML
<button id="speak_button"></button> #click to start video
<div id="circle"><p id="submit_button">Submit</p></div> #click to end video
<video" id="video" width="180" height="140" autoplay muted></video> #where webcam is
<video id="second_video" width="180" height="140" controls></video> #where I want recorded video to be
#JAVASCRIPT
<script>
let video = document.getElementById("video");
let mediaRecorder;
navigator.mediaDevices.getUserMedia({video: true, audio: true})
.then(function(stream){
video.srcObject = stream;
video.play();
mediaRecorder = new MediaRecorder(stream, { mimeType: "video/webm"});
mediaRecorder.ondatavailable = function(event) {
if (event.data.size > 0) {
const reader = new window.FileReader();
reader.readAsDataURL(event.data);
reader.onloadend = function() {
let base64 = reader.result.split('base64,')[1];
let recording = document.getElementById('second_video');
recording.src = "data:video/webm;base64," + base64;
recording.type = "video/webm";
recording.play();
};
};
};
});
document.getElementById("speak_button").addEventListener("click", function() {
mediaRecorder.start();
stream.record();
});
document.getElementById("submit_button").addEventListener("click", function() {
mediaRecorder.stop();
stream.stop();
var superBuffer = new Blob(recordedChunks);
document.getElementById("second_video") = window.URL.createObjectURL(superBuffer);
});
</script>
Thanks!
HTML CODE
<main id="container">
<video id="gum" playsinline autoplay muted></video>
<video id="recorded" playsinline loop></video>
<section>
<button id="start">Start camera</button>
<button id="record" disabled>Record</button>
<button id="play" disabled>Play</button>
</section>
</main>
<script src="main.js"></script>
main.js code
let mediaRecorder;
let recordedBlobs;
const errorMsgElement = document.querySelector("span#errorMsg");
const recordedVideo = document.querySelector("video#recorded");
const recordButton = document.querySelector("button#record");
const playButton = document.querySelector("button#play");
recordButton.addEventListener("click", () => {
if (recordButton.textContent === "Record") return startRecording();
stopRecording();
recordButton.textContent = "Record";
playButton.disabled = false;
});
playButton.addEventListener("click", () => {
const superBuffer = new Blob(recordedBlobs, { type: "video/webm" });
recordedVideo.src = null;
recordedVideo.srcObject = null;
recordedVideo.src = window.URL.createObjectURL(superBuffer);
recordedVideo.controls = true;
recordedVideo.play();
});
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) recordedBlobs.push(event.data);
}
function startRecording() {
recordedBlobs = [];
let options = { mimeType: "video/webm;codecs=vp9,opus" };
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error("Exception while creating MediaRecorder:", e);
errorMsgElement.innerHTML = `Exception while creating MediaRecorder: ${JSON.stringify(
e
)}`;
return;
}
recordButton.textContent = "Stop Recording";
playButton.disabled = true;
mediaRecorder.onstop = (event) => console.log(event);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
console.log("MediaRecorder started", mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
}
document.querySelector("button#start").addEventListener("click", async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: true,
});
recordButton.disabled = false;
window.stream = stream;
const gumVideo = document.querySelector("video#gum");
gumVideo.srcObject = stream;
} catch (e) {
console.error("navigator.getUserMedia error:", e);
errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
});

How to save video using web cam?

I have a plug & play web cam and i want to record video by clicking start and stop button. My web cam is iball company and if there are any simple script then please provide me. I want simple php code.
<p><video id="video" autoplay="autoplay"></video></p>
<p><input type="button" id="buttonSnap" value="Take screenshot" disabled="disabled" onclick="snapshot()" /></p>
<p>
<input type="button" id="buttonStart" value="Start" disabled="disabled" onclick="start()" />
<input type="button" id="buttonStop" value="Stop" disabled="disabled" onclick="stop()" />
</p>
<p><canvas id="canvas"></canvas></p>
<script type="text/javascript">
"use strict";
var video = document.getElementById('video');
var canvas = document.getElementById('canvas');
var videoStream = null;
var preLog = document.getElementById('preLog');
function log(text)
{
if (preLog) preLog.textContent += ('\n' + text);
else alert(text);
}
function snapshot()
{
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
}
function noStream()
{
log('Access to camera was denied!');
}
function stop()
{
var myButton = document.getElementById('buttonStop');
if (myButton) myButton.disabled = true;
myButton = document.getElementById('buttonSnap');
if (myButton) myButton.disabled = true;
if (videoStream)
{
if (videoStream.stop) videoStream.stop();
else if (videoStream.msStop) videoStream.msStop();
videoStream.onended = null;
videoStream = null;
}
if (video)
{
video.onerror = null;
video.pause();
if (video.mozSrcObject)
video.mozSrcObject = null;
video.src = "";
}
myButton = document.getElementById('buttonStart');
if (myButton) myButton.disabled = false;
}
function gotStream(stream)
{
var myButton = document.getElementById('buttonStart');
if (myButton) myButton.disabled = true;
videoStream = stream;
log('Got stream.');
video.onerror = function ()
{
log('video.onerror');
if (video) stop();
};
stream.onended = noStream;
if (window.webkitURL) video.src = window.webkitURL.createObjectURL(stream);
else if (video.mozSrcObject !== undefined)
{//FF18a
video.mozSrcObject = stream;
video.play();
}
else if (navigator.mozGetUserMedia)
{//FF16a, 17a
video.src = stream;
video.play();
}
else if (window.URL) video.src = window.URL.createObjectURL(stream);
else video.src = stream;
myButton = document.getElementById('buttonSnap');
if (myButton) myButton.disabled = false;
myButton = document.getElementById('buttonStop');
if (myButton) myButton.disabled = false;
}
function start()
{
if ((typeof window === 'undefined') || (typeof navigator === 'undefined')) log('This page needs a Web browser with the objects window.* and navigator.*!');
else if (!(video && canvas)) log('HTML context error!');
else
{
log('Get user media…');
if (navigator.getUserMedia) navigator.getUserMedia({video:true}, gotStream, noStream);
else if (navigator.oGetUserMedia) navigator.oGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.mozGetUserMedia) navigator.mozGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.webkitGetUserMedia) navigator.webkitGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.msGetUserMedia) navigator.msGetUserMedia({video:true, audio:false}, gotStream, noStream);
else log('getUserMedia() not available from your Web browser!');
}
}
start();
</script>
I got this code from https://codepen.io/blaberus/pen/raGdBG . Using This code I can see video but how can I record.
Anyone help?
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<meta name="mobile-web-app-capable" content="yes">
<meta id="theme-color" name="theme-color" content="#fff">
<base target="_blank">
<title>Media Recorder API Demo</title>
<link rel="stylesheet" href="css/main.css" />
<style>
a#downloadLink {
display: block;
margin: 0 0 1em 0;
min-height: 1.2em;
}
p#data {
min-height: 6em;
}
</style>
</head>
<body>
<div id="container">
<div style = "text-align:center;">
<h1>Media Recorder API Demo </h1>
<!-- <h2>Record a 640x480 video using the media recorder API implemented in Firefox and Chrome</h2> -->
<video controls autoplay></video><br>
<button id="rec" onclick="onBtnRecordClicked()">Record</button>
<button id="pauseRes" onclick="onPauseResumeClicked()" disabled>Pause</button>
<button id="stop" onclick="onBtnStopClicked()" disabled>Stop</button>
</div>
<a id="downloadLink" download="mediarecorder.webm" name="mediarecorder.webm" href></a>
<p id="data"></p>
<script src="js/main.js"></script>
</div>
</body>
</html>
<script>
'use strict';
/* globals MediaRecorder */
// Spec is at http://dvcs.w3.org/hg/dap/raw-file/tip/media-stream-capture/RecordingProposal.html
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if(getBrowser() == "Chrome"){
var constraints = {"audio": true, "video": { "mandatory": { "minWidth": 640, "maxWidth": 640, "minHeight": 480,"maxHeight": 480 }, "optional": [] } };//Chrome
}else if(getBrowser() == "Firefox"){
var constraints = {audio: true,video: { width: { min: 640, ideal: 640, max: 640 }, height: { min: 480, ideal: 480, max: 480 }}}; //Firefox
}
var recBtn = document.querySelector('button#rec');
var pauseResBtn = document.querySelector('button#pauseRes');
var stopBtn = document.querySelector('button#stop');
var videoElement = document.querySelector('video');
var dataElement = document.querySelector('#data');
var downloadLink = document.querySelector('a#downloadLink');
videoElement.controls = false;
function errorCallback(error){
console.log('navigator.getUserMedia error: ', error);
}
/*
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var sourceBuffer;
*/
var mediaRecorder;
var chunks = [];
var count = 0;
function startRecording(stream) {
log('Start recording...');
if (typeof MediaRecorder.isTypeSupported == 'function'){
/*
MediaRecorder.isTypeSupported is a function announced in https://developers.google.com/web/updates/2016/01/mediarecorder and later introduced in the MediaRecorder API spec http://www.w3.org/TR/mediastream-recording/
*/
if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {mimeType: 'video/webm;codecs=vp9'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=h264')) {
var options = {mimeType: 'video/webm;codecs=h264'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {mimeType: 'video/webm;codecs=vp8'};
}
log('Using '+options.mimeType);
mediaRecorder = new MediaRecorder(stream, options);
}else{
log('Using default codecs for browser');
mediaRecorder = new MediaRecorder(stream);
}
pauseResBtn.textContent = "Pause";
mediaRecorder.start(10);
var url = window.URL || window.webkitURL;
videoElement.src = url ? url.createObjectURL(stream) : stream;
videoElement.play();
mediaRecorder.ondataavailable = function(e) {
//log('Data available...');
//console.log(e.data);
//console.log(e.data.type);
//console.log(e);
chunks.push(e.data);
};
mediaRecorder.onerror = function(e){
log('Error: ' + e);
console.log('Error: ', e);
};
mediaRecorder.onstart = function(){
log('Started & state = ' + mediaRecorder.state);
};
mediaRecorder.onstop = function(){
log('Stopped & state = ' + mediaRecorder.state);
var blob = new Blob(chunks, {type: "video/mp4"});
chunks = [];
var videoURL = window.URL.createObjectURL(blob);
downloadLink.href = videoURL;
videoElement.src = videoURL;
downloadLink.innerHTML = 'Download video file';
var rand = Math.floor((Math.random() * 10000000));
var name = "video_"+rand+".mp4" ;
downloadLink.setAttribute( "download", name);
downloadLink.setAttribute( "name", name);
};
mediaRecorder.onpause = function(){
log('Paused & state = ' + mediaRecorder.state);
}
mediaRecorder.onresume = function(){
log('Resumed & state = ' + mediaRecorder.state);
}
mediaRecorder.onwarning = function(e){
log('Warning: ' + e);
};
}
//function handleSourceOpen(event) {
// console.log('MediaSource opened');
// sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp9"');
// console.log('Source buffer: ', sourceBuffer);
//}
function onBtnRecordClicked (){
if (typeof MediaRecorder === 'undefined' || !navigator.getUserMedia) {
alert('MediaRecorder not supported on your browser, use Firefox 30 or Chrome 49 instead.');
}else {
navigator.getUserMedia(constraints, startRecording, errorCallback);
recBtn.disabled = true;
pauseResBtn.disabled = false;
stopBtn.disabled = false;
}
}
function onBtnStopClicked(){
mediaRecorder.stop();
videoElement.controls = true;
recBtn.disabled = false;
pauseResBtn.disabled = true;
stopBtn.disabled = true;
}
function onPauseResumeClicked(){
if(pauseResBtn.textContent === "Pause"){
console.log("pause");
pauseResBtn.textContent = "Resume";
mediaRecorder.pause();
stopBtn.disabled = true;
}else{
console.log("resume");
pauseResBtn.textContent = "Pause";
mediaRecorder.resume();
stopBtn.disabled = false;
}
recBtn.disabled = true;
pauseResBtn.disabled = false;
}
function log(message){
dataElement.innerHTML = dataElement.innerHTML+'<br>'+message ;
}
//browser ID
function getBrowser(){
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var browserName = navigator.appName;
var fullVersion = ''+parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion,10);
var nameOffset,verOffset,ix;
// In Opera, the true version is after "Opera" or after "Version"
if ((verOffset=nAgt.indexOf("Opera"))!=-1) {
browserName = "Opera";
fullVersion = nAgt.substring(verOffset+6);
if ((verOffset=nAgt.indexOf("Version"))!=-1)
fullVersion = nAgt.substring(verOffset+8);
}
// In MSIE, the true version is after "MSIE" in userAgent
else if ((verOffset=nAgt.indexOf("MSIE"))!=-1) {
browserName = "Microsoft Internet Explorer";
fullVersion = nAgt.substring(verOffset+5);
}
// In Chrome, the true version is after "Chrome"
else if ((verOffset=nAgt.indexOf("Chrome"))!=-1) {
browserName = "Chrome";
fullVersion = nAgt.substring(verOffset+7);
}
// In Safari, the true version is after "Safari" or after "Version"
else if ((verOffset=nAgt.indexOf("Safari"))!=-1) {
browserName = "Safari";
fullVersion = nAgt.substring(verOffset+7);
if ((verOffset=nAgt.indexOf("Version"))!=-1)
fullVersion = nAgt.substring(verOffset+8);
}
// In Firefox, the true version is after "Firefox"
else if ((verOffset=nAgt.indexOf("Firefox"))!=-1) {
browserName = "Firefox";
fullVersion = nAgt.substring(verOffset+8);
}
// In most other browsers, "name/version" is at the end of userAgent
else if ( (nameOffset=nAgt.lastIndexOf(' ')+1) <
(verOffset=nAgt.lastIndexOf('/')) )
{
browserName = nAgt.substring(nameOffset,verOffset);
fullVersion = nAgt.substring(verOffset+1);
if (browserName.toLowerCase()==browserName.toUpperCase()) {
browserName = navigator.appName;
}
}
// trim the fullVersion string at semicolon/space if present
if ((ix=fullVersion.indexOf(";"))!=-1)
fullVersion=fullVersion.substring(0,ix);
if ((ix=fullVersion.indexOf(" "))!=-1)
fullVersion=fullVersion.substring(0,ix);
majorVersion = parseInt(''+fullVersion,10);
if (isNaN(majorVersion)) {
fullVersion = ''+parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion,10);
}
return browserName;
}
</script>

How to send blob gained by getusermedia() to socket.io then send it back to client and put it in video element

this is the code :
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var socket = io();
var recordedVideo = document.querySelector('video#recorded');
var gumVideo = document.querySelector('video#gum');
var translateButton = document.querySelector('button#record');
translateButton.onclick = startTranslate;
recordedVideo.src = window.URL.createObjectURL(mediaSource);
socket.on('video', function (data) {
sourceBuffer.appendBuffer(data);
});
navigator.mediaDevices.getUserMedia(constraints)
.then(handleSuccess).catc(handleError);
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
sourceBuffer.onupdate = function(){
console.log("updating");
};
console.log('Source buffer: ', sourceBuffer);
}
function handleSuccess(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function startTranslate() {
recordedBlobs = [];
var options = {mimeType: 'video/webm;codecs=vp9'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm;codecs=vp8'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
}
catch (e) {
console.error('Exception while creating MediaRecorder: ' + e);
alert('Exception while creating MediaRecorder: '+ e +'.mimeType: ' + options.mimeType);
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10); // collect 10ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
socket.emit('video',event.data);
}
}
When i click the translate button after 2-3 seconds
Error like :
Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer
has been removed from the parent media source
what does this error means.Or is it a bug of WebRTC?

Play MediaRecorder chunks in MediaSource HTML5 -- video frozen

I have this simple code to get chunks of video stream and play them in MediaSource. I see video, but sometimes it stops. It may work for few seconds or for few minutes. But finally it stops at some moment. chrome://media-internals/ shows no errors.
What is wrong here?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
navigator.getUserMedia(constraints, function (stream) {
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e) {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(100);
}, function (e) {
console.log(e)
});
Here is JSFIDDLE which is going to try to do it:
https://jsfiddle.net/stivyakovenko/fkt89cLu/6/
I am using Chrome as my main target.
Looks like this is a bug in Chrome...
https://bugs.chromium.org/p/chromium/issues/detail?id=606000
The mediarecorder will give you part of a whole webm file in the ondataavailable callback. Look like this kind of stuff is not work with mediaSource. It can not work at all in my chrome 66.
Here is a way that works like "video chat" or "live stream" with MediaRecorder without ffmpeg:
You can send that data part by part to your sever with ajax.
The server can return "the whole webm file" to your chrome browser in one
long time response. And the server can return more data part in that response as soon as the server some data from the client.
And this kind of workaroud works only with html too:
You can use a blob list to collect all the blob that come from ondataavailable.
then set the video.src again and again.
Here is a jsfiddle that works:
const constraints = {video: true};
const video1 = document.querySelector('.real1');
const video2 = document.querySelector('.real2');
var blobList = [];
var gCurrentTime = 0;
function playNew(){
gCurrentTime = video2.currentTime;
var thisBlob = new Blob(blobList,{type:"video/webm"});
var url = URL.createObjectURL(thisBlob);
video2.src = url;
video2.currentTime = gCurrentTime;
video2.play();
}
video2.onended = playNew;
var isFirst = true;
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{mimeType:"video/webm"});
mediaRecorder.ondataavailable = function(e){
blobList.push(e.data);
if (isFirst){
playNew();
isFirst = false;
}
}
mediaRecorder.start(1000);
}
function handleError(error) {
console.error('Reeeejected!', error);
}
navigator.mediaDevices.getUserMedia(constraints).
then(handleSuccess).catch(handleError);
<video class="real1" autoplay controls></video>
<video class="real2" controls></video>
https://jsfiddle.net/4akkadht/1/
The html only solution (second one) will blink again and again and have a huge delay. The server long push solution (first one) will not blink and have five seconds delay.
Based on my experience of working with MediaRecorder and MediaSource, most of the errors related to the video freezing or returning errors may be due to the chunks being received out-of-sync. I believe that webm (and maybe other media types also) need the chunks to be received in increasing order of their timecodes. Recording, sending and then receiving the chunks Async may not preserve this increasing order of timecodes.
So, after the above analysis of my own experience of video freezing with MediaRecorder/MediaSource, I changed my code to send the recorded chunks in Sync, not Async.
I am trying to do this as well, however I do not get any video at all. Your jsfiddle does not work for me on chrome or firefox (tested on ubuntu 14.04 and windows 7).
After a bit of research (mainly streaming back the file after it has been recorded), I've found out that the file is not properly fragmented to be played by MSE.
#Steve: I'd be interested to find out how you've done the fragmenting with ffmpeg.
As a sidenote, I also have a similar question here: Display getUserMediaStream live video with media stream extensions (MSE) , with an error description from chrome://media-internals.
a working example in chrome but it freez in firefox
const main = async(function* main(){
const logging = true;
let tasks = Promise.resolve(void 0);
const devices = yield navigator.mediaDevices.enumerateDevices();
console.table(devices);
const stream = yield navigator.mediaDevices.getUserMedia({video: true, audio: true});
if(logging){
stream.addEventListener("active", (ev)=>{ console.log(ev.type); });
stream.addEventListener("inactive", (ev)=>{ console.log(ev.type); });
stream.addEventListener("addtrack", (ev)=>{ console.log(ev.type); });
stream.addEventListener("removetrack", (ev)=>{ console.log(ev.type); });
}
const rec = new MediaRecorder(stream, {mimeType: 'video/webm; codecs="opus,vp8"'});
if(logging){
rec.addEventListener("dataavailable", (ev)=>{ console.log(ev.type); });
rec.addEventListener("pause", (ev)=>{ console.log(ev.type); });
rec.addEventListener("resume", (ev)=>{ console.log(ev.type); });
rec.addEventListener("start", (ev)=>{ console.log(ev.type); });
rec.addEventListener("stop", (ev)=>{ console.log(ev.type); });
rec.addEventListener("error", (ev)=>{ console.error(ev.type, ev); });
}
const ms = new MediaSource();
if(logging){
ms.addEventListener('sourceopen', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceended', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceclose', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('addsourcebuffer', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('removesourcebuffer', (ev)=>{ console.log(ev.type); });
}
const video = document.createElement("video");
if(logging){
video.addEventListener('loadstart', (ev)=>{ console.log(ev.type); });
video.addEventListener('progress', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadedmetadata', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadeddata', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplay', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplaythrough', (ev)=>{ console.log(ev.type); });
video.addEventListener('playing', (ev)=>{ console.log(ev.type); });
video.addEventListener('waiting', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeking', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeked', (ev)=>{ console.log(ev.type); });
video.addEventListener('ended', (ev)=>{ console.log(ev.type); });
video.addEventListener('emptied', (ev)=>{ console.log(ev.type); });
video.addEventListener('stalled', (ev)=>{ console.log(ev.type); });
video.addEventListener('timeupdate', (ev)=>{ console.log(ev.type); }); // annoying
video.addEventListener('durationchange', (ev)=>{ console.log(ev.type); });
video.addEventListener('ratechange', (ev)=>{ console.log(ev.type); });
video.addEventListener('play', (ev)=>{ console.log(ev.type); });
video.addEventListener('pause', (ev)=>{ console.log(ev.type); });
video.addEventListener('error', (ev)=>{ console.warn(ev.type, ev); });
}
//video.srcObject = ms;
video.src = URL.createObjectURL(ms);
video.volume = 0;
video.controls = true;
video.autoplay = true;
document.body.appendChild(video);
yield new Promise((resolve, reject)=>{
ms.addEventListener('sourceopen', ()=> resolve(), {once: true});
});
const sb = ms.addSourceBuffer(rec.mimeType);
if(logging){
sb.addEventListener('updatestart', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('update', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('updateend', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('error', (ev)=>{ console.error(ev.type, ev); });
sb.addEventListener('abort', (ev)=>{ console.log(ev.type); });
}
const stop = async(function* stop(){
console.info("stopping");
if(sb.updating){ sb.abort(); }
if(ms.readyState === "open"){ ms.endOfStream(); }
rec.stop();
stream.getTracks().map((track)=>{ track.stop(); });
yield video.pause();
console.info("end");
});
const button = document.createElement("button");
button.innerHTML = "stop";
button.addEventListener("click", ()=>{
document.body.removeChild(button);
tasks = tasks.then(stop);
}, {once: true});
document.body.appendChild(button);
let i = 0;
rec.ondataavailable = ({data})=>{
tasks = tasks.then(async(function*(){
console.group(""+i);
try{
if(logging){ console.log("dataavailable", "size:", data.size); }
if(data.size === 0){
console.warn("empty recorder data");
throw new Error("empty recorder data");
}
const buf = yield readAsArrayBuffer(data);
sb.appendBuffer(buf);
yield new Promise((resolve, reject)=>{
sb.addEventListener('updateend', ()=> resolve(), {once: true});
sb.addEventListener("error", (err)=> reject(ev), {once: true});
});
if(logging){
console.log("timestampOffset", sb.timestampOffset);
console.log("appendWindowStart", sb.appendWindowStart);
console.log("appendWindowEnd", sb.appendWindowEnd);
for(let i=0; i<sb.buffered.length; i++){
console.log("buffered", i, sb.buffered.start(i), sb.buffered.end(i));
}
for(let i=0; i<video.seekable.length; i++){
console.log("seekable", i, video.seekable.start(i), video.seekable.end(i));
}
console.log("webkitAudioDecodedByteCount", video.webkitAudioDecodedByteCount);
console.log("webkitVideoDecodedByteCount", video.webkitVideoDecodedByteCount);
console.log("webkitDecodedFrameCount", video.webkitDecodedFrameCount);
console.log("webkitDroppedFrameCount", video.webkitDroppedFrameCount);
}
if (video.buffered.length > 1) {
console.warn("MSE buffered has a gap!");
throw new Error("MSE buffered has a gap!");
}
}catch(err){
console.error(err);
yield stop();
console.groupEnd(""+i); i++;
return Promise.reject(err);
}
console.groupEnd(""+i);
i++;
}));
};
rec.start(1000);
console.info("start");
});
function sleep(ms){
return new Promise(resolve =>
setTimeout((()=>resolve(ms)), ms));
}
function readAsArrayBuffer(blob) {
return new Promise((resolve, reject)=>{
const reader = new FileReader();
reader.addEventListener("loadend", ()=> resolve(reader.result), {once: true});
reader.addEventListener("error", (err)=> reject(err.error), {once: true});
reader.readAsArrayBuffer(blob);
});
}
function async(generatorFunc){
return function (arg) {
const generator = generatorFunc(arg);
return next(null);
function next(arg) {
const result = generator.next(arg);
if(result.done){ return result.value; }
else if(result.value instanceof Promise){ return result.value.then(next); }
else{ return Promise.resolve(result.value); }
}
}
}
console.clear();
main().catch(console.error);
https://jsfiddle.net/nthyfgvs/
UPDATE!
This is version 2 that I also created, it will work in Firefox and Chrome, and no freeze. Please note, I am using the same two server-side Python3 programs for writing and reading the webcam data as the data is created from my previous answer.
Browser Client version 2:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video1" width="300" height="300" autoplay controls ></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs=vp8;' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
const constraints = {video: true};
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
//var blobList = [];
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{type:"video/webm; codecs=vp8;"});
mediaRecorder.ondataavailable = function(e){
//blobList.push(e.data);
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
async:false,
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
}
mediaRecorder.start(1000);
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer2.duration > 2) {
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
sourceBuffer2.appendBuffer( reader.result );
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
video.play();
}
function handleError(error) {
console.error('error', error);
}
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=vp8;");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.mediaDevices.getUserMedia(constraints).then(handleSuccess).catch(handleError);
});
</script>
</body>
</html>
This solution works great in Firefox, no freezing. It requires jquery, cgi Python3 for the browser client. It also has two server-side Python3 programs for writing and reading the webcam data as the data is created.
Browser Client:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video" width="300" height="300" controls></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs="vp8, opus"' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8, opus"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=\"vp8, opus\"");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.getUserMedia(constraints, function (stream)
{
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e)
{
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(1000);
}, function (e) {
console.log(e)
});
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer.duration > 2){
sourceBuffer.remove(0, sourceBuffer.duration - 2);
}
if (sourceBuffer2.duration > 2){
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer2.appendBuffer(arr);
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
});
</script>
</body>
</html>
Server-side Python3 webcam video writer: post_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
chunk = '' if not args.getvalue( "chunk" ) else escape( args.getvalue( "chunk" ) )
mp4 = 'webcam.mp4'
mp4_text = 'webcam_text.txt'
with open (mp4, 'ab') as f:
f.write( base64.b64decode(chunk) )
with open (mp4_text, 'a') as f:
f.write( str(len(chunk)) + ',' + chunk + '\n' )
html = 'success'
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print(html)
Server-side Python3 webcam video reader: get_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
offset = '' if not args.getvalue( "offset" ) else escape( args.getvalue( "offset" ) )
mp4_text = 'webcam_text.txt'
data = ''
try:
with open(mp4_text, 'r') as f:
line = f.readlines()[int(offset)]
data = line.split(',')[1].strip()
except:
pass
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print('base64,' + data)

WebRTC file transfer and syntax querySelector

i never use WebRTC. so do not have any understand how their syntax look like.
i got a couple of syntax which i think it is not jquery. so anyone mind to tell me is it specific to webRTC related code.
document.querySelector('#stop-recording').onclick = function() {
this.disabled = true;
mediaRecorder.stop();
mediaRecorder.stream.stop();
document.querySelector('#pause-recording').disabled = true;
document.querySelector('#start-recording').disabled = false;
};
what is querySelector ?
i got the code from this url https://github.com/streamproc/MediaStreamRecorder/blob/master/demos/video-recorder.html
looking for bit info. thanks
You can refer following code:
var audio_context;
var recorder;
$(function () {
try {
//Audio Recording
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
window.URL = window.URL || window.webkitURL;
var recorderObject;
var VM_IDForAudio = "";
var audio_context = new AudioContext;
var localMediaStreamForAudio;
var audioStream;
//Audio-Video Recording (Firefox)
var videoFile = !!navigator.mozGetUserMedia ? 'video.gif' : 'video.webm';
var inner = document.querySelector('.inner');
var videoElement = document.getElementById('webcamVideo');
var VM_IDForAudioVideo = "";
var localMediaStreamForAudioVideo;
//Disable Live Webcam Button
$("#btnShowWebcam").prop("disabled", true);
} catch (e) {
//alert('No web audio support in this browser!');
console.log("No web audio support in this browser!");
}
//Audio Recording
$("[id$='btnAudioRecord']").click(function () {
//VM_IDForAudio = $("[id$='hdVMID']").val();
VM_IDForAudio = $("[id$='hdPRN']").val() + "_" + $("[id$='hdVMID']").val() + "_" +
patientDet.visitType + "_" + replateDateString(patientDet.visitDate);
$this = $(this);
$recorder = $this.parent();
if ($("[id$='btnAudioRecord']").val() == "Record Audio") {
if (VM_IDForAudio != "") {
$this.attr("value", "Stop Record");
navigator.getUserMedia({ audio: true }, function (stream) {
if (window.IsChrome) stream = new window.MediaStream(stream.getAudioTracks());
audioStream = stream;
recorder = window.RecordRTC(stream, {
type: 'audio'
});
recorder.startRecording();
}, function () { });
}
else {
//Select Patient
}
} else {
$this.attr("value", "Record Audio");
if (recorder)
recorder.stopRecording(function (url) {
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function () {
base64data = reader.result;
PageMethods.SaveAudioRecording(base64data, VM_IDForAudio);
audioStream.stop();
}
});
}
});
//Audio-Video Recording
$("[id$='btnAudioVideoRecord']").click(function () {
//VM_IDForAudioVideo = $("[id$='hdVMID']").val();
VM_IDForAudioVideo = $("[id$='hdPRN']").val() + "_" + $("[id$='hdVMID']").val() + "_" +
patientDet.visitType + "_" + replateDateString(patientDet.visitDate);
$this = $(this);
if ($("[id$='btnAudioVideoRecord']").val() == "Record Aud/Vid") {
if (VM_IDForAudioVideo != "") {
$this.attr("value", "Stop Record");
captureUserMedia(function (stream) {
window.audioVideoRecorder = window.RecordRTC(stream, {
type: 'video', // don't forget this; otherwise you'll get video/webm instead of audio/ogg
canvas: {
width: 320,
height: 240
}
});
localMediaStreamForAudioVideo = stream;
$("#btnShowWebcam").prop("disabled", false);
window.audioVideoRecorder.startRecording();
});
}
else {
//Select Patient
}
} else {
$this.attr("value", "Record Aud/Vid");
$("#btnShowWebcam").prop("disabled", true);
window.audioVideoRecorder.stopRecording(function (url) {
convertStreams(audioVideoRecorder.getBlob(), videoFile, VM_IDForAudioVideo);
});
localMediaStreamForAudioVideo.stop();
}
});
and use RecordRTC javascript library.
for more go through this: http://recordrtc.org/RecordRTC.html,
for live demo: https://www.webrtc-experiment.com/RecordRTC/AudioVideo-on-Firefox.html
you should check webtorrent github repository, there is a detailed description about webRTC and how it is implemented. also check out the webtorrent official website

Categories

Resources