I have a plug & play web cam and i want to record video by clicking start and stop button. My web cam is iball company and if there are any simple script then please provide me. I want simple php code.
<p><video id="video" autoplay="autoplay"></video></p>
<p><input type="button" id="buttonSnap" value="Take screenshot" disabled="disabled" onclick="snapshot()" /></p>
<p>
<input type="button" id="buttonStart" value="Start" disabled="disabled" onclick="start()" />
<input type="button" id="buttonStop" value="Stop" disabled="disabled" onclick="stop()" />
</p>
<p><canvas id="canvas"></canvas></p>
<script type="text/javascript">
"use strict";
var video = document.getElementById('video');
var canvas = document.getElementById('canvas');
var videoStream = null;
var preLog = document.getElementById('preLog');
function log(text)
{
if (preLog) preLog.textContent += ('\n' + text);
else alert(text);
}
function snapshot()
{
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
}
function noStream()
{
log('Access to camera was denied!');
}
function stop()
{
var myButton = document.getElementById('buttonStop');
if (myButton) myButton.disabled = true;
myButton = document.getElementById('buttonSnap');
if (myButton) myButton.disabled = true;
if (videoStream)
{
if (videoStream.stop) videoStream.stop();
else if (videoStream.msStop) videoStream.msStop();
videoStream.onended = null;
videoStream = null;
}
if (video)
{
video.onerror = null;
video.pause();
if (video.mozSrcObject)
video.mozSrcObject = null;
video.src = "";
}
myButton = document.getElementById('buttonStart');
if (myButton) myButton.disabled = false;
}
function gotStream(stream)
{
var myButton = document.getElementById('buttonStart');
if (myButton) myButton.disabled = true;
videoStream = stream;
log('Got stream.');
video.onerror = function ()
{
log('video.onerror');
if (video) stop();
};
stream.onended = noStream;
if (window.webkitURL) video.src = window.webkitURL.createObjectURL(stream);
else if (video.mozSrcObject !== undefined)
{//FF18a
video.mozSrcObject = stream;
video.play();
}
else if (navigator.mozGetUserMedia)
{//FF16a, 17a
video.src = stream;
video.play();
}
else if (window.URL) video.src = window.URL.createObjectURL(stream);
else video.src = stream;
myButton = document.getElementById('buttonSnap');
if (myButton) myButton.disabled = false;
myButton = document.getElementById('buttonStop');
if (myButton) myButton.disabled = false;
}
function start()
{
if ((typeof window === 'undefined') || (typeof navigator === 'undefined')) log('This page needs a Web browser with the objects window.* and navigator.*!');
else if (!(video && canvas)) log('HTML context error!');
else
{
log('Get user media…');
if (navigator.getUserMedia) navigator.getUserMedia({video:true}, gotStream, noStream);
else if (navigator.oGetUserMedia) navigator.oGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.mozGetUserMedia) navigator.mozGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.webkitGetUserMedia) navigator.webkitGetUserMedia({video:true}, gotStream, noStream);
else if (navigator.msGetUserMedia) navigator.msGetUserMedia({video:true, audio:false}, gotStream, noStream);
else log('getUserMedia() not available from your Web browser!');
}
}
start();
</script>
I got this code from https://codepen.io/blaberus/pen/raGdBG . Using This code I can see video but how can I record.
Anyone help?
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<meta name="mobile-web-app-capable" content="yes">
<meta id="theme-color" name="theme-color" content="#fff">
<base target="_blank">
<title>Media Recorder API Demo</title>
<link rel="stylesheet" href="css/main.css" />
<style>
a#downloadLink {
display: block;
margin: 0 0 1em 0;
min-height: 1.2em;
}
p#data {
min-height: 6em;
}
</style>
</head>
<body>
<div id="container">
<div style = "text-align:center;">
<h1>Media Recorder API Demo </h1>
<!-- <h2>Record a 640x480 video using the media recorder API implemented in Firefox and Chrome</h2> -->
<video controls autoplay></video><br>
<button id="rec" onclick="onBtnRecordClicked()">Record</button>
<button id="pauseRes" onclick="onPauseResumeClicked()" disabled>Pause</button>
<button id="stop" onclick="onBtnStopClicked()" disabled>Stop</button>
</div>
<a id="downloadLink" download="mediarecorder.webm" name="mediarecorder.webm" href></a>
<p id="data"></p>
<script src="js/main.js"></script>
</div>
</body>
</html>
<script>
'use strict';
/* globals MediaRecorder */
// Spec is at http://dvcs.w3.org/hg/dap/raw-file/tip/media-stream-capture/RecordingProposal.html
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if(getBrowser() == "Chrome"){
var constraints = {"audio": true, "video": { "mandatory": { "minWidth": 640, "maxWidth": 640, "minHeight": 480,"maxHeight": 480 }, "optional": [] } };//Chrome
}else if(getBrowser() == "Firefox"){
var constraints = {audio: true,video: { width: { min: 640, ideal: 640, max: 640 }, height: { min: 480, ideal: 480, max: 480 }}}; //Firefox
}
var recBtn = document.querySelector('button#rec');
var pauseResBtn = document.querySelector('button#pauseRes');
var stopBtn = document.querySelector('button#stop');
var videoElement = document.querySelector('video');
var dataElement = document.querySelector('#data');
var downloadLink = document.querySelector('a#downloadLink');
videoElement.controls = false;
function errorCallback(error){
console.log('navigator.getUserMedia error: ', error);
}
/*
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var sourceBuffer;
*/
var mediaRecorder;
var chunks = [];
var count = 0;
function startRecording(stream) {
log('Start recording...');
if (typeof MediaRecorder.isTypeSupported == 'function'){
/*
MediaRecorder.isTypeSupported is a function announced in https://developers.google.com/web/updates/2016/01/mediarecorder and later introduced in the MediaRecorder API spec http://www.w3.org/TR/mediastream-recording/
*/
if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {mimeType: 'video/webm;codecs=vp9'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=h264')) {
var options = {mimeType: 'video/webm;codecs=h264'};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {mimeType: 'video/webm;codecs=vp8'};
}
log('Using '+options.mimeType);
mediaRecorder = new MediaRecorder(stream, options);
}else{
log('Using default codecs for browser');
mediaRecorder = new MediaRecorder(stream);
}
pauseResBtn.textContent = "Pause";
mediaRecorder.start(10);
var url = window.URL || window.webkitURL;
videoElement.src = url ? url.createObjectURL(stream) : stream;
videoElement.play();
mediaRecorder.ondataavailable = function(e) {
//log('Data available...');
//console.log(e.data);
//console.log(e.data.type);
//console.log(e);
chunks.push(e.data);
};
mediaRecorder.onerror = function(e){
log('Error: ' + e);
console.log('Error: ', e);
};
mediaRecorder.onstart = function(){
log('Started & state = ' + mediaRecorder.state);
};
mediaRecorder.onstop = function(){
log('Stopped & state = ' + mediaRecorder.state);
var blob = new Blob(chunks, {type: "video/mp4"});
chunks = [];
var videoURL = window.URL.createObjectURL(blob);
downloadLink.href = videoURL;
videoElement.src = videoURL;
downloadLink.innerHTML = 'Download video file';
var rand = Math.floor((Math.random() * 10000000));
var name = "video_"+rand+".mp4" ;
downloadLink.setAttribute( "download", name);
downloadLink.setAttribute( "name", name);
};
mediaRecorder.onpause = function(){
log('Paused & state = ' + mediaRecorder.state);
}
mediaRecorder.onresume = function(){
log('Resumed & state = ' + mediaRecorder.state);
}
mediaRecorder.onwarning = function(e){
log('Warning: ' + e);
};
}
//function handleSourceOpen(event) {
// console.log('MediaSource opened');
// sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp9"');
// console.log('Source buffer: ', sourceBuffer);
//}
function onBtnRecordClicked (){
if (typeof MediaRecorder === 'undefined' || !navigator.getUserMedia) {
alert('MediaRecorder not supported on your browser, use Firefox 30 or Chrome 49 instead.');
}else {
navigator.getUserMedia(constraints, startRecording, errorCallback);
recBtn.disabled = true;
pauseResBtn.disabled = false;
stopBtn.disabled = false;
}
}
function onBtnStopClicked(){
mediaRecorder.stop();
videoElement.controls = true;
recBtn.disabled = false;
pauseResBtn.disabled = true;
stopBtn.disabled = true;
}
function onPauseResumeClicked(){
if(pauseResBtn.textContent === "Pause"){
console.log("pause");
pauseResBtn.textContent = "Resume";
mediaRecorder.pause();
stopBtn.disabled = true;
}else{
console.log("resume");
pauseResBtn.textContent = "Pause";
mediaRecorder.resume();
stopBtn.disabled = false;
}
recBtn.disabled = true;
pauseResBtn.disabled = false;
}
function log(message){
dataElement.innerHTML = dataElement.innerHTML+'<br>'+message ;
}
//browser ID
function getBrowser(){
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var browserName = navigator.appName;
var fullVersion = ''+parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion,10);
var nameOffset,verOffset,ix;
// In Opera, the true version is after "Opera" or after "Version"
if ((verOffset=nAgt.indexOf("Opera"))!=-1) {
browserName = "Opera";
fullVersion = nAgt.substring(verOffset+6);
if ((verOffset=nAgt.indexOf("Version"))!=-1)
fullVersion = nAgt.substring(verOffset+8);
}
// In MSIE, the true version is after "MSIE" in userAgent
else if ((verOffset=nAgt.indexOf("MSIE"))!=-1) {
browserName = "Microsoft Internet Explorer";
fullVersion = nAgt.substring(verOffset+5);
}
// In Chrome, the true version is after "Chrome"
else if ((verOffset=nAgt.indexOf("Chrome"))!=-1) {
browserName = "Chrome";
fullVersion = nAgt.substring(verOffset+7);
}
// In Safari, the true version is after "Safari" or after "Version"
else if ((verOffset=nAgt.indexOf("Safari"))!=-1) {
browserName = "Safari";
fullVersion = nAgt.substring(verOffset+7);
if ((verOffset=nAgt.indexOf("Version"))!=-1)
fullVersion = nAgt.substring(verOffset+8);
}
// In Firefox, the true version is after "Firefox"
else if ((verOffset=nAgt.indexOf("Firefox"))!=-1) {
browserName = "Firefox";
fullVersion = nAgt.substring(verOffset+8);
}
// In most other browsers, "name/version" is at the end of userAgent
else if ( (nameOffset=nAgt.lastIndexOf(' ')+1) <
(verOffset=nAgt.lastIndexOf('/')) )
{
browserName = nAgt.substring(nameOffset,verOffset);
fullVersion = nAgt.substring(verOffset+1);
if (browserName.toLowerCase()==browserName.toUpperCase()) {
browserName = navigator.appName;
}
}
// trim the fullVersion string at semicolon/space if present
if ((ix=fullVersion.indexOf(";"))!=-1)
fullVersion=fullVersion.substring(0,ix);
if ((ix=fullVersion.indexOf(" "))!=-1)
fullVersion=fullVersion.substring(0,ix);
majorVersion = parseInt(''+fullVersion,10);
if (isNaN(majorVersion)) {
fullVersion = ''+parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion,10);
}
return browserName;
}
</script>
Related
I'm trying to do eye blink detection using MediaRecorderAPI with HTML and Java-script and i have captured video and download it with my normal downloaded path which is my default browser path from that i have pass my video and detecting eye blink. But my problem is i want to create a directory in my project for downloading captured videos. Please can anyone help me... Thanks in advance :)
here is my html index.html and main.js:
'use strict';
/* globals MediaRecorder */
// Spec is at http://dvcs.w3.org/hg/dap/raw-file/tip/media-stream-capture/RecordingProposal.html
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (getBrowser() == "Chrome") {
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 640,
"maxWidth": 640,
"minHeight": 480,
"maxHeight": 480
},
"optional": []
}
}; //Chrome did not support the new constraints spec until 59 for video and 60 for audio
} else if (getBrowser() == "Firefox") {
var constraints = {
audio: true,
video: {
width: {
min: 640,
ideal: 640,
max: 640
},
height: {
min: 480,
ideal: 480,
max: 480
}
}
}; //Firefox
}
var recBtn = document.querySelector('button#rec');
var pauseResBtn = document.querySelector('button#pauseRes');
var stopBtn = document.querySelector('button#stop');
var videoElement = document.querySelector('video');
var dataElement = document.querySelector('#data');
var downloadLink = document.querySelector('a#downloadLink');
videoElement.controls = false;
function errorCallback(error) {
console.log('navigator.getUserMedia error: ', error);
}
/*
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var sourceBuffer;
*/
var mediaRecorder;
var chunks = [];
var count = 0;
function startRecording(stream) {
log('Start recording...');
if (typeof MediaRecorder.isTypeSupported == 'function') {
/*
MediaRecorder.isTypeSupported is a function announced in https://developers.google.com/web/updates/2016/01/mediarecorder and later introduced in the MediaRecorder API spec http://www.w3.org/TR/mediastream-recording/
*/
if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {
mimeType: 'video/webm;codecs=vp9'
};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=h264')) {
var options = {
mimeType: 'video/webm;codecs=h264'
};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {
mimeType: 'video/webm;codecs=vp8'
};
}
log('Using ' + options.mimeType);
mediaRecorder = new MediaRecorder(stream, options);
} else {
log('isTypeSupported is not supported, using default codecs for browser');
mediaRecorder = new MediaRecorder(stream);
}
pauseResBtn.textContent = "Pause";
mediaRecorder.start(10);
var url = window.URL || window.webkitURL;
videoElement.src = url ? url.createObjectURL(stream) : stream;
videoElement.play();
mediaRecorder.ondataavailable = function(e) {
//log('Data available...');
//console.log(e.data);
//console.log(e.data.type);
//console.log(e);
chunks.push(e.data);
};
mediaRecorder.onerror = function(e) {
log('Error: ' + e);
console.log('Error: ', e);
};
mediaRecorder.onstart = function() {
log('Started & state = ' + mediaRecorder.state);
};
mediaRecorder.onstop = function() {
var dt = new Date();
var day = dt.getDate();
var month = dt.getMonth() + 1;
var year = dt.getFullYear();
var hour = dt.getHours();
var mins = dt.getMinutes();
// var dot =dot.getDot();
var postfix = day + "." + month + "." + year + "_" + hour + "." + mins;
log('Stopped & state = ' + mediaRecorder.state);
var blob = new Blob(chunks, {
type: "video/mp4"
});
chunks = [];
// xhr.open("GET", "/favicon.png");
var videoURL = window.URL.createObjectURL(blob);
downloadLink.href = videoURL;
videoElement.src = videoURL;
downloadLink.innerHTML = 'Download video file';
var rand = Math.floor((Math.random() * 10000000));
// var location =("D:/mine project/Media-Recorder-API-Demo-master/video" + rand + postfix+ ".mp4");
var name = ("video" + "_" + rand + "_" + postfix + ".mp4");
// var name = ("D:/mine project/Media-Recorder-API-Demo-master"+"video_"+rand+".mp4") ;
downloadLink.setAttribute("download", name);
downloadLink.setAttribute("name", name);
};
mediaRecorder.onpause = function() {
log('Paused & state = ' + mediaRecorder.state);
}
mediaRecorder.onresume = function() {
log('Resumed & state = ' + mediaRecorder.state);
}
mediaRecorder.onwarning = function(e) {
log('Warning: ' + e);
};
}
//function handleSourceOpen(event) {
// console.log('MediaSource opened');
// sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp9"');
// console.log('Source buffer: ', sourceBuffer);
//}
function onBtnRecordClicked() {
if (typeof MediaRecorder === 'undefined' || !navigator.getUserMedia) {
alert('MediaRecorder not supported on your browser, use Firefox 30 or Chrome 49 instead.');
} else {
navigator.getUserMedia(constraints, startRecording, errorCallback);
recBtn.disabled = true;
pauseResBtn.disabled = false;
stopBtn.disabled = false;
}
}
function onBtnStopClicked() {
mediaRecorder.stop();
videoElement.controls = true;
recBtn.disabled = false;
pauseResBtn.disabled = true;
stopBtn.disabled = true;
}
function onPauseResumeClicked() {
if (pauseResBtn.textContent === "Pause") {
console.log("pause");
pauseResBtn.textContent = "Resume";
mediaRecorder.pause();
stopBtn.disabled = true;
} else {
console.log("resume");
pauseResBtn.textContent = "Pause";
mediaRecorder.resume();
stopBtn.disabled = false;
}
recBtn.disabled = true;
pauseResBtn.disabled = false;
}
function log(message) {
dataElement.innerHTML = dataElement.innerHTML + '<br>' + message;
}
//browser ID
function getBrowser() {
var nVer = navigator.appVersion;
var nAgt = navigator.userAgent;
var browserName = navigator.appName;
var fullVersion = '' + parseFloat(navigator.appVersion);
var majorVersion = parseInt(navigator.appVersion, 10);
var nameOffset, verOffset, ix;
// In Opera, the true version is after "Opera" or after "Version"
if ((verOffset = nAgt.indexOf("Opera")) != -1) {
browserName = "Opera";
fullVersion = nAgt.substring(verOffset + 6);
if ((verOffset = nAgt.indexOf("Version")) != -1)
fullVersion = nAgt.substring(verOffset + 8);
}
// In MSIE, the true version is after "MSIE" in userAgent
else if ((verOffset = nAgt.indexOf("MSIE")) != -1) {
browserName = "Microsoft Internet Explorer";
fullVersion = nAgt.substring(verOffset + 5);
}
// In Chrome, the true version is after "Chrome"
else if ((verOffset = nAgt.indexOf("Chrome")) != -1) {
browserName = "Chrome";
fullVersion = nAgt.substring(verOffset + 7);
}
// In Safari, the true version is after "Safari" or after "Version"
else if ((verOffset = nAgt.indexOf("Safari")) != -1) {
browserName = "Safari";
fullVersion = nAgt.substring(verOffset + 7);
if ((verOffset = nAgt.indexOf("Version")) != -1)
fullVersion = nAgt.substring(verOffset + 8);
}
// In Firefox, the true version is after "Firefox"
else if ((verOffset = nAgt.indexOf("Firefox")) != -1) {
browserName = "Firefox";
fullVersion = nAgt.substring(verOffset + 8);
}
// In most other browsers, "name/version" is at the end of userAgent
else if ((nameOffset = nAgt.lastIndexOf(' ') + 1) <
(verOffset = nAgt.lastIndexOf('/'))) {
browserName = nAgt.substring(nameOffset, verOffset);
fullVersion = nAgt.substring(verOffset + 1);
if (browserName.toLowerCase() == browserName.toUpperCase()) {
browserName = navigator.appName;
}
}
// trim the fullVersion string at semicolon/space if present
if ((ix = fullVersion.indexOf(";")) != -1)
fullVersion = fullVersion.substring(0, ix);
if ((ix = fullVersion.indexOf(" ")) != -1)
fullVersion = fullVersion.substring(0, ix);
majorVersion = parseInt('' + fullVersion, 10);
if (isNaN(majorVersion)) {
fullVersion = '' + parseFloat(navigator.appVersion);
majorVersion = parseInt(navigator.appVersion, 10);
}
return browserName;
}
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<meta name="mobile-web-app-capable" content="yes">
<meta id="theme-color" name="theme-color" content="#fff">
<base target="_blank">
<title>Media Recorder API Demo</title>
<link rel="stylesheet" href="../css/main.css" />
<style>
a#downloadLink {
display: block;
margin: 0 0 1em 0;
min-height: 1.2em;
}
p#data {
min-height: 6em;
}
</style>
</head>
<body>
<div id="container">
<div style="text-align:center;">
<h1>Media Recorder API Demo </h1>
<video controls autoplay></video><br>
<button id="rec" onclick="onBtnRecordClicked()">Record</button>
<button id="pauseRes" onclick="onPauseResumeClicked()" disabled>Pause</button>
<button id="stop" onclick="onBtnStopClicked()" disabled>Stop</button>
</div>
<a id="downloadLink" name="mediarecorder.mp4" download=""></a>
<p id="data"></p>
<script src="../js/main.js"></script>
</div>
</body>
</html>
I use webrtc in javascript:
function start() {
var constraints = {
audio: true,
video: true
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
console.log(window.URL.createObjectURL(mediaStream));
var video = document.querySelector('#my-video');
video.src = window.URL.createObjectURL(mediaStream);
/*video.srcObject = mediaStream;
video.onloadedmetadata = function (e) {
video.play();
};*/
})
.catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
html:
<video id="my-video" autoplay="true" muted="true"></video>
<br />
<input id="start" type="button" value="Start" onclick="start()" />
Please, tell me, what you need to do to record sound and send it to the server (Asp .NET Core)?
For the record needed library RecorderJS.
HTML:
<h4>Recording audio</h4>
<input type="button" onclick="startRecording(this);" value="Record" />
<input type="button" onclick="stopRecording(this);" value="Stop" />
<h4>Record:</h4>
<div class="newRecord"></div>
JS:
window.onload = function () {
init();
};
var audio_context;
var recorder;
function init() {
try {
// webkit shim
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia;
window.URL = window.URL || window.webkitURL;
audio_context = new AudioContext;
} catch (e) {
alert('No web audio support in this browser!');
console.log(err.name + ": " + err.message);
}
queryToUseMicrophone();
};
function queryToUseMicrophone() {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(function (mediaStream) {
var input = audio_context.createMediaStreamSource(mediaStream);
recorder = new Recorder(input);
}).catch(function (err) {
console.log(err.name + ": " + err.message);
});
}
function startRecording(button) {
recorder && recorder.record();
button.disabled = true;
button.nextElementSibling.disabled = false;
}
function stopRecording(button) {
recorder && recorder.stop();
button.disabled = true;
button.previousElementSibling.disabled = false;
createDownloadLink();
recorder.clear();
}
function createDownloadLink() {
recorder && recorder.exportWAV(function (blob) {
var url = URL.createObjectURL(blob);
var audio = document.createElement('audio');
var a = document.createElement('a');
audio.controls = true;
audio.src = url;
a.href = url;
a.download = new Date().toISOString() + '.wav';
a.innerHTML = a.download;
document.querySelector(".newRecord").appendChild(audio);
document.querySelector(".newRecord").appendChild(a);
});
}
i am new to WEBRTC. i have two html pages that supposed to capture and show webcamera respectively. my expectation is that "webcamerasrc.html" page should capture webcamera and through "WEBCAMERAVew.html" page, one can view the camera. the first page is capturing the video but the second page is not showing it. i think webrtc handshake is not getting completed . any suggestion regarding making things work or understand webrtc handshake or sdp exchange between two diffrent pages would be appreciated .
Here are the snippets.
webcamerasrc.html
<!DOCTYPE html>
<html>
<head>
<meta charset="ISO-8859-1">
<title>WEB CAMERA SOURCE</title>
<script src='https://cdn.firebase.com/js/client/2.2.1/firebase.js'></script>
<script src="https://webrtcexperiment-webrtc.netdna-ssl.com/RTCPeerConnection-v1.5.js"> </script>
</head>
<body>
<h1>WEB CAMERA SOURCE</h1>
<div id="container">
<video autoplay="true" id="localVideo">
</video>
<video autoplay="true" id="rVideo">
</video>
</div>
<script>
var socket = new WebSocket('ws://localhost:8080/IntegrateIntoWebTest/websocket');
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
window.RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCSessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
window.RTCIceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {url: isChrome ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' };
var TURN = {url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo'};
var iceServers = {iceServers: [STUN, TURN] };
var DtlsSrtpKeyAgreement = {DtlsSrtpKeyAgreement: true};
var optional = {optional: [DtlsSrtpKeyAgreement]};
var video = document.getElementById('localVideo');
var offerer = new RTCPeerConnection(iceServers);
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, VideoSuccess, VideoError);
}
function VideoSuccess(stream) {
video.src = window.URL.createObjectURL(stream);
offerer.addStream(stream);
offerer.onicecandidate = function (event) {
if (!event || !event.candidate) return;
var o_icecandidate = event.candidate;
socket.send(JSON.stringify({o_icecandidate}));
socket.onmessage = function(event) {
var iceData = JSON.parse(event.data);
console.log(iceData);
offerer.addIceCandidate(iceData);
};
};
offerer.onaddstream = function (stream) {
//alert(stream.stream);
video.src = window.URL.createObjectURL(stream.stream);
};
}
offerer.createOffer(function (offerSdp) {
offerer.setLocalDescription(offerSdp);
socket.send(JSON.stringify({offerSdp}));
}, function(e) {console.log(e);}, mediaConstraints);
socket.onmessage = function(event)
{
//alert(JSON.parse(event.data.answerSdp));
// alert(answerSdp);
//if(answerSdp='answer'){var remoteDescription = new RTCSessionDescription(answerSdp);offerer.setRemoteDescription(remoteDescription);}
var actualData = JSON.parse(event.data);
console.log(actualData);
if(actualData.answerSdp.type='answer')
{
console.log(event.data);
var sd = JSON.parse(event.data);
console.log(sd.answerSdp);
var sd1 = new RTCSessionDescription(sd.answerSdp);
console.log(sd1);
offerer.setRemoteDescription(sd1);
}
}
function VideoError(e) {
console.log(e);
}
</script>
</body>
</html>
WEBCAMERAVew.html
<!DOCTYPE html>
<html>
<head>
<meta charset="ISO-8859-1">
<title>REMOTE WEB CAMERA VIEW</title>
</head>
<body>
<h1>REMOTE WEB CAMERA VIEW</h1>
<script src='https://cdn.firebase.com/js/client/2.2.1/firebase.js'></script>
<script src="//cdn.webrtc-experiment.com/RTCPeerConnection-v1.5.js"> </script>
<div id="container">
<video autoplay="true" id="localVideo">
</video>
<video autoplay="true" id="rVideo">
</video>
</div>
<script>
var myDataRef = new WebSocket('ws://localhost:8080/wsTest/websocket');
myDataRef.onmessage = function(event) {
var actualData = JSON.parse(event.data);
if(actualData.offerSdp.type='offer')
{ answererPeer(event);}
};
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
window.RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCSessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
window.RTCIceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {url: isChrome ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' };
var TURN = {url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo'};
var iceServers = {iceServers: [STUN, TURN] };
var DtlsSrtpKeyAgreement = {DtlsSrtpKeyAgreement: true};
var optional = {optional: [DtlsSrtpKeyAgreement]};
var answerer = new RTCPeerConnection(iceServers);
var video = document.getElementById('localVideo');
var remoteVideo = document.getElementById('rVideo');
function answererPeer(event) {
//window.alert(offer.sdp);
//window.alert(video_stream);
var sd = JSON.parse(event.data);
var rd = new RTCSessionDescription(sd.offerSdp);
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, VideoSuccess, VideoError);
}
function VideoSuccess(mediaStream)
{
//answerer.addIceCandidate(icecandicate.candidate);
answerer.addStream(mediaStream);
answerer.setRemoteDescription(rd);
answerer.createAnswer(function (answerSdp) {
answerer.setLocalDescription(answerSdp);
myDataRef.send(JSON.stringify({answerSdp}));
}, function() {}, mediaConstraints);
answerer.onicecandidate = function (event) {
if (!event || !event.candidate) return;
var a_icecandidate = event.candidate;
myDataRef.send(JSON.stringify({a_icecandidate}));
myDataRef.onmessage = function(event) {
var iceData = JSON.parse(event.data);
console.log('over here : '+iceData);
};
};
answerer.onaddstream = function (mediaStream) {
alert(mediaStream);
remoteVideo.src = URL.createObjectURL(mediaStream);
remoteVideo.play();
};
}
function VideoError(e) {
console.log(e);
}
}
</script>
</body>
</html>
I make webrtc video chating.
we need to sending image instead of video. someone say image can convert mediastream.
I try, image to base64 and call addstream but I am fail. how to do that?
var imagestream = getBase64FromImageUrl('./unown.png');
function getBase64FromImageUrl(URL) {
var img = new Image();
img.src = URL;
img.onload = function () {
var canvas = document.createElement("canvas");
canvas.width =this.width;
canvas.height =this.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(this, 0, 0);
var dataURL = canvas.toDataURL("image/png");
alert( dataURL.replace(/^data:image\/(png|jpg);base64,/, ""));
}
}
Try Whammy.js : A Real Time Javascript WebM Encoder
Try Recorder.js : This is for Audio (if you need) ;)
JS(script.js):
/*Adapating for different vendors*/
window.URL =
window.URL ||
window.webkitURL ||
window.mozURL ||
window.msURL;
window.requestAnimationFrame =
window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.msRequestAnimationFrame ||
window.oRequestAnimationFrame;
window.cancelAnimationFrame =
window.cancelAnimationFrame ||
window.webkitCancelAnimationFrame ||
window.mozCancelAnimationFrame ||
window.msCancelAnimationFrame ||
window.oCancelAnimationFrame;
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
window.AudioContext =
window.AudioContext ||
window.webkitAudioContext;
/*Global stuff*/
var video = get('video');
video.width = 320;
video.height = 240;
var canvas = document.createElement('canvas');
var rafId = null;
var frames = [];
var audioContext = new AudioContext;
var audioRecorder;
/*Save typing :) */
function get(selector) {
return document.querySelector(selector) || null;
}
/*Wrapper for recording*/
function recordIt() {
var record = get('#record');
record.textContent = record.disabled ? 'Record' : 'Recording...';
record.classList.toggle('recording');
record.disabled = !record.disabled;
}
/*Get Media (Video and Audio) from user*/
function getMedia(event) {
event.target.disabled = true;
get('#record').disabled = false;
video.controls = false;
var setVideo = function() {
setTimeout(function() {
video.width = 320;
video.height = 240;
canvas.width = video.width;
canvas.height = video.height;
}, 1000);
};
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true, audio: true}, function(stream) {
if (video.mozSrcObject !== undefined) {
video.mozSrcObject = stream;
} else {
video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
}
var audioInput = audioContext.createMediaStreamSource(stream);
audioInput.connect(audioContext.destination);
audioRecorder = new Recorder(audioInput);
setVideo();
}, function(e) {
alert('Error'+e);
console.log(e)
});
} else {
console.log('getUserMedia() not supported in this browser.');
}
};
/*Record function: Draws frames and pushes to array*/
function record() {
var context = canvas.getContext('2d');
var CANVAS_HEIGHT = canvas.height;
var CANVAS_WIDTH = canvas.width;
frames = [];
recordIt();
get('#stop').disabled = false;
function draw(time) {
rafId = requestAnimationFrame(draw);
context.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
var url = canvas.toDataURL('image/webp', 1);
frames.push(url);
};
rafId = requestAnimationFrame(draw);
//Audio stuff
audioRecorder.clear();
audioRecorder.record();
};
/*Stop Recording*/
function stop() {
cancelAnimationFrame(rafId);
get('#stop').disabled = true;
recordIt();
setVideo();
//Audio stuff
audioRecorder.stop();
setAudio();
};
/*Call Whammy for creating video*/
function setVideo(vidUrl) {
var url = vidUrl || null;
var video = get('#recordedDiv video') || null;
if (!video) {
video = document.createElement('video');
video.autoplay = true;
video.controls = true;
video.style.width = canvas.width + 'px';
video.style.height = canvas.height + 'px';
get('#recordedDiv').appendChild(video);
} else {
window.URL.revokeObjectURL(video.src);
}
if (!url) {
var webmBlob = Whammy.fromImageArray(frames, 1000 / 60);
url = window.URL.createObjectURL(webmBlob);
}
video.src = url;
}
function setAudio() {
audioRecorder.exportWAV(function(blob) {
var audio = get('#recordedDiv audio') || null;
var url = URL.createObjectURL(blob);
if(!audio) {
var audio = document.createElement('audio');
audio.autoplay = true;
audio.controls = true;
audio.src = url;
get('#recordedDiv').appendChild(audio);
}
else {
audio.src = url;
}
});
}
/*Fingers Crossed*/
function init() {
get('#camera').addEventListener('click', getMedia);
get('#record').addEventListener('click', record);
get('#stop').addEventListener('click', stop);
}
init();
HTML
<html><head>
<meta charset="utf-8">
<title>Record and Play Simple Messages</title>
<link rel="stylesheet" type="text/css" href="./css/style.css">
<style type="text/css"></style></head>
<body>
Records webm video and audio using WebAudioAPI, whammy.js and recorder.js
Webp images not supported in firefox, hence it fails. Works on Chrome though.
<section>
<div>
<video autoplay="" width="320" height="240"></video><br>
<button id="camera">GetUserMedia</button>
</div>
<div id="recordedDiv">
<button id="record" disabled="">Record</button>
<button id="stop" disabled="">Stop</button><br>
</div>
</section>
<script type="text/javascript" src="./js/whammy.min.js"></script>
<script type="text/javascript" src="./js/recorder.js"></script>
<script type="text/javascript" src="./js/script.js"></script>
</body></html>
DEMO
I know I am answering bit late, and this is only applicable for firefox( 41 and above), you can try and create a mediastream from the canvas using CanvasCaptureMediaStream
Edit: they are implementing this media capture option in chrome as well, you can follow the issue here
Hi I am making a program that uses Google chrome to open my android camera. i need to know how to close the chrome completely when the user presses the back button or else disable the use of the back button when on chrome. this is my code. I would like to do it with JavaScript.
<body style="overflow: hidden">
<video muted autoplay id="myvideo" style="width: 100%; height: auto"></video>
<div class='select'>
<select style="visibility: hidden" id='videoSource'>
</select>
</div>
</body>
<script type="text/javascript">
//Camera
var videoElement = document.querySelector("video");
var videoSelect = document.querySelector("select#videoSource");
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
//Checks for the sources available of cameras
function gotSources(sourceInfos) {
for (var i = 0; i != sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
var option = document.createElement("option");
option.value = sourceInfo.id;
if (sourceInfo.kind === 'video') {
option.text = sourceInfo.label || 'camera ' + (videoSelect.length + 1);
videoSelect.appendChild(option);
if (i == sourceInfos.length - 1) {
option.selected = true;
start();
}
} else {
console.log('Some other kind of source: ', sourceInfo);
}
}
}
if (typeof MediaStreamTrack === 'undefined') {
alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
} else {
MediaStreamTrack.getSources(gotSources);
}
//Checks if everything is successful
function successCallback(stream) {
window.stream = stream; // make stream available to console
videoElement.src = window.URL.createObjectURL(stream);
videoElement.play();
}
//Checks if an error occurred
function errorCallback(error) {
console.log("navigator.getUserMedia error: ", error);
}
// starts the streaming of the camera
function start() {
if (!!window.stream) {
videoElement.src = null;
window.stream.stop();
}
var videoSource = videoSelect.value;
var constraints = {
video: {
optional: [{ sourceId: videoSource}]
}
};
navigator.getUserMedia(constraints, successCallback, errorCallback);
}
videoSelect.onchange = start;