webrtc, is it possible convert image to mediastream? - javascript

I make webrtc video chating.
we need to sending image instead of video. someone say image can convert mediastream.
I try, image to base64 and call addstream but I am fail. how to do that?
var imagestream = getBase64FromImageUrl('./unown.png');
function getBase64FromImageUrl(URL) {
var img = new Image();
img.src = URL;
img.onload = function () {
var canvas = document.createElement("canvas");
canvas.width =this.width;
canvas.height =this.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(this, 0, 0);
var dataURL = canvas.toDataURL("image/png");
alert( dataURL.replace(/^data:image\/(png|jpg);base64,/, ""));
}
}

Try Whammy.js : A Real Time Javascript WebM Encoder
Try Recorder.js : This is for Audio (if you need) ;)
JS(script.js):
/*Adapating for different vendors*/
window.URL =
window.URL ||
window.webkitURL ||
window.mozURL ||
window.msURL;
window.requestAnimationFrame =
window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.msRequestAnimationFrame ||
window.oRequestAnimationFrame;
window.cancelAnimationFrame =
window.cancelAnimationFrame ||
window.webkitCancelAnimationFrame ||
window.mozCancelAnimationFrame ||
window.msCancelAnimationFrame ||
window.oCancelAnimationFrame;
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
window.AudioContext =
window.AudioContext ||
window.webkitAudioContext;
/*Global stuff*/
var video = get('video');
video.width = 320;
video.height = 240;
var canvas = document.createElement('canvas');
var rafId = null;
var frames = [];
var audioContext = new AudioContext;
var audioRecorder;
/*Save typing :) */
function get(selector) {
return document.querySelector(selector) || null;
}
/*Wrapper for recording*/
function recordIt() {
var record = get('#record');
record.textContent = record.disabled ? 'Record' : 'Recording...';
record.classList.toggle('recording');
record.disabled = !record.disabled;
}
/*Get Media (Video and Audio) from user*/
function getMedia(event) {
event.target.disabled = true;
get('#record').disabled = false;
video.controls = false;
var setVideo = function() {
setTimeout(function() {
video.width = 320;
video.height = 240;
canvas.width = video.width;
canvas.height = video.height;
}, 1000);
};
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true, audio: true}, function(stream) {
if (video.mozSrcObject !== undefined) {
video.mozSrcObject = stream;
} else {
video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
}
var audioInput = audioContext.createMediaStreamSource(stream);
audioInput.connect(audioContext.destination);
audioRecorder = new Recorder(audioInput);
setVideo();
}, function(e) {
alert('Error'+e);
console.log(e)
});
} else {
console.log('getUserMedia() not supported in this browser.');
}
};
/*Record function: Draws frames and pushes to array*/
function record() {
var context = canvas.getContext('2d');
var CANVAS_HEIGHT = canvas.height;
var CANVAS_WIDTH = canvas.width;
frames = [];
recordIt();
get('#stop').disabled = false;
function draw(time) {
rafId = requestAnimationFrame(draw);
context.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
var url = canvas.toDataURL('image/webp', 1);
frames.push(url);
};
rafId = requestAnimationFrame(draw);
//Audio stuff
audioRecorder.clear();
audioRecorder.record();
};
/*Stop Recording*/
function stop() {
cancelAnimationFrame(rafId);
get('#stop').disabled = true;
recordIt();
setVideo();
//Audio stuff
audioRecorder.stop();
setAudio();
};
/*Call Whammy for creating video*/
function setVideo(vidUrl) {
var url = vidUrl || null;
var video = get('#recordedDiv video') || null;
if (!video) {
video = document.createElement('video');
video.autoplay = true;
video.controls = true;
video.style.width = canvas.width + 'px';
video.style.height = canvas.height + 'px';
get('#recordedDiv').appendChild(video);
} else {
window.URL.revokeObjectURL(video.src);
}
if (!url) {
var webmBlob = Whammy.fromImageArray(frames, 1000 / 60);
url = window.URL.createObjectURL(webmBlob);
}
video.src = url;
}
function setAudio() {
audioRecorder.exportWAV(function(blob) {
var audio = get('#recordedDiv audio') || null;
var url = URL.createObjectURL(blob);
if(!audio) {
var audio = document.createElement('audio');
audio.autoplay = true;
audio.controls = true;
audio.src = url;
get('#recordedDiv').appendChild(audio);
}
else {
audio.src = url;
}
});
}
/*Fingers Crossed*/
function init() {
get('#camera').addEventListener('click', getMedia);
get('#record').addEventListener('click', record);
get('#stop').addEventListener('click', stop);
}
init();
HTML
<html><head>
<meta charset="utf-8">
<title>Record and Play Simple Messages</title>
<link rel="stylesheet" type="text/css" href="./css/style.css">
<style type="text/css"></style></head>
<body>
Records webm video and audio using WebAudioAPI, whammy.js and recorder.js
Webp images not supported in firefox, hence it fails. Works on Chrome though.
<section>
<div>
<video autoplay="" width="320" height="240"></video><br>
<button id="camera">GetUserMedia</button>
</div>
<div id="recordedDiv">
<button id="record" disabled="">Record</button>
<button id="stop" disabled="">Stop</button><br>
</div>
</section>
<script type="text/javascript" src="./js/whammy.min.js"></script>
<script type="text/javascript" src="./js/recorder.js"></script>
<script type="text/javascript" src="./js/script.js"></script>
</body></html>
DEMO

I know I am answering bit late, and this is only applicable for firefox( 41 and above), you can try and create a mediastream from the canvas using CanvasCaptureMediaStream
Edit: they are implementing this media capture option in chrome as well, you can follow the issue here

Related

How to save canvas animation as gif or webm?

i have written this function to capture each frame for the GIF but the output is very laggy and crashes when the data increases. Any suggestions ?
Code :
function createGifFromPng(list, framerate, fileName, gifScale) {
gifshot.createGIF({
'images': list,
'gifWidth': wWidth * gifScale,
'gifHeight': wHeight * gifScale,
'interval': 1 / framerate,
}, function(obj) {
if (!obj.error) {
var image = obj.image;
var a = document.createElement('a');
document.body.append(a);
a.download = fileName;
a.href = image;
a.click();
a.remove();
}
});
}
/////////////////////////////////////////////////////////////////////////
function getGifFromCanvas(renderer, sprite, fileName, gifScale, framesCount, framerate) {
var listImgs = [];
var saving = false;
var interval = setInterval(function() {
renderer.extract.canvas(sprite).toBlob(function(b) {
if (listImgs.length >= framesCount) {
clearInterval(interval);
if (!saving) {
createGifFromPng(listImgs, framerate, fileName,gifScale);
saving = true;
}
}
else {
listImgs.push(URL.createObjectURL(b));
}
}, 'image/gif');
}, 1000 / framerate);
}
In modern browsers you can use a conjunction of the MediaRecorder API and the HTMLCanvasElement.captureStream method.
The MediaRecorder API will be able to encode a MediaStream in a video or audio media file on the fly, resulting in far less memory needed than when you grab still images.
const ctx = canvas.getContext('2d');
var x = 0;
anim();
startRecording();
function startRecording() {
const chunks = []; // here we will store our recorded media chunks (Blobs)
const stream = canvas.captureStream(); // grab our canvas MediaStream
const rec = new MediaRecorder(stream); // init the recorder
// every time the recorder has new data, we will store it in our array
rec.ondataavailable = e => chunks.push(e.data);
// only when the recorder stops, we construct a complete Blob from all the chunks
rec.onstop = e => exportVid(new Blob(chunks, {type: 'video/webm'}));
rec.start();
setTimeout(()=>rec.stop(), 3000); // stop recording in 3s
}
function exportVid(blob) {
const vid = document.createElement('video');
vid.src = URL.createObjectURL(blob);
vid.controls = true;
document.body.appendChild(vid);
const a = document.createElement('a');
a.download = 'myvid.webm';
a.href = vid.src;
a.textContent = 'download the video';
document.body.appendChild(a);
}
function anim(){
x = (x + 1) % canvas.width;
ctx.fillStyle = 'white';
ctx.fillRect(0,0,canvas.width,canvas.height);
ctx.fillStyle = 'black';
ctx.fillRect(x - 20, 0, 40, 40);
requestAnimationFrame(anim);
}
<canvas id="canvas"></canvas>
You can also use https://github.com/spite/ccapture.js/ to capture to gif or video.

MediaRecorderAPI - Autorecord

I am currently trying to simulate audio autorecording. User speaks, and after he stops then audio should be submitted to the backend.
I already have a sample script that submits audio with start and stop click functions.
I'm trying to get sometype of value such as Amplitude, Volume or maybe a Threshold but I'm not sure if MediaRecorder supports this or if I need to look at Web Audio API or other solutions.
Can I achieve this with MediaRecorder?
Regarding the audio analysis of the mic input, the following example shows you how to take the audio captured by the mic, create an analyzer with createAnalyser method of the webkitAudioContext, connect the stream to the analyzer and calculate the FFT of the specified size, in order to calculate pitch and display the output sound wave.
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = null;
var isPlaying = false;
var sourceNode = null;
var analyser = null;
var theBuffer = null;
var audioCtx = null;
var mediaStreamSource = null;
var rafID = null;
var j = 0;
var waveCanvas = null;
window.onload = function() {
audioContext = new AudioContext();
audioCtx = document.getElementById( "waveform" );
canvasCtx = audioCtx.getContext("2d");
};
function getUserMedia(dictionary, callback) {
try {
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
navigator.getUserMedia(dictionary, callback, error);
} catch (e) {
alert('getUserMedia threw exception :' + e);
}
}
function gotStream(stream) {
// Create an AudioNode from the stream.
mediaStreamSource = audioContext.createMediaStreamSource(stream);
// Connect it to the destination.
analyser = audioContext.createAnalyser();
analyser.fftSize = 1024;
mediaStreamSource.connect( analyser );
updatePitch();
}
function toggleLiveInput()
{
canvasCtx.clearRect(0, 0, audioCtx.width, audioCtx.height);
canvasCtx.beginPath();
j = 0;
buflen = 1024;
buf = new Float32Array( buflen );
document.getElementById('toggleLiveInput').disabled = true;
document.getElementById('toggleLiveInputStop').disabled = false;
if (isPlaying) {
//stop playing and return
sourceNode.stop( 0 );
sourceNode = null;
//analyser = null;
isPlaying = false;
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
window.cancelAnimationFrame( rafID );
}
getUserMedia(
{
"audio": {
"mandatory": {
"googEchoCancellation": "false",
"googAutoGainControl": "false",
"googNoiseSuppression": "false",
"googHighpassFilter": "false"
},
"optional": []
},
}, gotStream);
}
function stop()
{
document.getElementById('toggleLiveInput').disabled = false;
document.getElementById('toggleLiveInputStop').disabled = true;
//waveCanvas.closePath();
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
window.cancelAnimationFrame( rafID );
return "start";
}
function updatePitch()
{
analyser.fftSize = 1024;
analyser.getFloatTimeDomainData(buf);
canvasCtx.strokeStyle = "red";
for (var i=0;i<2;i+=2)
{
x = j*5;
if(audioCtx.width < x)
{
x = audioCtx.width - 5;
previousImage = canvasCtx.getImageData(5, 0, audioCtx.width, audioCtx.height);
canvasCtx.putImageData(previousImage, 0, 0);
canvasCtx.beginPath();
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "red";
prex = prex - 5;
canvasCtx.lineTo(prex,prey);
prex = x;
prey = 128+(buf[i]*128);
canvasCtx.lineTo(x,128+(buf[i]*128));
canvasCtx.stroke();
}
else
{
prex = x;
prey = 128+(buf[i]*128);
canvasCtx.lineWidth = 2;
canvasCtx.lineTo(x,128+(buf[i]*128));
canvasCtx.stroke();
}
j++;
}
if (!window.requestAnimationFrame)
window.requestAnimationFrame = window.webkitRequestAnimationFrame;
rafID = window.requestAnimationFrame( updatePitch );
}
function error() {
console.error(new Error('error while generating audio'));
}
Try the demo here.
Example adapted from pitch-liveinput.

Access multiple cameras on Firefox with javascript/HTML5?

I've managed to access 2 cameras simultaneously on Chrome with Javascript and HTML5 but not on Firefox. Anyway to make it works? or Firefox still not support multiple cameras?
I've attached my code below. Please see if anything I have to rework.
** you have to replace your own device ID to make this code work**
<script>
document.addEventListener('DOMContentLoaded', function() {
var video = document.getElementById('cam1');
var video2 = document.getElementById('cam2');
var audio, audioType;
var canvas = document.querySelector('canvas');
var context = canvas.getContext('2d');
var sHeight = video.height/canvas.height;
var sWidth = video.width/canvas.width;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
if (navigator.getUserMedia) {
navigator.getUserMedia({video: {deviceId: "8b6cf59198c32c9b3544d9252d96c0d26938780787f0fc04cb162ba978aecf4c"}, audio: false}, onSuccessCallback, onErrorCallback);
navigator.getUserMedia({video: {deviceId: "024ad3a357f5dd716e658ba749ac0bc53a4de31f00aba58c35da2736141f51c1"}, audio: false}, onSuccessCallback2, onErrorCallback);
function onSuccessCallback(stream) {
video.src = window.URL.createObjectURL(stream) || stream;
video.play();
}
function onSuccessCallback2(stream) {
video2.src = window.URL.createObjectURL(stream) || stream;
video2.play();
}
// Display an error
function onErrorCallback(e) {
var expl = 'An error occurred: [Reason: ' + e.code + ']';
console.error(expl);
alert(expl);
return;
}
}
}, false);
</script>

Sending video captured with the computer camera from a website to a server

I am trying to have a computer or an android device record video from the camera and send it to a server that will store it. The system has to be web-based and needs to send clips of video continuously.
Here is the code in javascript:
var camera = (function() {
var options;
var video, canvas, context;
var renderTimer;
function initVideoStream() {
video = document.createElement("video");
video.setAttribute('width', options.width);
video.setAttribute('height', options.height);
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
if (navigator.getUserMedia) {
navigator.getUserMedia({
video: true
}, function(stream) {
options.onSuccess();
if (video.mozSrcObject !== undefined) { // hack for Firefox < 19
video.mozSrcObject = stream;
} else {
video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
}
initCanvas();
}, options.onError);
} else {
options.onNotSupported();
}
}
function initCanvas() {
canvas = document.getElementById("canvas");//options.targetCanvas || document.createElement("canvas");
canvas.setAttribute('width', options.width);
canvas.setAttribute('height', options.height);
context = canvas.getContext('2d');
// mirror video
if (options.mirror) {
context.translate(canvas.width, 0);
context.scale(-1, 1);
}
startCapture();
}
function startCapture() {
video.play();
renderTimer = setInterval(function() {
try {
context.drawImage(video, 0, 0, video.width, video.height);
options.onFrame(canvas);
} catch (e) {
// TODO
}
}, Math.round(1000 / options.fps));
}
function stopCapture() {
pauseCapture();
if (video.mozSrcObject !== undefined) {
video.mozSrcObject = null;
} else {
video.src = "";
}
}
function pauseCapture() {
if (renderTimer) clearInterval(renderTimer);
video.pause();
}
return {
init: function(captureOptions) {
var doNothing = function(){};
options = captureOptions || {};
options.fps = options.fps || 30;
options.width = options.width || 640;
options.height = options.height || 480;
options.mirror = options.mirror || false;
options.targetCanvas = options.targetCanvas || null; // TODO: is the element actually a <canvas> ?
initVideoStream();
},
start: startCapture,
pause: pauseCapture,
stop: stopCapture
};
})();
var imgSender = (function() {
function imgsFromCanvas(canvas, options) {
var context = canvas.getContext("2d");
var canvasWidth = canvas.width; //# pixels horizontally
var canvasHeight = canvas.height; //# pixels vertically
var imageData = context.getImageData(0, 0, canvasWidth, canvasHeight); //Vector of all pixels
options.callback(canvasWidth, canvasHeight, imageData.data);
}
return {
fromCanvas: function(canvas, options) {
options = options || {};
options.callback = options.callback || doNothing;
return imgsFromCanvas(canvas, options);
}
};
})();
var FPS = 30; //fps
var minVideoTime = 5; //Seconds that every video clip sent will take
var arrayImgs = []; //Array with all the images that will be sent to the server
var videoTime = 0; //Number of seconds of video stored in videoTime
(function() {
var capturing = false;
camera.init({
width: 160,
height: 120,
fps: FPS,
mirror: true,
onFrame: function(canvas) {
imgSender.fromCanvas(canvas, {
callback: function(w,h,image) {
arrayImgs.push(image);
videoTime += 1/FPS;
if (minVideoTime <= videoTime) {
sendToPhp(w,h,videoTime*FPS);
arrayImgs = [];
videoTime = 0;
function sendToPhp(width,height,numFrames) {
$.ajax({
type: "POST",
url: "sendToServer.php",
data: {
arrayImgs: arrayImgs,
width: w,
height: h,
nframes: numFrames
},
async: true, // If set to non-async, browser shows page as "Loading.."
cache: false,
timeout:500, // Timeout in ms (0.5s)
success: function(answer){
console.log("SUCCESS: ", answer);
},
error: function(XMLHttpRequest, textStatus, errorThrown){
console.log("ERROR: ", textStatus, " , ", errorThrown); }
});
}
}
}
});
},
onSuccess: function() {
document.getElementById("info").style.display = "none";
capturing = true;
document.getElementById("pause").style.display = "block";
document.getElementById("pause").onclick = function() {
if (capturing) {
camera.pause();
} else {
camera.start();
}
capturing = !capturing;
};
},
onError: function(error) {
// TODO: log error
},
onNotSupported: function() {
document.getElementById("info").style.display = "none";
document.getElementById("notSupported").style.display = "block";
}
});
})();
As you can see, I am using getUserMedia to get the video, and I am sampling the images in an array until I have 5 seconds of video, then I send that to a php file that will send it to the server.
My problem: there is A LOT of lag in all the ajax requests, and I also have size problems, the ajax requests don't accept so much data at the same time, so I have to reduce the fps and the length of the videos sent.
Is there any better way to do this? By now, I lose more than 50% of the video! Maybe through WebRTC? Help please
Thank you!
For your usecase, you should use the MediaRecorder API. This will let you record the video as a WebM in chunks you can upload to the server in the background. WebM/VP8 (or mp4) will do a far better job of compressing the video than ad-hoc MJPEG that you're doing now.
See also RecordRTC, which includes compatibility layers for browsers that don't implement this yet, etc.

WEBRTC - Webcamera Source and View

i am new to WEBRTC. i have two html pages that supposed to capture and show webcamera respectively. my expectation is that "webcamerasrc.html" page should capture webcamera and through "WEBCAMERAVew.html" page, one can view the camera. the first page is capturing the video but the second page is not showing it. i think webrtc handshake is not getting completed . any suggestion regarding making things work or understand webrtc handshake or sdp exchange between two diffrent pages would be appreciated .
Here are the snippets.
webcamerasrc.html
<!DOCTYPE html>
<html>
<head>
<meta charset="ISO-8859-1">
<title>WEB CAMERA SOURCE</title>
<script src='https://cdn.firebase.com/js/client/2.2.1/firebase.js'></script>
<script src="https://webrtcexperiment-webrtc.netdna-ssl.com/RTCPeerConnection-v1.5.js"> </script>
</head>
<body>
<h1>WEB CAMERA SOURCE</h1>
<div id="container">
<video autoplay="true" id="localVideo">
</video>
<video autoplay="true" id="rVideo">
</video>
</div>
<script>
var socket = new WebSocket('ws://localhost:8080/IntegrateIntoWebTest/websocket');
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
window.RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCSessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
window.RTCIceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {url: isChrome ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' };
var TURN = {url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo'};
var iceServers = {iceServers: [STUN, TURN] };
var DtlsSrtpKeyAgreement = {DtlsSrtpKeyAgreement: true};
var optional = {optional: [DtlsSrtpKeyAgreement]};
var video = document.getElementById('localVideo');
var offerer = new RTCPeerConnection(iceServers);
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, VideoSuccess, VideoError);
}
function VideoSuccess(stream) {
video.src = window.URL.createObjectURL(stream);
offerer.addStream(stream);
offerer.onicecandidate = function (event) {
if (!event || !event.candidate) return;
var o_icecandidate = event.candidate;
socket.send(JSON.stringify({o_icecandidate}));
socket.onmessage = function(event) {
var iceData = JSON.parse(event.data);
console.log(iceData);
offerer.addIceCandidate(iceData);
};
};
offerer.onaddstream = function (stream) {
//alert(stream.stream);
video.src = window.URL.createObjectURL(stream.stream);
};
}
offerer.createOffer(function (offerSdp) {
offerer.setLocalDescription(offerSdp);
socket.send(JSON.stringify({offerSdp}));
}, function(e) {console.log(e);}, mediaConstraints);
socket.onmessage = function(event)
{
//alert(JSON.parse(event.data.answerSdp));
// alert(answerSdp);
//if(answerSdp='answer'){var remoteDescription = new RTCSessionDescription(answerSdp);offerer.setRemoteDescription(remoteDescription);}
var actualData = JSON.parse(event.data);
console.log(actualData);
if(actualData.answerSdp.type='answer')
{
console.log(event.data);
var sd = JSON.parse(event.data);
console.log(sd.answerSdp);
var sd1 = new RTCSessionDescription(sd.answerSdp);
console.log(sd1);
offerer.setRemoteDescription(sd1);
}
}
function VideoError(e) {
console.log(e);
}
</script>
</body>
</html>
WEBCAMERAVew.html
<!DOCTYPE html>
<html>
<head>
<meta charset="ISO-8859-1">
<title>REMOTE WEB CAMERA VIEW</title>
</head>
<body>
<h1>REMOTE WEB CAMERA VIEW</h1>
<script src='https://cdn.firebase.com/js/client/2.2.1/firebase.js'></script>
<script src="//cdn.webrtc-experiment.com/RTCPeerConnection-v1.5.js"> </script>
<div id="container">
<video autoplay="true" id="localVideo">
</video>
<video autoplay="true" id="rVideo">
</video>
</div>
<script>
var myDataRef = new WebSocket('ws://localhost:8080/wsTest/websocket');
myDataRef.onmessage = function(event) {
var actualData = JSON.parse(event.data);
if(actualData.offerSdp.type='offer')
{ answererPeer(event);}
};
var mediaConstraints = {
optional: [],
mandatory: {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
}
};
window.RTCPeerConnection = window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCSessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription;
window.RTCIceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
var isChrome = !!navigator.webkitGetUserMedia;
var STUN = {url: isChrome ? 'stun:stun.l.google.com:19302' : 'stun:23.21.150.121' };
var TURN = {url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo'};
var iceServers = {iceServers: [STUN, TURN] };
var DtlsSrtpKeyAgreement = {DtlsSrtpKeyAgreement: true};
var optional = {optional: [DtlsSrtpKeyAgreement]};
var answerer = new RTCPeerConnection(iceServers);
var video = document.getElementById('localVideo');
var remoteVideo = document.getElementById('rVideo');
function answererPeer(event) {
//window.alert(offer.sdp);
//window.alert(video_stream);
var sd = JSON.parse(event.data);
var rd = new RTCSessionDescription(sd.offerSdp);
if (navigator.getUserMedia) {
navigator.getUserMedia({video: true}, VideoSuccess, VideoError);
}
function VideoSuccess(mediaStream)
{
//answerer.addIceCandidate(icecandicate.candidate);
answerer.addStream(mediaStream);
answerer.setRemoteDescription(rd);
answerer.createAnswer(function (answerSdp) {
answerer.setLocalDescription(answerSdp);
myDataRef.send(JSON.stringify({answerSdp}));
}, function() {}, mediaConstraints);
answerer.onicecandidate = function (event) {
if (!event || !event.candidate) return;
var a_icecandidate = event.candidate;
myDataRef.send(JSON.stringify({a_icecandidate}));
myDataRef.onmessage = function(event) {
var iceData = JSON.parse(event.data);
console.log('over here : '+iceData);
};
};
answerer.onaddstream = function (mediaStream) {
alert(mediaStream);
remoteVideo.src = URL.createObjectURL(mediaStream);
remoteVideo.play();
};
}
function VideoError(e) {
console.log(e);
}
}
</script>
</body>
</html>

Categories

Resources