WebRTC Peer to Peer only display local stream twice - javascript

I'm trying to learn how to use this new cool WebRTC API.
I'm following this tutorial, https://simpl.info/rtcpeerconnection/ but I don't understand how to get the second stream from my Raspberry Pi 3 running UV4L server https://www.linux-projects.org/webrtc-signalling/
I have tested it the functionality with UV4L built-in WebRTC page using websockets and it works.
Here is my code so far, but it only displays my local stream twice in the local and remote video tags.
HTML:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta content="width=device-width, user-scalable=yes, initial-scale=1, maximum-scale=1" name="viewport">
<title>Peer connection</title>
</head>
<body>
<div id="container">
<video autoplay="" id="localVideo"></video> <video autoplay="" id="remoteVideo"></video>
<div>
<button id="startButton">Start</button> <button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
</div>
</div>
<script src="js/main.js">
</script>
</body>
</html>
JavaScript:
var startButton = document.getElementById('startButton');
var callButton = document.getElementById('callButton');
var hangupButton = document.getElementById('hangupButton');
callButton.disabled = true;
hangupButton.disabled = true;
startButton.onclick = start;
callButton.onclick = call;
hangupButton.onclick = hangup;
var startTime;
var localVideo = document.getElementById('localVideo');
var remoteVideo = document.getElementById('remoteVideo');
var localStream;
var pc1;
var pc2;
var offerOptions = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 1,
};
function getName(pc) {
return pc === pc1 ? 'pc1' : 'pc2';
}
function getOtherPc(pc) {
return pc === pc1 ? pc2 : pc1;
}
function gotStream(stream) {
localVideo.srcObject = stream;
localStream = stream;
callButton.disabled = false;
}
function start() {
startButton.disabled = true;
navigator.mediaDevices
.getUserMedia({
audio: true,
video: true,
})
.then(gotStream)
.catch(function(e) {
alert('getUserMedia() error: ' + e.name);
});
}
function call() {
callButton.disabled = true;
hangupButton.disabled = false;
startTime = window.performance.now();
var videoTracks = localStream.getVideoTracks();
var audioTracks = localStream.getAudioTracks();
// MY UV4L stun server
var servers = {
iceServers: [ { urls: [ 'stun:' + '192.84.178.59' + ':3478' ] } ],
};
pc1 = new RTCPeerConnection(servers);
pc1.onicecandidate = function(e) {
onIceCandidate(pc1, e);
};
console.log(servers);
pc2 = new RTCPeerConnection(servers);
pc2.onicecandidate = function(e) {
onIceCandidate(pc2, e);
};
pc1.oniceconnectionstatechange = function(e) {
onIceStateChange(pc1, e);
};
pc2.oniceconnectionstatechange = function(e) {
onIceStateChange(pc2, e);
};
pc2.ontrack = gotRemoteStream;
localStream.getTracks().forEach(function(track) {
pc1.addTrack(track, localStream);
});
pc1.createOffer(offerOptions).then(onCreateOfferSuccess, onCreateSessionDescriptionError);
}
function onCreateSessionDescriptionError(error) {
console.log(error.toString());
}
function onCreateOfferSuccess(desc) {
pc1.setLocalDescription(desc).then(function() {
onSetLocalSuccess(pc1);
}, onSetSessionDescriptionError);
pc2.setRemoteDescription(desc).then(function() {
onSetRemoteSuccess(pc2);
}, onSetSessionDescriptionError);
pc2.createAnswer().then(onCreateAnswerSuccess, onCreateSessionDescriptionError);
}
function onSetLocalSuccess(pc) {
console.log(getName(pc) + ' setLocalDescription complete');
}
function onSetRemoteSuccess(pc) {
console.log(getName(pc) + ' setRemoteDescription complete');
}
function onSetSessionDescriptionError(error) {
console.log('Failed to set session description: ' + error.toString());
}
function gotRemoteStream(e) {
if (remoteVideo.srcObject !== e.streams[0]) {
console.log(e.streams[0]);
remoteVideo.srcObject = e.streams[0];
}
}
function onCreateAnswerSuccess(desc) {
pc2.setLocalDescription(desc).then(function() {
onSetLocalSuccess(pc2);
}, onSetSessionDescriptionError);
pc1.setRemoteDescription(desc).then(function() {
onSetRemoteSuccess(pc1);
}, onSetSessionDescriptionError);
}
function onIceCandidate(pc, event) {
getOtherPc(pc).addIceCandidate(event.candidate).then(
function() {
onAddIceCandidateSuccess(pc);
},
function(err) {
onAddIceCandidateError(pc, err);
},
);
}
function onAddIceCandidateSuccess(pc) {
console.log(getName(pc) + ' addIceCandidate success');
}
function onAddIceCandidateError(pc, error) {
console.log(getName(pc) + ' failed to add ICE Candidate: ' + error.toString());
}
function onIceStateChange(pc, event) {
if (pc) {
console.log(getName(pc) + ' ICE state: ' + pc.iceConnectionState);
console.log('ICE state change event: ', event);
}
}
function hangup() {
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
hangupButton.disabled = true;
callButton.disabled = false;
}

Related

Is window.localAudio a thing?

I found Mozilla's Getting browser microphone permission. It defines a function to request permission and listen to client's microphone as such:
function getLocalStream() {
navigator.mediaDevices.getUserMedia({video: false, audio: true}).then( stream => {
window.localStream = stream; // A
window.localAudio.srcObject = stream; // B
window.localAudio.autoplay = true; // C
}).catch( err => {
console.log("u got an error:" + err)
});
}
I checked in Chrome, Firefox and Safari - all of them throw an error about window.localAudio being undefined. Where did this tutorial get it from? Was window.localAudio ever a thing? What was it supposed to do?
I will try to give you something more useful than the question you have asked.
The function will create the element if it is not present and there are few options available. In the example I'm adding the newly created audio element to the body, but it will work even it is not added - it's a matter of choice.
<html>
<head>
<script>
var el;
function attachStream(stream, el, options) {
var item;
var URL = window.URL;
var element = el;
var opts = {
autoplay: true,
mirror: false,
muted: false,
audio: false,
disableContextMenu: false
};
if (options) {
for (item in options) {
opts[item] = options[item];
}
}
if (!element) {
element = document.createElement(opts.audio ? 'audio' : 'video');
} else if (element.tagName.toLowerCase() === 'audio') {
opts.audio = true;
}
if (opts.autoplay) element.autoplay = 'autoplay';
if (opts.muted) element.muted = true;
if (!opts.audio && opts.mirror) {
['', 'moz', 'webkit', 'o', 'ms'].forEach(function(prefix) {
var styleName = prefix ? prefix + 'Transform' : 'transform';
element.style[styleName] = 'scaleX(-1)';
});
}
element.srcObject = stream;
return element;
};
function getLocalStream() {
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
}).then(
stream => {
var doesnotexist = !el;
el = attachStream(stream, el, {
audio: true,
autoplay: true
});
if (doesnotexist) document.body.appendChild(el);
}
).catch(err => {
console.log("u got an error:" + err)
});
}
window.addEventListener('DOMContentLoaded', (event) => {
getLocalStream();
});
</script>
</head>
<body>
</body>
</html>

WebRTC failed to play in chrome & edge but playing in Firefiox

I have a very simple code for video calling using WebRTC. The entire system working different for different browsers.
Capture Browser
Player Browser
Working
Chrome
Firefox
✔
Chrome
Chrome
X
Firefox
Chrome
X
Firefox
Firefox
✔
The capture code is
JS:
(function () {
var localVideo, localConnection;
const signaling = new WebSocket('wss://crs4kx11s1/websockets');
signaling.onmessage = function (message) {
var data = JSON.parse(message.data);
if (data.sdp) {
var answerSDP = data.sdp;
if (answerSDP.type == "answer") {
localConnection.setRemoteDescription(answerSDP);
}
}
if (data.candidate && data.candidateType == "answerClient") {
localConnection.addIceCandidate(data.candidate);
}
}
localConnection = new RTCPeerConnection({
iceServers: [{
urls: 'turn:127.0.0.1:8043?transport=tcp',
credential: 'jupiter',
username: 'simpleshare'
}]
});
document.addEventListener("DOMContentLoaded", function (event) {
$("#share").click(function (event) {
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
stream.getTracks().forEach(
function (track) {
localConnection.addTrack(
track,
stream
);
}
);
localVideo = document.getElementById('local');
localVideo.srcObject = stream;
localConnection.onnegotiationneeded = function () {
localConnection.createOffer()
.then(offer => {
localConnection.setLocalDescription(offer)
.then(() => {
signaling.send(JSON.stringify({ sdp: offer }));
})
});
}
localConnection.onicecandidate = function (e) {
if (e.candidate) {
signaling.send(JSON.stringify({
candidateType: 'offerClient',
candidate: e.candidate.toJSON()
}));
}
console.log('offerClient is on icecandidate');
};
});
});
});
})();
HTML
<div>
<button id="share">Share</button>
<video id="local" autoplay></video>
</div>
Now the player code
JS
(function () {
var localVideo, localConnection;
const signaling = new WebSocket('wss://crs4kx11s1/websockets');
signaling.onmessage = function (message) {
const data = JSON.parse(message.data);
// const content = data.content;
try {
if (data.sdp) {
let offerSDP = data.sdp;
if (offerSDP.type == "offer") {
console.log("Accepting the offer.")
localConnection.setRemoteDescription(offerSDP);
localConnection.createAnswer().then(function (answer) {
console.log("Answer created!")
localConnection.setLocalDescription(answer);
signaling.send(JSON.stringify({ sdp: answer }));
});
}
}
if (data.candidate && data.candidateType == "offerClient") {
console.log("ICE candidate added!");
localConnection.addIceCandidate(data.candidate);
}
} catch (err) {
console.error(err);
}
};
document.addEventListener("DOMContentLoaded", function (event) {
startConnection();
localVideo = document.getElementById('self-view');
});
function startConnection() {
console.info("Starting connection");
localConnection = new RTCPeerConnection({iceServers: [{
urls: 'turn:127.0.0.1:8043?transport=tcp',
credential: 'jupiter',
username: 'simpleshare'
}]
});
//startCapture();
localConnection.onicecandidate = function (e) {
console.info("onicecandidate", e);
if (e.candidate) {
signaling.send(JSON.stringify({
candidateType: 'answerClient',
candidate: e.candidate.toJSON()
}));
}
console.log('answerClient is on icecandidate');
};
localConnection.onconnectionstatechange = function (e) {
console.log("Current state", localConnection.connectionState);
}
localConnection.ontrack = function (e) {
localVideo.srcObject = e.streams[0];
}
}
})();
HTML
<div id="chat-room">
<div id="videos">
<video id="self-view" autoplay></video>
</div>
</div>
Apart from these there is a WebSocket server which relays the SDP offers and candidates.
Please note that I have used our own TURN server for.
Got it worked. It was because of new autoplay policy in chrome. Just added localVideo.play(); and it worked.

Remote video stream not displaying

RTCPeerConnection gets established and receives the clients reply which is 'answer' but does not show the remote video stream. The console log is shown below
console.log
From the log, the offer is sent to the peer which the peer sends back an answer that is console logged to display that it did actually send the answer back. I would greatly appreciate if you could take a look at my code pasted below and advise me how to go about rectifying it.
'use strict';
var localStream;
var remoteStream;
var isInitiator;
var configuration = {
iceServers: [
{
urls: 'stun:stun.l.google.com:19302'
}
]
};
var pc = new RTCPeerConnection(configuration);
// Define action buttons.
const callButton = document.getElementById('callButton');
const hangupButton = document.getElementById('hangupButton');
/////////////////////////////////////////////
window.room = prompt('Enter room name:');
var socket = io.connect();
if (room !== '') {
console.log('Message from client: Asking to join room ' + room);
socket.emit('create or join', room);
}
socket.on('created', function(room) {
console.log('Created room ' + room);
isInitiator = true;
startVideo();
});
socket.on('joined', function(room) {
console.log('joined: ' + room);
startVideo();
});
socket.on('log', function(array) {
console.log.apply(console, array);
});
////////////////////////////////////////////////
function sendMessage(message) {
socket.emit('message', message);
}
// This client receives a message
socket.on('message', function(message) {
if (message.type === 'offer') {
pc.setRemoteDescription(message);
console.log('Sending answer to peer.');
pc.createAnswer().then(
setLocalAndSendMessage,
onCreateSessionDescriptionError
);
} else if (message.type === 'answer') {
console.log('This is to check if answer was returned');
remoteStream = event.stream;
remoteVideo.srcObject = remoteStream;
pc.setRemoteDescription(message);
} else if (message.type === 'candidate') {
pc.addIceCandidate(candidate);
}
});
////////////////////////////////////////////////////
const localVideo = document.querySelector('#localVideo');
const remoteVideo = document.querySelector('#remoteVideo');
// Set up initial action buttons status: disable call and hangup.
callButton.disabled = true;
hangupButton.disabled = true;
// Add click event handlers for buttons.
callButton.addEventListener('click', callStart);
hangupButton.addEventListener('click', hangupCall);
function startVideo() {
navigator.mediaDevices
.getUserMedia({
audio: true,
video: true
})
.then(gotStream)
.catch(function(e) {
alert('getUserMedia() error: ' + e.name);
});
}
function gotStream(stream) {
localVideo.srcObject = stream;
localStream = stream;
callButton.disabled = false;
}
function callStart() {
createPeerConnection();
callButton.disabled = true;
hangupButton.disabled = false;
if (isInitiator) {
console.log('Sending offer to peer');
pc.createOffer(setLocalAndSendMessage, handleCreateOfferError);
}
}
/////////////////////////////////////////////////////////
function createPeerConnection() {
try {
pc.onicecandidate = ({ candidate }) => sendMessage({ candidate });
pc.ontrack = event => {
if (remoteVideo.srcObject) return;
remoteVideo.srcObject = event.stream;
};
console.log('Created RTCPeerConnnection');
} catch (e) {
console.log('Failed to create PeerConnection, exception: ' + e.message);
alert('Cannot create RTCPeerConnection object.');
return;
}
}
function handleCreateOfferError(event) {
console.log('createOffer() error: ', event);
}
function setLocalAndSendMessage(sessionDescription) {
console.log('setLocalAndSendMessage sending message', sessionDescription);
pc.setLocalDescription(sessionDescription);
sendMessage(sessionDescription);
}
function onCreateSessionDescriptionError(error) {
console.log('Failed to create session description: ' + error.toString());
}
function hangupCall() {
pc.close();
pc = null;
}

Recorded video can't be forwarded/backwarded, Using MediaRecorder API chrome extension

I am implementing a chrome extension which records the screen with microphone and after recording it generates the recorded video perfectly but the problem is that the recorded video can not be forwarded/ backwarded or start video from any point first time.
When it plays the first time after that it works perfectly but when I download this video then the problem is same. I take help from muaz khan plugin.
I am using this code for start recording:
function gotStream(stream) {
if (cameraStream && cameraStream.getAudioTracks().length) {
cameraStream.getAudioTracks().forEach(function(track) {
// cameraStream.removeTrack(track);
stream.addTrack(track);
});
}
if (typeof MediaRecorder.isTypeSupported == 'function') {
/*
MediaRecorder.isTypeSupported is a function announced in https://developers.google.com/web/updates/2016/01/mediarecorder and later introduced in the MediaRecorder API spec http://www.w3.org/TR/mediastream-recording/
*/
if (MediaRecorder.isTypeSupported('video/mp4;codecs=h264')) {
var options = {
type: 'video',
mimeType: 'video/mp4;codecs=h264'
};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {
type: 'video',
mimeType: 'video/webm;codecs=vp9'
};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {
mimeType: 'video/webm;codecs=vp8'
};
}
console.log('Using ' + options.mimeType);
recorder = new RecordRTC(stream, options);
} else {
console.log('Using default codecs for browser');
recorder = new MediaRecorder(stream);
}
recorder.streams = [stream];
recorder.start(10);
recorder.ondataavailable = function(e) {
chunks.push(e.data);
};
recorder.onerror = function(e) {
console.log('Error: ', e);
};
recorder.onstart = function() {
isRecording = true;
onRecording();
console.log('Started & state = ' + recorder.state);
};
recorder.onpause = function() {
console.log('Paused & state = ' + recorder.state);
}
recorder.onresume = function() {
console.log('Resumed & state = ' + recorder.state);
}
recorder.onwarning = function(e) {
console.log('Warning: ' + e);
};
recorder.onstop = function() {
stopScreenRecording();
}
stream.onended = function() {
if (stream) {
stream.onended = null;
}
recorder.stop();
};
if (stream.getVideoTracks().length) {
stream.getVideoTracks().forEach(function(track) {
track.onended = function() {
if (!recorder) return;
if (!stream || typeof stream.onended !== 'function') return;
stream.onended();
};
});
}
}
and this for stop recording:
function stopScreenRecording(blob) {
isRecording = false;
var blob = new Blob(chunks, {
type: "video/mp4"
});
chunks = [];
var file = new File([blob ? blob : ''], getFileName(fileExtension), {
type: mimeType
});
DiskStorage.Store({
key: 'latest-file',
value: file
}, function(success) {
if (success) {
chrome.browserAction.setPopup({
popup: "popup.html"
});
chrome.tabs.create({
url: 'preview.html'
});
}
});
setTimeout(function() {
setDefaults();
// chrome.runtime.reload();
}, 1000);
try {
videoPlayers.forEach(function(player) {
player.src = null;
});
videoPlayers = [];
} catch (e) {}
// for dropdown.js
chrome.storage.sync.set({
isRecording: 'false' // FALSE
});
if (timer) {
clearTimeout(timer);
}
setBadgeText('');
}
I saw another screen recorder whose video blob is like:
filesystem:chrome-extension://mmeijimgabbpbgpdklnllpncmdofkcpn/persistent/e6ad7ba1-6afe-4d45-y6f5-47e08a87e036.webm
and our video blob is like:
blob:chrome-extension://hgpenkfjeddjngnojmcmgbclkoakihhg/af3dcfa6-b990-464b-9726-e8b6022762a2
How can I get this type of blob?

WEBRTC remote video stops streaming after 2 seconds

I am facing a very weird behavior with this WEBRTC peer to peer app. the app would stream audio form one peer to the other, but when it comes to stream video, it actually stream video but just for the first 2 seconds, then it would stop streaming video, but the audio continues to stream. Here is some of the code that handles the remote video:
var webrtc_capable = true;
var rtc_peer_connection = null;
var rtc_session_description = null;
var get_user_media = null;
var connect_stream_to_src = null;
var stun_server = null;
if (navigator.getUserMedia) {
rtc_peer_connection = RTCPeerConnection;
rtc_session_description = RTCSessionDescription;
get_user_media = navigator.getUserMedia.bind(navigator);
connect_stream_to_src = function(media_stream, media_element) {
media_element.srcObject = window.URL.createObjectURL(media_stream);
media_element.play();
};
} else if (navigator.mozGetUserMedia) {
rtc_peer_connection = mozRTCPeerConnection;
rtc_session_description = mozRTCSessionDescription;
get_user_media = navigator.mozGetUserMedia.bind(navigator);
connect_stream_to_src = function(media_stream, media_element) {
media_element.srcObject = window.URL.createObjectURL(media_stream);
media_element.play();
};
stun_server = null;
} else if (navigator.webkitGetUserMedia) {
rtc_peer_connection = webkitRTCPeerConnection;
rtc_session_description = RTCSessionDescription;
get_user_media = navigator.webkitGetUserMedia.bind(navigator);
connect_stream_to_src = function(media_stream, media_element) {
media_element.src = webkitURL.createObjectURL(media_stream);
};
} else {
alert("This browser does not support WebRTC - visit WebRTC.org for more info");
webrtc_capable = false;
}
</script>
<script>
var call_token;
var signaling_server;
var peer_connection;
function start() {
// create the WebRTC peer connection object
peer_connection = new rtc_peer_connection({
"iceServers": [ // information about ice servers
{ "url": "stun:"+stun_server },
]
});
// generic handler that sends any ice candidates to the other peer
peer_connection.onicecandidate = function (ice_event) {
console.log(ice_event.candidate);
if (ice_event.candidate){
console.log("true");
}
if (ice_event.candidate) {
signaling_server.send(
JSON.stringify({
token:call_token,
type: "new_ice_candidate",
candidate: ice_event.candidate ,
})
);
}
};
peer_connection.onaddstream = function (event) {
var video = document.querySelector("#remote_video");
video.src = webkitURL.createObjectURL(event.stream);
document.getElementById("loading_state").style.display = "none";
document.getElementById("open_call_state").style.display = "block";
};
setup_video();
signaling_server = new WebSocket("ws://localhost:1234");
if (document.location.hash === "" || document.location.hash === undefined) {
var token = Date.now()+"-"+Math.round(Math.random()*10000);
call_token = "#"+token;
document.location.hash = token;
signaling_server.onopen = function() {
signaling_server.onmessage = caller_signal_handler;
signaling_server.send(
JSON.stringify({
token:call_token,
type:"join",
})
);
}
document.title = "You are the Caller";
document.getElementById("loading_state").innerHTML = "Ready for a call...ask your friend to visit:<br/><br/>"+document.location;
} else { // you have a hash fragment so you must be the Callee
// get the unique token for this call from location.hash
call_token = document.location.hash;
signaling_server.onopen = function() {
// setup caller signal handler
signaling_server.onmessage = callee_signal_handler;
// tell the signaling server you have joined the call
signaling_server.send(
JSON.stringify({
token:call_token,
type:"join",
})
);
// let the caller know you have arrived so they can start the call
signaling_server.send(
JSON.stringify({
token:call_token,
type:"callee_arrived",
})
);
}
document.title = "You are the Callee";
document.getElementById("loading_state").innerHTML = "One moment please...connecting your call...";
}
// setup message bar handlers
document.getElementById("message_input").onkeydown = send_chat_message;
document.getElementById("message_input").onfocus = function() { this.value = ""; }
}
// handler to process new descriptions
function new_description_created(description) {
peer_connection.setLocalDescription(
description,
function () {
signaling_server.send(
JSON.stringify({
token:call_token,
type:"new_description",
sdp:description
})
);
},
log_error
);
}
// handle signals as a caller
function caller_signal_handler(event) {
var signal = JSON.parse(event.data);
if (signal.type === "callee_arrived") {
peer_connection.createOffer(
new_description_created,
log_error
);
} else if (signal.type === "new_ice_candidate") {
peer_connection.addIceCandidate(new RTCIceCandidate(signal.candidate));
} else if (signal.type === "new_description") {
peer_connection.setRemoteDescription(
new rtc_session_description(signal.sdp),
function () {
if (peer_connection.remoteDescription.type == "answer") {
peer_connection.createOffer(new_description_created, log_error);
}
},
log_error
);
} else if (signal.type === "new_chat_message") {
add_chat_message(signal);
} else {
// extend with your own signal types here
}
}
// handle signals as a callee
function callee_signal_handler(event) {
var signal = JSON.parse(event.data);
if (signal.type === "new_ice_candidate") {
peer_connection.addIceCandidate(
new RTCIceCandidate(signal.candidate)
);
} else if (signal.type === "new_description") {
peer_connection.setRemoteDescription(
new rtc_session_description(signal.sdp),
function () {
if (peer_connection.remoteDescription.type == "offer") {
peer_connection.createAnswer(new_description_created, log_error);
}
},
log_error
);
} else if (signal.type === "new_chat_message") {
add_chat_message(signal);
} else {
// extend with your own signal types here
}
}
// add new chat message to messages list
function add_chat_message(signal) {
var messages = document.getElementById("messages");
var user = signal.user || "them";
messages.innerHTML = user+": "+signal.message+"<br/>\n"+messages.innerHTML;
}
// send new chat message to the other browser
function send_chat_message(e) {
if (e.keyCode == 13) {
var new_message = this.value;
this.value = "";
signaling_server.send(
JSON.stringify({
token:call_token,
type: "new_chat_message",
message: new_message
})
);
add_chat_message({ user: "you", message: new_message });
}
}
// setup stream from the local camera
function setup_video() {
get_user_media(
{
"audio": true, // request access to local microphone
"video": true // request access to local camera
},
function (local_stream) {
<video> MediaElement
connect_stream_to_src(local_stream, document.getElementById("local_video"));
peer_connection.addStream(local_stream);
},
log_error
);
}
function log_error(error) {
console.log(error);
}
</script>
Additional Info:
Windows 8 x64
opera version is 27.0.1689.76
Localvideo plays fine
This call is between two peers on the same network(no nat traversal)

Categories

Resources