WebRTC: Determine which TURN server is used in PeerConnection - javascript

Scenario: You would like to know if TURN server is being used for a particular call and which one from the array of TURN servers you provided during PeerConnection creation, is being used. Right now there are two options:
Wireshark: But when you are behind a corporate proxy and TURN server is outside that, wireshark would show the Proxy IP as the destination.( also not the mention the inconvenience of running it in the background)
Going through the stats page and finding out, chrome --> chrome://webrtc-internals and Firefox --> about:webrtc
I would like to use a alternative to the above two, programmatically determine this so I do not have to leave my application page.

Update: I've updated the example to follow the latest spec, with maplike getStats.
The following approach follows the specification and currently only works in Firefox, because Chrome implements getStats() incorrectly at the moment. Hopefully, a version of the adapter.js polyfill should be available soon that will make this work in Chrome as well.
When you run this fiddle in Firefox, you'll see:
checking
connected
Does not use TURN
This is because the example provides both a STUN and a TURN server. But when I modify the config to use TURN only with iceTransportPolicy: "relay", I see:
checking
connected
Uses TURN server: 10.252.73.50
Note that the turn server I use is behind a VPN, so it won't work for you, but feel free to modify the fiddle with your own server (just don't save it unless you want the info to become public!)
While I haven't tested with more than one turn server, as you can see the IP address shown matches the turn server configured, so it should be possible to tell which server is used using this approach.
// Turn server is on Mozilla's VPN.
var cfg = { iceTransportPolicy: "all", // set to "relay" to force TURN.
iceServers: [{ urls: "stun:stun.l.google.com:19302" },
{ urls: "turn:10.252.73.50",
username:"webrtc", credential:"firefox" }] };
var pc1 = new RTCPeerConnection(cfg), pc2 = new RTCPeerConnection(cfg);
pc1.onicecandidate = e => pc2.addIceCandidate(e.candidate);
pc2.onicecandidate = e => pc1.addIceCandidate(e.candidate);
pc2.oniceconnectionstatechange = () => log(pc2.iceConnectionState);
pc2.onaddstream = e => v2.srcObject = e.stream;
var findSelected = stats =>
[...stats.values()].find(s => s.type == "candidate-pair" && s.selected);
var start = () => navigator.mediaDevices.getUserMedia({ video: true })
.then(stream => pc1.addStream(v1.srcObject = stream))
.then(() => pc1.createOffer()).then(d => pc1.setLocalDescription(d))
.then(() => pc2.setRemoteDescription(pc1.localDescription))
.then(() => pc2.createAnswer()).then(d => pc2.setLocalDescription(d))
.then(() => pc1.setRemoteDescription(pc2.localDescription))
.then(() => waitUntil(() => pc1.getStats().then(s => findSelected(s))))
.then(() => pc1.getStats())
.then(stats => {
var candidate = stats.get(findSelected(stats).localCandidateId);
if (candidate.candidateType == "relayed") {
log("Uses TURN server: " + candidate.ipAddress);
} else {
log("Does not use TURN (uses " + candidate.candidateType + ").");
}
})
.catch(log);
var waitUntil = f => Promise.resolve(f())
.then(done => done || wait(200).then(() => waitUntil(f)));
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
var log = msg => div.innerHTML += msg +"<br>";
var failed = e => log(e +", line "+ e.lineNumber);
<video id="v1" width="108" height="81" autoplay></video>
<video id="v2" width="108" height="81" autoplay></video><br>
<button onclick="start()">Start!</button><br><div id="div"></div>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>

I wrote and tested the below piece of code, works in latest versions of both firefox and chrome, getConnectionDetails returns a promise which resolves to connection details:
function getConnectionDetails(peerConnection){
var connectionDetails = {}; // the final result object.
if(window.chrome){ // checking if chrome
var reqFields = [ 'googLocalAddress',
'googLocalCandidateType',
'googRemoteAddress',
'googRemoteCandidateType'
];
return new Promise(function(resolve, reject){
peerConnection.getStats(function(stats){
var filtered = stats.result().filter(function(e){return e.id.indexOf('Conn-audio')==0 && e.stat('googActiveConnection')=='true'})[0];
if(!filtered) return reject('Something is wrong...');
reqFields.forEach(function(e){connectionDetails[e.replace('goog', '')] = filtered.stat(e)});
resolve(connectionDetails);
});
});
}else{ // assuming it is firefox
return peerConnection.getStats(null).then(function(stats){
var selectedCandidatePair = stats[Object.keys(stats).filter(function(key){return stats[key].selected})[0]]
, localICE = stats[selectedCandidatePair.localCandidateId]
, remoteICE = stats[selectedCandidatePair.remoteCandidateId];
connectionDetails.LocalAddress = [localICE.ipAddress, localICE.portNumber].join(':');
connectionDetails.RemoteAddress = [remoteICE.ipAddress, remoteICE.portNumber].join(':');
connectionDetails.LocalCandidateType = localICE.candidateType;
connectionDetails.RemoteCandidateType = remoteICE.candidateType;
return connectionDetails;
});
}
}
I would like point out one thing, all these three methods fail in one scenario: two turn servers running from same machine on different ports, only reliable way I found was looking at the turn server logs.

Related

Google cast v3 CAF Receiver application with DRM url

I am trying to use the v3 CAF receiver app using DRM to casting videos, from my IOS app. If I use the basic v3 CAF receiver app (default receiver) it is working fine, but when I using DRM url (dash/.mpd and licenseUrl ) it will throw below error
Error
[ 20.844s] [Error] [INFO] {"type":"LOAD_CANCELLED","requestId":0}
See the below code.
const playerManager = context.getPlayerManager();
const playbackConfig = new cast.framework.PlaybackConfig();
/** Debug Logger **/
const castDebugLogger = cast.debug.CastDebugLogger.getInstance();
var manifestUri = 'https://example.domain.video/prod/drm/1/7e942940-d705-4417-b552-796e8fd25460/Media_1_20_d2aaec7102dc42c09dd54e4f00cbea412019062801270383196000/dash/manifest.mpd';
var licenseServer = 'https://wv.example.domain.com/hms/wv/rights/?ExpressPlayToken=BQALuGDeKZcAJDE2YzAwYTRkLTYwZWYtNGJiZC1hZmEzLTdhMmZhYTY2NzM5OQAAAHCZzHVjRyfs3AEgxFuwPvZsrqMndjiBPzLQ5_VUx6rJOEDD5noQmXJoVP-Va1gQzxfp9eHux15_pEr6g0RxXNZIjlsN6b7SIfpHPyS9iuPQqgvEgq5I_tV9k1lhQvKuqgpBN0Z5BtxCLwHc8xrnLbuUK6fiThcLMR4He_x38reAsumjFYg';
// setting manually licenseUrl from here
playbackConfig.licenseUrl = licenseServer;
playbackConfig.manifestRequestHandler = requestInfo => {
requestInfo.withCredentials = true;
};
playbackConfig.licenseRequestHandler = requestInfo => {
requestInfo.withCredentials = true;
requestInfo.headers = {
// 'Content-type':'application/dash+xml', // trying this also
'Content-type':'application/octet-stream'
}
playbackConfig.licenseUrl = requestInfo.media.customData.licenseUrl;
return playbackConfig;
};
// MessageInterceptor
playerManager.setMessageInterceptor(
cast.framework.messages.MessageType.LOAD,
request => {
const error = new cast.framework.messages.ErrorData(cast.framework.messages.ErrorType.LOAD_CANCELLED);
castDebugLogger.info('Error', error);
if (!request.media) {
error.reason = cast.framework.messages.ErrorReason.INVALID_PARAM;
castDebugLogger.info('reason', error.reason);
return error;
}
if (request.media && request.media.entity) {
request.media.contentId = request.media.entity;
}
return new Promise((resolve, reject) => {
if (!request.media) {
castDebugLogger.error('MyAPP.LOG', 'Content not found');
reject();
} else {
// I have passed manually data (license Url and content Id etc.) from here for testing purpose
const item = new cast.framework.messages.QueueItem();
item.media = new cast.framework.messages.MediaInformation();
item.media.contentId = manifestUri;
item.media.streamType = cast.framework.messages.StreamType.BUFFERED;
// Trying all options of contentType
item.media.contentType = "application/octet-stream";
//request.media.contentType = 'application/x-mpegurl';
//item.media.contentType = "video/mp4";
//request.media.contentType = 'video/mp4';
//request.media.contentType = 'application/dash+xml';
item.media.metadata = new cast.framework.messages.MovieMediaMetadata();
item.media.metadata.title = "Example title";
item.media.metadata.subtitle = "Example subtitle ";
item.media.metadata.images = [new cast.framework.messages.Image("https://commondatastorage.googleapis.com/gtv-videos-bucket/sample/images/BigBuckBunny.jpg")];
request.media = item.media;
playbackConfig.protectionSystem = cast.framework.ContentProtection.WIDEVINE;
resolve(request);
}
});
});
// start
context.start({
playbackConfig: playbackConfig,
touchScreenOptimizedApp: true
});
LA_URL and .mpd url is working fine with another online shaka player.
Did you check in the remote web inspector if the network request is sent to the licenser when the load request is issued for the encoded dash stream? Most probably this will help to find where the problem is.
Possibly you will have to add some inteligence to your licenseRequestHandler to add a token of some sort. Or possibly there's a CORS issue.
Note: Before you post some code to stackoverflow, it might be wize to clean it up a bit: remove dead code, remove confusing commented code, provide proper indentation. You're wasting brain cycles of everybody reading your code and trying to process what you shared with the world!

Change playout delay in WebRTC stream

I'm trying to cast a live MediaStream (Eventually from the camera) from peerA to peerB and I want peerB to receive the live stream in real time and then replay it with an added delay. Unfortunately in isn't possible to simply pause the stream and resume with play since it jump forward to the live moment.
So I have figured out that I can use MediaRecorder + SourceBuffer rewatch the live stream. Record the stream and append the buffers to MSE (SourceBuffer) and play it 5 seconds later.
This works grate on the local device (stream). But when I try to use Media Recorder on the receivers MediaStream (from pc.onaddstream) is looks like it gets some data and it's able to append the buffer to the sourceBuffer. however it dose not replay. sometime i get just one frame.
const [pc1, pc2] = localPeerConnectionLoop()
const canvasStream = canvas.captureStream(200)
videoA.srcObject = canvasStream
videoA.play()
// Note: using two MediaRecorder at the same time seem problematic
// But this one works
// stream2mediaSorce(canvasStream, videoB)
// setTimeout(videoB.play.bind(videoB), 5000)
pc1.addTransceiver(canvasStream.getTracks()[0], {
streams: [ canvasStream ]
})
pc2.onaddstream = (evt) => {
videoC.srcObject = evt.stream
videoC.play()
// Note: using two MediaRecorder at the same time seem problematic
// THIS DOSE NOT WORK
stream2mediaSorce(evt.stream, videoD)
setTimeout(() => videoD.play(), 2000)
}
/**
* Turn a MediaStream into a SourceBuffer
*
* #param {MediaStream} stream Live Stream to record
* #param {HTMLVideoElement} videoElm Video element to play the recorded video in
* #return {undefined}
*/
function stream2mediaSorce (stream, videoElm) {
const RECORDER_MIME_TYPE = 'video/webm;codecs=vp9'
const recorder = new MediaRecorder(stream, { mimeType : RECORDER_MIME_TYPE })
const mediaSource = new MediaSource()
videoElm.src = URL.createObjectURL(mediaSource)
mediaSource.onsourceopen = (e) => {
sourceBuffer = mediaSource.addSourceBuffer(RECORDER_MIME_TYPE);
const fr = new FileReader()
fr.onerror = console.log
fr.onload = ({ target }) => {
console.log(target.result)
sourceBuffer.appendBuffer(target.result)
}
recorder.ondataavailable = ({ data }) => {
console.log(data)
fr.readAsArrayBuffer(data)
}
setInterval(recorder.requestData.bind(recorder), 1000)
}
console.log('Recorder created')
recorder.start()
}
Do you know why it won't play the video?
I have created a fiddle with all the necessary code to try it out, the javascript tab is the same code as above, (the html is mostly irrelevant and dose not need to be changed)
Some try to reduce the latency, but I actually want to increase it to ~10 seconds to rewatch something you did wrong in a golf swing or something, and if possible avoid MediaRecorder altogether
EDIT:
I found something called "playout-delay" in some RTC extension
that allows the sender to control the minimum and maximum latency from capture to render time
https://webrtc.org/experiments/rtp-hdrext/playout-delay/
How can i use it?
Will it be of any help to me?
Update, there is new feature that will enable this, called playoutDelayHint.
We want to provide means for javascript applications to set their preferences on how fast they want to render audio or video data. As fast as possible might be beneficial for applications which concentrates on real time experience. For others additional data buffering may provide smother experience in case of network issues.
Refs:
https://discourse.wicg.io/t/hint-attribute-in-webrtc-to-influence-underlying-audio-video-buffering/4038
https://bugs.chromium.org/p/webrtc/issues/detail?id=10287
Demo: https://jsfiddle.net/rvekxns5/
doe i was only able to set max 10s in my browser but it's more up to the UA vendor to do it's best it can with the resources available
import('https://jimmy.warting.se/packages/dummycontent/canvas-clock.js')
.then(({AnalogClock}) => {
const {canvas} = new AnalogClock(100)
document.querySelector('canvas').replaceWith(canvas)
const [pc1, pc2] = localPeerConnectionLoop()
const canvasStream = canvas.captureStream(200)
videoA.srcObject = canvasStream
videoA.play()
pc1.addTransceiver(canvasStream.getTracks()[0], {
streams: [ canvasStream ]
})
pc2.onaddstream = (evt) => {
videoC.srcObject = evt.stream
videoC.play()
}
$dur.onchange = () => {
pc2.getReceivers()[0].playoutDelayHint = $dur.valueAsNumber
}
})
<!-- all the irrelevant part, that you don't need to know anything about -->
<h3 style="border-bottom: 1px solid">Original canvas</h3>
<canvas id="canvas" width="100" height="100"></canvas>
<script>
function localPeerConnectionLoop(cfg = {sdpSemantics: 'unified-plan'}) {
const setD = (d, a, b) => Promise.all([a.setLocalDescription(d), b.setRemoteDescription(d)]);
return [0, 1].map(() => new RTCPeerConnection(cfg)).map((pc, i, pcs) => Object.assign(pc, {
onicecandidate: e => e.candidate && pcs[i ^ 1].addIceCandidate(e.candidate),
onnegotiationneeded: async e => {
try {
await setD(await pc.createOffer(), pc, pcs[i ^ 1]);
await setD(await pcs[i ^ 1].createAnswer(), pcs[i ^ 1], pc);
} catch (e) {
console.log(e);
}
}
}));
}
</script>
<h3 style="border-bottom: 1px solid">Local peer (PC1)</h3>
<video id="videoA" muted width="100" height="100"></video>
<h3 style="border-bottom: 1px solid">Remote peer (PC2)</h3>
<video id="videoC" muted width="100" height="100"></video>
<label> Change playoutDelayHint
<input type="number" value="1" id="$dur">
</label>

Chrome crash with webworkers and createImageBitmap

I'm currently having an issue when loading images with webworkers. I want to batch load a bunch of images and then do some processing on these images (in my case, convert source image to ImageBitmap using createImageBitmap). Currently the user has the ability to cancel the request. This causes a crash when trying to terminate the worker if the worker hasn't finished. I've created a fiddle here https://jsfiddle.net/e4wcro0o/18/ that crashes consistently.
The issue lies here:
function closeWorker() {
if (!isClosed) {
console.log("terminating worker");
isClosed = true;
worker.terminate();
}
}
for (let i = 0; i < srcImages.length; i++) {
loadImageWithWorker(new URL(srcImages[i], window.location).toString()).then(function(img) {
closeWorker();
console.log(img);
});
}
This may look a bit funky to call closeWorker() on the first resolved promise, but does it mean that the crash is reproducible. I've only test on chrome with 64.0.3282.186 (Official Build) (64-bit)
Any ideas on what I'm doing wrong?
I have come across the same issue. I think the cause comes terminating the worker during the createImageBitmap function.
I have modified your JSFiddle with a method of terminating the worker at the earliest chance to avoid a crash.
const worker = createWorker(() => {
const pendingBitmaps = {};
var pendingKill = false;
self.addEventListener("message", e => {
const src = e.data;
if (src == "KILL") {
pendingKill = true;
Promise.all(Object.values(pendingBitmaps)).then(_ => self.postMessage("READY"));
}
// not accepting anymore conversions
if (pendingKill) {
self.postMessage({src, bitmap: null});
return;
}
pendingBitmaps[src] = fetch(src).then(response => response.blob())
.then(blob => {
if (pendingKill) return null;
return createImageBitmap(blob);
})
.then(bitmap => {
self.postMessage({src,bitmap});
delete pendingBitmaps[src];
})
})
});
https://jsfiddle.net/wrf1sLbx/16/

Protractor (3.0.0)/Webdriver (2.53.0) switching tabs error

I have the following test with the current versions of webdriver/protractor (see title).
it('checks tabs', () => {
const url1 = 'https://stackoverflow.com/';
const url2 = 'http://programmers.stackexchange.com/';
let windowHandles = {
oldTab: '',
newTab: ''
};
await browser.get(url1);
await browser.getWindowHandle().then(handle => {
windowHandles.oldTab = handle;
});
await browser.executeScript('window.open("' + url2 + '", "whatever")');
await browser.getAllWindowHandles()
.then(handles => {
expect(handles[0]).toEqual(windowHandles.oldTab);
windowHandles.newTab = handles[1];
return browser.driver.switchTo().window(windowHandles.oldTab);
})
.then(() => {
let handle = browser.driver.getWindowHandle();
expect(handle).toEqual(windowHandles.oldTab);
})
.then(() => browser.sleep(6000));
});
The interesting thing is that the assertations work well; they are all green.
But it does not switch back to the first tab.
Am I missing something or it is indeed a bug?
Update
In my FireFox window.open opens a window, not a tab, and switching actually works between the windows.
I can accept the workaround of opening windows instead tabs in Chrome, though I really think that if the current window handle tells you that you have switched while you are still in the same is a bug.
Update 2
Even with opening windows Chrome does not switch while FireFox does. I reported a bug.
This is my new test:
it('checks tabs', async () => {
const url1 = '/login';
const url2 = config.chatLaunchUrl;
let windowHandles = {
oldTab: '',
newTab: ''
};
await browser.get(url1);
await browser.getWindowHandle().then(handle => {
windowHandles.oldTab = handle;
});
// opening new window sending CTRL+N
await browser.actions()
.sendKeys(protractor.Key.chord(protractor.Key.CONTROL, "n"))
.perform();
await browser.getAllWindowHandles()
.then(handles => {
expect(handles[0]).toEqual(windowHandles.oldTab);
windowHandles.newTab = handles[1];
return browser.driver.switchTo().window(windowHandles.newTab);
})
.then(() => {
// this works
return browser.get(url2);
})
.then(() => {
return browser.driver.switchTo()
.window(windowHandles.oldTab)
.then(() => browser.driver.executeScript('window.focus();'));
})
.then(() => {
let handle = browser.driver.getWindowHandle();
expect(handle).toEqual(windowHandles.oldTab);
})
.then(() => browser.sleep(6000));
});
Update 3
The difference between Chrome and Firefox is that if I switch to Firefox the browser window comes into focus while with Chrome it does not. The test can continue without problems in Chrome as well. So it's a lesser bug.
(Related links:
Protractor - switch tabs error - different case
https://github.com/angular/protractor/issues/55 - closed many years ago
https://github.com/angular/protractor/issues/3124 - My new bug report)
Visually, you will see it switch to the second tab, but you won't see it switch back to the first tab (maybe that's a bug in chrome or chromedriver?). The test commands are in fact sent to the correct tab though, and your test will pass or fail appropriately.

Detect offline peer in WebRTC connection

We are developing a video stream from a mobile device to a computer using WebRTC. The mobile device might lose its connection completely and the computer should be able to detect that. Right now, the video just freezes. But neither of the EventHandlers of RTCPeerConnection are called in such a situation.
So how can such a connection failure be detected on the other peer?
How can a peer detect connection problems on connection establishment in the first place?
As a workaround in Firefox, you could use getStats to detect if packets stop coming in:
var findStat = (m, type) => [...m.values()].find(s => s.type == type && !s.isRemote);
var hasConnected = new Promise(resolve => pc.oniceconnectionstatechange =
e => pc.iceConnectionState == "connected" && resolve());
var hasDropped = hasConnected.then(() => new Promise(resolve => {
var lastPackets = countdown = 0, timeout = 3; // seconds
var iv = setInterval(() => pc.getStats().then(stats => {
var packets = findStat(stats, "inbound-rtp").packetsReceived;
countdown = (packets - lastPackets)? timeout : countdown - 1;
if (!countdown) resolve(clearInterval(iv));
lastPackets = packets;
}), 1000);
}));
Here's a demo: https://jsfiddle.net/4rzhe7n8/
the iceconnectionstatechange handler should fire after ~5-10 seconds of not receiving data from the peer anymore (in Chrome; Firefox is working on that currently). See https://webrtc.github.io/samples/src/content/peerconnection/states/ for an example.

Categories

Resources