Requested device not found chromeMediaSource: 'desktop' - javascript

I am trying to create a web application which can share the desktop with the system audio using webrtc from chrome browser and if I set the constraints :
const constraints = {
audio: {
mandatory: {
chromeMediaSource: 'desktop'
}
},
video: {
mandatory: {
chromeMediaSource: 'desktop'
}
}
}
I get this error "DOMException: Requested device not found" or "DOMException: Invalid state"
here is the demo: https://codepen.io/EbramTawfik/pen/xQozaE

Related

How to find out reason for MediaStreamTrack.onended event

I have a website that is used to take pictures, user has to take one picture with main camera and then second picture (selfie) with front camera. All those pictures are saved as blobs in db and can be viewed in a separate page.
Issue: sometimes one of the photos are plain black and it seems that mediaStreamTrack ends randomly which causes the image to arrive to DB as plain black. (this mostly happens with iPhones, but I have seen desktops with win10 have the same issue since I log userAgent and made a function that logs some events like 'camera permission requested', 'permission granted', 'stream ended').
Is there a way to obtain why onended event was fired?
function startVideo(facingMode = 'environment') {
if (this.mediaStream && facingMode === 'user') {
// stop previous stream to start a new one with different camera
this.mediaStream.getVideoTracks[0].stop();
}
const videoEl = video.current;
const canvasEl = canvas.current;
navigator.mediaDevices
.getUserMedia({
video: {
facingMode:
facingMode === "user" ? { exact: facingMode } : facingMode,
height: {
min: 720,
max: 720
},
width: {
min: 720,
max: 1280
},
advanced: [{ aspectRatio: 1 }]
},
audio: false
})
.then((stream) => {
if (this.mediaStream !== stream) this.mediaStream = stream;
videoEl.srcObject = this.mediaStream;
videoEl.play();
this.mediaStream.getVideoTracks()[0].onended = () => {
console.log('stream ended unexpectedly');
this.sendUserLog('stream ended');
};
})
.catch((error) => {
if (error.name === 'OverconstrainedError') {
this.sendUserLog('camera quality too low')
} else {
console.log("An error occurred: " + error));
this.sendUserLog('permission denied');
}
})
}
I also tried to log the onended event, but it only shows the source mediaStream properties and and type: 'ended', but I already know that since the event fired.
Also since most of these cases happen with mobiles, it seems implausible that camera was disconnected manually.

How to send (Screen sharing stream) via SIPJS to the other caller

I'm using SIPJS to make calls between 2 callers using web browser.
Now i want to add (Screen sharing) feature , so far i managed to open chrome screen sharing window and i get the stream and played it in video element.
But what i really need is to send this stream to the other caller so he can see my screen sharing.
What I've tried so far:
After i get the (screen sharing stream) i pass it to session.sessionDescriptionHandler.peerConnection , and then catch the stream (or track) using these events onTrackAdded , onaddTrack , onaddStream , onstream
But none of there events get anything.
Also tried to send the stream with video constraint before the call start
video: {
mandatory: {
chromeMediaSource: 'desktop',
// chromeMediaSourceId: event.data.sourceId,
maxWidth: window.screen.width > 1920 ? window.screen.width : 1920,
maxHeight: window.screen.height > 1080 ? window.screen.height : 1080
},
optional: []
}
Even tried to send the stream with video constraint
navigator.mediaDevices.getDisplayMedia(constraints)
.then(function(stream) {
//We've got media stream
console.log("----------then triggered-------------");
var options = {
sessionDescriptionHandlerOptions: {
constraints: {
audio: true,
video: stream
}
}
}
pub_session = userAgent.invite(reciver_name,options);
})
.catch(function(error) {
console.log("----------catch-------------");
console.log(error);
});
also didn't work.
Here is my Code
First get the screen sharing stream and send it to the other user
// Get screen sharing and send it.
navigator.mediaDevices.getDisplayMedia(constraints)
.then(function(stream) {
//We've got media stream
console.log("----------then triggered-------------");
var pc = session.sessionDescriptionHandler.peerConnection;
stream.getTracks().forEach(function(track) {
pc.addTrack(track, stream);
});
})
.catch(function(error) {
console.log("----------catch-------------");
console.log(error);
});
Then catch that stream at the other side
// Reciving stream or track
userAgent.on('invite', function (session) {
session.on('trackAdded', function() {
console.log('-------------trackAdded triggered--------------');
});
session.on('addTrack', function (track) {
console.log('-------------addTrack triggered--------------');
});
session.on('addStream', function (stream) {
console.log('-------------addStream triggered--------------');
});
session.on('stream', function (stream) {
console.log('-------------stream triggered--------------');
});
});
But still get nothing from that code above
So how can i pass that stream or track to the other caller after the call starts ?
thank you so much
I Found the solution from some great gentlemen in SIPJS groups
Hope the answer will help someone as it helped me
var option = {video: {mediaSource: 'screen'}, audio: true};
navigator.mediaDevices.getDisplayMedia(option)
.then(function(streams){
var pc = session.sessionDescriptionHandler.peerConnection;
var videoTrack = streams.getVideoTracks()[0];
var sender = pc.getSenders().find(function(s) {
return s.track.kind == videoTrack.kind;
});
console.log('found sender:', sender);
sender.replaceTrack(videoTrack);
}, function(error){
console.log("error ", error);
});

AbortError - Screen Sharing using WebRTC

I am trying to implement Screen sharing functionality using webrtc. In firefox it is working, but in chrome it showing "Abort Error"
var constraints = {
video: {
mandatory: {
chromeMediaSource: 'screen'
},
optional: []
},
audio: true,
};
if(navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(constraints).then(getUserMediaSuccess).catch(errorHandler);
} else {
alert('Your browser does not support getUserMedia API');
}
console error:
{name: "AbortError", message: "Error starting screen capture", constraint: undefined, toString: ƒ}
here you are trying to access 'getUserMedia' and your target is 'screen'.
Solution:
So you need to make use of 'getDisplayMedia'
And your constraints will be like:
var constraints = {
video: true,
audio: true,
};
Try this.
This will solve your problem.

WebRTC merge video MediaStreamTracks into one on client side

How i can merge 2 video streams into one on client side, and send it through WebRTC PeerConnection?
For example i have 2 video streams like this
navigator.getUserMedia({ video: true }, successCamera, error); // capture camera
function successCamera(streamCamera) {
vtCamera = streamCamera.getVideoTracks()[0]
navigator.getUserMedia({ // capture screen
video: {
mandatory: {
chromeMediaSource: 'screen',
maxWidth: 1280,
maxHeight: 720
}
}
}, successScreen, error);
function successScreen(streamScreen) {
vtScreen = streamScreen.getVideoTracks()[0]
mergedVideoTracks = vtScreen + vtCamera; // How i can merge tracks in one??
finallyStream = streamScreen.clone()
finallyStream.removeTrack( finallyStream.getVideoTracks()[0] )
finallyStream.addTrack( mergedVideoTracks )
finallyStream // I need to send this through WebRTC PeerConnection
}
}
function error(error) {
console.error(error);
}
As you can see i have vtScreen and vtCamera as MediaStreamTracks. I need to set screen as background, and camera as small frame in right bottom corner. And send it through WebRTC PeerConnection as one stream
Yes, i can merge it on canvas, but i don't know how i can send this canvas as MediaStreamTrack. =(

getUserMedia fails with TrackStartError when allowing accessing to microphone

I am using Chrome Version 35.0.1916.114 m
When I run my html page using IIS (e.g., localhost/test.html) and hit "Allow" to the http://localhost/ wants to use your microphone prompt, getUserMedia() fails with the following error:
NavigatorUserMediaError {constraintName: "", message: "", name: "TrackStartError"}
Code:
var constraints = {audio: true, video: false};
...
function successCallback(stream) {
...
}
function errorCallback(error){
console.log("navigator.getUserMedia error: ", error);
}
navigator.getUserMedia(constraints, successCallback, errorCallback);
What could be the cause of this error?
var mediaConstraints =
{
'mandatory' :
{
'OfferToReceiveAudio' : true,
'OfferToReceiveVideo' : false
}
};
declare this in your code before using constraints.

Categories

Resources