I am trying to implement Chrome desktopCapture API with MediaStreamRecorder library. Everything works perfect but the video quality is so blurred and bad. 1 minute desktop captured video takes 14MB.
below is my code:
var pending_request_id;
chrome.runtime.onMessage.addListener(function(message, sender, sendResponse) {
startRecording();
sendResponse({"success": true});
});
function getUserMediaError() {
console.log("getUserMedia() failed.");
}
function onAccessApproved(id) {
if (!id) {
console.log("Access rejected.");
return;
}
navigator.webkitGetUserMedia({
audio:false,
video: { mandatory: { chromeMediaSource: "desktop",
chromeMediaSourceId: id } }
}, onMediaSuccess, getUserMediaError);
}
function startRecording() {
pending_request_id = chrome.desktopCapture.chooseDesktopMedia(
["window"], onAccessApproved);
}
function onMediaSuccess(stream) {
console.log("rcvd stream");
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4';
//i dont want strechy video so i fixed the width and height of recorder equal to window
mediaRecorder.width = window.screen.width;
mediaRecorder.height = window.screen.height;
mediaRecorder.ondataavailable = function (blob) {
var blobURL = URL.createObjectURL(blob);
console.log('' + blobURL + '');
var link=blobURL;
var videoInfo="Compiled Video file size: " + Math.ceil(blob.size / 1024) + "KB";
console.log(link);
console.log(videoInfo);
};
mediaRecorder.start(30000); // i want unlimited recording time so i increased the timeslice
stream.onended = function() {
mediaRecorder.stop();
//finalizeVideo();
console.log("Ended"); };
}
function onMediaError(e) {
console.error('media error', e);
}
Before using this library i tried to save streaming video using Whammy.js. but i failed to do so. then i found this library.
Questions :
Is there any way to increase quality of the video and as well as compress video size too?
How to save the video which return as blob:chrome url to desktop as fully qualified video?
As an alternative, If anyone knows how to do this in Whammy.js then kindly let me know
Thanks,
This might help to make your video quality better -
navigator.webkitGetUserMedia({
audio:false,
video: { mandatory: { chromeMediaSource: "desktop",
chromeMediaSourceId: id,
maxWidth: 4000,
maxHeight: 4000 } }
}, onMediaSuccess, getUserMediaError);
}
Related
I'm using the Media Source Extensions API to capture the desktop / an application / a chrome tab with it's audio and stream that data to multiple clients through Socket.io.
I am using a MediaRecorder which routinely records the stream every 10 ms, sending each 10ms chunk to my server which relays it back to every client to be appended to a SourceBuffer which is attached to a MediaSource attached to a video tag. Both the audio and video are successfully sent, received and displayed by the video tag, however, after a short time the audio gets ahead of the video and both gradually get further apart.
let videoMimeType = 'video/webm;codecs="vp9,opus"';
let player = document.getElementById("player");
let mediaSource = new MediaSource();
player.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function(e) {
if (videoBuffer == null) {
videoBuffer = mediaSource.addSourceBuffer(videoMimeType);
videoBuffer.mode = 'sequence'
}
});
navigator.mediaDevices.getDisplayMedia({
video: {
width: {
max: 1280
},
height: {
max: 720
},
frameRate: 30
},
audio: {
echoCancellation: false,
googEchoCancellation: false,
googAutoGainControl: false,
googAutoGainControl2: false,
googNoiseSuppression: false,
googHighpassFilter: false
}
}).then(
stream => {
let videoRecorder = new MediaRecorder(new MediaStream(stream), {
mimeType: videoMimeType
});
stream.getTracks()[0].addEventListener("ended", () => {
stream.getTracks().forEach(track => track.stop());
videoRecorder.stop();
playing = false;
player.src = "";
socket.emit("end")
return;
});
videoRecorder.ondataavailable = function(e) {
if (e.data.size > 0) {
socket.emit('update', e.data);
}
};
videoRecorder.start(10);
},
error => {
console.log("Unable to acquire screen capture", error);
});
socket.on("update", (arrayBuffer) => {
if (playing) {
player.play()
if (!videoBuffer.updating && queue.length == 0) {
videoBuffer.appendBuffer(arrayBuffer);
}
}
})
I expected the video and audio to remain synchronised throughout playback but instead over time the audio get ahead of the video. The video also buffers at times, which seems to herald the de-synchronisation, though during buffering both the audio and video stop and then start again after.
my problem is, I have script, that take video from camera. On my iPhone, there is problem, I can't use BLOB as URL for <video></video>, so I used FileReader, that makes base64 from BLOB. But I found another problem. When I take video in portrait mode, the captured video is in landscape mode, is so much wide. I need rotate that video to portrait mode. I don't know, if I have mistake in Blob or FileReader code. Can you help me please? Thanks.
This is my HTML Code:
<video autoplay="true" id="cameraVideo" playsinline webkit-playsinline>
This is my Javascript Code:
var video = document.querySelector("#cameraVideo");
var mode = "rear";
var mediaRecorder;
var chunks = [];
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({video: { facingMode: "environment" } }).then(function (stream) {
video.srcObject = stream;
mediaRecorder = new MediaRecorder(stream);
}).catch(function (err0r) {
alert("Something went wrong!");
});
}
$(".camera").find(".take").on("touchstart mousedown", function() {
mediaRecorder.start();
mediaRecorder.ondataavailable = function(ev) {
chunks.push(ev.data);
}
});
$(".camera").find(".take").on("touchend mouseup", function() {
mediaRecorder.stop();
mediaRecorder.onstop = (ev)=>{
var blob = new Blob(chunks, { 'type' : 'video/mp4' });
chunks = [];
var videoURL = webkitURL.createObjectURL(blob);
if(video.srcObject) video.srcObject.getTracks().forEach(t => t.stop());
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
document.getElementById("savevideo").src = reader.result;
document.getElementById("savevideo").play();
}
}
});
Pictures:
When video is recording :
When video is recorded :
I have to create a live streaming video application in which I have to read the video using the external web camera connected to my MacBook. I have to do this using WebRTC. But while executing the code the integrated webcam get triggered instead of the external webcam.
var video = document.querySelector("#videoElement");
var constraints = { audio:true,video: { facingMode:"environment" }
var promise = navigator.mediaDevices.getUserMedia(constraints);
promise.then(function(mediaStream) {
video.srcObject = mediaStream;
video.onloadedmetadata = function(e) {
video.play();
};
})
.catch(function(err) {
console.log(err.name + ": " + err.message);
});
How can I trigger the external webcam connected?
https://webrtc.github.io/samples/src/content/devices/input-output/ is the canonical example of how to select devices, demonstrating enumerateDevices() and getUserMedia()
You should take a look at https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API
And this one https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints
if (typeof MediaStreamTrack === 'undefined'){
alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
} else {
MediaStreamTrack.getSources( onSourcesAcquired);
}
function onSourcesAcquired(sources) {
for (var i = 0; i != sources.length; ++i) {
var source = sources[i];
// source.id -> DEVICE ID
// source.label -> DEVICE NAME
// source.kind = "audio" OR "video"
// TODO: add this to some datastructure of yours or a selection dialog
}
}
....
constraints = {
audio: {
optional: [{sourceId: selected_audio_source_id}]
},
video: {
optional: [{sourceId: selected_video_source_id}]
}
};
navigator.getUserMedia(constraints, onSuccessCallback, onErrorCallback);
I am using getUserMedia api to capture screen and record audio (both together) from chrome extension. The api captures the screen, records the video but doesn't capture the audio.
Chrome version: 55
Any reason why the audio is not being captured. Is there any limitation with the api or do we need to use any other api to capture video+audio?
Please find the code snippet below. (below code is present in background.js)
(tried passing the constraints as audio: true, video: true, but this didnt work. it was throwing an error.(Error - getUserMedia() failed: [object NavigatorUserMediaError]))
chrome.desktopCapture.chooseDesktopMedia(['screen','audio'],
function onAccessApproved(id) {
const constraints = { "video": {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: id,
minWidth: 1280,
minHeight: 720,
maxWidth:1280,
maxHeight:720
}
}, "audio" : {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: id}} };
navigator.mediaDevices.getUserMedia(constraints).then(gotMedia).catch(e => { console.error('getUserMedia() failed: ' + e);
});
function gotMedia(stream) {
theStream = stream;
var video = document.createElement('video');
video.src = URL.createObjectURL(stream);
video.srcObject = stream;
try {
recorder = new MediaRecorder(stream, {mimeType : "video/webm"});
} catch (e) {
console.error('Exception while creating MediaRecorder: ' + e);
return;
}
theRecorder = recorder;
recorder.ondataavailable =
(event) => { recordedChunks.push(event.data); };
recorder.start(100);
stream.getVideoTracks()[0].onended = function () {
download();
};
}
function download() {
theRecorder.stop();
theStream.getTracks().forEach(track => { track.stop(); });
var blob = new Blob(recordedChunks, {type: "video/webm"});
var url = URL.createObjectURL(blob);
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = 'test.webm';
a.click();
// setTimeout() here is needed for Firefox.
setTimeout(function() { URL.revokeObjectURL(url); }, 100);
}
According to this webRTC Experiment's page about screen sharing,
Why Screen Sharing Fails?
[...]
2. On chrome, you requested audio-stream alongwith 'chromeMediaSource' – it is not permitted on chrome.
So it is not permitted.
You could try to make a parallel request for audio only, then merge both streams with MediaStream.addTrack(), though I didn't tested it.
videoStream.addTrack(audioStream.getAudioTracks()[0])
Since this is the top Google result for "NavigatorUserMediaError mediaRecorder" I want to suggest too that if anyone else is getting that error message, on Android Chrome for example, it might have to do with security certificates. MediaRecorder cannot access a mobile device's mic or camera unless the site has an SSL certificate and the url is HTTPS.
You can request HTTPS the same way Google does in its simple.info/mr example's code:
// window.isSecureContext could be used for Chrome
var isSecureOrigin = location.protocol === 'https:' ||
location.hostname === 'localhost';
if (!isSecureOrigin) {
alert('getUserMedia() must be run from a secure origin: HTTPS or localhost.' +
'\n\nChanging protocol to HTTPS');
location.protocol = 'HTTPS';
}
But you will also need to ensure the site has an SSL certificate to reference. On HostGator this costs about $30/yr, but GitHub.io offers it for free if you're just looking to test some code. You can also supposedly run the files locally and it won't require any security certificates, but I have not tested that yet.
I've done a webcam directive that uses a service in AngularJS. I've used this example:
https://simpl.info/getusermedia/sources/
When I try the example in my tablet it works fine, but when I start my code in the tablet (Google Chrome), it gives me a couple of bugs.
Bug #1: I can't get the rear camera to work.
Bug #2: When I start the camera directive, it only shows me the first frame of the stream and then it halts. Although, when I flip to the rear camera (that doesn't work) and then flip back, it gives me the stream.
Anyone got any ideas about what I might be doing wrong? I've tried a lot of stuff.
Webcam directive:
link: function postLink($scope, element) {
var videoSources = [];
MediaStreamTrack.getSources(function(mediaSources) {
for (var i = 0; i < mediaSources.length; i++)
{
if (mediaSources[i].kind == 'video')
{
videoSources.push(mediaSources[i].id);
}
}
if (videoSources.length > 1){ $scope.$emit('multipleVideoSources'); }
initCamera(0);
});
// Elements
var videoElement = element.find('video')[0];
// Stream
function streaming(stream) {
$scope.$apply(function(){
videoElement.src = stream;
videoElement.play();
});
}
// Check ready state
function checkReadyState(){
if (videoElement.readyState == 4)
{
$interval.cancel(interval);
$scope.$emit('videoStreaming');
}
}
var interval = $interval(checkReadyState, 1000);
// Init
$scope.$on('init', function(event, stream){
streaming(stream);
});
// Switch camera
$scope.$on('switchCamera', function(event, cameraIndex){
initCamera(cameraIndex);
});
// Init via Service
function initCamera(cameraIndex)
{
var constraints = {
audio: false,
video: {
optional: [{ sourceId: videoSources[cameraIndex] }]
}
};
camera.setup(constraints, camera.onSuccess, camera.onError);
}
}
Camera service:
.service('camera', function($rootScope) {
// Setup of stream
this.init = false;
this.onError = function(error){
console.log(error);
alert('Camera error');
};
this.onSuccess = function(stream){
window.stream = stream;
stream = window.URL.createObjectURL(stream);
$rootScope.$broadcast('init', stream);
};
this.setup = function(constraint){
navigator.getMedia(constraint, this.onSuccess, this.onError);
this.init = true;
};
On my laptop this works fine, although I can only test with one video source (as it only has one).
Bug #2 solved by not do videoStream.play() until it has a readyState of 4.
Bug #1 solved by moving window in to the directive and not using the service. Then calling the following code within initCamera function:
if (!!window.stream) {
videoElement.src = null;
window.stream.stop();
}
For Bug #1, you can use the back camera by specifying it in a constraint object when calling getUserMedia
navigator.mediaDevices.getUserMedia({
audio: true,
video: {
facingMode: { exact: "environment" }
}
})
See details here:
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
Search for "rear camera"