HTML 5 video and audio appended to a SourceBuffer become de-synchronised - javascript

I'm using the Media Source Extensions API to capture the desktop / an application / a chrome tab with it's audio and stream that data to multiple clients through Socket.io.
I am using a MediaRecorder which routinely records the stream every 10 ms, sending each 10ms chunk to my server which relays it back to every client to be appended to a SourceBuffer which is attached to a MediaSource attached to a video tag. Both the audio and video are successfully sent, received and displayed by the video tag, however, after a short time the audio gets ahead of the video and both gradually get further apart.
let videoMimeType = 'video/webm;codecs="vp9,opus"';
let player = document.getElementById("player");
let mediaSource = new MediaSource();
player.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function(e) {
if (videoBuffer == null) {
videoBuffer = mediaSource.addSourceBuffer(videoMimeType);
videoBuffer.mode = 'sequence'
}
});
navigator.mediaDevices.getDisplayMedia({
video: {
width: {
max: 1280
},
height: {
max: 720
},
frameRate: 30
},
audio: {
echoCancellation: false,
googEchoCancellation: false,
googAutoGainControl: false,
googAutoGainControl2: false,
googNoiseSuppression: false,
googHighpassFilter: false
}
}).then(
stream => {
let videoRecorder = new MediaRecorder(new MediaStream(stream), {
mimeType: videoMimeType
});
stream.getTracks()[0].addEventListener("ended", () => {
stream.getTracks().forEach(track => track.stop());
videoRecorder.stop();
playing = false;
player.src = "";
socket.emit("end")
return;
});
videoRecorder.ondataavailable = function(e) {
if (e.data.size > 0) {
socket.emit('update', e.data);
}
};
videoRecorder.start(10);
},
error => {
console.log("Unable to acquire screen capture", error);
});
socket.on("update", (arrayBuffer) => {
if (playing) {
player.play()
if (!videoBuffer.updating && queue.length == 0) {
videoBuffer.appendBuffer(arrayBuffer);
}
}
})
I expected the video and audio to remain synchronised throughout playback but instead over time the audio get ahead of the video. The video also buffers at times, which seems to herald the de-synchronisation, though during buffering both the audio and video stop and then start again after.

Related

how do I keep playing audio file in browser

I am trying to play audio clips of audio receieved from server through socketio. The code works but on the first audio clip recieved is played, any clip recieved afterwards are not played. I confirmed their reciept but I don't know why it only plays the first received audio file.
socketio.on('audio_playback_results', function (data) {
console.log("I recieved an audio playback! ",data);
playOutput(data);
});
function playOutput(arrayBuffer){
let audioContext = new AudioContext();
let outputSource;
try {
if(arrayBuffer.byteLength > 0){
console.log(arrayBuffer.byteLength);
audioContext.decodeAudioData(arrayBuffer,
function(buffer){
audioContext.resume();
outputSource = audioContext.createBufferSource();
outputSource.connect(audioContext.destination);
outputSource.buffer = buffer;
outputSource.start(0);
},
function(){
console.log(`playoutput PLAYING OUTPUT ARGUMMENT ${arguments}`);
// console.log(arguments);
});
}
} catch(e) {
console.log(`ERORRR PLAYING OUTPUT ${e}`);
}
}
i perform a text to audiobuffer on server side and then stream it back to client side.
async function textToAudioBuffer(text) {
requestTTS.input = { text: text }; // text or SSML
const response = await ttsClient.synthesizeSpeech(requestTTS);
console.log("RESPONSE # ttsClient, textToAudioBuffer() : ", response[0].audioContent)
var results = response[0].audioContent
io.emit('audio_playback_results', results);
return response[0].audioContent;
}
what is causing it not to play audio clip recieved after the first one and how can I fix this ?

Intersection observer for multiple videos on the same page

I have a portfolio page made in WordPress and on the page I have 5 videos that need to be played when in viewport and stopped when out of viewport.
I have used the following script that works only on the first video on the page.
const video = document.querySelector("video");
let playState = null;
const observer = new IntersectionObserver((entries) => {
entries.forEach((entry) => {
if (!entry.isIntersecting) {
video.pause();
playState = false;
} else {
video.play();
playState = true;
}
});
}, {});
observer.observe(video);
const onVisibilityChange = () => {
if (document.hidden || !playState) {
video.pause();
} else {
video.play();
}
};
document.addEventListener("visibilitychange", onVisibilityChange);
querySelector("video");
And here is the link to the page:
http://wemedia.co.rs/portfolio-2/
So what I want to achieve is to play and pause every video when in or out of viewport.
Thank you.
You are only selecting one video to use throughout your whole js script.
You need to remember that video represents only a single video and if you want to control multiple, you need to reference them in every function that you call.
I also don't see a purpose in using playState since the way you have it, it is basically an indicator of whether the first video is on screen or not.
const videos = document.querySelectorAll("video"); // Select ALL the Videos
const observer = new IntersectionObserver((entries) => {
entries.forEach((entry) => {
if (!entry.isIntersecting) {
entry.target.pause(); // Pause the TARGET video
} else {
entry.target.play(); // Play the TARGET video
}
});
}, {});
for (const video of videos) observer.observe(video); // Observe EACH video
const onVisibilityChange = () => {
if (document.hidden) {
for (const video of videos) video.pause(); // Pause EACH video
} else {
for (const video of videos) video.play(); // Play EACH video
}
};
document.addEventListener("visibilitychange", onVisibilityChange);

Release webcam and microphone from getUserMedia after countdown [duplicate]

I opened a webcam by using the following JavaScript code:
const stream = await navigator.mediaDevices.getUserMedia({ /* ... */ });
Is there any JavaScript code to stop or close the webcam?
Since this answer has been originally posted the browser API has changed.
.stop() is no longer available on the stream that gets passed to the callback.
The developer will have to access the tracks that make up the stream (audio or video) and stop each of them individually.
More info here: https://developers.google.com/web/updates/2015/07/mediastream-deprecations?hl=en#stop-ended-and-active
Example (from the link above):
stream.getTracks().forEach(function(track) {
track.stop();
});
Browser support may differ.
Previously, navigator.getUserMedia provided you with a stream in the success callback, you could call .stop() on that stream to stop the recording (at least in Chrome, seems FF doesn't like it)
Use any of these functions:
// stop both mic and camera
function stopBothVideoAndAudio(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live') {
track.stop();
}
});
}
// stop only camera
function stopVideoOnly(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live' && track.kind === 'video') {
track.stop();
}
});
}
// stop only mic
function stopAudioOnly(stream) {
stream.getTracks().forEach(function(track) {
if (track.readyState == 'live' && track.kind === 'audio') {
track.stop();
}
});
}
Don't use stream.stop(), it's deprecated
MediaStream Deprecations
Use stream.getTracks().forEach(track => track.stop())
FF, Chrome and Opera has started exposing getUserMedia via navigator.mediaDevices as standard now (Might change :)
online demo
navigator.mediaDevices.getUserMedia({audio:true,video:true})
.then(stream => {
window.localStream = stream;
})
.catch( (err) =>{
console.log(err);
});
// later you can do below
// stop both video and audio
localStream.getTracks().forEach( (track) => {
track.stop();
});
// stop only audio
localStream.getAudioTracks()[0].stop();
// stop only video
localStream.getVideoTracks()[0].stop();
Suppose we have streaming in video tag and id is video - <video id="video"></video> then we should have following code -
var videoEl = document.getElementById('video');
// now get the steam
stream = videoEl.srcObject;
// now get all tracks
tracks = stream.getTracks();
// now close each track by having forEach loop
tracks.forEach(function(track) {
// stopping every track
track.stop();
});
// assign null to srcObject of video
videoEl.srcObject = null;
Starting Webcam Video with different browsers
For Opera 12
window.navigator.getUserMedia(param, function(stream) {
video.src =window.URL.createObjectURL(stream);
}, videoError );
For Firefox Nightly 18.0
window.navigator.mozGetUserMedia(param, function(stream) {
video.mozSrcObject = stream;
}, videoError );
For Chrome 22
window.navigator.webkitGetUserMedia(param, function(stream) {
video.src =window.webkitURL.createObjectURL(stream);
}, videoError );
Stopping Webcam Video with different browsers
For Opera 12
video.pause();
video.src=null;
For Firefox Nightly 18.0
video.pause();
video.mozSrcObject=null;
For Chrome 22
video.pause();
video.src="";
With this the Webcam light go down everytime...
Try method below:
var mediaStream = null;
navigator.getUserMedia(
{
audio: true,
video: true
},
function (stream) {
mediaStream = stream;
mediaStream.stop = function () {
this.getAudioTracks().forEach(function (track) {
track.stop();
});
this.getVideoTracks().forEach(function (track) { //in case... :)
track.stop();
});
};
/*
* Rest of your code.....
* */
});
/*
* somewhere insdie your code you call
* */
mediaStream.stop();
You can end the stream directly using the stream object returned in the success handler to getUserMedia. e.g.
localMediaStream.stop()
video.src="" or null would just remove the source from video tag. It wont release the hardware.
Since you need the tracks to close the streaming, and you need the stream boject to get to the tracks, the code I have used with the help of the Muaz Khan's answer above is as follows:
if (navigator.getUserMedia) {
navigator.getUserMedia(constraints, function (stream) {
videoEl.src = stream;
videoEl.play();
document.getElementById('close').addEventListener('click', function () {
stopStream(stream);
});
}, errBack);
function stopStream(stream) {
console.log('stop called');
stream.getVideoTracks().forEach(function (track) {
track.stop();
});
Of course this will close all the active video tracks. If you have multiple, you should select accordingly.
If the .stop() is deprecated then I don't think we should re-add it like #MuazKhan dose. It's a reason as to why things get deprecated and should not be used anymore. Just create a helper function instead... Here is a more es6 version
function stopStream (stream) {
for (let track of stream.getTracks()) {
track.stop()
}
}
You need to stop all tracks (from webcam, microphone):
localStream.getTracks().forEach(track => track.stop());
Start and Stop Web Camera,(Update 2020 React es6 )
Start Web Camera
stopWebCamera =()=>
//Start Web Came
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
//use WebCam
navigator.mediaDevices.getUserMedia({ video: true }).then(stream => {
this.localStream = stream;
this.video.srcObject = stream;
this.video.play();
});
}
}
Stop Web Camera or Video playback in general
stopVideo =()=>
{
this.video.pause();
this.video.src = "";
this.video.srcObject = null;
// As per new API stop all streams
if (this.localStream)
this.localStream.getTracks().forEach(track => track.stop());
}
Stop Web Camera function works even with video streams:
this.video.src = this.state.videoToTest;
this.video.play();
Using .stop() on the stream works on chrome when connected via http. It does not work when using ssl (https).
Please check this: https://jsfiddle.net/wazb1jks/3/
navigator.getUserMedia(mediaConstraints, function(stream) {
window.streamReference = stream;
}, onMediaError);
Stop Recording
function stopStream() {
if (!window.streamReference) return;
window.streamReference.getAudioTracks().forEach(function(track) {
track.stop();
});
window.streamReference.getVideoTracks().forEach(function(track) {
track.stop();
});
window.streamReference = null;
}
The following code worked for me:
public vidOff() {
let stream = this.video.nativeElement.srcObject;
let tracks = stream.getTracks();
tracks.forEach(function (track) {
track.stop();
});
this.video.nativeElement.srcObject = null;
this.video.nativeElement.stop();
}
Have a reference of stream form successHandle
var streamRef;
var handleVideo = function (stream) {
streamRef = stream;
}
//this will stop video and audio both track
streamRef.getTracks().map(function (val) {
val.stop();
});

How to do the live streaming of external webcam using webRTC on MacBook Pro

I have to create a live streaming video application in which I have to read the video using the external web camera connected to my MacBook. I have to do this using WebRTC. But while executing the code the integrated webcam get triggered instead of the external webcam.
var video = document.querySelector("#videoElement");
var constraints = { audio:true,video: { facingMode:"environment" }
var promise = navigator.mediaDevices.getUserMedia(constraints);
promise.then(function(mediaStream) {
video.srcObject = mediaStream;
video.onloadedmetadata = function(e) {
video.play();
};
})
.catch(function(err) {
console.log(err.name + ": " + err.message);
});
How can I trigger the external webcam connected?
https://webrtc.github.io/samples/src/content/devices/input-output/ is the canonical example of how to select devices, demonstrating enumerateDevices() and getUserMedia()
You should take a look at https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API
And this one https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Constraints
if (typeof MediaStreamTrack === 'undefined'){
alert('This browser does not support MediaStreamTrack.\n\nTry Chrome Canary.');
} else {
MediaStreamTrack.getSources( onSourcesAcquired);
}
function onSourcesAcquired(sources) {
for (var i = 0; i != sources.length; ++i) {
var source = sources[i];
// source.id -> DEVICE ID
// source.label -> DEVICE NAME
// source.kind = "audio" OR "video"
// TODO: add this to some datastructure of yours or a selection dialog
}
}
....
constraints = {
audio: {
optional: [{sourceId: selected_audio_source_id}]
},
video: {
optional: [{sourceId: selected_video_source_id}]
}
};
navigator.getUserMedia(constraints, onSuccessCallback, onErrorCallback);

Chrome desktopCapture API with MediaStreamRecorder library

I am trying to implement Chrome desktopCapture API with MediaStreamRecorder library. Everything works perfect but the video quality is so blurred and bad. 1 minute desktop captured video takes 14MB.
below is my code:
var pending_request_id;
chrome.runtime.onMessage.addListener(function(message, sender, sendResponse) {
startRecording();
sendResponse({"success": true});
});
function getUserMediaError() {
console.log("getUserMedia() failed.");
}
function onAccessApproved(id) {
if (!id) {
console.log("Access rejected.");
return;
}
navigator.webkitGetUserMedia({
audio:false,
video: { mandatory: { chromeMediaSource: "desktop",
chromeMediaSourceId: id } }
}, onMediaSuccess, getUserMediaError);
}
function startRecording() {
pending_request_id = chrome.desktopCapture.chooseDesktopMedia(
["window"], onAccessApproved);
}
function onMediaSuccess(stream) {
console.log("rcvd stream");
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4';
//i dont want strechy video so i fixed the width and height of recorder equal to window
mediaRecorder.width = window.screen.width;
mediaRecorder.height = window.screen.height;
mediaRecorder.ondataavailable = function (blob) {
var blobURL = URL.createObjectURL(blob);
console.log('' + blobURL + '');
var link=blobURL;
var videoInfo="Compiled Video file size: " + Math.ceil(blob.size / 1024) + "KB";
console.log(link);
console.log(videoInfo);
};
mediaRecorder.start(30000); // i want unlimited recording time so i increased the timeslice
stream.onended = function() {
mediaRecorder.stop();
//finalizeVideo();
console.log("Ended"); };
}
function onMediaError(e) {
console.error('media error', e);
}
Before using this library i tried to save streaming video using Whammy.js. but i failed to do so. then i found this library.
Questions :
Is there any way to increase quality of the video and as well as compress video size too?
How to save the video which return as blob:chrome url to desktop as fully qualified video?
As an alternative, If anyone knows how to do this in Whammy.js then kindly let me know
Thanks,
This might help to make your video quality better -
navigator.webkitGetUserMedia({
audio:false,
video: { mandatory: { chromeMediaSource: "desktop",
chromeMediaSourceId: id,
maxWidth: 4000,
maxHeight: 4000 } }
}, onMediaSuccess, getUserMediaError);
}

Categories

Resources