Is window.localAudio a thing? - javascript

I found Mozilla's Getting browser microphone permission. It defines a function to request permission and listen to client's microphone as such:
function getLocalStream() {
navigator.mediaDevices.getUserMedia({video: false, audio: true}).then( stream => {
window.localStream = stream; // A
window.localAudio.srcObject = stream; // B
window.localAudio.autoplay = true; // C
}).catch( err => {
console.log("u got an error:" + err)
});
}
I checked in Chrome, Firefox and Safari - all of them throw an error about window.localAudio being undefined. Where did this tutorial get it from? Was window.localAudio ever a thing? What was it supposed to do?

I will try to give you something more useful than the question you have asked.
The function will create the element if it is not present and there are few options available. In the example I'm adding the newly created audio element to the body, but it will work even it is not added - it's a matter of choice.
<html>
<head>
<script>
var el;
function attachStream(stream, el, options) {
var item;
var URL = window.URL;
var element = el;
var opts = {
autoplay: true,
mirror: false,
muted: false,
audio: false,
disableContextMenu: false
};
if (options) {
for (item in options) {
opts[item] = options[item];
}
}
if (!element) {
element = document.createElement(opts.audio ? 'audio' : 'video');
} else if (element.tagName.toLowerCase() === 'audio') {
opts.audio = true;
}
if (opts.autoplay) element.autoplay = 'autoplay';
if (opts.muted) element.muted = true;
if (!opts.audio && opts.mirror) {
['', 'moz', 'webkit', 'o', 'ms'].forEach(function(prefix) {
var styleName = prefix ? prefix + 'Transform' : 'transform';
element.style[styleName] = 'scaleX(-1)';
});
}
element.srcObject = stream;
return element;
};
function getLocalStream() {
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
}).then(
stream => {
var doesnotexist = !el;
el = attachStream(stream, el, {
audio: true,
autoplay: true
});
if (doesnotexist) document.body.appendChild(el);
}
).catch(err => {
console.log("u got an error:" + err)
});
}
window.addEventListener('DOMContentLoaded', (event) => {
getLocalStream();
});
</script>
</head>
<body>
</body>
</html>

Related

track.stop is not turning the camera off anymore

I have a webpage where I want user to take a picture with his laptop/phone camera.
Once he clicks on a button a modal is shown and the following js will start the camera stream to take the picture:
function startStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
//const audioSource = audioInputSelect.value;
const videoSource = videoSelect.value;
const constraints = {
//audio: {deviceId: audioSource ? {exact: audioSource} : undefined},
video: {
deviceId: videoSource ? {
exact: videoSource
} : undefined
}
};
navigator.mediaDevices.getUserMedia(constraints).then(gotStream).then(gotDevices).catch(handleError);
var mediaSupport = 'mediaDevices' in navigator;
if (mediaSupport && null == cameraStream) {
const videoSource = videoSelect.value;
const constraints = {
video: {
deviceId: videoSource ? {
exact: videoSource
} : undefined
}
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
cameraStream = mediaStream;
stream.srcObject = mediaStream;
stream.play();
})
.catch(handleError);
} else {
alert('Your browser does not support media devices.');
return;
}
}
This is triggered by
$('#photoStudio').on('show.bs.modal', function (event) {
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
startStreaming();
});
Then when I close the modal I want to stop the streaming but the led indicator next to my camera is still on)
$('#photoStudio').on('hide.bs.modal', function (event) {
stopStreaming();
});
where stopStreaming() is:
function stopStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
}
I don't get any kind of error and I cannot find a way to debug why the camera is still running. Am I missing anything in the stopStreaming function?
If any track has not been stopped then your camera will still be active. In your stopStreaming function you only stop the first track in the returned array.
If you instead iterate through the tracks you may catch ones you aren't currently:
function stopStreaming() {
if (null != cameraStream) {
var tracks = cameraStream.getTracks();
// stop all the tracks, not just the first
tracks.forEach((track) => {
track.stop();
});
stream.load();
cameraStream = null;
}
}
this.camera_stream.getTracks().forEach((track) => {
console.log(track);
track.stop();
**track.enabled = false**
});
video.load()
this.camera_stream = null

WebRTC failed to play in chrome & edge but playing in Firefiox

I have a very simple code for video calling using WebRTC. The entire system working different for different browsers.
Capture Browser
Player Browser
Working
Chrome
Firefox
✔
Chrome
Chrome
X
Firefox
Chrome
X
Firefox
Firefox
✔
The capture code is
JS:
(function () {
var localVideo, localConnection;
const signaling = new WebSocket('wss://crs4kx11s1/websockets');
signaling.onmessage = function (message) {
var data = JSON.parse(message.data);
if (data.sdp) {
var answerSDP = data.sdp;
if (answerSDP.type == "answer") {
localConnection.setRemoteDescription(answerSDP);
}
}
if (data.candidate && data.candidateType == "answerClient") {
localConnection.addIceCandidate(data.candidate);
}
}
localConnection = new RTCPeerConnection({
iceServers: [{
urls: 'turn:127.0.0.1:8043?transport=tcp',
credential: 'jupiter',
username: 'simpleshare'
}]
});
document.addEventListener("DOMContentLoaded", function (event) {
$("#share").click(function (event) {
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
stream.getTracks().forEach(
function (track) {
localConnection.addTrack(
track,
stream
);
}
);
localVideo = document.getElementById('local');
localVideo.srcObject = stream;
localConnection.onnegotiationneeded = function () {
localConnection.createOffer()
.then(offer => {
localConnection.setLocalDescription(offer)
.then(() => {
signaling.send(JSON.stringify({ sdp: offer }));
})
});
}
localConnection.onicecandidate = function (e) {
if (e.candidate) {
signaling.send(JSON.stringify({
candidateType: 'offerClient',
candidate: e.candidate.toJSON()
}));
}
console.log('offerClient is on icecandidate');
};
});
});
});
})();
HTML
<div>
<button id="share">Share</button>
<video id="local" autoplay></video>
</div>
Now the player code
JS
(function () {
var localVideo, localConnection;
const signaling = new WebSocket('wss://crs4kx11s1/websockets');
signaling.onmessage = function (message) {
const data = JSON.parse(message.data);
// const content = data.content;
try {
if (data.sdp) {
let offerSDP = data.sdp;
if (offerSDP.type == "offer") {
console.log("Accepting the offer.")
localConnection.setRemoteDescription(offerSDP);
localConnection.createAnswer().then(function (answer) {
console.log("Answer created!")
localConnection.setLocalDescription(answer);
signaling.send(JSON.stringify({ sdp: answer }));
});
}
}
if (data.candidate && data.candidateType == "offerClient") {
console.log("ICE candidate added!");
localConnection.addIceCandidate(data.candidate);
}
} catch (err) {
console.error(err);
}
};
document.addEventListener("DOMContentLoaded", function (event) {
startConnection();
localVideo = document.getElementById('self-view');
});
function startConnection() {
console.info("Starting connection");
localConnection = new RTCPeerConnection({iceServers: [{
urls: 'turn:127.0.0.1:8043?transport=tcp',
credential: 'jupiter',
username: 'simpleshare'
}]
});
//startCapture();
localConnection.onicecandidate = function (e) {
console.info("onicecandidate", e);
if (e.candidate) {
signaling.send(JSON.stringify({
candidateType: 'answerClient',
candidate: e.candidate.toJSON()
}));
}
console.log('answerClient is on icecandidate');
};
localConnection.onconnectionstatechange = function (e) {
console.log("Current state", localConnection.connectionState);
}
localConnection.ontrack = function (e) {
localVideo.srcObject = e.streams[0];
}
}
})();
HTML
<div id="chat-room">
<div id="videos">
<video id="self-view" autoplay></video>
</div>
</div>
Apart from these there is a WebSocket server which relays the SDP offers and candidates.
Please note that I have used our own TURN server for.
Got it worked. It was because of new autoplay policy in chrome. Just added localVideo.play(); and it worked.

Web RTC between two different web clients not working

As per our web RTC requirements, there are two different client
Player (Players the screen shared by capture client)
Capture (Share Screen)
The two web clients communicate and exchange offers and ICE candidates using WebSocket.
In Chrome [Version 84.0.4147.105 (Official Build) (64-bit)]
There is no error in the Player and Capture javascript console in chrome.
But if we check chrome://webrtc-internals/ we can see the following event and transmission graph:
Player
Capture
Here the I can see the video streaming is transmission but not playing in payer end and an ICE Candidate error in showing up int he events log. Is that is the problem the video stream is not working in the payer end?
Firefox (v79.0)
Showing errors in the console:
DOMException: No remoteDescription.
In player.js line no: 33.
Any Idea why two different browsers have different errors?
Player.js
(function(){
var localVideo, remoteVideo, localConnection, remoteConnection;
const MESSAGE_TYPE = {
SDP: 'SDP',
CANDIDATE_LOCAL: 'LOCAL_CANDIDATE',
CANDIDATE_REMOTE: 'REMOTE_CANDIDATE'
};
const signaling = new WebSocket('ws://127.0.0.1:1337');
var configuration = {
offerToReceiveAudio: true,
offerToReceiveVideo: true
}
remoteConnection = new RTCPeerConnection({configuration: configuration, iceServers: [{ urls: 'stun:aalimoshaver.com:3478' }]});
remoteConnection.onicecandidate = function(e) { !e.candidate
|| signaling.send(JSON.stringify({message_type:MESSAGE_TYPE.CANDIDATE_REMOTE, content: e.candidate.toJSON()}));
}
remoteConnection.ontrack = function (e) {
const remoteVideo = document.getElementById('remote-view');
if (!remoteVideo.srcObject) {
remoteVideo.srcObject = e.streams[0];
}
};
signaling.onmessage = function (message){
const data = JSON.parse(message.data);
const message_type = data.message_type;
const content = data.content;
try {
if (message_type === MESSAGE_TYPE.CANDIDATE_LOCAL && content) {
remoteConnection.addIceCandidate(content)
.catch(function (e) {
console.error(e)
});
}else if (message_type === MESSAGE_TYPE.SDP && content) {
if (content.type === 'offer') {
remoteConnection.setRemoteDescription(content);
remoteConnection.createAnswer()
.then(function(answer){
remoteConnection.setLocalDescription(answer);
signaling.send(JSON.stringify({
message_type: MESSAGE_TYPE.SDP,
content: answer
}));
});
} else {
console.log('Unsupported SDP type.');
}
}
} catch (err) {
console.error(err);
}
};
})()
Capture.js
/**
* Created by Sowvik Roy on 30-07-2020.
*/
(function () {
var localVideo, remoteVideo, localConnection, remoteConnection;
const MESSAGE_TYPE = {
SDP_LOCAL: 'SDP',
CANDIDATE_LOCAL: 'LOCAL_CANDIDATE',
CANDIDATE_REMOTE: 'REMOTE_CANDIDATE'
};
var configuration = {
offerToReceiveAudio: true,
offerToReceiveVideo: true
};
const signaling = new WebSocket('ws://127.0.0.1:1337');
signaling.onmessage = function (message){
const data = JSON.parse(message.data);
const message_type = data.message_type;
const content = data.content;
try {
if (message_type === MESSAGE_TYPE.CANDIDATE_REMOTE && content) {
localConnection.addIceCandidate(content)
.catch(function (e) {
console.error(e)
});
} else if (message_type === MESSAGE_TYPE.SDP_LOCAL) {
if (content.type === 'answer') {
localConnection.setRemoteDescription(content);
} else {
console.log('Unsupported SDP type.');
}
}
} catch (err) {
console.error(err);
}
};
document.addEventListener('click', function (event) {
if (event.target.id === 'start') {
startChat();
localVideo = document.getElementById('self-view');
remoteVideo = document.getElementById('remote-view');
}
});
function startConnection(){
localConnection = new RTCPeerConnection({configuration: configuration, iceServers: [{ urls: 'stun:aalimoshaver.com:3478' }]});
localConnection.onicecandidate = function (e) {
!e.candidate
|| signaling.send(JSON.stringify({message_type:MESSAGE_TYPE.CANDIDATE_LOCAL, content: e.candidate.toJSON()}));
};
localConnection.createOffer()
.then(function (offer) {
if(offer){
localConnection.setLocalDescription(offer);
signaling.send(JSON.stringify({message_type:MESSAGE_TYPE.SDP_LOCAL, content: localConnection.localDescription}));
if (navigator.getDisplayMedia) {
navigator.getDisplayMedia({video: true}).then(onCaptureSuccess);
} else if (navigator.mediaDevices.getDisplayMedia) {
navigator.mediaDevices.getDisplayMedia({video: true}).then(onCaptureSuccess);
} else {
navigator.mediaDevices.getUserMedia({video: {mediaSource: 'screen'}}).then(onCaptureSuccess);
}
}
else{
console.error("RTC offer is null");
}
})
.catch(function (e) {
console.error(e)
});
}
function onCaptureSuccess(stream){
localVideo.srcObject = stream;
stream.getTracks().forEach(
function (track) {
localConnection.addTrack(
track,
stream
);
}
);
}
function startChat() {
if (navigator.getDisplayMedia) {
navigator.getDisplayMedia({video: true}).then(onMediaSuccess);
} else if (navigator.mediaDevices.getDisplayMedia) {
navigator.mediaDevices.getDisplayMedia({video: true}).then(onMediaSuccess);
} else {
navigator.mediaDevices.getUserMedia({video: {mediaSource: 'screen'}}).then(onMediaSuccess);
}
}
function onMediaSuccess(stream) {
localVideo.srcObject = stream;
// Set up the ICE candidates for the two peers
localConnection = new RTCPeerConnection({configuration: configuration, iceServers: [{ urls: 'stun:stun.xten.com:19302' }]});
localConnection.onicecandidate = function (e) {
!e.candidate
|| signaling.send(JSON.stringify({message_type:MESSAGE_TYPE.CANDIDATE_LOCAL, content: e.candidate.toJSON()}));
};
stream.getTracks().forEach(
function (track) {
localConnection.addTrack(
track,
stream
);
}
);
localConnection.createOffer()
.then(function (offer) {
if(offer){
localConnection.setLocalDescription(offer);
signaling.send(JSON.stringify({message_type:MESSAGE_TYPE.SDP_LOCAL, content: localConnection.localDescription}));
}
else{
console.error("RTC offer is null");
}
})
.catch(function (e) {
console.error(e)
});
}
})();
Can anybody explain or identify a loophole in the code? Please let me know if you need additional info.

javascript AudioNodes can't change sample-rate

i can't figure out, why this error come from. or what i missed out.
here my code:
function mediaDeviceInit(deviceId) {
// this for fast codding see w3c spec for audio
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
console.log('IpCodec : Get user permissions for Media Access.');
let audioConstraints = {};
// check for default value
if (deviceId) {
audioConstraints = {
audio: { deviceId: deviceId, echoCancellation: false, sampleRate: defaultSampleRate }, video: false
};
} else {
audioConstraints = { audio: { echoCancellation: false, sampleRate: defaultSampleRate }, video: false };
}
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(audioConstraints)
.then(function (stream) {
//console.log(navigator.mediaDevices.getSupportedConstraints());
userMediaSuccess(stream);
})
.catch(function (error) {
userMediaError(error);
});
} else {
console.log('IpCodec : Browser Unsuported to getUserMedia.')
}
// enumerate all input audio device
function enumAudioInput() {
// somecode
}
// callback on success
function userMediaSuccess(stream) {
let audioSrc = audioMixer.audioContext.createMediaStreamSource(stream); // --> error here
// some init code
console.log('IpCodec : Media permission granted by user.');
if (!deviceId) {
enumAudioInput();
}
}
// callback on error
function userMediaError(error) {
console.log('IpCodec' + error);
}
}
with error like :
Connecting AudioNodes from AudioContexts with different sample-rate is currently not supported.
and this part audioMixer class who own AudioContext :
class AudioMixer {
constructor(type, sRate, latency) {
this.audioContext;
// parse all browser compability
window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;
console.log('IpCodec : Initialize audio mixer success.');
if (window.AudioContext) {
this.audioContext = new window.AudioContext({ sampleRate: sRate, latencyHint: latency });
//console.log(this.audioContext);
} else {}
}
}

Recorded video can't be forwarded/backwarded, Using MediaRecorder API chrome extension

I am implementing a chrome extension which records the screen with microphone and after recording it generates the recorded video perfectly but the problem is that the recorded video can not be forwarded/ backwarded or start video from any point first time.
When it plays the first time after that it works perfectly but when I download this video then the problem is same. I take help from muaz khan plugin.
I am using this code for start recording:
function gotStream(stream) {
if (cameraStream && cameraStream.getAudioTracks().length) {
cameraStream.getAudioTracks().forEach(function(track) {
// cameraStream.removeTrack(track);
stream.addTrack(track);
});
}
if (typeof MediaRecorder.isTypeSupported == 'function') {
/*
MediaRecorder.isTypeSupported is a function announced in https://developers.google.com/web/updates/2016/01/mediarecorder and later introduced in the MediaRecorder API spec http://www.w3.org/TR/mediastream-recording/
*/
if (MediaRecorder.isTypeSupported('video/mp4;codecs=h264')) {
var options = {
type: 'video',
mimeType: 'video/mp4;codecs=h264'
};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp9')) {
var options = {
type: 'video',
mimeType: 'video/webm;codecs=vp9'
};
} else if (MediaRecorder.isTypeSupported('video/webm;codecs=vp8')) {
var options = {
mimeType: 'video/webm;codecs=vp8'
};
}
console.log('Using ' + options.mimeType);
recorder = new RecordRTC(stream, options);
} else {
console.log('Using default codecs for browser');
recorder = new MediaRecorder(stream);
}
recorder.streams = [stream];
recorder.start(10);
recorder.ondataavailable = function(e) {
chunks.push(e.data);
};
recorder.onerror = function(e) {
console.log('Error: ', e);
};
recorder.onstart = function() {
isRecording = true;
onRecording();
console.log('Started & state = ' + recorder.state);
};
recorder.onpause = function() {
console.log('Paused & state = ' + recorder.state);
}
recorder.onresume = function() {
console.log('Resumed & state = ' + recorder.state);
}
recorder.onwarning = function(e) {
console.log('Warning: ' + e);
};
recorder.onstop = function() {
stopScreenRecording();
}
stream.onended = function() {
if (stream) {
stream.onended = null;
}
recorder.stop();
};
if (stream.getVideoTracks().length) {
stream.getVideoTracks().forEach(function(track) {
track.onended = function() {
if (!recorder) return;
if (!stream || typeof stream.onended !== 'function') return;
stream.onended();
};
});
}
}
and this for stop recording:
function stopScreenRecording(blob) {
isRecording = false;
var blob = new Blob(chunks, {
type: "video/mp4"
});
chunks = [];
var file = new File([blob ? blob : ''], getFileName(fileExtension), {
type: mimeType
});
DiskStorage.Store({
key: 'latest-file',
value: file
}, function(success) {
if (success) {
chrome.browserAction.setPopup({
popup: "popup.html"
});
chrome.tabs.create({
url: 'preview.html'
});
}
});
setTimeout(function() {
setDefaults();
// chrome.runtime.reload();
}, 1000);
try {
videoPlayers.forEach(function(player) {
player.src = null;
});
videoPlayers = [];
} catch (e) {}
// for dropdown.js
chrome.storage.sync.set({
isRecording: 'false' // FALSE
});
if (timer) {
clearTimeout(timer);
}
setBadgeText('');
}
I saw another screen recorder whose video blob is like:
filesystem:chrome-extension://mmeijimgabbpbgpdklnllpncmdofkcpn/persistent/e6ad7ba1-6afe-4d45-y6f5-47e08a87e036.webm
and our video blob is like:
blob:chrome-extension://hgpenkfjeddjngnojmcmgbclkoakihhg/af3dcfa6-b990-464b-9726-e8b6022762a2
How can I get this type of blob?

Categories

Resources