Microphone not changing after setting new constraints - javascript

We are building an application in Electron and the microphone is not changing after setting a new deviceId. There is no error being shown.
//myId is an id retrieved from navigator.mediaDevices.enumerateDevices();
const constraints = {audio: {deviceId: {exact: myId}}};
navigator.mediaDevices.getUserMedia(constraints);
What are we doing wrong? When we test it on an external website, it looks like it is working correctly.
All our search results in nothing useful or on searches that are very outdated. Electron is a chromium browser. We have been stuck on this problem for more than a week. It doesn't work on Ubuntu and Windows 11 (only OS we have tested so far). We have not yet bothered with MacOs.

make sure myId is not null, because when you first call navigator.mediaDevices.enumerateDevices() it can return empty strings.
for me const constraints = {audio: {deviceId: {exact: myId}}}; return error because of exact. For me worked const constraints = {audio: {deviceId: myId}};
You missing one '}' at the end of const constraints = {audio: {deviceId: {exact: myId}};
If you set new devices you have to get new tracks from new stream and add to old stream.
It's working code that change microphone depending on which button you click.
script.js
let mediaStream;
let mic1Id, mic2Id;
navigator.mediaDevices.enumerateDevices().then((res) => {
mic1Id = res[2].deviceId;
mic2Id = res[3].deviceId;
const constraints = {
audio: {
deviceId: { exact: mic1Id },
},
};
navigator.mediaDevices.getUserMedia(constraints).then((stream) => {
mediaStream = stream;
var audio = document.querySelector("audio");
stream.getAudioTracks()[0].enabled = true;
audio.srcObject = stream;
});
});
function getAudioTrack(deviceId) {
const constraints = {
audio: {
deviceId: {
exact: deviceId,
},
},
};
return navigator.mediaDevices.getUserMedia(constraints).then((stream) => {
return stream.getAudioTracks();
});
}
function replaceAudioTrack(stream, newAudioTrack) {
let audioTrack = stream.getAudioTracks()[0];
stream.removeTrack(audioTrack);
stream.addTrack(newAudioTrack);
}
let btn1 = document.querySelector("button:nth-of-type(1)");
let btn2 = document.querySelector("button:nth-of-type(2)");
btn1.onclick = () => {
getAudioTrack(mic1Id).then((res) => replaceAudioTrack(mediaStream, res[0]));
};
btn2.onclick = () => {
getAudioTrack(mic2Id).then((res) => replaceAudioTrack(mediaStream, res[0]));
};
index.html
<audio autoplay></audio>
<button>Mic 1</button>
<button>Mic 2</button>

Related

track.stop is not turning the camera off anymore

I have a webpage where I want user to take a picture with his laptop/phone camera.
Once he clicks on a button a modal is shown and the following js will start the camera stream to take the picture:
function startStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
//const audioSource = audioInputSelect.value;
const videoSource = videoSelect.value;
const constraints = {
//audio: {deviceId: audioSource ? {exact: audioSource} : undefined},
video: {
deviceId: videoSource ? {
exact: videoSource
} : undefined
}
};
navigator.mediaDevices.getUserMedia(constraints).then(gotStream).then(gotDevices).catch(handleError);
var mediaSupport = 'mediaDevices' in navigator;
if (mediaSupport && null == cameraStream) {
const videoSource = videoSelect.value;
const constraints = {
video: {
deviceId: videoSource ? {
exact: videoSource
} : undefined
}
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
cameraStream = mediaStream;
stream.srcObject = mediaStream;
stream.play();
})
.catch(handleError);
} else {
alert('Your browser does not support media devices.');
return;
}
}
This is triggered by
$('#photoStudio').on('show.bs.modal', function (event) {
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
startStreaming();
});
Then when I close the modal I want to stop the streaming but the led indicator next to my camera is still on)
$('#photoStudio').on('hide.bs.modal', function (event) {
stopStreaming();
});
where stopStreaming() is:
function stopStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
}
I don't get any kind of error and I cannot find a way to debug why the camera is still running. Am I missing anything in the stopStreaming function?
If any track has not been stopped then your camera will still be active. In your stopStreaming function you only stop the first track in the returned array.
If you instead iterate through the tracks you may catch ones you aren't currently:
function stopStreaming() {
if (null != cameraStream) {
var tracks = cameraStream.getTracks();
// stop all the tracks, not just the first
tracks.forEach((track) => {
track.stop();
});
stream.load();
cameraStream = null;
}
}
this.camera_stream.getTracks().forEach((track) => {
console.log(track);
track.stop();
**track.enabled = false**
});
video.load()
this.camera_stream = null

Using camera flashlight not allowing to change facingmode - Navigator.mediaDevices

I'm trying to build a web app which takes photos using webcam or mobile camera depending on the device. I have already made a button which changes the constraints.facingmode so the user can use both cameras ( "environment", "user" ) if device supports it. The problem is that when I enable flashlight support as well, by creating a button and setting it as the flashlight toggler like that:
const SUPPORTS_MEDIA_DEVICES = 'mediaDevices' in navigator;
if (SUPPORTS_MEDIA_DEVICES) {
const track = stream.getVideoTracks()[0];
const imageCapture = new ImageCapture(track);
const photoCapabilities = imageCapture.getPhotoCapabilities().then(() => {
const btn = document.querySelector('.toggleCameraTorch');
btn.style.visibility = 'visible';
btn.addEventListener('click', function () {
try {
track.applyConstraints({
advanced: [{ torch: !wheelsfs.videoConstraint.torchState }]
});
wheelsfs.videoConstraint.torchState = !wheelsfs.videoConstraint.torchState;
}
catch(e) {
alert(e.message);
}
});
});
}
After that, the flashlight is working perfectly but I no longer have the option to swap camera ( facingmode ). When I'm trying to change the camera I get the error "could not start video source". Like the camera is already being used by something.
This is how I'm changing camera - facingmode:
wheelsfs.videoConstraint.facingMode.exact = wheelsfs.videoConstraint.facingMode.exact == "environment" ? "user" : "environment";
var cameraInput = wheelsfs.videoConstraint.facingMode.exact;
wheelsfs.videoTrue.srcObject && wheelsfs.videoTrue.srcObject.getTracks().forEach(t => t.stop());
wheelsfs.videoConstraint = {
video: {
width: { ideal: trueWidth },
height: { ideal: trueHeight },
facingMode: { ideal: "environment" }
},
facingMode: { exact: cameraInput }
};
navigator.mediaDevices.getUserMedia({ video: wheelsfs.videoConstraint }).then(function (stream) {
wheelsfs.videoTrue.srcObject = stream;
wheelsfs.videoTrue.play();
const SUPPORTS_MEDIA_DEVICES = 'mediaDevices' in navigator;
if (SUPPORTS_MEDIA_DEVICES) {
const track = stream.getVideoTracks()[0];
const imageCapture = new ImageCapture(track);
const photoCapabilities = imageCapture.getPhotoCapabilities().then(() => {
const btn = document.querySelector('.toggleCameraTorch');
btn.style.visibility = 'visible';
btn.addEventListener('click', function () {
try {
track.applyConstraints({
advanced: [{ torch: !wheelsfs.videoConstraint.torchState }]
});
wheelsfs.videoConstraint.torchState = !wheelsfs.videoConstraint.torchState;
}
catch (e) {
alert(e.message);
}
});
});
}
}).catch((e) => { console.log(e.message); }
Solved it by storing the stream.getVideoTracks()[0] to a variable and then calling stop() on it before changing the camera (facingmode).
So when I do:
if (SUPPORTS_MEDIA_DEVICES) {
wheelsfs.track = stream.getVideoTracks()[0];
const imageCapture = new ImageCapture(wheelsfs.track);
const photoCapabilities = imageCapture.getPhotoCapabilities().then(() => {
const btn = document.querySelector('.toggleCameraTorch');
btn.style.visibility = 'visible';
btn.addEventListener('click', function () {
try {
wheelsfs.track.applyConstraints({
advanced: [{ torch: !wheelsfs.videoConstraint.torchState }]
});
wheelsfs.videoConstraint.torchState = !wheelsfs.videoConstraint.torchState;
}
catch (e) {
alert(e.message);
}
});
});
}
In the 2nd line I save the track in a public variable and then when the function that changes the camera that is being used is called, I make sure I run
"wheelsfs.track.stop();" just before the navigator.mediaDevices.getUserMedia call.

How to record audio and video corss platform with electron

So i was making a electron project that records your screen and your desktop or selected app's audio with desktopCapture.
I got the screen record, and at one point even got the mic to work, but at no point, no matter what i tried, i couldn't record desktop audio nor any app's audio. After some research i found that you cannot record any desktop nor app's audio with chromium on linux.
So what could be the solution or some other ways to try to record desktop audio. Maybe there is some way to record desktop audio with a different library and then combine the video with audio somehow.
Any suggestions would be appreciated.
Code for the screen recorder itself:
videoSelectBtn.onclick = getVideoSources;
async function getVideoSources() {
const inputSources = await desktopCapturer.getSources({
types: ["window", "screen", "audio"],
});
inputSources.forEach((source) => {
if (source.name === "Screen 1") {
selectSource(source);
} else {
console.log(source);
}
});
}
async function selectSource(source) {
videoSelectBtn.innerText = source.name;
const constraints = {
audio: {
mandatory: {
chromeMediaSource: "desktop",
},
},
video: {
mandatory: {
chromeMediaSource: "desktop",
},
},
};
const stream = await navigator.mediaDevices.getUserMedia(constraints);
videoElement.srcObject = stream;
videoElement.play();
const options = {
mimeType: "video/webm; codecs=vp9",
};
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.onstop = handleStop;
}
function handleDataAvailable(e) {
console.log("video data available");
recordedChunks.push(e.data);
}
async function handleStop(e) {
const blob = new Blob(recordedChunks, {
type: "video/webm; codecs=vp9",
});
const buffer = Buffer.from(await blob.arrayBuffer());
const { filePath } = await dialog.showSaveDialog({
buttonLabel: "Save video",
defaultPath: `vid-${Date.now()}.webm`,
});
if (filePath) {
writeFile(filePath, buffer, () => console.log("video saved successfully!"));
}
}

How to send MediaStream AUDIO data with socket.io

I have been having trouble taking audio data that is being recorded from a mic and sending it to the other clients in the room so that people can speak to each other in real time. I have a method of doing this, but it is inefficient and choppy...
setInterval(() => {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
const mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
const audioChunks = [];
mediaRecorder.addEventListener("dataavailable", (event) => {
audioChunks.push(event.data);
});
mediaRecorder.addEventListener("stop", () => {
socket.emit('liveAudioToServer', audioChunks)
});
setTimeout(() => {
mediaRecorder.stop();
},2000);
});
}, 2000);
This snippet records audio and sends a buffer every two seconds so that the client side compiles it and plays it upon receiving the data. I know there's got to be another way to do this. I tried a different method, but just receive an error.
socket.on('active', () => {
if(navigator.getUserMedia) {
navigator.getUserMedia(
{audio: true},
function(stream) {
const audioContext3 = new AudioContext();
const audioSource3 = audioContext3.createMediaStreamSource(stream);
const analyser3 = audioContext3.createAnalyser();
audioSource3.connect(analyser3);
analyser3.fftSize = 256;
const bufferLength = analyser3.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
function sendAudioChunks(){
analyser3.getByteFrequencyData(dataArray);
requestAnimationFrame(sendAudioChunks);
socket.emit('liveAudioToServer', dataArray)
}
sendAudioChunks();
},
function(){ console.log("Error 003.")}
);
}
Can anyone help me?

Remote video not showing the stream

Im not sure what is wrong. The caller and callee both receive the same stream that is getting set in local video but it wont set the remote video.
I have seen it worked before when I clicked the call button twice but I changed the code and dont know how to recreate it.
Any help is appreciated. Thank you.
video.js
//Starts by calling the user
const callUser = (socketId, mySocket) => {
navigator.getUserMedia({ video: true, audio: true }, stream => {
console.log("My Stream: ", stream)
const localVideo = document.getElementById("local-video");
localVideo.srcObject = stream;
localStream = stream
createAndSendOffer(socketId, mySocket);
})
}
//Create Offer
const createAndSendOffer = async (socketId, mySocket) => {
peerConnection = new RTCPeerConnection()
peerConnection.ontrack = function(event) {
console.log("Remote Stream", event.streams[0])
const remoteVideo = document.getElementById("remoteVideo")
if(remoteVideo.srcObject !== event.stream[0]) {
remoteVideo.srcObject = event.streams[0];
}
}
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream))
await peerConnection.createOffer( offer => {
peerConnection.setLocalDescription(new RTCSessionDescription(offer),
function() {
mySocket.emit("callUser", {offer,to: socketId});
})
})
}
//Callee Is getting called
const answerCall = (data, mySocket, inCall) => {
navigator.getUserMedia({ video: true, audio: true }, stream => {
console.log("My Stream: ", stream)
const localVideo = document.getElementById("local-video");
localVideo.srcObject = stream;
localStream = stream
createAndSendAnswer(data, mySocket, inCall);
})
}
//Create Answer
const createAndSendAnswer = async (data, mySocket, inCall) => {
peerConnection = new RTCPeerConnection()
peerConnection.ontrack = function(event) {
console.log("Remote Stream", event.streams[0])
const remoteVideo = document.getElementById("remoteVideo")
if(remoteVideo.srcObject !== event.stream[0]) {
console.log(event.streams[0])
remoteVideo.srcObject = event.streams[0];
}
inCall(true)
}
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream))
await peerConnection.setRemoteDescription(new RTCSessionDescription(data.offer))
await peerConnection.createAnswer(answer => {
peerConnection.setLocalDescription(new RTCSessionDescription(answer),
function() {
mySocket.emit("makeAnswer", {answer, to: data.socket});
})
})
}
const startSockets = (socket, inCall) => {
socket = socket
socket.on("gettingCalled", data => {
console.log("You are getting called with socketId", data.socket)
answerCall(data, socket, inCall);
})
socket.on("answerMade", async data => {
await peerConnection.setRemoteDescription(new RTCSessionDescription(data.answer))
})
}
class Video extends Component {
constructor(props){
super(props);
this.state={
friends: [],
mySocket: [],
inCall: false
}
}
componentDidMount() {
if(this.props.user) {
startSockets(this.props.user.socket, this.inCallChange)
}
}
inCallChange = boolean => this.setState({inCall: boolean})
callUser = socketId => callUser(socketId, this.state.mySocket, this.state.inCall)
render() {
if(!this.props.user) {
return (<div>No User</div>)
} else {
return (
<div>`enter code here`
<div className="video-chat-container">
<div className="video-container">
<video
autoPlay
className="remote-video"
id="remoteVideo"
></video>
<video
autoPlay
muted
className="local-video"
id="local-video"
></video>
</div>
</div>
</div>
);
}
}
}
export default withRouter(connect(mapStateToProps)(Video))
Update
Im not sure why this is the case but I have to send just the offer and receive the answer again. For now, I resend an offer createAndSendOffer(), starting the process again. I dont think this is the best practice and if you find a better solution that would be great :)
first check your pc.ontrack() firing or not. pc.ontrack() event is fired after pc.addTrack() and remotedescription is set.and also make sure tracks are added before creating answer/offer

Categories

Resources