javascript AudioNodes can't change sample-rate - javascript

i can't figure out, why this error come from. or what i missed out.
here my code:
function mediaDeviceInit(deviceId) {
// this for fast codding see w3c spec for audio
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
console.log('IpCodec : Get user permissions for Media Access.');
let audioConstraints = {};
// check for default value
if (deviceId) {
audioConstraints = {
audio: { deviceId: deviceId, echoCancellation: false, sampleRate: defaultSampleRate }, video: false
};
} else {
audioConstraints = { audio: { echoCancellation: false, sampleRate: defaultSampleRate }, video: false };
}
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(audioConstraints)
.then(function (stream) {
//console.log(navigator.mediaDevices.getSupportedConstraints());
userMediaSuccess(stream);
})
.catch(function (error) {
userMediaError(error);
});
} else {
console.log('IpCodec : Browser Unsuported to getUserMedia.')
}
// enumerate all input audio device
function enumAudioInput() {
// somecode
}
// callback on success
function userMediaSuccess(stream) {
let audioSrc = audioMixer.audioContext.createMediaStreamSource(stream); // --> error here
// some init code
console.log('IpCodec : Media permission granted by user.');
if (!deviceId) {
enumAudioInput();
}
}
// callback on error
function userMediaError(error) {
console.log('IpCodec' + error);
}
}
with error like :
Connecting AudioNodes from AudioContexts with different sample-rate is currently not supported.
and this part audioMixer class who own AudioContext :
class AudioMixer {
constructor(type, sRate, latency) {
this.audioContext;
// parse all browser compability
window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext;
console.log('IpCodec : Initialize audio mixer success.');
if (window.AudioContext) {
this.audioContext = new window.AudioContext({ sampleRate: sRate, latencyHint: latency });
//console.log(this.audioContext);
} else {}
}
}

Related

Audio Worklet - How to capture microphone streams

I am trying to capture live microphone streams from my desktop, When converting float 32 bits to Int16 Array all values are coming 0. Not sure what i have done wrong here. how can i capture the live microphone stream in audio worklet?
Below is javascript code:
try {
navigator.getUserMedia = navigator.getUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia;
microphone = navigator.getUserMedia({
audio : true,
video : false
}, onMicrophoneGranted, onMicrophoneDenied);
} catch (e) {
alert(e)
}
async function onMicrophoneGranted(stream) {
console.log(stream)
context = new AudioContext();
source = context.createMediaStreamSource(stream);
await context.audioWorklet.addModule('/assets/js/buffer-detector.js');
// Create our custom node.
bufferDetectorNode= new AudioWorkletNode(context, 'buffer-detector');
bufferDetectorNode.port.onmessage = (event) => {
};
source.connect(bufferDetectorNode);
bufferDetectorNode.connect(context.destination);
//source.connect(context.destination);
}
function onMicrophoneDenied() {
console.log('denied')
}
Below is AudioWorklet
class BufferProcessor extends AudioWorkletProcessor {
process (inputs) {
console.log(inputs)
inputs.forEach(floatArray => {
floatArray.forEach(elem => {
const intArray = Int16Array.from(elem)
console.log(intArray)
})
})
//const input = inputs[0];
//this.append(input[0])
return true;
}
static get parameterDescriptors() {
return [{
name: 'Buffer Detector',
}]
}
constructor() {
super();
this.initBuffer()
}
}
registerProcessor('buffer-detector',BufferProcessor );

Is window.localAudio a thing?

I found Mozilla's Getting browser microphone permission. It defines a function to request permission and listen to client's microphone as such:
function getLocalStream() {
navigator.mediaDevices.getUserMedia({video: false, audio: true}).then( stream => {
window.localStream = stream; // A
window.localAudio.srcObject = stream; // B
window.localAudio.autoplay = true; // C
}).catch( err => {
console.log("u got an error:" + err)
});
}
I checked in Chrome, Firefox and Safari - all of them throw an error about window.localAudio being undefined. Where did this tutorial get it from? Was window.localAudio ever a thing? What was it supposed to do?
I will try to give you something more useful than the question you have asked.
The function will create the element if it is not present and there are few options available. In the example I'm adding the newly created audio element to the body, but it will work even it is not added - it's a matter of choice.
<html>
<head>
<script>
var el;
function attachStream(stream, el, options) {
var item;
var URL = window.URL;
var element = el;
var opts = {
autoplay: true,
mirror: false,
muted: false,
audio: false,
disableContextMenu: false
};
if (options) {
for (item in options) {
opts[item] = options[item];
}
}
if (!element) {
element = document.createElement(opts.audio ? 'audio' : 'video');
} else if (element.tagName.toLowerCase() === 'audio') {
opts.audio = true;
}
if (opts.autoplay) element.autoplay = 'autoplay';
if (opts.muted) element.muted = true;
if (!opts.audio && opts.mirror) {
['', 'moz', 'webkit', 'o', 'ms'].forEach(function(prefix) {
var styleName = prefix ? prefix + 'Transform' : 'transform';
element.style[styleName] = 'scaleX(-1)';
});
}
element.srcObject = stream;
return element;
};
function getLocalStream() {
navigator.mediaDevices.getUserMedia({
video: false,
audio: true
}).then(
stream => {
var doesnotexist = !el;
el = attachStream(stream, el, {
audio: true,
autoplay: true
});
if (doesnotexist) document.body.appendChild(el);
}
).catch(err => {
console.log("u got an error:" + err)
});
}
window.addEventListener('DOMContentLoaded', (event) => {
getLocalStream();
});
</script>
</head>
<body>
</body>
</html>

track.stop is not turning the camera off anymore

I have a webpage where I want user to take a picture with his laptop/phone camera.
Once he clicks on a button a modal is shown and the following js will start the camera stream to take the picture:
function startStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
//const audioSource = audioInputSelect.value;
const videoSource = videoSelect.value;
const constraints = {
//audio: {deviceId: audioSource ? {exact: audioSource} : undefined},
video: {
deviceId: videoSource ? {
exact: videoSource
} : undefined
}
};
navigator.mediaDevices.getUserMedia(constraints).then(gotStream).then(gotDevices).catch(handleError);
var mediaSupport = 'mediaDevices' in navigator;
if (mediaSupport && null == cameraStream) {
const videoSource = videoSelect.value;
const constraints = {
video: {
deviceId: videoSource ? {
exact: videoSource
} : undefined
}
};
navigator.mediaDevices.getUserMedia(constraints)
.then(function (mediaStream) {
cameraStream = mediaStream;
stream.srcObject = mediaStream;
stream.play();
})
.catch(handleError);
} else {
alert('Your browser does not support media devices.');
return;
}
}
This is triggered by
$('#photoStudio').on('show.bs.modal', function (event) {
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
startStreaming();
});
Then when I close the modal I want to stop the streaming but the led indicator next to my camera is still on)
$('#photoStudio').on('hide.bs.modal', function (event) {
stopStreaming();
});
where stopStreaming() is:
function stopStreaming() {
if (null != cameraStream) {
var track = cameraStream.getTracks()[0];
track.stop();
stream.load();
cameraStream = null;
}
}
I don't get any kind of error and I cannot find a way to debug why the camera is still running. Am I missing anything in the stopStreaming function?
If any track has not been stopped then your camera will still be active. In your stopStreaming function you only stop the first track in the returned array.
If you instead iterate through the tracks you may catch ones you aren't currently:
function stopStreaming() {
if (null != cameraStream) {
var tracks = cameraStream.getTracks();
// stop all the tracks, not just the first
tracks.forEach((track) => {
track.stop();
});
stream.load();
cameraStream = null;
}
}
this.camera_stream.getTracks().forEach((track) => {
console.log(track);
track.stop();
**track.enabled = false**
});
video.load()
this.camera_stream = null

Add a video stream in an ongoing audio stream between connected peers in react-native-webrtc

I have an application in which I have implemented audio/video calling between web ( Reactjs ) and mobile ( React Native ). I am using react-native-webrtc for mobile side peer. When an audio call is connected and peer connections are created, the audio stream works perfectly. When I switch to video call during the audio call and the camera on my device turns on, the existing stream is not updated to accommodate both audio and video streams. I am looking for a solution where I can switch between audio and video calls on the fly and update the webRTC stream accordingly.
Reactjs code: Getting audio stream and setting it to localStream
getStream = (options) => {
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
navigator.getUserMedia(
{ audio: true, video: false },
(stream) => {
console.log("getusermedia event obj", stream);
console.log(stream.getTracks());
this.localGetTracks();
if (this.localVideo.current !== null) {
this.localVideo.current.srcObject = stream;
this.localStream = stream;
}
},
(error) => {
console.log("User Media Error", error);
}
)};
Reactjs code: New peer connection is created and stream is added with the localStream in it, which includes audio as true and video as false
newPeerConnection = () => {
window.RTCPeerConnection =
window.mozRTCPeerConnection ||
window.webkitRTCPeerConnection ||
window.RTCPeerConnection;
let peerConn = new RTCPeerConnection({
iceServers: turnServer,
});
peerConn.onicecandidate = (evt) => {
if (evt.candidate) {
this.props.connection.invoke(
"addIceCandidate",
parseInt(this.props.ticket.id),
JSON.stringify({
type: "candidate",
sdpMLineIndex: evt.candidate.sdpMLineIndex,
sdpMid: evt.candidate.sdpMid,
candidate: evt.candidate.candidate,
})
);
} else {
console.log("End of candidates.");
this.setState(
{
videoState: true,
end_call: true,
call_status: "Connected",
},
() => this.props._handleCallConnected(true)
);
}
this.forceUpdate();
};
peerConn.addStream(this.localStream);
peerConn.addEventListener(
"addstream",
(stream) => {
console.log("remoteStream on addstream", stream);
this.remoteVideo.current.srcObject = stream.stream;
},
false
);
this.setState({
peerConn: peerConn,
})};
React-native code: An event listener is subscribed for when a new stream is added, it listens to this eventListener and adds the upcoming stream to remoteStream
peerConn.addEventListener(
'addstream',
(stream) => {
console.log(`AddStreamListen`);
console.log('remoteStream added', stream);
InCallManager.setForceSpeakerphoneOn(false);
this.setState({
isSpeakerEnabled: false,
});
this.setState({
remoteStream: stream,
showAudioCallTimer: true,
});
},
false,
);
Reactjs code: Adding a new stream with audio and video both as true
getVideoStream = () => {
navigator.getUserMedia =
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
navigator.getUserMedia(
{ audio: true, video: true, mirror: true },
(stream) => {
this.localStream = stream;
this.localVideo.current.srcObject = this.localStream;
this.state.peerConn.addStream(this.localStream);
},
(error) => {
console.log("error", error);
}
)};
But the eventListener on mobile code does not listen to the second stream I am adding with audio as true and video as true, the previous stream continues and video is not showing up

getUserMedia() can't release the camera

There is somewhere very silly mistake in my code that I can't find. Basically what I'm doing is, I'm using two separate buttons to start and stop recording the stream that I get from WebRTC getUserMedia() (I'm using RecordRTC for recording). My stop function stops the recording but does not release the camera.
<script type="text/javascript">
$(document).ready(function () {
var recorder;
var video = document.getElementById("video");
var videoConstraints = {
video: {
mandatory: {
minWidth: 1280,
minHeight: 720,
maxWidth: 1920,
maxHeight: 1080,
minFrameRate: 29.97,
maxFrameRate: 60,
minAspectRatio: 1.77
}
},
audio: true
};
function captureCamera(callback) {
navigator.mediaDevices.getUserMedia(videoConstraints).then(function (camera) {
callback(camera);
}).catch(function (error) {
alert('Unable to capture your camera. Please check console logs.');
console.error(error);
});
}
function stopRecordingCallback() {
video.src = video.srcObject = null;
video.src = URL.createObjectURL(recorder.getBlob());
video.play();
//recorder.camera.stop(); //its the deprecated way
recorder.camera.getTracks().forEach(track => track.stop()); //modern way as per documentation
recorder.destroy();
recorder = null;
}
hasGetUserMedia() {
return (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
}
$('#startRecord').on("click", function () {
if (hasGetUserMedia()) {
/*----------------recording process start----------------*/
this.disabled = true;
captureCamera(function (camera) {
setSrcObject(camera, video);
video.play();
var options = {
recorderType: MediaStreamRecorder,
mimeType: 'video/webm\;codecs=h264',
audioBitsPerSecond: 128000,
videoBitsPerSecond: 2097152, // 2 mbps
};
recorder = RecordRTC(camera, options);
recorder.startRecording();
// release camera on stopRecording
recorder.camera = camera;
document.getElementById('stopRecord').disabled = false;
});
/*----------------recording process end----------------*/
}
else {
alert('getUserMedia() is not supported by your browser');
}
});
$('#stopRecord').on("click", function () {
this.disabled = true;
document.getElementById('startRecord').disabled = false;
recorder.stopRecording(stopRecordingCallback);
});
});
</script>
So I can't find the reason why the camera isn't released when the $('#stopRecord').on("click", function (){}) is called. Any help?
You can stop your stream's tracks, like this:
navigator.getUserMedia({audio: false, video: true},
function(stream) {
// can also use getAudioTracks() or getVideoTracks()
var track = stream.getTracks()[0]; // if only one media track
// ...
track.stop();
},
function(error){
console.log('getUserMedia() error', error);
});
So, in your case, I believe you can do something like this:
var track = recorder.camera.getTracks()[0]; // if only one media track
// ...
track.stop();

Categories

Resources