Record front and back webcams at the same time - javascript

I have the following code which records with the back camera of the tablet for 5 seconds and downloads the video:
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<script>
navigator.mediaDevices.enumerateDevices()
.then(devices => {
var deviceId = [];
devices.forEach(function(device) {
if ( device.kind == 'videoinput' )
deviceId.push(device.deviceId);
});
// deviceId[0] equals the id of the front camera (resolution: 1280x720)
// deviceId[1] equals the id of the back camera (resolution: 1920x1080)
navigator.mediaDevices.getUserMedia({ audio: true, video: { deviceId: { exact: deviceId[1] }, width: 1920, height: 1080 } })
.then(stream => record(stream, 5000)
.then(recording => {
stop(stream);
var a = document.createElement('a');
a.href = URL.createObjectURL(new Blob(recording));
a.download = "recording.webm";
a.click();
})
.catch(log).then(() => stop(stream)))
.catch(log);
})
.catch(log);
var record = (stream, ms) => {
var rec = new MediaRecorder(stream), data = [];
rec.ondataavailable = e => data.push(e.data);
rec.start();
var stopped = new Promise((r, e) => (rec.onstop = r, rec.onerror = e));
return Promise.all([stopped, wait(ms).then(() => rec.stop())])
.then(() => data);
};
var stop = stream => stream.getTracks().forEach(track => track.stop());
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
var log = err => console.log(err.name + ': ' + err.message);
</script>
</body>
</html>
What I'm trying to figure out is how can I record both the front and back webcams of the tablet at the same time and download the videos as seperated files.
I have found this question but it's old so I'm not sure if it's possible or not today.

Related

unable to generate .mov video thumbnail javascript

I'm trying to generate a thumbnail from a video(.mov) but it's showing this error
Error: Error 4; details: DEMUXER_ERROR_NO_SUPPORTED_STREAMS: FFmpegDemuxer: no supported streams
.mp4 and other formats are working well.
chrome version: Version 101.0.4951.67 (Official Build) (64-bit)
Code:
async generateVideoThumbnail(file) {
console.log('generating thumbnail')
const binaryData = []
binaryData.push(file)
const canvas = document.createElement('canvas')
const context = canvas.getContext('2d')
const video = document.createElement('video')
video.setAttribute('src', URL.createObjectURL(new Blob(binaryData)))
video.onloadeddata = () => {
console.log('Yay! The readyState just increased to ' +
'HAVE_CURRENT_DATA or greater for the first time.');
};
video.loadstart = () => {
console.error(`load start`);
}
video.onwaiting = () => {
console.log('Video is waiting for more data.');
};
video.onprogress = () => {
console.log("Downloading video");
};
video.onerror = () => {
console.log('video error')
console.log("Error " + video.error.code + "; details: " + video.error.message);
}
console.log(video)
console.log('video load')
video.load()
let thumbnail = await new Promise((resolve) => {
video.onloadedmetadata = async () => {
console.log('on load')
canvas.width = video.videoWidth
canvas.height = video.videoHeight
video.currentTime = video.duration / 2
await video.play()
context.drawImage(video, 0, 0)
video.pause()
const blob = await new Promise((resolve) => {
return canvas.toBlob(function (blob) {
resolve(blob)
})
})
resolve(blob)
}
})
return thumbnail
},
I don't think Chrome is able to play mov files.
You can check this in the console, by writing something like:
const video = document.createElement('video')
console.log(video.canPlayType('video/mp4')) //expect 'maybe'
console.log(video.canPlayType('video/ogg')) //expect 'maybe'
console.log(video.canPlayType('video/quicktime')) //expect ''
Firefox on the other hand seems to be able to play them, you might try your app there.

How to send MediaStream AUDIO data with socket.io

I have been having trouble taking audio data that is being recorded from a mic and sending it to the other clients in the room so that people can speak to each other in real time. I have a method of doing this, but it is inefficient and choppy...
setInterval(() => {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
const mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
const audioChunks = [];
mediaRecorder.addEventListener("dataavailable", (event) => {
audioChunks.push(event.data);
});
mediaRecorder.addEventListener("stop", () => {
socket.emit('liveAudioToServer', audioChunks)
});
setTimeout(() => {
mediaRecorder.stop();
},2000);
});
}, 2000);
This snippet records audio and sends a buffer every two seconds so that the client side compiles it and plays it upon receiving the data. I know there's got to be another way to do this. I tried a different method, but just receive an error.
socket.on('active', () => {
if(navigator.getUserMedia) {
navigator.getUserMedia(
{audio: true},
function(stream) {
const audioContext3 = new AudioContext();
const audioSource3 = audioContext3.createMediaStreamSource(stream);
const analyser3 = audioContext3.createAnalyser();
audioSource3.connect(analyser3);
analyser3.fftSize = 256;
const bufferLength = analyser3.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
function sendAudioChunks(){
analyser3.getByteFrequencyData(dataArray);
requestAnimationFrame(sendAudioChunks);
socket.emit('liveAudioToServer', dataArray)
}
sendAudioChunks();
},
function(){ console.log("Error 003.")}
);
}
Can anyone help me?

Webrtc video broadcasting Viewer only works sometimes

I´m using Gabriel Tanners video broadcasting Tutorial on localhost it works great! Over Internet connection most of the times the viewer simply doesn't show anything. I spent hours testing and changing the lifecycle. Nothing works.
Here´s the Broadcaster code:
const peerConnections = {};
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};
const socket = io.connect(window.location.origin);
socket.on("answer", (id, description) => {
peerConnections[id].setRemoteDescription(description);
});
socket.on("watcher", id => {
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;
let stream = videoElement.srcObject;
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
peerConnection
.createOffer()
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("offer", id, peerConnection.localDescription);
});
});
socket.on("candidate", (id, candidate) => {
peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
});
socket.on("disconnectPeer", id => {
peerConnections[id].close();
delete peerConnections[id];
});
window.onunload = window.onbeforeunload = () => {
socket.close();
};
// Get camera and microphone
const videoElement = document.querySelector("video");
const audioSelect = document.querySelector("select#audioSource");
const videoSelect = document.querySelector("select#videoSource");
audioSelect.onchange = getStream;
videoSelect.onchange = getStream;
getStream()
.then(getDevices)
.then(gotDevices);
function getDevices() {
return navigator.mediaDevices.enumerateDevices();
}
function gotDevices(deviceInfos) {
window.deviceInfos = deviceInfos;
for (const deviceInfo of deviceInfos) {
const option = document.createElement("option");
option.value = deviceInfo.deviceId;
if (deviceInfo.kind === "audioinput") {
option.text = deviceInfo.label || `Microphone ${audioSelect.length + 1}`;
audioSelect.appendChild(option);
} else if (deviceInfo.kind === "videoinput") {
option.text = deviceInfo.label || `Camera ${videoSelect.length + 1}`;
videoSelect.appendChild(option);
}
}
}
function getStream() {
if (window.stream) {
window.stream.getTracks().forEach(track => {
track.stop();
});
}
const audioSource = audioSelect.value;
const videoSource = videoSelect.value;
const constraints = {
audio: { deviceId: audioSource ? { exact: audioSource } : undefined },
video: { deviceId: videoSource ? { exact: videoSource } : undefined }
};
return navigator.mediaDevices
.getUserMedia(constraints)
.then(gotStream)
.catch(handleError);
}
function gotStream(stream) {
window.stream = stream;
audioSelect.selectedIndex = [...audioSelect.options].findIndex(
option => option.text === stream.getAudioTracks()[0].label
);
videoSelect.selectedIndex = [...videoSelect.options].findIndex(
option => option.text === stream.getVideoTracks()[0].label
);
videoElement.srcObject = stream;
socket.emit("broadcaster");
}
function handleError(error) {
console.error("Error: ", error);
}
The viewer:
let peerConnection;
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};
const socket = io.connect(window.location.origin);
const video = document.querySelector("video");
socket.on("offer", (id, description) => {
peerConnection = new RTCPeerConnection(config);
peerConnection
.setRemoteDescription(description)
.then(() => peerConnection.createAnswer())
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("answer", id, peerConnection.localDescription);
});
peerConnection.ontrack = event => {
video.srcObject = event.streams[0];
};
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
});
socket.on("candidate", (id, candidate) => {
peerConnection
.addIceCandidate(new RTCIceCandidate(candidate))
.catch(e => console.error(e));
});
socket.on("connect", () => {
socket.emit("watcher");
});
socket.on("broadcaster", () => {
socket.emit("watcher");
});
socket.on("disconnectPeer", () => {
peerConnection.close();
});
window.onunload = window.onbeforeunload = () => {
socket.close();
};
I´m very thankful for your help
Since the two devices are not in the same network, you will need a TURN server for the establishment of the connection. A TURN server is used to relay as a relay if the peer-to-peer connection fails, which seems to be happening in your case.
Since there are no public TURN servers out there, you will probably need to create your own. For that, I can recommend the following options:
Coturn - Has a lot of possible configuration options but is thereby harder to set up.
Pion TURN - Easier to set up but doesn't feature as many possibilities
If you want to learn more about the internal of WebRTC and how they work together, checkout out webrtcforthecurious.

How to record background music while recording videos with JavaScript

I want to record the video and background music without the microphone permission on Mobile devices.
It just like TikTok.
I know how to record stream by MediaRecorder.
But MediaRecorder only can record one stream, I don't know how to mix music in the stream.
My code:
record = (renderer, fps = 25, ms = 3000, mediaElement) => new Promise(resolve => {
if (this.recording) {
resolve(false);
}
this.recording = true;
try {
const stream = mediaElement.captureStream(fps);
const recorder = new MediaRecorder(stream);
const chunks = [];
recorder.ondataavailable = e => chunks.push(e.data);
recorder.onstop = () => resolve(new Blob(chunks));
recorder.start();
setTimeout(() => recorder.stop(), ms);
} catch (err) {
console.log('failed to record', err);
resolve(false);
}
this.recording = false;
});
Thank you for your appreciation.
I find the answer.
Need to change the audio source to stream and mix it to one stream.
It work!
record = (stream, fps = 25, ms = 3000) => new Promise(resolve => {
if (this.recording) {
resolve(false);
}
this.recording = true;
try {
const audio = new Audio();
audio.src = require('someMusic.mp3');
const context = new AudioContext();
const backgroundMusic = context.createMediaElementSource(audio);
audio.volume = 0.8;
audio.play();
const mixedOutput = context.createMediaStreamDestination();
backgroundMusic.connect(mixedOutput);
const videoTrack = stream.getVideoTracks()[0];
const mixedTracks = mixedOutput.stream.getAudioTracks()[0];
const streamN = new MediaStream([mixedTracks, videoTrack]);
const recorder = new MediaRecorder(streamN);
const chunks = [];
recorder.ondataavailable = e => chunks.push(e.data);
recorder.onstop = () => resolve(new Blob(chunks));
recorder.start();
setTimeout(() => recorder.stop(), ms);
} catch (err) {
console.log('failed to record', err);
resolve(false);
}
this.recording = false;
});

How to pass recorded video data from javascript to python

I'm trying to pass recorded blobs of video data from some javascript code to my routes so that i can save it
im a newbie
The javascript records video using users webcam and saves it as recordedBlob. I'm trying to pass that recordedBlob data to my python routes for saving.
This is the javascript code..it is in my html file
<script type="text/javascript">
let video = document.getElementById("video");
let recording = document.getElementById("recording");
let startButton = document.getElementById("startButton");
let stopButton = document.getElementById("stopButton");
let downloadButton = document.getElementById("downloadButton");
let logElement = document.getElementById("log");
let recordingTimeMS = 5000;
function log(msg) {
logElement.innerHTML += msg + "\n";
}
function wait(delayInMS) {
return new Promise(resolve => setTimeout(resolve, delayInMS));
}
function startRecording(stream, lengthInMS) {
let recorder = new MediaRecorder(stream);
let data = [];
recorder.ondataavailable = event => data.push(event.data);
recorder.start();
log(recorder.state + " for " + (lengthInMS/1000) + " seconds...");
let stopped = new Promise((resolve, reject) => {
recorder.onstop = resolve;
recorder.onerror = event => reject(event.name);
});
  let recorded = wait(lengthInMS).then(
() => recorder.state == "recording" && recorder.stop()
  );
return Promise.all([
stopped,
recorded
])
.then(() => data);
}
function stop(stream) {
stream.getTracks().forEach(track => track.stop());
}
startButton.addEventListener("click", function() {
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
video.srcObject = stream;
downloadButton.href = stream;
video.captureStream = video.captureStream || video.mozCaptureStream;
return new Promise(resolve => video.onplaying = resolve);
}).then(() => startRecording(video.captureStream(), recordingTimeMS))
.then (recordedChunks => {
let recordedBlob = new Blob(recordedChunks, { type: "video/webm" });
recording.src = URL.createObjectURL(recordedBlob);
downloadButton.href = recording.src;
downloadButton.download = "RecordedVideo.webm";
log("Successfully recorded " + recordedBlob.size + " bytes of " +
recordedBlob.type + " media.");
})
.catch(log);
}, false);
stopButton.addEventListener("click", function() {
stop(video.srcObject);
}, false);
</script>
This is the routes.py where im trying to pass the recordedBlob data
from flask import render_template, redirect, url_for
#posts.route('/post/new/vlog',methods=['GET','POST'])
def new_vlog():
if current_user.is_authenticated:
return render_template('vlog.html',title='New Vlog',video={recordedBlob})
if video.data:
video_file = save_video(video.data)
return redirect(url_for('main.home'))
else:
return redirect(url_for('users.login'))

Categories

Resources