Saving captured getUserMedia video to file before POST to server - javascript

I am trying to write a small application that can record video in the browser and upload it to a server.
I have got the code below:
<html>
<body>
<video id="video" playsinline autoplay></video>
<script>
function hasGetUserMedia() {
return !!(navigator.mediaDevices &&
navigator.mediaDevices.getUserMedia);
}
if (hasGetUserMedia()) {
console.log("Good to go");
} else {
console.log('Not supported');
}
const constraints = {
video: true,
audio: true,
};
function start(){
navigator.mediaDevices.getUserMedia(constraints).
then((stream) => {video.srcObject = stream});
var videoEl = document.getElementById('video');
stream = videoEl.srcObject;
}
function stop(){
var videoEl = document.getElementById('video');
stream = videoEl.srcObject;
tracks = stream.getTracks();
tracks.forEach(function(track) {
track.stop();
});
downloadLink.href = URL.createObjectURL(new Blob(tracks[0]));
downloadLink.download = 'acetest.webm';
}
</script>
<button onclick="start()">Start</button>
<button onclick="stop()">Stop</button>
</body>
I can see the video on the screen, but unsure how I can then capture that to a file to upload.
I have tried using URL.createObjectURL(new Blob(tracks[0])); but this doesn't work. How can I save the video once the Stop button is pressed?

Related

Blob video stream not showing on iOS when receiving a stream from socket.io (JavaScript and Node.js)

This works perfectly fine on android (every part of it). But when I receive a video stream wrapped in a blob on iOS from android or another iOS device, it does not show any sign of loading the video or displaying it. However, when I show my own video to myself on iOS, it works.
I have tried the following:
video.setAttribute('autoplay', '');
video.setAttribute('playsinline', '');
video.setAttribute('muted', '');
Or adding a source element to the video element, but these did not work.
How am I supposed to fix the receiving video issue on iOS?
Code (sorry for all the styling):
Client:
let media;
const done = document.getElementById('done');
const vidCon = document.getElementById('video-con');
var getUserMedia = (navigator.mediaDevices.getUserMedia || navigator.mediaDevices.webkitGetUserMedia || navigator.mediaDevices.mozGetUserMedia).bind(navigator.mediaDevices);
getUserMedia({
video: true,
audio: true
}).then((stream) => {
const myVideo = document.createElement('video');
myVideo.srcObject = stream;
myVideo. setAttribute('autoplay', '');
myVideo. setAttribute('muted', '');
myVideo. setAttribute('playsinline', '');
myVideo.style.width = '100%';
myVideo.style.height = '80%';
myVideo.muted = true;
myVideo.style.display = 'block';
myVideo.style.objectFit = 'cover';
media = new MediaRecorder(stream);
media.onstart = function(e) {
this.chunks = [];
myVideo.play();
document.getElementById('video-base-con').append(myVideo);
}
done.onclick = function() {
media.stop();
audio.src = "93642-Blakes_7_Gun_144bpm.wav";
audio.play();
audio.addEventListener('ended', go);
done.style.display = 'none';
document.getElementById('blank-choosing').style.display = 'block';
}
media.ondataavailable = function(e) {
this.chunks.push(e.data);
}
media.onstop = function(e) {
myVideo.remove();
var blob = new Blob(this.chunks, { 'type' : 'video/ogg; codecs=opus' });
socket.emit('send-video', blob);
}
});
socket.on('recieve-video', (stream, codeNew) => {
if (codeNew == code.value) {
document.getElementById('blank-video').style.display = 'none';
console.log('recieved video.');
const blob = new Blob([stream], { 'type' : 'video/ogg; codecs=opus' });
const video = document.createElement('video');
video.src = window.URL.createObjectURL(blob);
video. setAttribute('autoplay', '');
video. setAttribute('muted', '');
video. setAttribute('playsinline', '');
vidCon.style.display = 'block';
video.style.width = '90%';
video.style.height = '100%';
video.style.objectFit = 'cover';
vidCon.style.width = '100%';
vidCon.style.height = '100%';
vidCon.style.textAlign = 'center';
vidCon.style.backgroundColor = 'lightgray';
vidCon.style.borderRadius = '30px';
vidCon.append(video);
video.play();
video.addEventListener('ended', () => {
video.remove();
vidCon.style.display = 'none';
answers.style.display = 'block';
}, false);
}
});
Server:
socket.on('send-video', (blob) => {
socket.broadcast.emit('recieve-video', blob, code);
});
Thanks in advance!
This is almost certainly a media type (f/k/a MIME type) issue. The default media types generated by MediaRecorder are not the same on Android and iOS devices. Right after your media = new MediaRecorder(stream) line examine the media type with media.mimeType to see what default you received in each case.
You can try choosing the media type explicitly with code like this, so you don't get stuck with the default.
media = new MediaRecorder(stream, {mimeType: 'video/mp4'})
or
media = new MediaRecorder(stream, {mimeType: 'video/webm'})
You may have to struggle to find a common media type provided by both Android and iOS.
It looks like you're trying to choose the media type in your Blob constructor. You Can't Do That™. The media type is set when you construct your MediaRecorder.

Is there a way to use PoseNet with live webcam feed?

I've already tried adding a video tag and then setting the source to the webcam, but this didn't work. It just produced 404s in the console. Here is the code I tried:
<html>
<head>
<!-- Load TensorFlow.js -->
<script src="https://cdn.jsdelivr.net/npm/#tensorflow/tfjs"></script>
<!-- Load Posenet -->
<script src="https://cdn.jsdelivr.net/npm/#tensorflow-models/posenet"></script>
</head>
<body>
<video autoplay="true" id="videoElement">
</video>
</body>
<script>
var video = document.querySelector("#videoElement");
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
video.srcObject = stream;
})
}
var flipHorizontal = false;
var imageElement = document.getElementById('videoElement');
posenet.load().then(function(net) {
const pose = net.estimateSinglePose(imageElement, {
flipHorizontal: true
});
return pose;
}).then(function(pose){
var parts = pose["keypoints"];
console.log(parts[9]);
})
</script>
</html>
Please see our official example code here for using webcam with bodypix (which is very similar to posenet but gives you even more details). The webcam part of the code however would be the same:
CodePen:
https://codepen.io/jasonmayes/pen/QWbNeJd
Or Glitch: https://glitch.com/edit/#!/tensorflow-js-body-segmentation
Essentially the key parts here are:
const video = document.getElementById('webcam');
// Check if webcam access is supported.
function hasGetUserMedia() {
return !!(navigator.mediaDevices &&
navigator.mediaDevices.getUserMedia);
}
// Enable the live webcam view and start classification.
function enableCam(event) {
// getUsermedia parameters.
const constraints = {
video: true
};
// Activate the webcam stream.
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
video.addEventListener('loadedmetadata', function() {
// do something once loaded metadata
});
video.srcObject = stream;
video.addEventListener('loadeddata', function(){
// Do something once loaded.
});
});
}
// If webcam supported, add event listener to button for when user
// wants to activate it.
if (hasGetUserMedia()) {
const enableWebcamButton = document.getElementById('webcamButton');
enableWebcamButton.addEventListener('click', enableCam);
} else {
console.warn('getUserMedia() is not supported by your browser');
}

iOS Safari Video recording Blob/FileReader bug

my problem is, I have script, that take video from camera. On my iPhone, there is problem, I can't use BLOB as URL for <video></video>, so I used FileReader, that makes base64 from BLOB. But I found another problem. When I take video in portrait mode, the captured video is in landscape mode, is so much wide. I need rotate that video to portrait mode. I don't know, if I have mistake in Blob or FileReader code. Can you help me please? Thanks.
This is my HTML Code:
<video autoplay="true" id="cameraVideo" playsinline webkit-playsinline>
This is my Javascript Code:
var video = document.querySelector("#cameraVideo");
var mode = "rear";
var mediaRecorder;
var chunks = [];
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({video: { facingMode: "environment" } }).then(function (stream) {
video.srcObject = stream;
mediaRecorder = new MediaRecorder(stream);
}).catch(function (err0r) {
alert("Something went wrong!");
});
}
$(".camera").find(".take").on("touchstart mousedown", function() {
mediaRecorder.start();
mediaRecorder.ondataavailable = function(ev) {
chunks.push(ev.data);
}
});
$(".camera").find(".take").on("touchend mouseup", function() {
mediaRecorder.stop();
mediaRecorder.onstop = (ev)=>{
var blob = new Blob(chunks, { 'type' : 'video/mp4' });
chunks = [];
var videoURL = webkitURL.createObjectURL(blob);
if(video.srcObject) video.srcObject.getTracks().forEach(t => t.stop());
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
document.getElementById("savevideo").src = reader.result;
document.getElementById("savevideo").play();
}
}
});
Pictures:
When video is recording :
When video is recorded :

javascript record voice stop and save it on the 20 second

I took and modified code which record our voice and after click of stop button make it like .wav file. I made the code to start record onload and set function which click automatically after 20sec the stop button. But these function doesn't work! This code is right after function startRecording() {
console.log("recordButton clicked");. The code which doesn't work is setTimeout("function() {document.getElementById('stopRecording').click();}", 5000);
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Simple Recorder.js demo with record, stop and pause</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body onload='startRecording()'>
<div id="controls">
<button id="recordButton" >Record</button>
<button id="pauseButton" disabled>Pause</button>
<button id="stopButton" disabled>Stop</button>
</div>
<h3>Recordings</h3>
<ol id="recordingsList"></ol>
<!-- inserting these scripts at the end to be able to use all the elements in the DOM -->
<script src="https://cdn.rawgit.com/mattdiamond/Recorderjs/08e7abd9/dist/recorder.js"></script>
<script>
URL = window.URL || window.webkitURL;
var gumStream; //stream from getUserMedia()
var rec; //Recorder.js object
var input; //MediaStreamAudioSourceNode we'll be recording
// shim for AudioContext when it's not avb.
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = new AudioContext; //new audio context to help us record
var recordButton = document.getElementById("recordButton");
var stopButton = document.getElementById("stopButton");
var pauseButton = document.getElementById("pauseButton");
//add events to those 3 buttons
recordButton.addEventListener("click", startRecording);
stopButton.addEventListener("click", stopRecording);
pauseButton.addEventListener("click", pauseRecording);
function startRecording() {
console.log("recordButton clicked");
setTimeout("function() {document.getElementById('stopRecording').click();}", 5000);
/*
Simple constraints object, for more advanced audio features see
<div class="video-container"><blockquote class="wp-embedded-content" data-secret="vNsz0nPBL4">Supported Audio Constraints in getUserMedia()</blockquote><iframe class="wp-embedded-content" sandbox="allow-scripts" security="restricted" style="position: absolute; clip: rect(1px, 1px, 1px, 1px);" src="https://addpipe.com/blog/audio-constraints-getusermedia/embed/#?secret=vNsz0nPBL4" data-secret="vNsz0nPBL4" width="600" height="338" title="“Supported Audio Constraints in getUserMedia()” — Pipe Blog" frameborder="0" marginwidth="0" marginheight="0" scrolling="no"></iframe></div>
*/
var constraints = { audio: true, video:false }
/*
Disable the record button until we get a success or fail from getUserMedia()
*/
recordButton.disabled = true;
stopButton.disabled = false;
pauseButton.disabled = false
/*
We're using the standard promise based getUserMedia()
https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
*/
navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
console.log("getUserMedia() success, stream created, initializing Recorder.js ...");
/* assign to gumStream for later use */
gumStream = stream;
/* use the stream */
input = audioContext.createMediaStreamSource(stream);
/*
Create the Recorder object and configure to record mono sound (1 channel)
Recording 2 channels will double the file size
*/
rec = new Recorder(input,{numChannels:1})
//start the recording process
rec.record()
console.log("Recording started");
}).catch(function(err) {
//enable the record button if getUserMedia() fails
recordButton.disabled = false;
stopButton.disabled = true;
pauseButton.disabled = true
});
}
function pauseRecording(){
console.log("pauseButton clicked rec.recording=",rec.recording );
if (rec.recording){
//pause
rec.stop();
pauseButton.innerHTML="Resume";
}else{
//resume
rec.record()
pauseButton.innerHTML="Pause";
}
}
function stopRecording() {
console.log("stopButton clicked");
//disable the stop button, enable the record too allow for new recordings
stopButton.disabled = true;
recordButton.disabled = false;
pauseButton.disabled = true;
//reset button just in case the recording is stopped while paused
pauseButton.innerHTML="Pause";
//tell the recorder to stop the recording
rec.stop();
//stop microphone access
gumStream.getAudioTracks()[0].stop();
//create the wav blob and pass it on to createDownloadLink
rec.exportWAV(createDownloadLink);
}
function createDownloadLink(blob) {
var url = URL.createObjectURL(blob);
var au = document.createElement('audio');
var li = document.createElement('li');
var link = document.createElement('a');
//add controls to the <audio> element
au.controls = true;
au.src = url;
//link the a element to the blob
link.href = url;
link.download = new Date().toISOString() + '.wav';
link.innerHTML = link.download;
//add the new audio and a elements to the li element
li.appendChild(au);
li.appendChild(link);
//add the li element to the ordered list
recordingsList.appendChild(li);
}
</script>
</body>
</html>
It is possible to record audio with a vanilla JavaScript.
For example
<button id="audioRecordButton">RECORD</button>
function recordAudioFromMicrophone()
{
var mediaRecorder;
var audioRecordButton = document.getElementById( "audioRecordButton" );
var time;
var isStartRec = true;
var run = function(stream)
{
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.addEventListener("dataavailable", getAudio);
audioRecordButton.addEventListener("click", recordHandler);
}
function recordHandler()
{
if(isStartRec)
{
startRec();
time = setTimeout(stopRec, 20000);
}
else
{
stopRec();
clearTimeout(time);
}
}
function startRec()
{
mediaRecorder.start();
isStartRec = false;
}
function stopRec()
{
mediaRecorder.stop();
isStartRec = true;
}
function getAudio(blob){...}
const enableAudio = { audio: true };
navigator.mediaDevices.getUserMedia(enableAudio).then(run);
}

How to remove camera permissions icon in chrome browser after MediaStreams stop

After stop working with camera I still see permission icon for the camera in chrome browser. I cannot reload page after turn of the camera and I should stay on this page.
var _video = document.getElementById('video');
var _mediaStream = null;
navigator.mediaDevices
.getUserMedia({audio: false, video: true}).then(function (stream) {
_mediaStream = stream;
_video.src = window.URL.createObjectURL(stream);
_video.play();
});
document.getElementById('stopCamera').addEventListener('click', function () {
_video.pause();
_video.src = '';
_mediaStream.getVideoTracks().forEach(function (track) {
track.stop();
});
_mediaStream.getAudioTracks().forEach(function (track) {
track.stop();
});
_mediaStream = null;
_video.parentNode.removeChild(_video);
});
<button id="stopCamera">Stop Camera</button>
<video id="video" width="640" height="480" autoplay></video>

Categories

Resources