my problem is, I have script, that take video from camera. On my iPhone, there is problem, I can't use BLOB as URL for <video></video>, so I used FileReader, that makes base64 from BLOB. But I found another problem. When I take video in portrait mode, the captured video is in landscape mode, is so much wide. I need rotate that video to portrait mode. I don't know, if I have mistake in Blob or FileReader code. Can you help me please? Thanks.
This is my HTML Code:
<video autoplay="true" id="cameraVideo" playsinline webkit-playsinline>
This is my Javascript Code:
var video = document.querySelector("#cameraVideo");
var mode = "rear";
var mediaRecorder;
var chunks = [];
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia({video: { facingMode: "environment" } }).then(function (stream) {
video.srcObject = stream;
mediaRecorder = new MediaRecorder(stream);
}).catch(function (err0r) {
alert("Something went wrong!");
});
}
$(".camera").find(".take").on("touchstart mousedown", function() {
mediaRecorder.start();
mediaRecorder.ondataavailable = function(ev) {
chunks.push(ev.data);
}
});
$(".camera").find(".take").on("touchend mouseup", function() {
mediaRecorder.stop();
mediaRecorder.onstop = (ev)=>{
var blob = new Blob(chunks, { 'type' : 'video/mp4' });
chunks = [];
var videoURL = webkitURL.createObjectURL(blob);
if(video.srcObject) video.srcObject.getTracks().forEach(t => t.stop());
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
document.getElementById("savevideo").src = reader.result;
document.getElementById("savevideo").play();
}
}
});
Pictures:
When video is recording :
When video is recorded :
Related
This works perfectly fine on android (every part of it). But when I receive a video stream wrapped in a blob on iOS from android or another iOS device, it does not show any sign of loading the video or displaying it. However, when I show my own video to myself on iOS, it works.
I have tried the following:
video.setAttribute('autoplay', '');
video.setAttribute('playsinline', '');
video.setAttribute('muted', '');
Or adding a source element to the video element, but these did not work.
How am I supposed to fix the receiving video issue on iOS?
Code (sorry for all the styling):
Client:
let media;
const done = document.getElementById('done');
const vidCon = document.getElementById('video-con');
var getUserMedia = (navigator.mediaDevices.getUserMedia || navigator.mediaDevices.webkitGetUserMedia || navigator.mediaDevices.mozGetUserMedia).bind(navigator.mediaDevices);
getUserMedia({
video: true,
audio: true
}).then((stream) => {
const myVideo = document.createElement('video');
myVideo.srcObject = stream;
myVideo. setAttribute('autoplay', '');
myVideo. setAttribute('muted', '');
myVideo. setAttribute('playsinline', '');
myVideo.style.width = '100%';
myVideo.style.height = '80%';
myVideo.muted = true;
myVideo.style.display = 'block';
myVideo.style.objectFit = 'cover';
media = new MediaRecorder(stream);
media.onstart = function(e) {
this.chunks = [];
myVideo.play();
document.getElementById('video-base-con').append(myVideo);
}
done.onclick = function() {
media.stop();
audio.src = "93642-Blakes_7_Gun_144bpm.wav";
audio.play();
audio.addEventListener('ended', go);
done.style.display = 'none';
document.getElementById('blank-choosing').style.display = 'block';
}
media.ondataavailable = function(e) {
this.chunks.push(e.data);
}
media.onstop = function(e) {
myVideo.remove();
var blob = new Blob(this.chunks, { 'type' : 'video/ogg; codecs=opus' });
socket.emit('send-video', blob);
}
});
socket.on('recieve-video', (stream, codeNew) => {
if (codeNew == code.value) {
document.getElementById('blank-video').style.display = 'none';
console.log('recieved video.');
const blob = new Blob([stream], { 'type' : 'video/ogg; codecs=opus' });
const video = document.createElement('video');
video.src = window.URL.createObjectURL(blob);
video. setAttribute('autoplay', '');
video. setAttribute('muted', '');
video. setAttribute('playsinline', '');
vidCon.style.display = 'block';
video.style.width = '90%';
video.style.height = '100%';
video.style.objectFit = 'cover';
vidCon.style.width = '100%';
vidCon.style.height = '100%';
vidCon.style.textAlign = 'center';
vidCon.style.backgroundColor = 'lightgray';
vidCon.style.borderRadius = '30px';
vidCon.append(video);
video.play();
video.addEventListener('ended', () => {
video.remove();
vidCon.style.display = 'none';
answers.style.display = 'block';
}, false);
}
});
Server:
socket.on('send-video', (blob) => {
socket.broadcast.emit('recieve-video', blob, code);
});
Thanks in advance!
This is almost certainly a media type (f/k/a MIME type) issue. The default media types generated by MediaRecorder are not the same on Android and iOS devices. Right after your media = new MediaRecorder(stream) line examine the media type with media.mimeType to see what default you received in each case.
You can try choosing the media type explicitly with code like this, so you don't get stuck with the default.
media = new MediaRecorder(stream, {mimeType: 'video/mp4'})
or
media = new MediaRecorder(stream, {mimeType: 'video/webm'})
You may have to struggle to find a common media type provided by both Android and iOS.
It looks like you're trying to choose the media type in your Blob constructor. You Can't Do Thatâ„¢. The media type is set when you construct your MediaRecorder.
My goal here is to continuously render a video blob e.data into the video element e.g. continuous playback.
The code below throws:
DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
What could be wrong in this code?
var options = {mimeType: 'video/webm'};
var mediaRecorder = new MediaRecorder(stream, options);
var player = document.querySelector("#player");
var mediaSource = new MediaSource();
player.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen, {once: true})
function sourceOpen() {
//URL.revokeObjectURL(player.src);
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
mediaRecorder.ondataavailable = function(e) {
console.log("Data available")
if (e.data.size > 0) {
var bufferPromise = e.data.arrayBuffer();
bufferPromise.then(function(array){
sourceBuffer.appendBuffer(array);
})
}
}
}
Update: This is solved by sourceBuffer.appendBuffer(new Uint8Array(array)); however the code still is not functional with Chrome, it only works with Firefox.
I am trying to write a small application that can record video in the browser and upload it to a server.
I have got the code below:
<html>
<body>
<video id="video" playsinline autoplay></video>
<script>
function hasGetUserMedia() {
return !!(navigator.mediaDevices &&
navigator.mediaDevices.getUserMedia);
}
if (hasGetUserMedia()) {
console.log("Good to go");
} else {
console.log('Not supported');
}
const constraints = {
video: true,
audio: true,
};
function start(){
navigator.mediaDevices.getUserMedia(constraints).
then((stream) => {video.srcObject = stream});
var videoEl = document.getElementById('video');
stream = videoEl.srcObject;
}
function stop(){
var videoEl = document.getElementById('video');
stream = videoEl.srcObject;
tracks = stream.getTracks();
tracks.forEach(function(track) {
track.stop();
});
downloadLink.href = URL.createObjectURL(new Blob(tracks[0]));
downloadLink.download = 'acetest.webm';
}
</script>
<button onclick="start()">Start</button>
<button onclick="stop()">Stop</button>
</body>
I can see the video on the screen, but unsure how I can then capture that to a file to upload.
I have tried using URL.createObjectURL(new Blob(tracks[0])); but this doesn't work. How can I save the video once the Stop button is pressed?
I'm trying to build a web app that fetches a sound of a bird from an API, plays it and gets a new bird once the old one has stopped playing. As of now it works in Safari & Firefox, but the script stops in Chrome.
function init() {
console.log('init');
var container = document.getElementById("container");
container.innerHTML = ("Machines & Birds");
fetchBirdAPI();
}
function fetchBirdAPI() {
fetch(url)
.then(function(response) {
return response.json();
}).then(function(response) {
console.log(response)
getAudio(response);
});
}
function getAudio(response) {
if (response.numRecordings !== 0) {
var birdSrc = (response.recordings[0].file);
var audio = document.createElement('AUDIO');
audio.src = birdSrc;
audio.addEventListener('loadedmetadata', function() {
console.log(audio.duration);
});
audio.play();
audio.onended = function() {
console.log('Audio Ended');
init();
}
}
else {
init();
}
}
So basically the audio.onended doesn't work. Also tried to initiate the audio with audio = new Audio() without any luck.
Really flabbergasted here, so any input would be appreciated.
I am trying to implement Chrome desktopCapture API with MediaStreamRecorder library. Everything works perfect but the video quality is so blurred and bad. 1 minute desktop captured video takes 14MB.
below is my code:
var pending_request_id;
chrome.runtime.onMessage.addListener(function(message, sender, sendResponse) {
startRecording();
sendResponse({"success": true});
});
function getUserMediaError() {
console.log("getUserMedia() failed.");
}
function onAccessApproved(id) {
if (!id) {
console.log("Access rejected.");
return;
}
navigator.webkitGetUserMedia({
audio:false,
video: { mandatory: { chromeMediaSource: "desktop",
chromeMediaSourceId: id } }
}, onMediaSuccess, getUserMediaError);
}
function startRecording() {
pending_request_id = chrome.desktopCapture.chooseDesktopMedia(
["window"], onAccessApproved);
}
function onMediaSuccess(stream) {
console.log("rcvd stream");
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4';
//i dont want strechy video so i fixed the width and height of recorder equal to window
mediaRecorder.width = window.screen.width;
mediaRecorder.height = window.screen.height;
mediaRecorder.ondataavailable = function (blob) {
var blobURL = URL.createObjectURL(blob);
console.log('' + blobURL + '');
var link=blobURL;
var videoInfo="Compiled Video file size: " + Math.ceil(blob.size / 1024) + "KB";
console.log(link);
console.log(videoInfo);
};
mediaRecorder.start(30000); // i want unlimited recording time so i increased the timeslice
stream.onended = function() {
mediaRecorder.stop();
//finalizeVideo();
console.log("Ended"); };
}
function onMediaError(e) {
console.error('media error', e);
}
Before using this library i tried to save streaming video using Whammy.js. but i failed to do so. then i found this library.
Questions :
Is there any way to increase quality of the video and as well as compress video size too?
How to save the video which return as blob:chrome url to desktop as fully qualified video?
As an alternative, If anyone knows how to do this in Whammy.js then kindly let me know
Thanks,
This might help to make your video quality better -
navigator.webkitGetUserMedia({
audio:false,
video: { mandatory: { chromeMediaSource: "desktop",
chromeMediaSourceId: id,
maxWidth: 4000,
maxHeight: 4000 } }
}, onMediaSuccess, getUserMediaError);
}