unable to generate .mov video thumbnail javascript - javascript

I'm trying to generate a thumbnail from a video(.mov) but it's showing this error
Error: Error 4; details: DEMUXER_ERROR_NO_SUPPORTED_STREAMS: FFmpegDemuxer: no supported streams
.mp4 and other formats are working well.
chrome version: Version 101.0.4951.67 (Official Build) (64-bit)
Code:
async generateVideoThumbnail(file) {
console.log('generating thumbnail')
const binaryData = []
binaryData.push(file)
const canvas = document.createElement('canvas')
const context = canvas.getContext('2d')
const video = document.createElement('video')
video.setAttribute('src', URL.createObjectURL(new Blob(binaryData)))
video.onloadeddata = () => {
console.log('Yay! The readyState just increased to ' +
'HAVE_CURRENT_DATA or greater for the first time.');
};
video.loadstart = () => {
console.error(`load start`);
}
video.onwaiting = () => {
console.log('Video is waiting for more data.');
};
video.onprogress = () => {
console.log("Downloading video");
};
video.onerror = () => {
console.log('video error')
console.log("Error " + video.error.code + "; details: " + video.error.message);
}
console.log(video)
console.log('video load')
video.load()
let thumbnail = await new Promise((resolve) => {
video.onloadedmetadata = async () => {
console.log('on load')
canvas.width = video.videoWidth
canvas.height = video.videoHeight
video.currentTime = video.duration / 2
await video.play()
context.drawImage(video, 0, 0)
video.pause()
const blob = await new Promise((resolve) => {
return canvas.toBlob(function (blob) {
resolve(blob)
})
})
resolve(blob)
}
})
return thumbnail
},

I don't think Chrome is able to play mov files.
You can check this in the console, by writing something like:
const video = document.createElement('video')
console.log(video.canPlayType('video/mp4')) //expect 'maybe'
console.log(video.canPlayType('video/ogg')) //expect 'maybe'
console.log(video.canPlayType('video/quicktime')) //expect ''
Firefox on the other hand seems to be able to play them, you might try your app there.

Related

Can I use addEventListener('play') inside a function? the function is running but the addeventlistener('play') is not

const faceDetection = () => {
const video = document.getElementById(`video`);
console.log(video);
const displaySize = { width: video.width, height: video.height };
video.addEventListener('click', () => {
console.log(`run`);
});
video.addEventListener('play', () => {
console.log(`run`);
const canvas = faceapi.createCanvasFromMedia(video);
camera.append(canvas);
faceapi.matchDimensions(canvas, displaySize);
// interval
setInterval(async () => {
console.log(`this run`);
const detections = await faceapi
.detectAllFaces(video, new faceapi.TinyFaceDetectorOptions())
.withFaceLandmarks();
const resizedDetections = faceapi.resizeResults(detections, displaySize);
canvas.getContext('2d').clearRect(0, 0, canvas.width, canvas.height);
faceapi.draw.drawDetections(canvas, resizedDetections);
faceapi.draw.drawFaceLandmarks(canvas, resizedDetections);
}, 100);
});
};
the video.addEventListener('click') does run but video.addEventListener('play') does not, can someone please give me idea why does it's not working?
My mistake I did call the vid.play(); on the navigator.mediaDevices.getUserMedia();
navigator.mediaDevices
.getUserMedia({ video: true })
.then((stream) => {
vid.srcObject = stream;
// vid.play();
if (backend === 'webgl') return faceDetection(100);
if (backend === 'cpu') return faceDetection(1000);
track = stream.getTracks();
resetMessages();
})
.catch((e) => {
console.log(e);
})
.finally(() => {
preloader.style.display = 'none';
});
Hi maybe your component may not be loaded yet when you call the function, can you try to make a timeout ?
Like in is issue :
Stackoverflow - How can i make a waitFor(delay)

Chrome extension video recording blob not able to convert in to video file

I am creating a chrome extension to record screen, facing an issue in converting the video recording blob into a video file, in background js video is getting recorded correctly but in content.js not able to convert the video blob to a video file
I am creating a chrome extension to record screen, facing an issue in converting the video recording blob into a video file, in background js video is getting recorded correctly but in content.js not able to convert the video blob to a video file
function startRecording() {
var constraints = {
audio: true,
video: true,
maxframeRate: fps,
};
navigator.mediaDevices.getDisplayMedia(constraints).then(function (stream) {
let output = new MediaStream();
if (output.getAudioTracks().length == 0) {
// Get microphone audio (system audio is unreliable & doesn't work on Mac)
if (micable) {
micsource.connect(destination);
output.addTrack(destination.stream.getAudioTracks()[0]);
}
} else {
syssource = audioCtx.createMediaStreamSource(stream);
if (micable) {
micsource.connect(destination);
}
syssource.connect(destination);
output.addTrack(destination.stream.getAudioTracks()[0]);
}
output.addTrack(stream.getVideoTracks()[0]);
mediaConstraints = {
audio: true,
video: true,
mimeType: "video/webm;codecs=vp8,opus",
};
mediaRecorder = new MediaRecorder(stream, mediaConstraints);
mediaRecorder.start(1000);
var recordedBlobs = [];
let writer = "";
mediaRecorder.ondataavailable = (event) => {
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
console.log("recordedBlobs", recordedBlobs);
};
mediaRecorder.onstop = () => {
chrome.tabs.getSelected(null, (tab) => {
chrome.tabs.sendMessage(tab.id, {
message: "download-video",
obj: {
blobs: recordedBlobs,
},
// camerasize: camerasize
});
});
endRecording(stream, writer, recordedBlobs);
};
stream.getVideoTracks()[0].onended = function () {
cancel = false;
mediaRecorder.stop();
};
});
}
content.js
function convertVideoBlobToVideo(obj) {
let chunk = obj.blobs;
// mediaRecorder.onstop = () => {
var superBuffer;
superBuffer = new Blob(chunks, {
type: "video/webm",
});
chunks = [];
// Create a video or audio element
// that stores the recorded media
const recordedMedia = document.createElement("video");
recordedMedia.controls = true;
const recordedMediaURL = URL.createObjectURL(superBuffer);
recordedMedia.src = recordedMediaURL;
const downloadButton = document.createElement("a");
downloadButton.download = "Recorded-Media";
downloadButton.href = recordedMediaURL;
downloadButton.innerText = "Download it!";
downloadButton.onclick = () => {
URL.revokeObjectURL(recordedMedia);
};
document.body.appendChild(recordedMedia, downloadButton);
// };
}

How to upload video recorded with MediaRecorder API using PHP?

I am working on a video recording task using MediaRecorder API. As from frontend I can start the webcam, record video, play the recorded video and download the video.
But when I try to upload the video to php server, it's not at all working. I don't really understand why it is happening, I also tried using so many methods but none of it is working. Please check the code attatched below.
JS:-
let mediaRecorder
let recordedBlobs
const errorMsgElement = document.querySelector('span#errorMsg');
const recordedVideo = document.querySelector('video#recorded');
const recordButton = document.querySelector('button#record');
const playButton = document.querySelector('button#play');
const downloadButton = document.querySelector('button#download');
document.querySelector("button#start").addEventListener("click", async function() {
const hasEchoCancellation = document.querySelector("#echoCancellation").checked
const constraints = {
audio: {
echoCancellation:{
exact: hasEchoCancellation
}
},
video: {
width: 1280,
height: 720
}
}
await init(constraints)
})
async function init(constraints) {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints)
handleSuccess(stream)
} catch(e) {
console.log(e)
}
}
function handleSuccess(stream) {
recordButton.disabled = false
window.stream = stream
const gumVideo = document.querySelector("video#gum")
gumVideo.srcObject = stream
}
recordButton.addEventListener("click", () => {
if(recordButton.textContent === "Record") {
startRecording()
} else {
stopRecording()
recordButton.textContent = 'Record'
playButton.disabled = false
downloadButton.disabled = false
}
})
function startRecording() {
recordedBlobs = []
let options = {
mimeType: "video/webm;codecs=vp9,opus"
}
try {
mediaRecorder = new MediaRecorder(window.stream, options)
} catch(e) {
console.log(e)
}
recordButton.textContent = "Stop Recording"
playButton.disabled = true
downloadButton.disabled = true
mediaRecorder.onstop = (event) => {
console.log('Recording Stopped')
}
mediaRecorder.ondataavailable = handleDataAvailable
mediaRecorder.start()
}
function handleDataAvailable(event) {
if(event.data && event.data.size > 0) {
recordedBlobs.push(event.data)
}
}
function stopRecording() {
mediaRecorder.stop()
}
playButton.addEventListener('click', function() {
const superBuffer = new Blob(recordedBlobs, {
type: 'video/webm'
})
var file = new File([superBuffer], 'test.webm')
var url = window.URL.createObjectURL(superBuffer)
// var video = blobToFile(superBuffer, 'test.webm')
sendToServer(file)
recordedVideo.src = null
recordedVideo.srcObject = null
recordedVideo.src = url
recordedVideo.controls = true
recordedVideo.play()
})
downloadButton.addEventListener('click', () => {
const blob = new Blob(recordedBlobs, {type: 'video/mp4'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.mp4';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
});
function sendToServer(file) {
let url = 'send.php'
let headers = {
'Content-Type': 'multipart/form-data'
}
var formData = new FormData()
formData.append("file", file)
axios.post(url, formData, headers)
.then((response) => {
console.log(response.data)
})
.catch((error) => {
console.log(error.response)
})
}
function blobToFile(theBlob, fileName){
//A Blob() is almost a File() - it's just missing the two properties below which we will add
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
PHP:-
$target_dir = "uploads/";
$target_file = $target_dir . 'test.webm';
if(move_uploaded_file($_FILES["file"]["tmp_name"], $target_file)) {
echo "File uploaded successfully";
} else {
echo "File not uploaded";
}
print_r($_FILES['file']['error']);
No matter how much I tried, I can't figure out why it is not working. It is showing that "File not uploaded" like it can't read the file from tmp_name. Please help me fix the problem.
Any help on this problem will be really appreciated.
Thank you.

How to pass recorded video data from javascript to python

I'm trying to pass recorded blobs of video data from some javascript code to my routes so that i can save it
im a newbie
The javascript records video using users webcam and saves it as recordedBlob. I'm trying to pass that recordedBlob data to my python routes for saving.
This is the javascript code..it is in my html file
<script type="text/javascript">
let video = document.getElementById("video");
let recording = document.getElementById("recording");
let startButton = document.getElementById("startButton");
let stopButton = document.getElementById("stopButton");
let downloadButton = document.getElementById("downloadButton");
let logElement = document.getElementById("log");
let recordingTimeMS = 5000;
function log(msg) {
logElement.innerHTML += msg + "\n";
}
function wait(delayInMS) {
return new Promise(resolve => setTimeout(resolve, delayInMS));
}
function startRecording(stream, lengthInMS) {
let recorder = new MediaRecorder(stream);
let data = [];
recorder.ondataavailable = event => data.push(event.data);
recorder.start();
log(recorder.state + " for " + (lengthInMS/1000) + " seconds...");
let stopped = new Promise((resolve, reject) => {
recorder.onstop = resolve;
recorder.onerror = event => reject(event.name);
});
  let recorded = wait(lengthInMS).then(
() => recorder.state == "recording" && recorder.stop()
  );
return Promise.all([
stopped,
recorded
])
.then(() => data);
}
function stop(stream) {
stream.getTracks().forEach(track => track.stop());
}
startButton.addEventListener("click", function() {
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
video.srcObject = stream;
downloadButton.href = stream;
video.captureStream = video.captureStream || video.mozCaptureStream;
return new Promise(resolve => video.onplaying = resolve);
}).then(() => startRecording(video.captureStream(), recordingTimeMS))
.then (recordedChunks => {
let recordedBlob = new Blob(recordedChunks, { type: "video/webm" });
recording.src = URL.createObjectURL(recordedBlob);
downloadButton.href = recording.src;
downloadButton.download = "RecordedVideo.webm";
log("Successfully recorded " + recordedBlob.size + " bytes of " +
recordedBlob.type + " media.");
})
.catch(log);
}, false);
stopButton.addEventListener("click", function() {
stop(video.srcObject);
}, false);
</script>
This is the routes.py where im trying to pass the recordedBlob data
from flask import render_template, redirect, url_for
#posts.route('/post/new/vlog',methods=['GET','POST'])
def new_vlog():
if current_user.is_authenticated:
return render_template('vlog.html',title='New Vlog',video={recordedBlob})
if video.data:
video_file = save_video(video.data)
return redirect(url_for('main.home'))
else:
return redirect(url_for('users.login'))

Record front and back webcams at the same time

I have the following code which records with the back camera of the tablet for 5 seconds and downloads the video:
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<script>
navigator.mediaDevices.enumerateDevices()
.then(devices => {
var deviceId = [];
devices.forEach(function(device) {
if ( device.kind == 'videoinput' )
deviceId.push(device.deviceId);
});
// deviceId[0] equals the id of the front camera (resolution: 1280x720)
// deviceId[1] equals the id of the back camera (resolution: 1920x1080)
navigator.mediaDevices.getUserMedia({ audio: true, video: { deviceId: { exact: deviceId[1] }, width: 1920, height: 1080 } })
.then(stream => record(stream, 5000)
.then(recording => {
stop(stream);
var a = document.createElement('a');
a.href = URL.createObjectURL(new Blob(recording));
a.download = "recording.webm";
a.click();
})
.catch(log).then(() => stop(stream)))
.catch(log);
})
.catch(log);
var record = (stream, ms) => {
var rec = new MediaRecorder(stream), data = [];
rec.ondataavailable = e => data.push(e.data);
rec.start();
var stopped = new Promise((r, e) => (rec.onstop = r, rec.onerror = e));
return Promise.all([stopped, wait(ms).then(() => rec.stop())])
.then(() => data);
};
var stop = stream => stream.getTracks().forEach(track => track.stop());
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
var log = err => console.log(err.name + ': ' + err.message);
</script>
</body>
</html>
What I'm trying to figure out is how can I record both the front and back webcams of the tablet at the same time and download the videos as seperated files.
I have found this question but it's old so I'm not sure if it's possible or not today.

Categories

Resources