Extract frames from specific times from video - javascript

I am using video js in my react application. I am streaming video from a video url and not from local machine. I have a video url which I give to my player. I want to capture/Extract some frames from the video
Problem: Let's say I have time-in and time-out give as 20 sec and 30 sec respectively. I want to randomly extract 4 frames between 20 to 30 sec. I don't want it to get extracted when video reaches 20 sec by playing it. I want to extract the frames as soon as the page gets loaded.
Here is what I have tried
async componentDidMount() {
this.init()
timeline_width = document.getElementById("timeline").offsetWidth
let frames = await this.extractFramesFromVideo(this.props.video_reducer.selected_video_file);
}
async extractFramesFromVideo(videoUrl, fps=25) {
return new Promise(async (resolve) => {
// fully download it first (no buffering):
let videoBlob = await fetch(videoUrl).then(r => r.blob());
let videoObjectUrl = URL.createObjectURL(videoBlob);
let video = document.createElement("video");
let seekResolve;
video.addEventListener('seeked', async function() {
if(seekResolve) seekResolve();
});
video.addEventListener('loadeddata', async function() {
let canvas = document.getElementById('prevImgCanvas');
let context = canvas.getContext('2d');
let [w, h] = [video.videoWidth, video.videoHeight]
canvas.width = w;
canvas.height = h;
let frames = [];
let interval = 1 / fps;
let currentTime = 0;
let duration = video.duration;
while(currentTime < duration) {
video.currentTime = currentTime;
await new Promise(r => seekResolve=r);
context.drawImage(video, 0, 0, w, h);
let base64ImageData = canvas.toDataURL();
frames.push(base64ImageData);
currentTime += interval;
}
resolve(frames);
});
// set video src *after* listening to events in case it loads so fast
// that the events occur before we were listening.
video.src = videoObjectUrl;
});
}
But this extracts all the frames of the video. I just want specific frames.
Can someone please suggest a solution to do this?

Let say you have to get still image from the video below,
<video id="video" controls="controls">
<source src=".mp4" />
</video>
<button id="capture">Capture</button>
<div id="output"></div>
Use the following function to get the image from the video before it loads:
(function() {
"use strict";
var video, $output;
var scale = 0.25;
var initialize = function() {
$output = $("#output");
video = $("#video").get(0);
$("#capture").click(captureImage);
};
var captureImage = function() {
var canvas = document.createElement("canvas");
canvas.width = video.videoWidth * scale;
canvas.height = video.videoHeight * scale;
canvas.getContext('2d')
.drawImage(video, 0, 0, canvas.width, canvas.height);
var img = document.createElement("img");
img.src = canvas.toDataURL();
$output.prepend(img);
};
$(initialize);
}());
Thanks to Chris Brandsma, i have used this on wordpress as custom code and made some changes according to my work, so you need to add some code to get random time based images.
You can find this code on: Tutorial Code

Related

How to record hd video media recorder canvas, with set framerate? [duplicate]

I want to record a video from a HTML <canvas> element at a specific frame rate.
I am using CanvasCaptureMediaStream with canvas.captureStream(fps) and also have access to the video track via const track = stream.getVideoTracks()[0] so I create track.requestFrame() to write it to the output video buffer via MediaRecorder.
I want to precisely capture one frame at a time and then change the canvas content. Changing the canvas content can take some time (as images need to be loaded etc). So I can not capture the canvas in real-time.
Some changes on the canvas would happen in 500ms real-time so this needs also to be adjusted to rendering one frame at the time.
The MediaRecorder API is meant to record live-streams, doing edition is not what it was designed to do, and it doesn't do it very well to be honest...
The MediaRecorder itself has no concept of frame-rate, this is normally defined by the MediaStreamTrack. However, the CanvasCaptureStreamTrack doesn't really make it clear what its frame rate is.
We can pass a parameter to HTMLCanvas.captureStream(), but this only tells the max frames we want per seconds, it's not really an fps parameter.
Also, even if we stop drawing on the canvas, the recorder will still continue to extend the duration of the recorded video in real time (I think that technically only a single long frame is recorded though in this case).
So... we gonna have to hack around...
One thing we can do with the MediaRecorder is to pause() and resume() it.
Then sounds quite easy to pause before doing the long drawing operation and to resume right after it's been made? Yes... and not that easy either...
Once again, the frame-rate is dictated by the MediaStreamTrack, but this MediaStreamTrack can not be paused.
Well, actually there is one way to pause a special kind of MediaStreamTrack, and luckily I'm talking about CanvasCaptureMediaStreamTracks.
When we do call our capture-stream with a parameter of 0, we are basically having manual control over when new frames are added to the stream.
So here we can synchronize both our MediaRecorder adn our MediaStreamTrack to whatever frame-rate we want.
The basic workflow is
await the_long_drawing_task;
resumeTheRecorder();
writeTheFrameToStream(); // track.requestFrame();
await wait( time_per_frame );
pauseTheRecorder();
Doing so, the recorder is awaken only the time per frame we decided, and a single frame is passed to the MediaStream during this time, effectively mocking a constant FPS drawing for what the MediaRecorder is concerned.
But as always, hacks in this still experimental area come with a lot of browsers weirdness and the following demo actually only works in current Chrome...
For whatever reasons, Firefox will always generate files with twice the number of frames than what has been requested, and it will also occasionally prepend a long first frame...
Also to be noted, Chrome has a bug where it will update the canvas stream at drawing, even though we initiated this stream with a frameRequestRate of 0. So this means that if you start drawing before everything is ready, or if the drawing on your canvas itself takes a long time, then our recorder will record half-baked frames that we didn't asked for.
To workaround this bug, we thus need to use a second canvas, used only for the streaming. All we'll do on that canvas is to drawImage the source one, which will always be a fast enough operation. to not face that bug.
class FrameByFrameCanvasRecorder {
constructor(source_canvas, FPS = 30) {
this.FPS = FPS;
this.source = source_canvas;
const canvas = this.canvas = source_canvas.cloneNode();
const ctx = this.drawingContext = canvas.getContext('2d');
// we need to draw something on our canvas
ctx.drawImage(source_canvas, 0, 0);
const stream = this.stream = canvas.captureStream(0);
const track = this.track = stream.getVideoTracks()[0];
// Firefox still uses a non-standard CanvasCaptureMediaStream
// instead of CanvasCaptureMediaStreamTrack
if (!track.requestFrame) {
track.requestFrame = () => stream.requestFrame();
}
// prepare our MediaRecorder
const rec = this.recorder = new MediaRecorder(stream);
const chunks = this.chunks = [];
rec.ondataavailable = (evt) => chunks.push(evt.data);
rec.start();
// we need to be in 'paused' state
waitForEvent(rec, 'start')
.then((evt) => rec.pause());
// expose a Promise for when it's done
this._init = waitForEvent(rec, 'pause');
}
async recordFrame() {
await this._init; // we have to wait for the recorder to be paused
const rec = this.recorder;
const canvas = this.canvas;
const source = this.source;
const ctx = this.drawingContext;
if (canvas.width !== source.width ||
canvas.height !== source.height) {
canvas.width = source.width;
canvas.height = source.height;
}
// start our timer now so whatever happens between is not taken in account
const timer = wait(1000 / this.FPS);
// wake up the recorder
rec.resume();
await waitForEvent(rec, 'resume');
// draw the current state of source on our internal canvas (triggers requestFrame in Chrome)
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.drawImage(source, 0, 0);
// force write the frame
this.track.requestFrame();
// wait until our frame-time elapsed
await timer;
// sleep recorder
rec.pause();
await waitForEvent(rec, 'pause');
}
async export () {
this.recorder.stop();
this.stream.getTracks().forEach((track) => track.stop());
await waitForEvent(this.recorder, "stop");
return new Blob(this.chunks);
}
}
///////////////////
// how to use:
(async() => {
const FPS = 30;
const duration = 5; // seconds
let x = 0;
let frame = 0;
const ctx = canvas.getContext('2d');
ctx.textAlign = 'right';
draw(); // we must have drawn on our canvas context before creating the recorder
const recorder = new FrameByFrameCanvasRecorder(canvas, FPS);
// draw one frame at a time
while (frame++ < FPS * duration) {
await longDraw(); // do the long drawing
await recorder.recordFrame(); // record at constant FPS
}
// now all the frames have been drawn
const recorded = await recorder.export(); // we can get our final video file
vid.src = URL.createObjectURL(recorded);
vid.onloadedmetadata = (evt) => vid.currentTime = 1e100; // workaround https://crbug.com/642012
download(vid.src, 'movie.webm');
// Fake long drawing operations that make real-time recording impossible
function longDraw() {
x = (x + 1) % canvas.width;
draw(); // this triggers a bug in Chrome
return wait(Math.random() * 300)
.then(draw);
}
function draw() {
ctx.fillStyle = 'white';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = 'black';
ctx.fillRect(x, 0, 50, 50);
ctx.fillText(frame + " / " + FPS * duration, 290, 140);
};
})().catch(console.error);
<canvas id="canvas"></canvas>
<video id="vid" controls></video>
<script>
// Some helpers
// Promise based timer
function wait(ms) {
return new Promise(res => setTimeout(res, ms));
}
// implements a sub-optimal monkey-patch for requestPostAnimationFrame
// see https://stackoverflow.com/a/57549862/3702797 for details
if (!window.requestPostAnimationFrame) {
window.requestPostAnimationFrame = function monkey(fn) {
const channel = new MessageChannel();
channel.port2.onmessage = evt => fn(evt.data);
requestAnimationFrame((t) => channel.port1.postMessage(t));
};
}
// Promisifies EventTarget.addEventListener
function waitForEvent(target, type) {
return new Promise((res) => target.addEventListener(type, res, {
once: true
}));
}
// creates a downloadable anchor from url
function download(url, filename = "file.ext") {
a = document.createElement('a');
a.textContent = a.download = filename;
a.href = url;
document.body.append(a);
return a;
}
</script>
I asked a similar question which has been linked to this one. In the meantime I came up with a solution which overlaps Kaiido's and which I think is worth reading.
I added two tricks:
I deferred the next render (see code), which fixes the problem of Firefox generating twice the number of frames
I stored an accumulated timing error to correct setTimeout's inaccuracies. I personally used it to tweak the progression of my render and for example skip frames if there is a sudden latency and keep the duration of the video close to the target duration. It is not enough to smoothen setTimeout though.
const recordFrames = (onstop, canvas, fps=30) => {
const chunks = [];
// get Firefox to initialise the canvas
canvas.getContext('2d').fillRect(0, 0, 0, 0);
const stream = canvas.captureStream();
const recorder = new MediaRecorder(stream);
recorder.addEventListener('dataavailable', ({data}) => chunks.push(data));
recorder.addEventListener('stop', () => onstop(new Blob(chunks)));
const frameDuration = 1000 / fps;
const frame = (next, start) => {
recorder.pause();
api.error += Date.now() - start - frameDuration;
setTimeout(next, 0); // helps Firefox record the right frame duration
};
const api = {
error: 0,
init() {
recorder.start();
recorder.pause();
},
step(next) {
recorder.resume();
setTimeout(frame, frameDuration, next, Date.now());
},
stop: () => recorder.stop()
};
return api;
}
how to use
const fps = 30;
const duration = 5000;
const animation = Something;
const videoOutput = blob => {
const video = document.createElement('video');
video.src = URL.createObjectURL(blob);
document.body.appendChild(video);
}
const recording = recordFrames(videoOutput, canvas, fps);
const startRecording = () => {
recording.init();
animation.play();
};
// I am assuming you can call these from your library
const onAnimationRender = nextFrame => recording.step(nextFrame);
const onAnimationEnd = () => recording.step(recording.stop);
let now = 0;
const progression = () => {
now = now + 1 + recorder.error * fps / 1000;
recorder.error = 0;
return now * 1000 / fps / duration
}
I found this solution to be satisfying at 30fps in both Chrome and Firefox. I didn't experience the Chrome bugs mentionned by Kaiido and thus didn't implement anything to deal with them.

Show webcam / camera stream with delay - webrtc

I've made a simple setup, getting the webcam / phone camera stream and the passing it on , drawing on a html 2d canvas.
But ive been having trouble figuring out how to show the stream with a delay of few seconds. Kinda like a delay mirror.
I tried playing with ctx.globalAlpha = 0.005; but this gives me a ghosting effect rather than 'delaying' the stream.
Any idea how this can be achieved?
The snippet below doesnt work here probably because of security issues apparently but here's a pen:
https://codepen.io/farisk/pen/LvmGGQ
var width = 0, height = 0;
var canvas = document.createElement('canvas'),
ctx = canvas.getContext('2d');
document.body.appendChild(canvas);
var video = document.createElement('video'),
track;
video.setAttribute('autoplay',true);
window.vid = video;
function getWebcam(){
navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
var videoTracks = stream.getVideoTracks();
var newStream = new MediaStream(stream.getVideoTracks());
video.srcObject = newStream;
video.play();
track = stream.getTracks()[0];
}, function(e) {
console.error('Rejected!', e);
});
}
getWebcam();
var rotation = 0,
loopFrame,
centerX,
centerY,
twoPI = Math.PI * 2;
function loop(){
loopFrame = requestAnimationFrame(loop);
// ctx.globalAlpha = 0.005;
ctx.drawImage(video, 0, 0, width, height);
ctx.restore();
}
function startLoop(){
loopFrame = requestAnimationFrame(loop);
}
video.addEventListener('loadedmetadata',function(){
width = canvas.width = video.videoWidth;
height = canvas.height = video.videoHeight;
centerX = width / 2;
centerY = height / 2;
startLoop();
});
canvas.addEventListener('click',function(){
if ( track ) {
if ( track.stop ) { track.stop(); }
track = null;
} else {
getWebcam();
}
});
video,
canvas {
max-width: 100%;
height: auto;
}
The snippet below doesnt work here probably because of security issues apparently but here's a pen:
https://codepen.io/farisk/pen/LvmGGQ
You might want to consider storing the video data you get in an array of sorts. It might mean delaying the playback for n seconds at first.
Basically on frame 1, you store the video feed into an array, and draw nothing. This happened until frame 1000 (1 second). At that point start drawing based on the first element of the array.
Once you draw that frame, remove it from the array and add the new frame.

Issue with creating thumbnail from video in Typescript

I'm trying to generate a thumbnail image from a video file. This is my code:
let canvas: HTMLCanvasElement = document.createElement('canvas');
let video: HTMLVideoElement = document.createElement('video');
video.src = video_url;
video.currentTime = 4;
video.onloadedmetadata = () => {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
video.currentTime = 4;
};
video.onloadeddata = () => {
video.currentTime = 4;
canvas.getContext('2d').drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
canvas.toBlob(thumbnailBlob => { grabThumbnail(thumbnailBlob) }, 'image/png');
};
It creates the image, but just a blank image. Doesn't get the video frame from the currentTime property. What am I doing wrong?
the property set on line video.currentTime=4 is async in nature and will not give the frame image immediately. You must add event listener for the event seeked. In this event listener method you can generate the thumbnail of the frame at that time.
Let me know if it works.

Display canvas as gif for video preview

I'm working on a website and I am dealing with videos.
My need is to display a gif a a preview / teaser for the videos on another page, that redirects to the video.
What I found & added so far
HTML
<div id=thumbs></div>
CSS
#video {width:320px}
JS
var i =0;
var video = document.createElement("video");
var thumbs = document.getElementById("thumbs");
video.addEventListener('loadeddata', function() {
thumbs.innerHTML = "";
video.currentTime = i;
}, false);
video.addEventListener('seeked', function() {
var j = video.duration;
var u = j/4;
// now video has seeked and current frames will show
// at the time as we expect
generateThumbnail(i);
// when frame is captured, increase
i+=u;
// if we are not passed end, seek to next interval
if (i <= video.duration) {
// this will trigger another seeked event
video.currentTime = i;
}
}, false);
video.preload = "auto";
video.src = "https://www.html5rocks.com/en/tutorials/video/basics/devstories.webm";
function generateThumbnail() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
ctx.drawImage(video, 0, 0, 160, 90);
thumbs.appendChild(c);
thumbs.replaceChild(c, thumbs.childNodes[0]);
}
What I do is I get a video from its Url and I get 5 frames at equal timing. It gives me canvas and I'd like to display them as a gif or a succession of images.
Since you only want to display it, instead of trying to generate a gif, the easiest way is probably to do the animation yourself.
You already have the code to fetch the video frames, but you are currently showing it in the DOM, and forget about it once displayed.
What you can do from this, is to store these frames, directly as canvas, and draw all these canvases on a final, visible canvas, in a timed loop.
var thumbsList = []; // we will save our frames as canvas in here
var delay = 500; // the speed of the animation (ms)
function generateThumbnail() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
ctx.drawImage(video, 0, 0, 160, 90);
thumbsList.push(c); // store this frame in our list
if (thumbsList.length === 1) {
displayThumbs(); // start animating as soon as we got a frame
}
}
// initialises the display canvas, and starts the animation loop
function displayThumbs() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
thumbs.appendChild(c);
startAnim(ctx); // pass our visible canvas' context
}
function startAnim(ctx) {
var currentFrame = 0;
// here is the actual loop
function anim() {
ctx.drawImage(thumbsList[currentFrame], 0, 0); // draw the currentFrame
// increase our counter, and set it to 0 if too large
currentFrame = (currentFrame + 1) % thumbsList.length;
setTimeout(anim, delay); // do it again in x ms
}
anim(); // let's go !
}
var i = 0;
var video = document.createElement("video");
var thumbs = document.getElementById("thumbs");
/* OP's code */
video.addEventListener('loadeddata', function() {
thumbs.innerHTML = "";
video.currentTime = i;
}, false);
video.addEventListener('seeked', function() {
var j = video.duration;
var u = j / 4;
// now video has seeked and current frames will show
// at the time as we expect
generateThumbnail(i);
// when frame is captured, increase
i += u;
// if we are not passed end, seek to next interval
if (i <= video.duration) {
// this will trigger another seeked event
video.currentTime = i;
} else {
// displayFrame(); // wait for all images to be parsed before animating
}
}, false);
video.preload = "auto";
video.src = "https://www.html5rocks.com/en/tutorials/video/basics/devstories.webm";
<div id=thumbs></div>

Recording speed javascript

I am currently creating a project that supports video recording through my website.
I create a canvas and then push the recorded frames to it. The problem is, when I play the video after its recorded, it plays too fast. A 10 second long video plays in like 2 seconds. I have checked the playbackRate is set to 1. I save the recording to a database and its speeded up there aswell, so it has nothing to do with the browsers videoplayer.
I am relative new to AngularJS and javascript so im sorry if I left something important out.
I have tried changing alot of the values back and forth but I cant seem to find the cause for the problem. Any ideas?
Here is the code for the video recording:
scope.startRecording = function () {
if (mediaStream) {
var video = $('.video-capture')[0];
var canvas = document.createElement('canvas');
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
ctx = canvas.getContext('2d');
var CANVAS_WIDTH = canvas.width;
var CANVAS_HEIGHT = canvas.height;
function drawVideoFrame(time) {
videoRecorder = requestAnimationFrame(drawVideoFrame);
ctx.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
recordedFrames.push(canvas.toDataURL('image/webp', 1));
}
videoRecorder = requestAnimationFrame(drawVideoFrame); // Note: not using vendor prefixes!
scope.recording = true;
}
};
scope.stopRecording = function () {
cancelAnimationFrame(videoRecorder); // Note: not using vendor prefixes!
// 2nd param: framerate for the video file.
scope.video.files = Whammy.fromImageArray(recordedFrames, 1000 / 30);
recordedVideoBlob = Whammy.fromImageArray(recordedFrames, 1000 / 30);
scope.videoMode = 'viewRecording';
scope.recording = false;
};
I am guess the culprit is requestAnimationFrame, left on it's own, you cannot tell at what intervals it keeps calling the callback, it can be as high as 60fps.
also looking at your code, I cannot tell how you came to the conclusion that frame rate = 1000/30
my advice( at least for your case) would be to go with $interval,
you can do something like:
scope.frameRate = 10, videoInterval; // the amount I consider ideal for client-side video recording.
scope.startRecording = function () {
if (mediaStream) {
var video = $('.video-capture')[0];
var canvas = document.createElement('canvas');
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
ctx = canvas.getContext('2d');
var CANVAS_WIDTH = canvas.width;
var CANVAS_HEIGHT = canvas.height;
function drawVideoFrame() {
ctx.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
recordedFrames.push(canvas.toDataURL('image/webp', 1));
}
videoInterval = $interval(drawVideoFrame, 1000/scope.frameRate);
scope.recording = true;
}
};
scope.stopRecording = function () {
$interval.cancel(videoInterval);
// 2nd param: framerate for the video file.
scope.video.files = Whammy.fromImageArray(recordedFrames, scope.frameRate);
recordedVideoBlob = Whammy.fromImageArray(recordedFrames, scope.frameRate); // you can chage this to some file copy method, so leave out the duplicate processing of images into video.
scope.videoMode = 'viewRecording';
scope.recording = false;
};

Categories

Resources