I've made a simple setup, getting the webcam / phone camera stream and the passing it on , drawing on a html 2d canvas.
But ive been having trouble figuring out how to show the stream with a delay of few seconds. Kinda like a delay mirror.
I tried playing with ctx.globalAlpha = 0.005; but this gives me a ghosting effect rather than 'delaying' the stream.
Any idea how this can be achieved?
The snippet below doesnt work here probably because of security issues apparently but here's a pen:
https://codepen.io/farisk/pen/LvmGGQ
var width = 0, height = 0;
var canvas = document.createElement('canvas'),
ctx = canvas.getContext('2d');
document.body.appendChild(canvas);
var video = document.createElement('video'),
track;
video.setAttribute('autoplay',true);
window.vid = video;
function getWebcam(){
navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
var videoTracks = stream.getVideoTracks();
var newStream = new MediaStream(stream.getVideoTracks());
video.srcObject = newStream;
video.play();
track = stream.getTracks()[0];
}, function(e) {
console.error('Rejected!', e);
});
}
getWebcam();
var rotation = 0,
loopFrame,
centerX,
centerY,
twoPI = Math.PI * 2;
function loop(){
loopFrame = requestAnimationFrame(loop);
// ctx.globalAlpha = 0.005;
ctx.drawImage(video, 0, 0, width, height);
ctx.restore();
}
function startLoop(){
loopFrame = requestAnimationFrame(loop);
}
video.addEventListener('loadedmetadata',function(){
width = canvas.width = video.videoWidth;
height = canvas.height = video.videoHeight;
centerX = width / 2;
centerY = height / 2;
startLoop();
});
canvas.addEventListener('click',function(){
if ( track ) {
if ( track.stop ) { track.stop(); }
track = null;
} else {
getWebcam();
}
});
video,
canvas {
max-width: 100%;
height: auto;
}
The snippet below doesnt work here probably because of security issues apparently but here's a pen:
https://codepen.io/farisk/pen/LvmGGQ
You might want to consider storing the video data you get in an array of sorts. It might mean delaying the playback for n seconds at first.
Basically on frame 1, you store the video feed into an array, and draw nothing. This happened until frame 1000 (1 second). At that point start drawing based on the first element of the array.
Once you draw that frame, remove it from the array and add the new frame.
Related
With the code below, the quality of the video coming from my Mac's camera and shown inside <video> is great.
However the quality of the frame I capture and show in p5's canvas is pretty low, dark and grainy. Why is that and can I fix it ?
function setup() {
let canvas = createCanvas(canvasSize, canvasSize)
canvas.elt.width = canvasSize
canvas.elt.height = canvasSize
video = createCapture(VIDEO)
}
let PAUSE = false
async function draw() {
if (video && video.loadedmetadata) {
if (!PAUSE) {
// the quality of this image is much lower than what is shown inside p5's <video>
image(video.get(), 0, 0, canvasSize, canvasSize, x, y, canvasSize, canvasSize)
PAUSE = true
}
}
}
I found what the problem was.
It was not due to me setting the canvas.elt.width and canvas.elt.height, even though setting them is indeed redundant.
It's because in the code shown in the OP I was capturing the very first frame and this is too soon so the very first frame is still dark and blurry. Apparently the first few frames coming from the camera are like that.
If I give my code a delay of e.g. 5 seconds the frame it captures is then the exact same quality as the one coming from the video feed.
let video
let canvasWidth = 400
// set this to 10 on https://editor.p5js.org/ and you'll see the problem
const DELAY = 5000
function setup() {
let canvas = createCanvas(canvasWidth, canvasWidth)
canvas.elt.width = canvasWidth // redundant
canvas.elt.height = canvasWidth // redundant
video = createCapture(VIDEO)
}
let PAUSE = false
let start = Date.now()
async function draw() {
let delay = Date.now() - start
if (video && video.loadedmetadata) {
if (delay > DELAY && !PAUSE) {
PAUSE = true
let x = Math.round((video.width / 2) - (canvasWidth / 2))
let y = Math.round((video.height / 2) - (canvasWidth / 2))
// the quality of this image is now perfect
image(video.get(), 0, 0, canvasWidth, canvasWidth, x, y, canvasWidth, canvasWidth)
}
}
}
You should not be setting width/height this way. That will mess up the sizing on high DPI displays and cause your image to appear stretched and blurry.
const canvasSize = 500;
function setup() {
let canvas = createCanvas(canvasSize, canvasSize)
// Don't do this, it will mess up the sizing on high DPI displays:
// canvas.elt.width = canvasSize
// canvas.elt.height = canvasSize
video = createCapture(VIDEO)
}
let PAUSE = false;
function draw() {
if (video && video.loadedmetadata) {
if (!PAUSE) {
// the quality of this image is much lower than what is shown inside p5's <video>
image(video.get(), 0, 0, canvasSize, canvasSize, 0, 0, canvasSize, canvasSize)
}
}
}
function keyPressed() {
if (key === 'p') {
PAUSE = !PAUSE;
}
}
With this code I paused the video that was being rendered to p5.js and then took a screenshot. The version of the video displayed on the p5.js canvas wash indistinguishable from the live video.
Introduction
I'm trying to deal with blurry visuals on my canvas animation. The blurriness is especially prevalent on mobile-devices, retina and high-dpi (dots-per-inch) screens.
I'm looking for a way to ensure the pixels that are drawn using the canvas look their best on low-dpi screens and high-dpi screens. As a solution to this problem I red multiple articles about canvas-down-scaling and followed this tutorial:
https://www.kirupa.com/canvas/canvas_high_dpi_retina.htm
Integrating down-scaling in the project
The project in which I want to implement down-scaling can be found below and consists of a few important features:
There is a (big) main canvas. (Performance optimization)
There are multiple (pre-rendered) smaller canvasses that are used to draw and load a image into. (Performance optimization)
The canvas is animated. (In the code snippet, there is no visible animation but the animation function is intergrated.)
Question
What im trying to achieve: The problem I'm facing seems quite simple. When the website (with the canvas) is opened on a mobile device (eg. an Iphone, with more pixels per inch then a regular desktop). The images appear more blurry. What I'm actually trying to achieve is to remove this blurriness from the images. I red this and it stated that blurriness can be removed by downsampling. I tried to incorporate this technique in the code provided, but it did not work completely. The images just became larger and I was unable to scale the images back to the original size. snippet it is not implemented correctly, the output is still blurry. What did I do wrong and how am I able to fix this issue?
Explanation of the code snippet
The variable devicePixelRatio is set to 2 to simulate a high-dpi phone screen, low-dpi screens have a devicePixelRatio of 1.
Multiple pre-rendered canvasses generated is the function spawn is the snippet there are 5 different canvasses, but on the production environment there are 10's.
If there are any pieces of information missing or questions about this post, please let me know. Thanks a lot!
Code Snippet
var canvas = document.querySelector('canvas');
var c = canvas.getContext('2d' );
var circles = [];
//Simulate Retina screen = 2, Normal screen = 1
let devicePixelRatio = 2
function mainCanvasPixelRatio() {
// get current size of the canvas
let rect = canvas.getBoundingClientRect();
// increase the actual size of our canvas
canvas.width = rect.width * devicePixelRatio;
canvas.height = rect.height * devicePixelRatio;
// ensure all drawing operations are scaled
c.scale(devicePixelRatio, devicePixelRatio);
// scale everything down using CSS
canvas.style.width = rect.width + 'px';
canvas.style.height = rect.height + 'px';
}
// Initial Spawn
function spawn() {
for (let i = 0; i < 2; i++) {
//Set Radius
let radius = parseInt(i*30);
//Give position
let x = Math.round((canvas.width/devicePixelRatio) / 2);
let y = Math.round((canvas.height /devicePixelRatio) / 2);
//Begin Prerender canvas
let PreRenderCanvas = document.createElement('canvas');
const tmp = PreRenderCanvas.getContext("2d");
//Set PreRenderCanvas width and height
let PreRenderCanvasWidth = ((radius*2)*1.5)+1;
let PreRenderCanvasHeight = ((radius*2)*1.5)+1;
//Increase the actual size of PreRenderCanvas
PreRenderCanvas.width = PreRenderCanvasWidth * devicePixelRatio;
PreRenderCanvas.height = PreRenderCanvasHeight * devicePixelRatio;
//Scale PreRenderCanvas down using CSS
PreRenderCanvas.style.width = PreRenderCanvasWidth + 'px';
PreRenderCanvas.style.height = PreRenderCanvasHeight + 'px';
//Ensure PreRenderCanvas drawing operations are scaled
tmp.scale(devicePixelRatio, devicePixelRatio);
//Init image
const image= new Image();
//Get center of PreRenderCanvas
let m_canvasCenterX = (PreRenderCanvas.width/devicePixelRatio) * .5;
let m_canvasCenterY = (PreRenderCanvas.height/devicePixelRatio) * .5;
//Draw red circle on PreRenderCanvas
tmp.strokeStyle = "red";
tmp.beginPath();
tmp.arc((m_canvasCenterX), (m_canvasCenterY), ((PreRenderCanvas.width/devicePixelRatio)/3) , 0, 2 * Math.PI);
tmp.lineWidth = 2;
tmp.stroke();
tmp.restore();
tmp.closePath()
//Set Image
image .src= "https://play-lh.googleusercontent.com/IeNJWoKYx1waOhfWF6TiuSiWBLfqLb18lmZYXSgsH1fvb8v1IYiZr5aYWe0Gxu-pVZX3"
//Get padding
let paddingX = (PreRenderCanvas.width/devicePixelRatio)/5;
let paddingY = (PreRenderCanvas.height/devicePixelRatio)/5;
//Load image
image.onload = function () {
tmp.beginPath()
tmp.drawImage(image, paddingX,paddingY, (PreRenderCanvas.width/devicePixelRatio)-(paddingX*2),(PreRenderCanvas.height/devicePixelRatio)-(paddingY*2));
tmp.closePath()
}
let circle = new Circle(x, y, c ,PreRenderCanvas);
circles.push(circle)
}
}
// Circle parameters
function Circle(x, y, c ,m_canvas) {
this.x = x;
this.y = y;
this.c = c;
this.m_canvas = m_canvas;
}
//Draw circle on canvas
Circle.prototype = {
//Draw circle on canvas
draw: function () {
this.c.drawImage( this.m_canvas, (this.x - (this.m_canvas.width)/2), (this.y - this.m_canvas.height/2));
}
};
// Animate
function animate() {
//Clear canvas each time
c.clearRect(0, 0, (canvas.width /devicePixelRatio), (canvas.height /devicePixelRatio));
//Draw in reverse for info overlap
circles.slice().reverse().forEach(function( circle ) {
circle.draw();
});
requestAnimationFrame(animate);
}
mainCanvasPixelRatio()
spawn()
animate()
#mainCanvas {
background:blue;
}
<canvas id="mainCanvas"></canvas>
<br>
<!DOCTYPE html>
<html>
<body>
<p>Image to use:</p>
<img id="scream" width="220" height="277"
src="pic_the_scream.jpg" alt="The Scream">
<p>Canvas:</p>
<canvas id="myCanvas" width="240" height="297"
style="border:1px solid #d3d3d3;">
</canvas>
<script>
window.onload = function() {
var canvas = document.getElementById("myCanvas");
var ctx = canvas.getContext("2d");
var img = document.getElementById("scream");
ctx.drawImage(img, 10, 10);
};
</script>
</body>
I have an audio visualizer to which I'm attempting to add controls. Unfortunately, the problem cannot be easily replicated in a snippet because a special server must be setup to allow frequency access to the audio. However, I'll describe the problem the best I can.
All JavaScript for the project is below. I haven't tried skip functions yet but play/pause doesn't work.
This block handles all the play/pause.
function updatePlayState(){
//console.log(paused)
console.log(audio.paused)
//audio.play();
audio.pause();
console.log(audio.paused)
/*if(!paused){
paused = true;
audio.pause();
console.log(audio.src)
}else{
audio.play();
}*/
}
When I click the button, the console logs false and then true. However, it continues the same behavior after additional clicks. The audio also doesn't pause. The audio object I'm using is global so scope must not be the issue. I'm just wanting to get the audio to pause and then I'll move on to additional functionality.
//initialize global variables...
var audio, canvas, ctx, audioCtx, source, analyser, playlist_index = 0, full_screen = false, paused = false;
var playlist = [
//'http://localhost/audio-visualizer/audio/audio.mp3',
'http://localhost/audio-visualizer/audio/HaxPigMeow.mp3',
'http://localhost/audio-visualizer/audio/4ware.mp3',
'http://localhost/audio-visualizer/audio/Narwhals_song.mp3'
];
//when the page loads...
window.addEventListener('load', function() {
//initialize the canvas...
initializeCanvas();
//initialize audio...
initializeAudio();
//initialize audio analyzer (get frequency information)...
initializeAudioAnalyser();
//initialize audio controls...
initializeAudioControls();
});
//when the window is resized...
window.addEventListener('resize', function() {
resizeCanvas();
});
function initializeCanvas() {
//get the canvas...
canvas = document.getElementById('canvas');
//create a canvas context to draw graphics...
ctx = canvas.getContext('2d');
//resize the canvas to fit the window...
resizeCanvas();
}
function resizeCanvas() {
//set height of canvas...
canvas.width = window.innerWidth;
//set width of canvas...
canvas.height = window.innerHeight;
//set width of context...
ctx.width = window.innerWidth;
//set height of context...
ctx.height = window.innerHeight;
//reset drawing properties...
setCanvasDrawingProperties();
}
function initializeAudio() {
//load the audio...
audio = new Audio(playlist[playlist_index]);
//bypass CORS (Cross Origin Resource Sharing) restrictions...
audio.crossOrigin = 'anonymous';
//when the audio finishes playing; replay...
//audio.loop = true;
//play automatically...
//audio.autoplay = true;
//wait until audio fully loads before playing...
audio.oncanplaythrough = function() {
setTimeout(function() {
window.addEventListener('click',function(e){
audio.play();
//request full screen access...
if(e.target.tagName != 'INPUT'){
var root_element = document.documentElement;
rfs = root_element.requestFullscreen
|| root_element.webkitRequestFullScreen
|| root_element.mozRequestFullScreen
|| root_element.msRequestFullscreen
;
rfs.call(root_element);
}
//show audio controls....
document.getElementById('controlContainer').style.display = 'block';
setTimeout(function(){
document.getElementById('controlContainer').style.opacity = '1';
},500);
//hide the loading message...
document.getElementById('overlayLoadingMessage').style.opacity = '0';
window.setTimeout(function() {
document.getElementById('overlayLoadingMessage').style.display = 'none';
}, 500);
});
}, 1000);
};
audio.addEventListener('ended',function(){
skipForward();
playlist_index++;
if(playlist_index == playlist.length){
playlist_index = 0;
}
audio.src = playlist[playlist_index];
audio.crossOrigin = 'anonymous';
audio.play();
})
}
function initializeAudioControls(){
document.getElementById('skipBack').addEventListener('click',skipTrackBackward);
document.getElementById('skipForward').addEventListener('click',skipTrackForward);
document.getElementById('pause').addEventListener('click',updatePlayState);
function skipTrackForward(){
console.log('skip forward')
}
function skipTrackBackward(){
console.log('skip backward')
}
function updatePlayState(){
//console.log(paused)
console.log(audio.paused)
//audio.play();
audio.pause();
console.log(audio.paused)
/*if(!paused){
paused = true;
audio.pause();
console.log(audio.src)
}else{
audio.play();
}*/
}
}
function initializeAudioAnalyser() {
//create an audio context for browsers (including older webkit)...
if(window.webkitAudioContext){
//an older browser which needs to use the webkit audio constructor...
audioCtx = new window.webkitAudioContext;
}else{
//a newer browser which has full support for the audio context...
audioCtx = new window.AudioContext;
}
//create a new analyser...
analyser = audioCtx.createAnalyser();
//create new media source for the audio context...
source = audioCtx.createMediaElementSource(audio);
//connect the analyser to the source...
source.connect(analyser);
//connect audio output device information to the analyser to gather audio frequencies...
analyser.connect(audioCtx.destination);
//set drawing properties...
setCanvasDrawingProperties();
//let's do this thing (time to animate)...
animate();
}
function setCanvasDrawingProperties() {
//set background color of future drawing...
ctx.fillStyle = '#fff';
//blur radius (50px)...
ctx.shadowBlur = 50;
//shadow color...
ctx.shadowColor = "#ddd";
}
function animate() {
//clear canvas...
ctx.clearRect(0, 0, window.innerWidth, window.innerHeight);
//create new frequency array map...
frequencyBinaryCountArray = new Uint8Array(analyser.frequencyBinCount);
//input frequency data into the array map...
analyser.getByteFrequencyData(frequencyBinaryCountArray);
//calculate radius based on frequency information (uses channel 50 right now)..
var r = frequencyBinaryCountArray[50];
//set x of circle...
var x = (window.innerWidth / 2);
//set y of circle...
var y = (window.innerHeight / 2);
//set start angle (the circumference of the circle)...
var startAngle = 2 * Math.PI;
//set end angle (the end circumference of the circle)...
var endAngle = 0 * Math.PI;
//draw a circle; radius is based on frequency...
//begin the drawing...
ctx.beginPath();
//draw the circle...
ctx.arc(x, y, r, startAngle, endAngle);
//fill the circle with a color...
ctx.fill();
//close the path...
ctx.closePath();
//do it again (appx 60 times per second)...
requestAnimationFrame(animate);
}
Tested in Chrome 67.x (latest version as of this post) on macOS High Sierra.
I was stuck on it a while but, immediately after posting the question, I discovered the global click listener that starts playing the audio was also firing when I clicked the pause button. It was overriding the pause function. To fix it, I moved the audio play inside the e.target tagname exception. However, to prevent future confusion, I added a button to start the visualization rather than a global click event.
Thanks for looking.
I'm working on a website and I am dealing with videos.
My need is to display a gif a a preview / teaser for the videos on another page, that redirects to the video.
What I found & added so far
HTML
<div id=thumbs></div>
CSS
#video {width:320px}
JS
var i =0;
var video = document.createElement("video");
var thumbs = document.getElementById("thumbs");
video.addEventListener('loadeddata', function() {
thumbs.innerHTML = "";
video.currentTime = i;
}, false);
video.addEventListener('seeked', function() {
var j = video.duration;
var u = j/4;
// now video has seeked and current frames will show
// at the time as we expect
generateThumbnail(i);
// when frame is captured, increase
i+=u;
// if we are not passed end, seek to next interval
if (i <= video.duration) {
// this will trigger another seeked event
video.currentTime = i;
}
}, false);
video.preload = "auto";
video.src = "https://www.html5rocks.com/en/tutorials/video/basics/devstories.webm";
function generateThumbnail() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
ctx.drawImage(video, 0, 0, 160, 90);
thumbs.appendChild(c);
thumbs.replaceChild(c, thumbs.childNodes[0]);
}
What I do is I get a video from its Url and I get 5 frames at equal timing. It gives me canvas and I'd like to display them as a gif or a succession of images.
Since you only want to display it, instead of trying to generate a gif, the easiest way is probably to do the animation yourself.
You already have the code to fetch the video frames, but you are currently showing it in the DOM, and forget about it once displayed.
What you can do from this, is to store these frames, directly as canvas, and draw all these canvases on a final, visible canvas, in a timed loop.
var thumbsList = []; // we will save our frames as canvas in here
var delay = 500; // the speed of the animation (ms)
function generateThumbnail() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
ctx.drawImage(video, 0, 0, 160, 90);
thumbsList.push(c); // store this frame in our list
if (thumbsList.length === 1) {
displayThumbs(); // start animating as soon as we got a frame
}
}
// initialises the display canvas, and starts the animation loop
function displayThumbs() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
thumbs.appendChild(c);
startAnim(ctx); // pass our visible canvas' context
}
function startAnim(ctx) {
var currentFrame = 0;
// here is the actual loop
function anim() {
ctx.drawImage(thumbsList[currentFrame], 0, 0); // draw the currentFrame
// increase our counter, and set it to 0 if too large
currentFrame = (currentFrame + 1) % thumbsList.length;
setTimeout(anim, delay); // do it again in x ms
}
anim(); // let's go !
}
var i = 0;
var video = document.createElement("video");
var thumbs = document.getElementById("thumbs");
/* OP's code */
video.addEventListener('loadeddata', function() {
thumbs.innerHTML = "";
video.currentTime = i;
}, false);
video.addEventListener('seeked', function() {
var j = video.duration;
var u = j / 4;
// now video has seeked and current frames will show
// at the time as we expect
generateThumbnail(i);
// when frame is captured, increase
i += u;
// if we are not passed end, seek to next interval
if (i <= video.duration) {
// this will trigger another seeked event
video.currentTime = i;
} else {
// displayFrame(); // wait for all images to be parsed before animating
}
}, false);
video.preload = "auto";
video.src = "https://www.html5rocks.com/en/tutorials/video/basics/devstories.webm";
<div id=thumbs></div>
I am currently creating a project that supports video recording through my website.
I create a canvas and then push the recorded frames to it. The problem is, when I play the video after its recorded, it plays too fast. A 10 second long video plays in like 2 seconds. I have checked the playbackRate is set to 1. I save the recording to a database and its speeded up there aswell, so it has nothing to do with the browsers videoplayer.
I am relative new to AngularJS and javascript so im sorry if I left something important out.
I have tried changing alot of the values back and forth but I cant seem to find the cause for the problem. Any ideas?
Here is the code for the video recording:
scope.startRecording = function () {
if (mediaStream) {
var video = $('.video-capture')[0];
var canvas = document.createElement('canvas');
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
ctx = canvas.getContext('2d');
var CANVAS_WIDTH = canvas.width;
var CANVAS_HEIGHT = canvas.height;
function drawVideoFrame(time) {
videoRecorder = requestAnimationFrame(drawVideoFrame);
ctx.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
recordedFrames.push(canvas.toDataURL('image/webp', 1));
}
videoRecorder = requestAnimationFrame(drawVideoFrame); // Note: not using vendor prefixes!
scope.recording = true;
}
};
scope.stopRecording = function () {
cancelAnimationFrame(videoRecorder); // Note: not using vendor prefixes!
// 2nd param: framerate for the video file.
scope.video.files = Whammy.fromImageArray(recordedFrames, 1000 / 30);
recordedVideoBlob = Whammy.fromImageArray(recordedFrames, 1000 / 30);
scope.videoMode = 'viewRecording';
scope.recording = false;
};
I am guess the culprit is requestAnimationFrame, left on it's own, you cannot tell at what intervals it keeps calling the callback, it can be as high as 60fps.
also looking at your code, I cannot tell how you came to the conclusion that frame rate = 1000/30
my advice( at least for your case) would be to go with $interval,
you can do something like:
scope.frameRate = 10, videoInterval; // the amount I consider ideal for client-side video recording.
scope.startRecording = function () {
if (mediaStream) {
var video = $('.video-capture')[0];
var canvas = document.createElement('canvas');
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
ctx = canvas.getContext('2d');
var CANVAS_WIDTH = canvas.width;
var CANVAS_HEIGHT = canvas.height;
function drawVideoFrame() {
ctx.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
recordedFrames.push(canvas.toDataURL('image/webp', 1));
}
videoInterval = $interval(drawVideoFrame, 1000/scope.frameRate);
scope.recording = true;
}
};
scope.stopRecording = function () {
$interval.cancel(videoInterval);
// 2nd param: framerate for the video file.
scope.video.files = Whammy.fromImageArray(recordedFrames, scope.frameRate);
recordedVideoBlob = Whammy.fromImageArray(recordedFrames, scope.frameRate); // you can chage this to some file copy method, so leave out the duplicate processing of images into video.
scope.videoMode = 'viewRecording';
scope.recording = false;
};