javascript and HTML5 draw video into canvas - javascript

I'm trying to draw a video into the canvas in javascript and HTML5 (like an intro video for a game) which is working fine on the web browser but is not working on android, so I'm not sure if I making a mistake somewhere or this is not supported on mobile devices.
Here is the code:
//create canvas and get 2d context
var canvas = document.getElementById('introVid');
var ctx = canvas.getContext('2d');
// allow fullscreen
function fullscreen() {
var el = canvas;
if (el.webkitRequestFullScreen) {
el.webkitRequestFullScreen(); //chrome
} else {
el.mozRequestFullScreen(); //firefox
}
}
canvas.addEventListener("click", fullscreen); //enable fullscreen onClick
//draw the video into canvas
var video = document.getElementById("video");
video.addEventListener('loadeddata', function() {
video.play(); // start playing
update(); //start rendering
});
function update() {
ctx.drawImage(video, 0, 0, 300, 150);
requestAnimationFrame(update); //wait the browser be ready for next frame
};
Here a working demo on fiddle:
http://jsfiddle.net/h1hjp0Lp/122/

Related

Is it possible to block the smartphone from opening gallery?

I have an input for capturing image from the smartphone camera like this below:
<input ng-model="fileModel" type="file" name="input_default" accept="image/*;capture=camera"
capture="camera" capture />
If there is no camera on the installed smartphone(OS Android or IOS), the input opens the cell phone gallery. I wanted the user to be alerted that it doesn't have a camera and to not open the device gallery.
I tried this way below, but it didn't work.
var el = document.createElement('input');
if(el.capture == undefined){
// display message user
} else{
// open the camera normally on the device
}
Is it possible to BLOCK the smartphone from opening gallery using Javascript?
This is a working example of how to take photo from camera directly with WebRTC by opening camera and taking photo.
This approach is used by many govt website to validate authenticity.
This is the demo,
https://yari-demos.prod.mdn.mozit.cloud/en-US/docs/Web/API/Media_Streams_API/Taking_still_photos/_sample_.demo.html
And this is complete explanation,
https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Taking_still_photos
(function() {
// The width and height of the captured photo. We will set the
// width to the value defined here, but the height will be
// calculated based on the aspect ratio of the input stream.
var width = 320; // We will scale the photo width to this
var height = 0; // This will be computed based on the input stream
// |streaming| indicates whether or not we're currently streaming
// video from the camera. Obviously, we start at false.
var streaming = false;
// The various HTML elements we need to configure or control. These
// will be set by the startup() function.
var video = null;
var canvas = null;
var photo = null;
var startbutton = null;
function showViewLiveResultButton() {
if (window.self !== window.top) {
// Ensure that if our document is in a frame, we get the user
// to first open it in its own tab or window. Otherwise, it
// won't be able to request permission for camera access.
document.querySelector(".contentarea").remove();
const button = document.createElement("button");
button.textContent = "View live result of the example code above";
document.body.append(button);
button.addEventListener('click', () => window.open(location.href));
return true;
}
return false;
}
function startup() {
if (showViewLiveResultButton()) { return; }
video = document.getElementById('video');
canvas = document.getElementById('canvas');
photo = document.getElementById('photo');
startbutton = document.getElementById('startbutton');
navigator.mediaDevices.getUserMedia({video: true, audio: false})
.then(function(stream) {
video.srcObject = stream;
video.play();
})
.catch(function(err) {
console.log("An error occurred: " + err);
});
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
// Firefox currently has a bug where the height can't be read from
// the video, so we will make assumptions if this happens.
if (isNaN(height)) {
height = width / (4/3);
}
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
clearphoto();
}
// Fill the photo with an indication that none has been
// captured.
function clearphoto() {
var context = canvas.getContext('2d');
context.fillStyle = "#AAA";
context.fillRect(0, 0, canvas.width, canvas.height);
var data = canvas.toDataURL('image/png');
photo.setAttribute('src', data);
}
// Capture a photo by fetching the current contents of the video
// and drawing it into a canvas, then converting that to a PNG
// format data URL. By drawing it on an offscreen canvas and then
// drawing that to the screen, we can change its size and/or apply
// other changes before drawing it.
function takepicture() {
var context = canvas.getContext('2d');
if (width && height) {
canvas.width = width;
canvas.height = height;
context.drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
photo.setAttribute('src', data);
} else {
clearphoto();
}
}
// Set up our event listener to run the startup process
// once loading is complete.
window.addEventListener('load', startup, false);
})();

Offset playback of two videos when drawing canvas

It's my first post on Stackoverflow! I hope this is clear, and thanks in advance for your help.
I am drawing two videos to canvas, and recording the canvas for a single video for playback and review.
video1 is a video from our server.
video2 is the user's webcam feed.
The issue:
The two videos are not entirely synced due to some delay in the webcam capture.
I am trying to figure out how to delay the ctx.DrawImage of video1 by a "x" milliseconds so that I can get the two videos to appear on the canvas more synchronized, making the final recording look more synchronized.
Here is my code for drawing to the canvas.
// Draw two videos to one canvas
$(function() {
var canvas = document.getElementById('canvas');
var ctx = canvas.getContext('2d');
var video1 = document.getElementById('teachervid');
var video2 = document.getElementById('studentvideo');
video1.addEventListener('play', function() {
var $this = this; //cache
(function loop() {
if (!$this.paused && !$this.ended) {
ctx.drawImage($this, 0, 0, 960, 540);
setTimeout(loop, 1000 / 30); // drawing at 30fps
}
})();
}, 0);
video2.addEventListener('play', function() {
var $this = this; //cache
(function loop() {
if (!$this.paused && !$this.ended) {
ctx.drawImage($this, 960, 0, 960, 540);
setTimeout(loop, 1000 / 30); // drawing at 30fps
}
})();
}, 0);
});
To be clear, I am not trying to change the frame rate, simply offset the start points of the two videos so that video 1 is drawn X milliseconds after video2 is drawn. I assume I will need to tweak and test what X should be once I figure out how.
Note, Keep in mind that I am new to JS and programming when responding.
Cheers!

Safari is open new window when I play the video

I create a video tag in javascript and using the canvas to display the video:
const canvas = document.getElementById('canvas');
const ctx2d = canvas.getContext('2d');
const videoElement = document.createElement('video');
videoElement.height = canvas.height;
videoElement.width = canvas.width;
videoElement.playsinline = "playsinline";
videoElement.setAttribute('webkit-playsinline', 'webkit-playsinline');
videoElement.src = '.....';
videoElement.autoplay = false;
videoElement.loop = false;
videoElement.muted = true;
videoElement.addEventListener('play', () => {
(function loop() {
if (videoElement && !videoElement.paused && !videoElement.ended) {
ctx2d.drawImage(videoElement, 0, 0, canvas.width, canvas.height);
setTimeout(loop, 1000 / 30); // drawing at 30fps
}
})();
});
The problem is in my ios device (iphone 11 - safari), when I play the video, ios play the video in new window (fullscreen).
What I need to do to solve that problem? any safari developers here?
This is what I get when I play the video. the fullscreen mode of safari.

JavaScript audio.pause() and audio.play() not working in audio visualizer

I have an audio visualizer to which I'm attempting to add controls. Unfortunately, the problem cannot be easily replicated in a snippet because a special server must be setup to allow frequency access to the audio. However, I'll describe the problem the best I can.
All JavaScript for the project is below. I haven't tried skip functions yet but play/pause doesn't work.
This block handles all the play/pause.
function updatePlayState(){
//console.log(paused)
console.log(audio.paused)
//audio.play();
audio.pause();
console.log(audio.paused)
/*if(!paused){
paused = true;
audio.pause();
console.log(audio.src)
}else{
audio.play();
}*/
}
When I click the button, the console logs false and then true. However, it continues the same behavior after additional clicks. The audio also doesn't pause. The audio object I'm using is global so scope must not be the issue. I'm just wanting to get the audio to pause and then I'll move on to additional functionality.
//initialize global variables...
var audio, canvas, ctx, audioCtx, source, analyser, playlist_index = 0, full_screen = false, paused = false;
var playlist = [
//'http://localhost/audio-visualizer/audio/audio.mp3',
'http://localhost/audio-visualizer/audio/HaxPigMeow.mp3',
'http://localhost/audio-visualizer/audio/4ware.mp3',
'http://localhost/audio-visualizer/audio/Narwhals_song.mp3'
];
//when the page loads...
window.addEventListener('load', function() {
//initialize the canvas...
initializeCanvas();
//initialize audio...
initializeAudio();
//initialize audio analyzer (get frequency information)...
initializeAudioAnalyser();
//initialize audio controls...
initializeAudioControls();
});
//when the window is resized...
window.addEventListener('resize', function() {
resizeCanvas();
});
function initializeCanvas() {
//get the canvas...
canvas = document.getElementById('canvas');
//create a canvas context to draw graphics...
ctx = canvas.getContext('2d');
//resize the canvas to fit the window...
resizeCanvas();
}
function resizeCanvas() {
//set height of canvas...
canvas.width = window.innerWidth;
//set width of canvas...
canvas.height = window.innerHeight;
//set width of context...
ctx.width = window.innerWidth;
//set height of context...
ctx.height = window.innerHeight;
//reset drawing properties...
setCanvasDrawingProperties();
}
function initializeAudio() {
//load the audio...
audio = new Audio(playlist[playlist_index]);
//bypass CORS (Cross Origin Resource Sharing) restrictions...
audio.crossOrigin = 'anonymous';
//when the audio finishes playing; replay...
//audio.loop = true;
//play automatically...
//audio.autoplay = true;
//wait until audio fully loads before playing...
audio.oncanplaythrough = function() {
setTimeout(function() {
window.addEventListener('click',function(e){
audio.play();
//request full screen access...
if(e.target.tagName != 'INPUT'){
var root_element = document.documentElement;
rfs = root_element.requestFullscreen
|| root_element.webkitRequestFullScreen
|| root_element.mozRequestFullScreen
|| root_element.msRequestFullscreen
;
rfs.call(root_element);
}
//show audio controls....
document.getElementById('controlContainer').style.display = 'block';
setTimeout(function(){
document.getElementById('controlContainer').style.opacity = '1';
},500);
//hide the loading message...
document.getElementById('overlayLoadingMessage').style.opacity = '0';
window.setTimeout(function() {
document.getElementById('overlayLoadingMessage').style.display = 'none';
}, 500);
});
}, 1000);
};
audio.addEventListener('ended',function(){
skipForward();
playlist_index++;
if(playlist_index == playlist.length){
playlist_index = 0;
}
audio.src = playlist[playlist_index];
audio.crossOrigin = 'anonymous';
audio.play();
})
}
function initializeAudioControls(){
document.getElementById('skipBack').addEventListener('click',skipTrackBackward);
document.getElementById('skipForward').addEventListener('click',skipTrackForward);
document.getElementById('pause').addEventListener('click',updatePlayState);
function skipTrackForward(){
console.log('skip forward')
}
function skipTrackBackward(){
console.log('skip backward')
}
function updatePlayState(){
//console.log(paused)
console.log(audio.paused)
//audio.play();
audio.pause();
console.log(audio.paused)
/*if(!paused){
paused = true;
audio.pause();
console.log(audio.src)
}else{
audio.play();
}*/
}
}
function initializeAudioAnalyser() {
//create an audio context for browsers (including older webkit)...
if(window.webkitAudioContext){
//an older browser which needs to use the webkit audio constructor...
audioCtx = new window.webkitAudioContext;
}else{
//a newer browser which has full support for the audio context...
audioCtx = new window.AudioContext;
}
//create a new analyser...
analyser = audioCtx.createAnalyser();
//create new media source for the audio context...
source = audioCtx.createMediaElementSource(audio);
//connect the analyser to the source...
source.connect(analyser);
//connect audio output device information to the analyser to gather audio frequencies...
analyser.connect(audioCtx.destination);
//set drawing properties...
setCanvasDrawingProperties();
//let's do this thing (time to animate)...
animate();
}
function setCanvasDrawingProperties() {
//set background color of future drawing...
ctx.fillStyle = '#fff';
//blur radius (50px)...
ctx.shadowBlur = 50;
//shadow color...
ctx.shadowColor = "#ddd";
}
function animate() {
//clear canvas...
ctx.clearRect(0, 0, window.innerWidth, window.innerHeight);
//create new frequency array map...
frequencyBinaryCountArray = new Uint8Array(analyser.frequencyBinCount);
//input frequency data into the array map...
analyser.getByteFrequencyData(frequencyBinaryCountArray);
//calculate radius based on frequency information (uses channel 50 right now)..
var r = frequencyBinaryCountArray[50];
//set x of circle...
var x = (window.innerWidth / 2);
//set y of circle...
var y = (window.innerHeight / 2);
//set start angle (the circumference of the circle)...
var startAngle = 2 * Math.PI;
//set end angle (the end circumference of the circle)...
var endAngle = 0 * Math.PI;
//draw a circle; radius is based on frequency...
//begin the drawing...
ctx.beginPath();
//draw the circle...
ctx.arc(x, y, r, startAngle, endAngle);
//fill the circle with a color...
ctx.fill();
//close the path...
ctx.closePath();
//do it again (appx 60 times per second)...
requestAnimationFrame(animate);
}
Tested in Chrome 67.x (latest version as of this post) on macOS High Sierra.
I was stuck on it a while but, immediately after posting the question, I discovered the global click listener that starts playing the audio was also firing when I clicked the pause button. It was overriding the pause function. To fix it, I moved the audio play inside the e.target tagname exception. However, to prevent future confusion, I added a button to start the visualization rather than a global click event.
Thanks for looking.

Video stuttering when drawn to canvas on Chrome 63

I'm running a loop drawing a playing video onto a canvas. it used to work smoothly until the last chrome update, where now the video is stuttering, jumping 2 frames ahead and then one back. the same code performs smoothly on firefox and opera.
i'm currently testing on chrome 63.0.3239.132.
has anyone stumbled upon this issue?
here's the code i test with:
var canvas, context, video;
function drawingLoop() {
window.requestAnimationFrame(drawingLoop);
context.drawImage(video, 0, 0);
}
canvas = document.createElement('canvas');
document.body.appendChild(canvas);
video = document.createElement('video');
video.addEventListener("loadeddata", function() {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
context = canvas.getContext('2d');
video.play();
window.requestAnimationFrame(drawingLoop);
});
video.autoplay = false;
video.src = "http://www.sample-videos.com/video/mp4/720/big_buck_bunny_720p_1mb.mp4";
video.load();

Categories

Resources