JavaScript audio.pause() and audio.play() not working in audio visualizer - javascript

I have an audio visualizer to which I'm attempting to add controls. Unfortunately, the problem cannot be easily replicated in a snippet because a special server must be setup to allow frequency access to the audio. However, I'll describe the problem the best I can.
All JavaScript for the project is below. I haven't tried skip functions yet but play/pause doesn't work.
This block handles all the play/pause.
function updatePlayState(){
//console.log(paused)
console.log(audio.paused)
//audio.play();
audio.pause();
console.log(audio.paused)
/*if(!paused){
paused = true;
audio.pause();
console.log(audio.src)
}else{
audio.play();
}*/
}
When I click the button, the console logs false and then true. However, it continues the same behavior after additional clicks. The audio also doesn't pause. The audio object I'm using is global so scope must not be the issue. I'm just wanting to get the audio to pause and then I'll move on to additional functionality.
//initialize global variables...
var audio, canvas, ctx, audioCtx, source, analyser, playlist_index = 0, full_screen = false, paused = false;
var playlist = [
//'http://localhost/audio-visualizer/audio/audio.mp3',
'http://localhost/audio-visualizer/audio/HaxPigMeow.mp3',
'http://localhost/audio-visualizer/audio/4ware.mp3',
'http://localhost/audio-visualizer/audio/Narwhals_song.mp3'
];
//when the page loads...
window.addEventListener('load', function() {
//initialize the canvas...
initializeCanvas();
//initialize audio...
initializeAudio();
//initialize audio analyzer (get frequency information)...
initializeAudioAnalyser();
//initialize audio controls...
initializeAudioControls();
});
//when the window is resized...
window.addEventListener('resize', function() {
resizeCanvas();
});
function initializeCanvas() {
//get the canvas...
canvas = document.getElementById('canvas');
//create a canvas context to draw graphics...
ctx = canvas.getContext('2d');
//resize the canvas to fit the window...
resizeCanvas();
}
function resizeCanvas() {
//set height of canvas...
canvas.width = window.innerWidth;
//set width of canvas...
canvas.height = window.innerHeight;
//set width of context...
ctx.width = window.innerWidth;
//set height of context...
ctx.height = window.innerHeight;
//reset drawing properties...
setCanvasDrawingProperties();
}
function initializeAudio() {
//load the audio...
audio = new Audio(playlist[playlist_index]);
//bypass CORS (Cross Origin Resource Sharing) restrictions...
audio.crossOrigin = 'anonymous';
//when the audio finishes playing; replay...
//audio.loop = true;
//play automatically...
//audio.autoplay = true;
//wait until audio fully loads before playing...
audio.oncanplaythrough = function() {
setTimeout(function() {
window.addEventListener('click',function(e){
audio.play();
//request full screen access...
if(e.target.tagName != 'INPUT'){
var root_element = document.documentElement;
rfs = root_element.requestFullscreen
|| root_element.webkitRequestFullScreen
|| root_element.mozRequestFullScreen
|| root_element.msRequestFullscreen
;
rfs.call(root_element);
}
//show audio controls....
document.getElementById('controlContainer').style.display = 'block';
setTimeout(function(){
document.getElementById('controlContainer').style.opacity = '1';
},500);
//hide the loading message...
document.getElementById('overlayLoadingMessage').style.opacity = '0';
window.setTimeout(function() {
document.getElementById('overlayLoadingMessage').style.display = 'none';
}, 500);
});
}, 1000);
};
audio.addEventListener('ended',function(){
skipForward();
playlist_index++;
if(playlist_index == playlist.length){
playlist_index = 0;
}
audio.src = playlist[playlist_index];
audio.crossOrigin = 'anonymous';
audio.play();
})
}
function initializeAudioControls(){
document.getElementById('skipBack').addEventListener('click',skipTrackBackward);
document.getElementById('skipForward').addEventListener('click',skipTrackForward);
document.getElementById('pause').addEventListener('click',updatePlayState);
function skipTrackForward(){
console.log('skip forward')
}
function skipTrackBackward(){
console.log('skip backward')
}
function updatePlayState(){
//console.log(paused)
console.log(audio.paused)
//audio.play();
audio.pause();
console.log(audio.paused)
/*if(!paused){
paused = true;
audio.pause();
console.log(audio.src)
}else{
audio.play();
}*/
}
}
function initializeAudioAnalyser() {
//create an audio context for browsers (including older webkit)...
if(window.webkitAudioContext){
//an older browser which needs to use the webkit audio constructor...
audioCtx = new window.webkitAudioContext;
}else{
//a newer browser which has full support for the audio context...
audioCtx = new window.AudioContext;
}
//create a new analyser...
analyser = audioCtx.createAnalyser();
//create new media source for the audio context...
source = audioCtx.createMediaElementSource(audio);
//connect the analyser to the source...
source.connect(analyser);
//connect audio output device information to the analyser to gather audio frequencies...
analyser.connect(audioCtx.destination);
//set drawing properties...
setCanvasDrawingProperties();
//let's do this thing (time to animate)...
animate();
}
function setCanvasDrawingProperties() {
//set background color of future drawing...
ctx.fillStyle = '#fff';
//blur radius (50px)...
ctx.shadowBlur = 50;
//shadow color...
ctx.shadowColor = "#ddd";
}
function animate() {
//clear canvas...
ctx.clearRect(0, 0, window.innerWidth, window.innerHeight);
//create new frequency array map...
frequencyBinaryCountArray = new Uint8Array(analyser.frequencyBinCount);
//input frequency data into the array map...
analyser.getByteFrequencyData(frequencyBinaryCountArray);
//calculate radius based on frequency information (uses channel 50 right now)..
var r = frequencyBinaryCountArray[50];
//set x of circle...
var x = (window.innerWidth / 2);
//set y of circle...
var y = (window.innerHeight / 2);
//set start angle (the circumference of the circle)...
var startAngle = 2 * Math.PI;
//set end angle (the end circumference of the circle)...
var endAngle = 0 * Math.PI;
//draw a circle; radius is based on frequency...
//begin the drawing...
ctx.beginPath();
//draw the circle...
ctx.arc(x, y, r, startAngle, endAngle);
//fill the circle with a color...
ctx.fill();
//close the path...
ctx.closePath();
//do it again (appx 60 times per second)...
requestAnimationFrame(animate);
}
Tested in Chrome 67.x (latest version as of this post) on macOS High Sierra.

I was stuck on it a while but, immediately after posting the question, I discovered the global click listener that starts playing the audio was also firing when I clicked the pause button. It was overriding the pause function. To fix it, I moved the audio play inside the e.target tagname exception. However, to prevent future confusion, I added a button to start the visualization rather than a global click event.
Thanks for looking.

Related

Is it possible to block the smartphone from opening gallery?

I have an input for capturing image from the smartphone camera like this below:
<input ng-model="fileModel" type="file" name="input_default" accept="image/*;capture=camera"
capture="camera" capture />
If there is no camera on the installed smartphone(OS Android or IOS), the input opens the cell phone gallery. I wanted the user to be alerted that it doesn't have a camera and to not open the device gallery.
I tried this way below, but it didn't work.
var el = document.createElement('input');
if(el.capture == undefined){
// display message user
} else{
// open the camera normally on the device
}
Is it possible to BLOCK the smartphone from opening gallery using Javascript?
This is a working example of how to take photo from camera directly with WebRTC by opening camera and taking photo.
This approach is used by many govt website to validate authenticity.
This is the demo,
https://yari-demos.prod.mdn.mozit.cloud/en-US/docs/Web/API/Media_Streams_API/Taking_still_photos/_sample_.demo.html
And this is complete explanation,
https://developer.mozilla.org/en-US/docs/Web/API/Media_Streams_API/Taking_still_photos
(function() {
// The width and height of the captured photo. We will set the
// width to the value defined here, but the height will be
// calculated based on the aspect ratio of the input stream.
var width = 320; // We will scale the photo width to this
var height = 0; // This will be computed based on the input stream
// |streaming| indicates whether or not we're currently streaming
// video from the camera. Obviously, we start at false.
var streaming = false;
// The various HTML elements we need to configure or control. These
// will be set by the startup() function.
var video = null;
var canvas = null;
var photo = null;
var startbutton = null;
function showViewLiveResultButton() {
if (window.self !== window.top) {
// Ensure that if our document is in a frame, we get the user
// to first open it in its own tab or window. Otherwise, it
// won't be able to request permission for camera access.
document.querySelector(".contentarea").remove();
const button = document.createElement("button");
button.textContent = "View live result of the example code above";
document.body.append(button);
button.addEventListener('click', () => window.open(location.href));
return true;
}
return false;
}
function startup() {
if (showViewLiveResultButton()) { return; }
video = document.getElementById('video');
canvas = document.getElementById('canvas');
photo = document.getElementById('photo');
startbutton = document.getElementById('startbutton');
navigator.mediaDevices.getUserMedia({video: true, audio: false})
.then(function(stream) {
video.srcObject = stream;
video.play();
})
.catch(function(err) {
console.log("An error occurred: " + err);
});
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
// Firefox currently has a bug where the height can't be read from
// the video, so we will make assumptions if this happens.
if (isNaN(height)) {
height = width / (4/3);
}
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
clearphoto();
}
// Fill the photo with an indication that none has been
// captured.
function clearphoto() {
var context = canvas.getContext('2d');
context.fillStyle = "#AAA";
context.fillRect(0, 0, canvas.width, canvas.height);
var data = canvas.toDataURL('image/png');
photo.setAttribute('src', data);
}
// Capture a photo by fetching the current contents of the video
// and drawing it into a canvas, then converting that to a PNG
// format data URL. By drawing it on an offscreen canvas and then
// drawing that to the screen, we can change its size and/or apply
// other changes before drawing it.
function takepicture() {
var context = canvas.getContext('2d');
if (width && height) {
canvas.width = width;
canvas.height = height;
context.drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
photo.setAttribute('src', data);
} else {
clearphoto();
}
}
// Set up our event listener to run the startup process
// once loading is complete.
window.addEventListener('load', startup, false);
})();

Show webcam / camera stream with delay - webrtc

I've made a simple setup, getting the webcam / phone camera stream and the passing it on , drawing on a html 2d canvas.
But ive been having trouble figuring out how to show the stream with a delay of few seconds. Kinda like a delay mirror.
I tried playing with ctx.globalAlpha = 0.005; but this gives me a ghosting effect rather than 'delaying' the stream.
Any idea how this can be achieved?
The snippet below doesnt work here probably because of security issues apparently but here's a pen:
https://codepen.io/farisk/pen/LvmGGQ
var width = 0, height = 0;
var canvas = document.createElement('canvas'),
ctx = canvas.getContext('2d');
document.body.appendChild(canvas);
var video = document.createElement('video'),
track;
video.setAttribute('autoplay',true);
window.vid = video;
function getWebcam(){
navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
var videoTracks = stream.getVideoTracks();
var newStream = new MediaStream(stream.getVideoTracks());
video.srcObject = newStream;
video.play();
track = stream.getTracks()[0];
}, function(e) {
console.error('Rejected!', e);
});
}
getWebcam();
var rotation = 0,
loopFrame,
centerX,
centerY,
twoPI = Math.PI * 2;
function loop(){
loopFrame = requestAnimationFrame(loop);
// ctx.globalAlpha = 0.005;
ctx.drawImage(video, 0, 0, width, height);
ctx.restore();
}
function startLoop(){
loopFrame = requestAnimationFrame(loop);
}
video.addEventListener('loadedmetadata',function(){
width = canvas.width = video.videoWidth;
height = canvas.height = video.videoHeight;
centerX = width / 2;
centerY = height / 2;
startLoop();
});
canvas.addEventListener('click',function(){
if ( track ) {
if ( track.stop ) { track.stop(); }
track = null;
} else {
getWebcam();
}
});
video,
canvas {
max-width: 100%;
height: auto;
}
The snippet below doesnt work here probably because of security issues apparently but here's a pen:
https://codepen.io/farisk/pen/LvmGGQ
You might want to consider storing the video data you get in an array of sorts. It might mean delaying the playback for n seconds at first.
Basically on frame 1, you store the video feed into an array, and draw nothing. This happened until frame 1000 (1 second). At that point start drawing based on the first element of the array.
Once you draw that frame, remove it from the array and add the new frame.

Canvas doesn't repaint when tab inactive/ backgrounded for recording ( WebGL)

I am building online web application which renders a video on a canvas and then records the canvas using canvas.captureStream() and mediaRecorder. The problem is that when the user switches tab or minimizes the window the canvas freezes. My animation keeps running as I have used webWorkerSetInterval(Hacktimer.js). As per chrome they have not yet provided a solution https://bugs.chromium.org/p/chromium/issues/detail?id=639105.
Can anyone suggest a work-around? I tried new window which doesn't allow to minimize but was unsuccessful. The recording doesn't stop when switching window.(stops only when tab is switched or minimized)
NB:
Now that this question has been specifically edited to treat webgl contexts, it may not be completely a duplicate of this previous answer, which indeed doesn't work with webgl contexts ; but because of an chrome bug...
So this answer will show you how to workaround this bug, while waiting for a fix from chrome.
The linked answer made use of the WebAudio API's timing method to create an Timed loop, not tied to the screen refresh-rate nor the window / tab's visibility.
But as said in the header, this currently doesn't work with webgl contexts on chrome.
The easy workaround, is to use an offscreen 2d context as the stream source, and to draw our webgl canvas onto this 2d context :
function startRecording(webgl_renderer, render_func) {
// create a clone of the webgl canvas
var canvas = webgl_renderer.domElement.cloneNode();
// init an 2D context
var ctx = canvas.getContext('2d');
function anim(){
// render the webgl Animation
render_func();
// draw the wegbl canvas on our 2D one
ctx.clearRect(0,0,canvas.width, canvas.height);
ctx.drawImage(webgl_renderer.domElement, 0,0);
}
var fps = 60;
// start our loop #60fps
var stopAnim = audioTimerLoop(anim, 1000 / fps);
// maximum stream rate set as 60 fps
var cStream = canvas.captureStream(fps);
let chunks = [];
var recorder = new MediaRecorder(cStream);
recorder.ondataavailable = e => chunks.push(e.data);
recorder.onstop = e => {
// we can stop our loop
stopAnim();
var url = URL.createObjectURL(new Blob(chunks));
var v = document.createElement('video');
v.src = url;
v.controls = true;
document.body.appendChild(v);
}
recorder.start();
// stops the recorder in 20s, try to change tab during this time
setTimeout(function() {
recorder.stop();
}, 20000);
btn.parentNode.removeChild(btn);
}
/*
An alternative timing loop, based on AudioContext's clock
#arg callback : a callback function
with the audioContext's currentTime passed as unique argument
#arg frequency : float in ms;
#returns : a stop function
*/
function audioTimerLoop(callback, frequency) {
var freq = frequency / 1000; // AudioContext time parameters are in seconds
var aCtx = new AudioContext();
// Chrome needs our oscillator node to be attached to the destination
// So we create a silent Gain Node
var silence = aCtx.createGain();
silence.gain.value = 0;
silence.connect(aCtx.destination);
onOSCend();
var stopped = false; // A flag to know when we'll stop the loop
function onOSCend() {
var osc = aCtx.createOscillator();
osc.onended = onOSCend; // so we can loop
osc.connect(silence);
osc.start(0); // start it now
osc.stop(aCtx.currentTime + freq); // stop it next frame
callback(aCtx.currentTime); // one frame is done
if (stopped) { // user broke the loop
osc.onended = function() {
aCtx.close(); // clear the audioContext
return;
};
}
};
// return a function to stop our loop
return function() {
stopped = true;
};
}
/* global THREE */
/* Note that all rAF loop have been removed
since they're now handled by our 'audioTimerLoop' */
(function() {
'use strict';
var WIDTH = 500, HEIGHT = 500;
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(75, WIDTH / HEIGHT, 0.1, 1000);
var renderer = new THREE.WebGLRenderer();
renderer.setSize(WIDTH , HEIGHT);
document.body.appendChild(renderer.domElement);
var geometry = new THREE.CubeGeometry(5, 5, 5);
var material = new THREE.MeshLambertMaterial({
color: 0x00fff0
});
var cube = new THREE.Mesh(geometry, material);
scene.add(cube);
camera.position.z = 12;
var pointLight = new THREE.PointLight(0xFFFFFF);
pointLight.position.x = 10;
pointLight.position.y = 50;
pointLight.position.z = 130;
scene.add(pointLight);
var render = function() {
var delta = Math.random() * (0.06 - 0.02) + 0.02;
cube.rotation.x += delta;
cube.rotation.y += delta;
cube.rotation.z -= delta;
renderer.render(scene, camera);
};
render();
console.clear();
btn.onclick = function(){startRecording(renderer, render);};
}());
body {
margin: 0;
background: #000;
}
button{
position: absolute;
top: 0;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/85/three.min.js"></script>
<!-- Mobile devices need an user interaction to start the WebAudio API -->
<button id="btn">Start</button>

Display canvas as gif for video preview

I'm working on a website and I am dealing with videos.
My need is to display a gif a a preview / teaser for the videos on another page, that redirects to the video.
What I found & added so far
HTML
<div id=thumbs></div>
CSS
#video {width:320px}
JS
var i =0;
var video = document.createElement("video");
var thumbs = document.getElementById("thumbs");
video.addEventListener('loadeddata', function() {
thumbs.innerHTML = "";
video.currentTime = i;
}, false);
video.addEventListener('seeked', function() {
var j = video.duration;
var u = j/4;
// now video has seeked and current frames will show
// at the time as we expect
generateThumbnail(i);
// when frame is captured, increase
i+=u;
// if we are not passed end, seek to next interval
if (i <= video.duration) {
// this will trigger another seeked event
video.currentTime = i;
}
}, false);
video.preload = "auto";
video.src = "https://www.html5rocks.com/en/tutorials/video/basics/devstories.webm";
function generateThumbnail() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
ctx.drawImage(video, 0, 0, 160, 90);
thumbs.appendChild(c);
thumbs.replaceChild(c, thumbs.childNodes[0]);
}
What I do is I get a video from its Url and I get 5 frames at equal timing. It gives me canvas and I'd like to display them as a gif or a succession of images.
Since you only want to display it, instead of trying to generate a gif, the easiest way is probably to do the animation yourself.
You already have the code to fetch the video frames, but you are currently showing it in the DOM, and forget about it once displayed.
What you can do from this, is to store these frames, directly as canvas, and draw all these canvases on a final, visible canvas, in a timed loop.
var thumbsList = []; // we will save our frames as canvas in here
var delay = 500; // the speed of the animation (ms)
function generateThumbnail() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
ctx.drawImage(video, 0, 0, 160, 90);
thumbsList.push(c); // store this frame in our list
if (thumbsList.length === 1) {
displayThumbs(); // start animating as soon as we got a frame
}
}
// initialises the display canvas, and starts the animation loop
function displayThumbs() {
var c = document.createElement("canvas");
var ctx = c.getContext("2d");
c.width = 160;
c.height = 90;
thumbs.appendChild(c);
startAnim(ctx); // pass our visible canvas' context
}
function startAnim(ctx) {
var currentFrame = 0;
// here is the actual loop
function anim() {
ctx.drawImage(thumbsList[currentFrame], 0, 0); // draw the currentFrame
// increase our counter, and set it to 0 if too large
currentFrame = (currentFrame + 1) % thumbsList.length;
setTimeout(anim, delay); // do it again in x ms
}
anim(); // let's go !
}
var i = 0;
var video = document.createElement("video");
var thumbs = document.getElementById("thumbs");
/* OP's code */
video.addEventListener('loadeddata', function() {
thumbs.innerHTML = "";
video.currentTime = i;
}, false);
video.addEventListener('seeked', function() {
var j = video.duration;
var u = j / 4;
// now video has seeked and current frames will show
// at the time as we expect
generateThumbnail(i);
// when frame is captured, increase
i += u;
// if we are not passed end, seek to next interval
if (i <= video.duration) {
// this will trigger another seeked event
video.currentTime = i;
} else {
// displayFrame(); // wait for all images to be parsed before animating
}
}, false);
video.preload = "auto";
video.src = "https://www.html5rocks.com/en/tutorials/video/basics/devstories.webm";
<div id=thumbs></div>

Recording speed javascript

I am currently creating a project that supports video recording through my website.
I create a canvas and then push the recorded frames to it. The problem is, when I play the video after its recorded, it plays too fast. A 10 second long video plays in like 2 seconds. I have checked the playbackRate is set to 1. I save the recording to a database and its speeded up there aswell, so it has nothing to do with the browsers videoplayer.
I am relative new to AngularJS and javascript so im sorry if I left something important out.
I have tried changing alot of the values back and forth but I cant seem to find the cause for the problem. Any ideas?
Here is the code for the video recording:
scope.startRecording = function () {
if (mediaStream) {
var video = $('.video-capture')[0];
var canvas = document.createElement('canvas');
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
ctx = canvas.getContext('2d');
var CANVAS_WIDTH = canvas.width;
var CANVAS_HEIGHT = canvas.height;
function drawVideoFrame(time) {
videoRecorder = requestAnimationFrame(drawVideoFrame);
ctx.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
recordedFrames.push(canvas.toDataURL('image/webp', 1));
}
videoRecorder = requestAnimationFrame(drawVideoFrame); // Note: not using vendor prefixes!
scope.recording = true;
}
};
scope.stopRecording = function () {
cancelAnimationFrame(videoRecorder); // Note: not using vendor prefixes!
// 2nd param: framerate for the video file.
scope.video.files = Whammy.fromImageArray(recordedFrames, 1000 / 30);
recordedVideoBlob = Whammy.fromImageArray(recordedFrames, 1000 / 30);
scope.videoMode = 'viewRecording';
scope.recording = false;
};
I am guess the culprit is requestAnimationFrame, left on it's own, you cannot tell at what intervals it keeps calling the callback, it can be as high as 60fps.
also looking at your code, I cannot tell how you came to the conclusion that frame rate = 1000/30
my advice( at least for your case) would be to go with $interval,
you can do something like:
scope.frameRate = 10, videoInterval; // the amount I consider ideal for client-side video recording.
scope.startRecording = function () {
if (mediaStream) {
var video = $('.video-capture')[0];
var canvas = document.createElement('canvas');
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
ctx = canvas.getContext('2d');
var CANVAS_WIDTH = canvas.width;
var CANVAS_HEIGHT = canvas.height;
function drawVideoFrame() {
ctx.drawImage(video, 0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
recordedFrames.push(canvas.toDataURL('image/webp', 1));
}
videoInterval = $interval(drawVideoFrame, 1000/scope.frameRate);
scope.recording = true;
}
};
scope.stopRecording = function () {
$interval.cancel(videoInterval);
// 2nd param: framerate for the video file.
scope.video.files = Whammy.fromImageArray(recordedFrames, scope.frameRate);
recordedVideoBlob = Whammy.fromImageArray(recordedFrames, scope.frameRate); // you can chage this to some file copy method, so leave out the duplicate processing of images into video.
scope.videoMode = 'viewRecording';
scope.recording = false;
};

Categories

Resources