Create a popup media player using JS? - javascript

I have been trying to make an audio player that has basic controls allowing audio to be played instead of a video. It needs to be viewed in PIP mode, as well. Here is my code so far:
var audio = document.createElement('audio'); // The Audio
var canvas = document.createElement('canvas'); // New Canvas To Contain Video
canvas.width = canvas.height = 512;
var video = document.createElement('video'); // Create Video
video.srcObject = canvas.captureStream(); // Make Video Reflect The Canvas
video.muted = true;
function showPictureInPictureWindow() {
const image = new Image();
image.crossOrigin = true;
image.src = [...navigator.mediaSession.metadata.artwork].pop().src;
image.decode();
canvas.getContext('2d').drawImage(image, 0, 0, 512, 512);
video.onloadedmetadata = function() {
video.play();
video.requestPictureInPicture();
};
}
window.VID = video;
window.AU = audio;
function playAudio() {
audio.src = "mysong.mp3";
// Update Media Session metadata
navigator.mediaSession.metadata = new MediaMetadata({
title: "Audio Title",
artist: "MEEEEEEE",
album: "LOOOL",
artwork: [{src: "graphics/128x128.png", sizes: "128x128", type: "image/png"}]
});
// Play audio
audio.play();
// Show track album in a Picture-in-Picture window
showPictureInPictureWindow();
}
window.VID.requestPictureInPicture();
playAudio();
Any help that you can give me would be very much appreciated. (I already have the pause and play functions but I didn't include them for message size)
PS: (I don't know why, but earlier I was getting an error that said "JavaScript exception: Failed to execute 'requestPictureInPicture' on 'HTMLVideoElement': Metadata for the video element are not loaded yet" and now I have no errors at all... and no music either...)
PPS: (I fixed some of the bugs... and now I have the original error again.)

I have the same problem as you, but I have solved it
My solution:
My guess is that the video data did not load successfully when the RequestPicturePicture function was executed
So you can execute the RequestPicturePicture function in the loadedData function

Related

How to create a play a video in Javascript

I'm creating a chrome extension (not for new tab) and want to auto play a muted video every time the user goes to a specific website.
I attempted to try it out in CodePen and it's still not playing the video.
// Video BG
let videoContainer = document.createElement("video");
videoContainer.id = "video";
videoContainer.style.backgroundColor = "rgba(236,55,55,.5)";
videoContainer.style.zIndex = "-1";
videoContainer.style.width = "100%";
videoContainer.style.height = "100vw";
videoContainer.style.marginTop = "50x";
videoContainer.style.position = "fixed";
videoContainer.style.top = "0";
videoContainer.src =
"https://external-content.duckduckgo.com/iu/?u=https%3A%2F%2Fi.pinimg.com%2Foriginals%2F6f%2F8a%2F7d%2F6f8a7d0ce651ac3ee11046c18b57d232.gif&f=1&nofb=1&ipt=8431012b0b8c3e7404333b537bb0e673adedd1bb00ff35ae9f65003423c0855c&ipo=images";
//videoContainer.type = "video/mp4";
videoContainer.muted = true;
videoContainer.autoplay = true;
videoContainer.loop = true;
videoContainer.controls = false;
document.body.append(videoContainer);
I'm pretty new to coding by the way. Thanks to all who can help.
No problem if you are new, you can check in the MDN official documentation, we have the HTMLMediaElement play() method attempts to begin playback of the media. It returns a Promise which is resolved when playback has been successfully started.
You can try using this async function to do that! ;)
const videoElem = document.querySelector("#video");
const playButton = document.querySelector("#playbutton");
playButton.addEventListener("click", handlePlayButton, false);
playVideo();
async function playVideo() {
try {
await videoElem.play();
playButton.classList.add("playing");
} catch (err) {
playButton.classList.remove("playing");
}
}
function handlePlayButton() {
if (videoElem.paused) {
playVideo();
} else {
videoElem.pause();
playButton.classList.remove("playing");
}
}
It seems the source,
videoContainer.src = "https://external-content.duckduckgo.com/iu/?u=https%3A%2F%2Fi.pinimg.com%2Foriginals%2F6f%2F8a%2F7d%2F6f8a7d0ce651ac3ee11046c18b57d232.gif&f=1&nofb=1&ipt=8431012b0b8c3e7404333b537bb0e673adedd1bb00ff35ae9f65003423c0855c&ipo=images";
was in fact not '.mp4' but a '.gif'. I tried changing the type as 'video/'asterik'' to accept all video file types, but it failed to display the '.gif' file.
My fix: Simply changing the video source to be an ".mp4" type. i.e "myvideo.mp4"
Questions:
Is 'video/*' not the correct way to include all video formats?
Is a '.gif' not a video file?
Thanks to all that helped!

HTML5 video element request stay pending forever (on chrome in mobile) when toggle over front-rare camera

I'm developing an app where users can capture photo using a front/rare camera. it working perfectly but when toggle over camera front/rare var playPromise = videoStream.play() is gone in pending state. some times promise get resolve, the camera is working sometimes not.
this issue occurs only in chrome browser not in mozila and firefox
try {
stopWebCam(); // stop media stream when toggle over camera
stream = await navigator.mediaDevices.getUserMedia({video: true});
/* use the stream */
let videoStream = document.getElementById('captureCandidateId');
videoStream.srcObject = stream;
// videoStream.play();
var playPromise = videoStream.play();
if (playPromise !== undefined) {
playPromise.then(_ => {
// Automatic playback started!
// Show playing UI.
})
.catch(error => {
// Auto-play was prevented
// Show paused UI.
});
}
};
} catch(err) {
/* handle the error */
console.log(err.name + ": " + err.message);
}
let stopWebCam = function (pictureType) {
setTimeout(()=>{
let videoStream = document.getElementById('captureCandidateId');
const stream = videoStream.srcObject;
if (stream && stream.getTracks) {
const tracks = stream.getTracks();
tracks.forEach(function(track) {
track.stop();
});
}
videoStream.srcObject = null;
}, 0)
}
Here, I drafted a piece of code for you, this is much more simple and smaller approach than what you are trying to do. I am just taking the stream from the video element and drawing it to canvas. Image can be downloaded by right clicking.
NOTE: Example does not work in StackOverflow
<video id="player" controls autoplay></video>
<button id="capture">Capture</button>
<canvas id="canvas" width=320 height=240></canvas>
<script>
const player = document.getElementById('player');
const canvas = document.getElementById('canvas');
const context = canvas.getContext('2d');
const captureButton = document.getElementById('capture');
const constraints = {
video: true,
};
captureButton.addEventListener('click', () => {
// Draw the video frame to the canvas.
context.drawImage(player, 0, 0, canvas.width, canvas.height);
});
// Attach the video stream to the video element and autoplay.
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
player.srcObject = stream;
});
</script>
If you want, you can also make some edits according to your needs, like:
Choose which camera to use
Hide the video stream
Add a easier method to download the photo on your device
You can also add a functionality to upload the photo straight to the server if you have one

Web API MediaRecorder: How do I force record at 60fps?

Summary
The API I speak about
The context
The problem: Actual result + Expected result
What I already have tried
Clues to help you to help me
Minimal and Testable Example: Prerequisites and Steps to follow + Sources
The API I speak about
https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
The context
I have a video element that I load & play several times with different src. Between two different videos, a transition occures and each video appears with a fade in. When a video is playing, I draw it in a canvas with the transition and the fade in.
Everything I draw in the canvas is recorded using the Web API MediaRecorder. Then I download the WEBM file corresponding to this recording.
The problem
Actual result
The resulting WEBM is jerky. When I drew in the canvas, the drawing was fluid. But the WEBM resulting from the recording of the canvas drawing is jerky.
Expected result
The WEBM resulting from the recording of the canvas drawing must be as fluid as the canvas drawing itself. If it's impossible to have exactly this result, then: the WEBM must be approximately as fluid as the canvas drawing itself in a way that we quite can't say it's jerky.
What I have already tried
First, I've tried to set a time slice of 1ms, of 16ms (corresponding to 60fps), of 100ms, then 1000, then 10000, etc. to the method start of the Media Recorder, but it didn't work.
Second, I've tried to call requestData every 16ms (in a simple JS timeoutlistener executed every 16ms), but it didn't work.
Clues to help you to help me
Perhaps I'm wrong but it would be possible that a material limit exist on my computer (I have a HP Spectre x360 that doesn't have graphic card, but just a little graphics chip), or a logical limit on my Chromium browser (I have Version 81.0.4044.138 on my Ubuntu 16.04 LTS).
If you can confirm this it would solve this question. If you can explain how to deal with this problem it would be very good (alternative solution to the Web API Media Recorder, or something else).
Minimal and Testable Example
Prerequisites and Steps to follow
Have at least 2 WEBM videos (which will be the input videos, remember: we want to output a new video that contains the canvas drawing, which itself contains these two input videos plus transitions and colors effects etc.)
Have a HTTP server and a file called "index.html" that you will open with Chromium v.81 e.g.. Copy/paste the following sources inside ; click on the "Start" button (you won't need to click on the "Stop" button) ; it will draw both videos on the canvas, with the transitions & colors effects, it will record the canvas drawing, and a "Download the final output video" will appear, allowing you to download the output video. You will see that it's jerky.
In the following sources, copy/paste the paths of your videos in the JS array called videos.
Sources
<html>
<head>
<title>Creating Final Video</title>
</head>
<body>
<button id="start">Start</button>
<button id="stop_recording">Stop recording</button>
<video id="video" width="320" height="240" controls>
<source type="video/mp4">
</video>
<canvas id="canvas" width=3200 height=1608></canvas>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
<script>
var videos = []; // Populated by Selenium
var canvas = document.getElementById("canvas");
var ctx = canvas.getContext("2d");
var video = document.querySelector("video");
startRecording();
$('#start').click(function() {
showSomeMedia(0, 0);
});
function showSomeMedia(videos_counter) {
resetVideo();
if(videos_counter == videos.length) {
$('#stop_recording').click();
return;
} else {
setVideoSrc(videos_counter);
setVideoListener();
videos_counter++;
}
video.addEventListener('ended', () => {
showSomeMedia(videos_counter);
}, false);
}
function resetVideo() {
var clone = video.cloneNode(true);
video.remove();
video = clone;
}
function setVideoSrc(videos_counter) {
video.setAttribute("src", videos[videos_counter]);
video.load();
video.play();
}
function setVideoListener() {
var alpha = 0.1;
video.addEventListener('playing', () => {
function step() {
if(alpha < 1) {
ctx.globalAlpha = alpha;
}
ctx.drawImage(video, 0, 0, canvas.width, canvas.height)
requestAnimationFrame(step)
if(alpha < 1) {
alpha += 0.00001;
}
}
requestAnimationFrame(step);
}, false)
}
function startRecording() {
const chunks = [];
const stream = canvas.captureStream();
const rec = new MediaRecorder(stream);
rec.ondataavailable = e => chunks.push(e.data);
$('#stop_recording').click(function() {
rec.stop();
});
rec.onstop = e => exportVid(new Blob(chunks, {type: 'video/webm'}));
window.setTimeout(function() {
rec.requestData();
}, 1);
rec.start();
}
function exportVid(blob) {
const vid = document.createElement('video');
vid.src = URL.createObjectURL(blob);
vid.controls = true;
document.body.appendChild(vid);
const a = document.createElement('a');
a.download = 'my_final_output_video.webm';
a.href = vid.src;
a.textContent = 'Download the final output video';
document.body.appendChild(a);
}
</script>
</body>
</html>
The problem is solved by using my gaming laptop (RoG), wich has a good graphic card, able to record at higher frequency than my Spectre x360 development laptop.

HTML5 getUserMedia is not working with scientific microscopy camera Lumenera

I am using microscopy camera Lumenera INFINITY1-2 (https://www.lumenera.com/infinity1-2.html) and from the camera description it tells that it can be integrated into 3rd party software packages using TWAIN support. I try to capture image from web page using HTML5 but it gives me error.
I also tried to check video streaming with Adobe Flash but camera stream is not displayed properly and captured images is quite broken (in fact it is empty)
I checked camera's drivers and they all installed and works. Native camera application also captures images well.
Question, is it possible to capture images in the web browser from the microscopy camera like that? It is being recognized by browser as a device but video stream is not working.
Please see below script (code is getting from here https://github.com/jhuckaby/webcamjs/blob/master/DOCS.md)
Thank you in advance
window.addEventListener("load", function () {
// [1] GET ALL THE HTML ELEMENTS
var video = document.getElementById("vid-show"),
canvas = document.getElementById("vid-canvas"),
take = document.getElementById("vid-take");
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
// [3] SHOW VIDEO STREAM ON VIDEO TAG
video.srcObject = stream;
video.play();
take.addEventListener("click", function () {
// Create snapshot from video
var draw = document.createElement("canvas");
draw.width = video.videoWidth;
draw.height = video.videoHeight;
var context2D = draw.getContext("2d");
context2D.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
// Output as file
var anchor = document.createElement("a");
anchor.href = draw.toDataURL("image/png");
anchor.download = "webcam.png";
anchor.innerHTML = "Click to download";
canvas.innerHTML = "";
canvas.appendChild(anchor);
var imageBase64 = draw.toDataURL("image/png").replace("data:image/png;base64,", "");
document.getElementById("hdn_ImageByte").setAttribute("value", imageBase64);
});
})
.catch(function (err) {
debugger;
console.log(err)
document.getElementById("vid-controls").innerHTML = "Please enable access and attach a camera";
});
});
</script>```

Wait until video loads to capture snapshot as image

I'm working on a script that will take an mp4 video and create a snapshot of the first frame. The purpose for doing this is some mobile browsers won't play a mp4 on load. It will show the mp4 with a play button. Therefore, creating a snapshot will be a good fallback for mobile devices.
I have a basic snippet already, and it works 70% of the time. However, when it doesn't work I think I'm having issues with the script trying to take a snapshot from a cached video, or it's trying to capture before the video fully loads. Does anyone has suggestions on how to make this 100%? I've tried deferring lines of code to wait until everything loads, yet sometimes it doesn't work.... What has helped a little is adding a small setTimeOut...
( function( window, $ ) {
const document = window.document;
const ImgSnapshot = (el) => {
//setup variables
const $el = $(el);
const video = $el.find(".wave-animation__container").get(0);
$(video).ready( () => {
setTimeout(() => {
function createCanvas(){
//create a canvas obj
let canvas = document.createElement("canvas");
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d')
.drawImage(video, 0, 0, canvas.width, canvas.height);
//wait until we have the canvas object captured
return $.Deferred().resolve( canvas ).promise();
}
createCanvas().done( ( canvas ) => {
//create an image element to append
let img = document.createElement("img");
img.src = canvas.toDataURL();
img.classList.add('hide-for-medium', 'snapshot');
$el.append(img);
video.classList.add( 'show-for-medium' );
});
}, 150);
});
};
$(document).ready(function(){
$('.js-wave-animation').each(function(){
new ImgSnapshot(this);
});
});
} )( window, jQuery );
I have a different approach for this problem. Instead of using deferred objects to check if video is loaded how about using media events of video
<video width="400" controls id="myvideo">
<source src="" type="video/mp4">
</video>
<script type="text/javascript">
var jqvideo = $("#myvideo");
var video = jqvideo[0];
video.addEventListener("loadeddata", function() {
console.log("loaded");
let canvas = document.createElement("canvas");
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
let img = document.createElement("img");
img.src = canvas.toDataURL();
$("body").append(img);
}, true);
jqvideo.find("source").attr("src", "yourvideourl.mp4");
</script>

Categories

Resources