I want to set up VideoJS to handle RTMP streaming using VideoJS-SWF player as flash-fallback, which does seem to have some support for it.
This is how I'd set up VideoJS for normal MP4 videos:
var tag = document.createElement("video");
tag.id = "vid1";
tag.controls = true;
tag.className = "video-js";
tag.preload = "auto";
tag.width = "640";
tag.height = "264";
tag.poster = "http://video-js.zencoder.com/oceans-clip.png";
var source1 = document.createElement("source");
source1.src = "http://video-js.zencoder.com/oceans-clip.mp4";
source1.type = "video/mp4";
tag.appendChild(source1);
// Add more sources, etc...
document.getElementById('player').appendChild(tag);
var player = _V_(tag, {width:640, height:264, techOrder:['flash', 'html5']}, function() {
var that = this;
this.addEvent('loadstart', function { that.play(); });
});
But how do you set up VideoJS to play an RTMP stream? Do you use the same tags, or is there some other way to specify the RTMP provider and stream url?
Just FYI - RTMP streaming is not yet available in VideoJS, but recent changes (like https://github.com/videojs/video.js/pull/605) indicate that it may be soon.
Pay attention to the feature request at https://github.com/videojs/video.js/issues/559 for updates.
Related
I'm creating a chrome extension (not for new tab) and want to auto play a muted video every time the user goes to a specific website.
I attempted to try it out in CodePen and it's still not playing the video.
// Video BG
let videoContainer = document.createElement("video");
videoContainer.id = "video";
videoContainer.style.backgroundColor = "rgba(236,55,55,.5)";
videoContainer.style.zIndex = "-1";
videoContainer.style.width = "100%";
videoContainer.style.height = "100vw";
videoContainer.style.marginTop = "50x";
videoContainer.style.position = "fixed";
videoContainer.style.top = "0";
videoContainer.src =
"https://external-content.duckduckgo.com/iu/?u=https%3A%2F%2Fi.pinimg.com%2Foriginals%2F6f%2F8a%2F7d%2F6f8a7d0ce651ac3ee11046c18b57d232.gif&f=1&nofb=1&ipt=8431012b0b8c3e7404333b537bb0e673adedd1bb00ff35ae9f65003423c0855c&ipo=images";
//videoContainer.type = "video/mp4";
videoContainer.muted = true;
videoContainer.autoplay = true;
videoContainer.loop = true;
videoContainer.controls = false;
document.body.append(videoContainer);
I'm pretty new to coding by the way. Thanks to all who can help.
No problem if you are new, you can check in the MDN official documentation, we have the HTMLMediaElement play() method attempts to begin playback of the media. It returns a Promise which is resolved when playback has been successfully started.
You can try using this async function to do that! ;)
const videoElem = document.querySelector("#video");
const playButton = document.querySelector("#playbutton");
playButton.addEventListener("click", handlePlayButton, false);
playVideo();
async function playVideo() {
try {
await videoElem.play();
playButton.classList.add("playing");
} catch (err) {
playButton.classList.remove("playing");
}
}
function handlePlayButton() {
if (videoElem.paused) {
playVideo();
} else {
videoElem.pause();
playButton.classList.remove("playing");
}
}
It seems the source,
videoContainer.src = "https://external-content.duckduckgo.com/iu/?u=https%3A%2F%2Fi.pinimg.com%2Foriginals%2F6f%2F8a%2F7d%2F6f8a7d0ce651ac3ee11046c18b57d232.gif&f=1&nofb=1&ipt=8431012b0b8c3e7404333b537bb0e673adedd1bb00ff35ae9f65003423c0855c&ipo=images";
was in fact not '.mp4' but a '.gif'. I tried changing the type as 'video/'asterik'' to accept all video file types, but it failed to display the '.gif' file.
My fix: Simply changing the video source to be an ".mp4" type. i.e "myvideo.mp4"
Questions:
Is 'video/*' not the correct way to include all video formats?
Is a '.gif' not a video file?
Thanks to all that helped!
I have been trying to make an audio player that has basic controls allowing audio to be played instead of a video. It needs to be viewed in PIP mode, as well. Here is my code so far:
var audio = document.createElement('audio'); // The Audio
var canvas = document.createElement('canvas'); // New Canvas To Contain Video
canvas.width = canvas.height = 512;
var video = document.createElement('video'); // Create Video
video.srcObject = canvas.captureStream(); // Make Video Reflect The Canvas
video.muted = true;
function showPictureInPictureWindow() {
const image = new Image();
image.crossOrigin = true;
image.src = [...navigator.mediaSession.metadata.artwork].pop().src;
image.decode();
canvas.getContext('2d').drawImage(image, 0, 0, 512, 512);
video.onloadedmetadata = function() {
video.play();
video.requestPictureInPicture();
};
}
window.VID = video;
window.AU = audio;
function playAudio() {
audio.src = "mysong.mp3";
// Update Media Session metadata
navigator.mediaSession.metadata = new MediaMetadata({
title: "Audio Title",
artist: "MEEEEEEE",
album: "LOOOL",
artwork: [{src: "graphics/128x128.png", sizes: "128x128", type: "image/png"}]
});
// Play audio
audio.play();
// Show track album in a Picture-in-Picture window
showPictureInPictureWindow();
}
window.VID.requestPictureInPicture();
playAudio();
Any help that you can give me would be very much appreciated. (I already have the pause and play functions but I didn't include them for message size)
PS: (I don't know why, but earlier I was getting an error that said "JavaScript exception: Failed to execute 'requestPictureInPicture' on 'HTMLVideoElement': Metadata for the video element are not loaded yet" and now I have no errors at all... and no music either...)
PPS: (I fixed some of the bugs... and now I have the original error again.)
I have the same problem as you, but I have solved it
My solution:
My guess is that the video data did not load successfully when the RequestPicturePicture function was executed
So you can execute the RequestPicturePicture function in the loadedData function
I am using microscopy camera Lumenera INFINITY1-2 (https://www.lumenera.com/infinity1-2.html) and from the camera description it tells that it can be integrated into 3rd party software packages using TWAIN support. I try to capture image from web page using HTML5 but it gives me error.
I also tried to check video streaming with Adobe Flash but camera stream is not displayed properly and captured images is quite broken (in fact it is empty)
I checked camera's drivers and they all installed and works. Native camera application also captures images well.
Question, is it possible to capture images in the web browser from the microscopy camera like that? It is being recognized by browser as a device but video stream is not working.
Please see below script (code is getting from here https://github.com/jhuckaby/webcamjs/blob/master/DOCS.md)
Thank you in advance
window.addEventListener("load", function () {
// [1] GET ALL THE HTML ELEMENTS
var video = document.getElementById("vid-show"),
canvas = document.getElementById("vid-canvas"),
take = document.getElementById("vid-take");
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
// [3] SHOW VIDEO STREAM ON VIDEO TAG
video.srcObject = stream;
video.play();
take.addEventListener("click", function () {
// Create snapshot from video
var draw = document.createElement("canvas");
draw.width = video.videoWidth;
draw.height = video.videoHeight;
var context2D = draw.getContext("2d");
context2D.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
// Output as file
var anchor = document.createElement("a");
anchor.href = draw.toDataURL("image/png");
anchor.download = "webcam.png";
anchor.innerHTML = "Click to download";
canvas.innerHTML = "";
canvas.appendChild(anchor);
var imageBase64 = draw.toDataURL("image/png").replace("data:image/png;base64,", "");
document.getElementById("hdn_ImageByte").setAttribute("value", imageBase64);
});
})
.catch(function (err) {
debugger;
console.log(err)
document.getElementById("vid-controls").innerHTML = "Please enable access and attach a camera";
});
});
</script>```
There is this feature in my app where user can add videos using gdrive shareable link. I uploaded the video in gdrive and then created a shareable link. Pasted that link in the upload form and then video gets added. I need to validate 3 things -
format must be mp4
file access is provided or not
file size is more than 25mb
Following is my code -
var link = videoUrl;
if (link.indexOf('drive') != -1) {
var gdrive = link.replace('/view?usp=sharing', '').replace('open?id=', 'uc?export=download&id=').replace('file/d/', 'uc?export=download&id=').replace('/edit?usp=sharing', '');
videoUrl = gdrive;
}
var videoelement = createVideoElement(videoUrl);
createVideoElement = function(url) {
var element = document.createElement('video');
element.src = url;
element.width = '400';
element.height = '200';
element.controls = true;
element.autoplay = 'autoplay';
return element;
};
Sample Video Shareable which I am trying to add - https://drive.google.com/open?id=0B31psA0C98iFNl9ScWRESTBxalk
I have worked with fileupload earlier and I am familiar how to validate the uploaded file size. But here, since it is gdrive link couldn't an idea how to implement it.
You can check the video size by looking at the video metadata - you don't need to download the full video for this:
var video = document.createElement('video');
video.preload = 'metadata';
video.onloadedmetadata = function() {
alert("metadata loaded")
var duration = video.duration;
alert("duration is " + duration)
}
I'm building a small application that use MediaRecoder API to split the recoding video from webcam and upload all the part to server.
I see that with Media Source API, I need to play the first part and then play any other part.
According to http://www.w3.org/TR/media-source/#examples I need the "Initialization Segment" at the beginning of the file.
How can I generate "Initialization Segment" of WebM in JS so I can play any part I choose. Is there any lib for that? (I have no knowledge about WebM byte stream format)
You need to use media source extensions
Please refer to below example
<script>
var appendID = "123";
function onOpen(e) {
var video = e.target;
var headers = GetStreamHeaders();
if (headers == null) {
// Error fetching headers. Signal end of stream with an error.
video.sourceEndOfStream(HTMLMediaElement.EOS_NETWORK_ERR);
}
video.sourceAddId(appendID, 'video/webm; codecs="vorbis,vp8"');
// Append the stream headers (i.e. WebM Header, Info, and Tracks elements)
video.sourceAppend(appendID, headers);
// Append some initial media data.
video.sourceAppend(appendID, GetNextCluster());
}
function onSeeking(e) {
var video = e.target;
// Abort current segment append.
video.sourceAbort(appendID);
// Notify the cluster loading code to start fetching data at the
// new playback position.
SeekToClusterAt(video.currentTime);
// Append clusters from the new playback position.
video.sourceAppend(appendID, GetNextCluster());
video.sourceAppend(appendID, GetNextCluster());
}
function onProgress(e) {
var video = e.target;
if (video.sourceState == video.SOURCE_ENDED)
return;
// If we have run out of stream data, then signal end of stream.
if (!HaveMoreClusters()) {
video.sourceEndOfStream(HTMLMediaElement.EOS_NO_ERROR);
return;
}
video.sourceAppend(appendID, GetNextCluster());
}
var video = document.getElementById('v');
video.addEventListener('sourceopen', onOpen);
video.addEventListener('seeking', onSeeking);
video.addEventListener('progress', onProgress);
</script>
<video id="v" autoplay> </video>
<script>
var video = document.getElementById('v');
video.src = video.mediaSourceURL;
</script>