Html canvas video streaming - javascript

I have this code which playing my camera's view in a video tag and then into canvas element at the same page, the page has 2 squares one for the video tag(playing camera's view) and the other is for canvas(playing the same video of the video tag). How I can edit my code to make the video play in a canvas at another page to let my user watch it live, and if there is a method to let users watch it at the same page not another page and see the canvas only, it's also accepted.
here is my html file:
<html>
<head>
<meta charset="UTF-8">
<title>With canvas</title>
</head>
<body>
<div class="booth">
<video id="video" width="400" height="300" autoplay></video>
<canvas id="canvas" width="400" height="300" style="border: 1px solid black;"></canvas>
</div>
<script src="video.js"></script>
</body>
</html>
And here is my js file:
(function() {
var canvas= document.getElementById('canvas'),
context = canvas.getContext('2d'),
video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getMedia ({
video: true,
audio: false
}, function(stream) {
//video.src = vendorUrl.createObjectURL(stream);
if ('srcObject' in video) {
video.srcObject = stream;
} else {
video.src = vendorUrl.createObjectURL(stream);
}
video.play();
}, function(error) {
// An error occured
//error.code
});
video.addEventListener('play', function() {
setInterval(() => {
draw(this, context, 400, 300);
}, 100);
}, false );
function draw(video, context, width, height) {
context.drawImage(video, 0, 0, width, height);
}
}) ();

use MediaRecorder to get live data in chunks as media recorder records from camera and give chunks that can be send over other end of user
then use mediasource to append chunks on other side of user to append it and play it live as it recieves new chunks
i know its a complicated thing but believe me when i was experimenting this i have spent good amount of time to understand i believe you will get some benefit from my experiment code below
here is a working demo
var mediasource = new MediaSource(),video = document.querySelector("video"),
mime = 'video/webm;codecs=vp9,opus'
video.src = URL.createObjectURL(mediasource)
mediasource.addEventListener("sourceopen",function(_){
var source = mediasource.addSourceBuffer(mime)
navigator.mediaDevices.getUserMedia({video: true,audio: true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
// creating chunks of 5 seconds of video continiously
var r = new Response(d.data).arrayBuffer() // convert blob to arraybuffer
r.then(arraybuffer=>{
source.appendBuffer(arraybuffer)
})
}
recorder.start(5000)
})
})
<video class="video" controls autoplay></video>
separated version for gathering live data and playing it demo
// gathering camera data to liveData array
var vid = document.querySelector(".video"),
mime = 'video/webm;codecs=vp9,opus',
liveData = []
navigator.mediaDevices.getUserMedia({video:true,audio:true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
console.log("capturing")
new Response(d.data).arrayBuffer().then(eachChunk=>{
// recording in chunks here and saving it to liveData array
liveData.push(eachChunk)
})
}
recorder.start(5000)
})
//--------------------------------------------------------------------------------
// playing from liveData in video
var ms = new MediaSource()
vid.src =URL.createObjectURL(ms)
ms.onsourceopen = function(){
var source = ms.addSourceBuffer(mime),chunkIndex = 0
setInterval(function(){
if(liveData.length > 0){
var currentChunk = liveData[chunkIndex]
source.appendBuffer(currentChunk)
console.log("playing")
chunkIndex++
}
},5000)
}

Related

How to get audio buffer from video element api?

How to get audio buffer from video element? i know a ways from BaseAudioContext.decodeAudioData(). it fetch directly from audio file via request. but i need to get the buffer from video directly to manipulate.
const video = document.createElement('video');
video.src = 'https://www.w3schools.com/html/mov_bbb.mp4';
document.body.appendChild(video);
let audioCtx;
let audioSource;
const play = () => {
audioCtx = new AudioContext();
audioSource = audioCtx.createMediaElementSource(video);
audioSource.connect(audioCtx.destination);
video.play()
};
video.ontimeupdate = () => {
let buffer = new AudioBufferSourceNode(audioCtx);
// Manipulate audio buffer realtime
// Problem is buffer null
buffer.connect(audioSource.context.destination);
console.log(buffer.buffer)
}
<!DOCTYPE html>
<html>
<body>
<button onclick="play()">Play</button>
</body>
</html>

HTML5 video element request stay pending forever (on chrome in mobile) when toggle over front-rare camera

I'm developing an app where users can capture photo using a front/rare camera. it working perfectly but when toggle over camera front/rare var playPromise = videoStream.play() is gone in pending state. some times promise get resolve, the camera is working sometimes not.
this issue occurs only in chrome browser not in mozila and firefox
try {
stopWebCam(); // stop media stream when toggle over camera
stream = await navigator.mediaDevices.getUserMedia({video: true});
/* use the stream */
let videoStream = document.getElementById('captureCandidateId');
videoStream.srcObject = stream;
// videoStream.play();
var playPromise = videoStream.play();
if (playPromise !== undefined) {
playPromise.then(_ => {
// Automatic playback started!
// Show playing UI.
})
.catch(error => {
// Auto-play was prevented
// Show paused UI.
});
}
};
} catch(err) {
/* handle the error */
console.log(err.name + ": " + err.message);
}
let stopWebCam = function (pictureType) {
setTimeout(()=>{
let videoStream = document.getElementById('captureCandidateId');
const stream = videoStream.srcObject;
if (stream && stream.getTracks) {
const tracks = stream.getTracks();
tracks.forEach(function(track) {
track.stop();
});
}
videoStream.srcObject = null;
}, 0)
}
Here, I drafted a piece of code for you, this is much more simple and smaller approach than what you are trying to do. I am just taking the stream from the video element and drawing it to canvas. Image can be downloaded by right clicking.
NOTE: Example does not work in StackOverflow
<video id="player" controls autoplay></video>
<button id="capture">Capture</button>
<canvas id="canvas" width=320 height=240></canvas>
<script>
const player = document.getElementById('player');
const canvas = document.getElementById('canvas');
const context = canvas.getContext('2d');
const captureButton = document.getElementById('capture');
const constraints = {
video: true,
};
captureButton.addEventListener('click', () => {
// Draw the video frame to the canvas.
context.drawImage(player, 0, 0, canvas.width, canvas.height);
});
// Attach the video stream to the video element and autoplay.
navigator.mediaDevices.getUserMedia(constraints)
.then((stream) => {
player.srcObject = stream;
});
</script>
If you want, you can also make some edits according to your needs, like:
Choose which camera to use
Hide the video stream
Add a easier method to download the photo on your device
You can also add a functionality to upload the photo straight to the server if you have one

displaying my webcam in my webpage is not working

I am trying to display my webcam in my web page but the video is not showing and it is fetching this error (Failed to execute 'createObjectURL' on 'URL': No function was found that matched the signature provided.
at )
this is my java script
<script type ="text/javascript">
(function(){
var video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getMedia({
video: true,
audio: false
}, function(stream){
video.src = vendorUrl.createObjectURL(stream);
video.play();
}, function(error){
});
})();
</script>
I've read some documents about google removing the createObjectURL but I forgot the source of it, if it is true then what should I do to display my webcam video in my webpage
You can use p5js library. For security reasons this doesn't work in stackOverFlow code snippet.
Although you can test it on p5Js online editor or test it in your example. dont forget to add the libraries
this example
var myCanvas;
var w = 640;
var h = 480;
var myVideo;
var frame;
function setup(){
myCanvas = createCanvas(w,h);
myVideo = createCapture(VIDEO);
myVideo.id("whatever_id_name");
myVideo.size(myCanvas.width, myCanvas.height); //480p
}
function draw(){ //loop function
background(100); //Gray background - not visible
rect(100,100,60,30); //Create rect at position (100,100) with size (60,30)
//frameRate(1); //Default frame rate (30 frames per second)
//add the image of the capturedVideo every frame
//at position (100,100)
//with size have of the canvas dimensions - use 0,0 for default video size
frame = image(myVideo, 160,100,w/2,h/2);
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.3/p5.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.3/addons/p5.dom.js"></script>
You're trying to play raw data as a MediaSource stream.
To correct this, change:
video.src = vendorUrl.createObjectURL(stream);
video.play();
To:
video.srcObject = stream;
video.play();
In addition to this, note that navigator.getUserMedia is deprecated, and you should really be using navigator.mediaDevices.getUserMedia() instead:
navigator.getMedia = navigator.mediaDevices.getUserMedia()

Is it possible to use the MediaRecorder API with html5 video?

I would like to record the video tag so that I can stream the blob data to a websocket server, however, when I attempt to start the mediaRecorder I get the following error:
The MediaRecorder failed to start because there are no audio or video
tracks available.
Is it possible to add the audio/video tracks from the html5 video tag to the media stream?
<script>
var video = document.getElementById('video');
var mediaStream = video.captureStream(30);
var mediaRecorder = new MediaRecorder(
mediaStream,
{
mimeType: 'video/webm;codecs=h264',
videoBitsPerSecond: 3000000
}
);
fileElem.onchange = function () {
var file = fileElem.files[0],
canplay = !!video.canPlayType(file.type).replace("no", ""),
isaudio = file.type.indexOf("audio/") === 0 && file.type !== "audio/mpegurl";
video.src = URL.createObjectURL(file);
video.play();
mediaRecorder.start(1000); // Start recording, and dump data every second
};
</script>
<p id="choice" class="info"><input type="file" id="file"> File type: <span id="ftype"></span></p>
<video width="640" height="360" id="video" src="" controls="false"></video>
Is it possible to use the MediaRecorder API with html5 video?
Yes, however you need to initialize MediaRecorder after the HTMLVideoElement.readyState has metadata.
Here is a sample, but it only works if the video source is from the same origin (since captureStream cannot capture from element with cross-origin data)
Note: In this sample, I use onloadedmetadata to initialize MediaRecorder after the video got metadata.
var mainVideo = document.getElementById("mainVideo"),
displayVideo = document.getElementById("displayVideo"),
videoData = [],
mediaRecorder;
var btnStart = document.getElementById("btnStart"),
btnStop = document.getElementById("btnStop"),
btnResult = document.getElementById("btnResult");
var initMediaRecorder = function() {
// Record with 25 fps
mediaRecorder = new MediaRecorder(mainVideo.captureStream(25));
mediaRecorder.ondataavailable = function(e) {
videoData.push(e.data);
};
}
function startCapture() {
videoData = [];
mediaRecorder.start();
// Buttons 'disabled' state
btnStart.setAttribute('disabled', 'disabled');
btnStop.removeAttribute('disabled');
btnResult.setAttribute('disabled', 'disabled');
};
function endCapture() {
mediaRecorder.stop();
// Buttons 'disabled' state
btnStart.removeAttribute('disabled');
btnStop.setAttribute('disabled', 'disabled');
btnResult.removeAttribute('disabled');
};
function showCapture() {
return new Promise(resolve => {
setTimeout(() => {
// Wrapping this in setTimeout, so its processed in the next RAF
var blob = new Blob(videoData, {
'type': 'video/mp4'
}),
videoUrl = window.URL.createObjectURL(blob);
displayVideo.src = videoUrl;
resolve();
}, 0);
});
};
video {
max-width: 300px;
max-height: 300px;
display: block;
margin: 10px 0;
}
<video id="mainVideo" playsinline="" webkit-playsinline="1" controls="1" onloadedmetadata="initMediaRecorder()">
<source src="sampleVideo.mp4" type="video/mp4">
</video>
<button id="btnStart" onclick="startCapture()"> Start </button>
<button id="btnStop" disabled='disabled' onclick="endCapture()"> Stop </button>
<button id="btnResult" disabled='disabled' onclick="showCapture()"> Show Result </button>
<video id="displayVideo" playsinline="" webkit-playsinline="1" controls="1"></video>

Uploading a picture taken from a webcam

How do i upload a picture that i took from a webcam, i've found a code that worked (only on Chrome)
i've used the code, and when clic on Take picture, i got the picture but i dont see it on the source?
<video autoplay id="vid" style="display:none;"></video>
<canvas id="canvas" width="640" height="480" style="border:1px solid #d3d3d3;"></canvas><br>
<button onclick="snapshot()">Take Picture</button>
<script type="text/javascript">
var video = document.querySelector("#vid");
var canvas = document.querySelector('#canvas');
var ctx = canvas.getContext('2d');
var localMediaStream = null;
var onCameraFail = function (e) {
console.log('Camera did not work.', e);
};
function snapshot() {
if (localMediaStream) {
ctx.drawImage(video, 0, 0);
}
}
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia({video:true}, function (stream) {
video.src = window.URL.createObjectURL(stream);
localMediaStream = stream;
}, onCameraFail);
</script>
How do i add it to a form that have a picture element
<input name="avatar" class="avatar" type="file" required>
as you can see, the field is required, so if it is added to that field the form will see it as a non-empty?
Why it dont work on Opera? i use Opera 12.15
Update: i found how to convert to canvas to image, but dont know how to fill it in the image:
a = canvas.toDataURL("image/jpeg")
document.getElementById("avatar").src = a
if i use the javascript console, i get the source, but if i validate the form, the form thinks that the image is still missing and it is blank.
any ideas?
Build the SetInterval with snapshot() runing in 300ms.
The button only stop the interval, ;)
<button onclick="clearInterval(ivl)">Take Picture</button>
var ivl = setInterval(function(){ snapshot(); },300);

Categories

Resources