displaying my webcam in my webpage is not working - javascript

I am trying to display my webcam in my web page but the video is not showing and it is fetching this error (Failed to execute 'createObjectURL' on 'URL': No function was found that matched the signature provided.
at )
this is my java script
<script type ="text/javascript">
(function(){
var video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getMedia({
video: true,
audio: false
}, function(stream){
video.src = vendorUrl.createObjectURL(stream);
video.play();
}, function(error){
});
})();
</script>
I've read some documents about google removing the createObjectURL but I forgot the source of it, if it is true then what should I do to display my webcam video in my webpage

You can use p5js library. For security reasons this doesn't work in stackOverFlow code snippet.
Although you can test it on p5Js online editor or test it in your example. dont forget to add the libraries
this example
var myCanvas;
var w = 640;
var h = 480;
var myVideo;
var frame;
function setup(){
myCanvas = createCanvas(w,h);
myVideo = createCapture(VIDEO);
myVideo.id("whatever_id_name");
myVideo.size(myCanvas.width, myCanvas.height); //480p
}
function draw(){ //loop function
background(100); //Gray background - not visible
rect(100,100,60,30); //Create rect at position (100,100) with size (60,30)
//frameRate(1); //Default frame rate (30 frames per second)
//add the image of the capturedVideo every frame
//at position (100,100)
//with size have of the canvas dimensions - use 0,0 for default video size
frame = image(myVideo, 160,100,w/2,h/2);
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.3/p5.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/0.7.3/addons/p5.dom.js"></script>

You're trying to play raw data as a MediaSource stream.
To correct this, change:
video.src = vendorUrl.createObjectURL(stream);
video.play();
To:
video.srcObject = stream;
video.play();
In addition to this, note that navigator.getUserMedia is deprecated, and you should really be using navigator.mediaDevices.getUserMedia() instead:
navigator.getMedia = navigator.mediaDevices.getUserMedia()

Related

Html canvas video streaming

I have this code which playing my camera's view in a video tag and then into canvas element at the same page, the page has 2 squares one for the video tag(playing camera's view) and the other is for canvas(playing the same video of the video tag). How I can edit my code to make the video play in a canvas at another page to let my user watch it live, and if there is a method to let users watch it at the same page not another page and see the canvas only, it's also accepted.
here is my html file:
<html>
<head>
<meta charset="UTF-8">
<title>With canvas</title>
</head>
<body>
<div class="booth">
<video id="video" width="400" height="300" autoplay></video>
<canvas id="canvas" width="400" height="300" style="border: 1px solid black;"></canvas>
</div>
<script src="video.js"></script>
</body>
</html>
And here is my js file:
(function() {
var canvas= document.getElementById('canvas'),
context = canvas.getContext('2d'),
video = document.getElementById('video'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getMedia ({
video: true,
audio: false
}, function(stream) {
//video.src = vendorUrl.createObjectURL(stream);
if ('srcObject' in video) {
video.srcObject = stream;
} else {
video.src = vendorUrl.createObjectURL(stream);
}
video.play();
}, function(error) {
// An error occured
//error.code
});
video.addEventListener('play', function() {
setInterval(() => {
draw(this, context, 400, 300);
}, 100);
}, false );
function draw(video, context, width, height) {
context.drawImage(video, 0, 0, width, height);
}
}) ();
use MediaRecorder to get live data in chunks as media recorder records from camera and give chunks that can be send over other end of user
then use mediasource to append chunks on other side of user to append it and play it live as it recieves new chunks
i know its a complicated thing but believe me when i was experimenting this i have spent good amount of time to understand i believe you will get some benefit from my experiment code below
here is a working demo
var mediasource = new MediaSource(),video = document.querySelector("video"),
mime = 'video/webm;codecs=vp9,opus'
video.src = URL.createObjectURL(mediasource)
mediasource.addEventListener("sourceopen",function(_){
var source = mediasource.addSourceBuffer(mime)
navigator.mediaDevices.getUserMedia({video: true,audio: true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
// creating chunks of 5 seconds of video continiously
var r = new Response(d.data).arrayBuffer() // convert blob to arraybuffer
r.then(arraybuffer=>{
source.appendBuffer(arraybuffer)
})
}
recorder.start(5000)
})
})
<video class="video" controls autoplay></video>
separated version for gathering live data and playing it demo
// gathering camera data to liveData array
var vid = document.querySelector(".video"),
mime = 'video/webm;codecs=vp9,opus',
liveData = []
navigator.mediaDevices.getUserMedia({video:true,audio:true}).then(stream=>{
var recorder = new MediaRecorder(stream,{mimeType:mime})
recorder.ondataavailable = d =>{
console.log("capturing")
new Response(d.data).arrayBuffer().then(eachChunk=>{
// recording in chunks here and saving it to liveData array
liveData.push(eachChunk)
})
}
recorder.start(5000)
})
//--------------------------------------------------------------------------------
// playing from liveData in video
var ms = new MediaSource()
vid.src =URL.createObjectURL(ms)
ms.onsourceopen = function(){
var source = ms.addSourceBuffer(mime),chunkIndex = 0
setInterval(function(){
if(liveData.length > 0){
var currentChunk = liveData[chunkIndex]
source.appendBuffer(currentChunk)
console.log("playing")
chunkIndex++
}
},5000)
}

HTML5 getUserMedia is not working with scientific microscopy camera Lumenera

I am using microscopy camera Lumenera INFINITY1-2 (https://www.lumenera.com/infinity1-2.html) and from the camera description it tells that it can be integrated into 3rd party software packages using TWAIN support. I try to capture image from web page using HTML5 but it gives me error.
I also tried to check video streaming with Adobe Flash but camera stream is not displayed properly and captured images is quite broken (in fact it is empty)
I checked camera's drivers and they all installed and works. Native camera application also captures images well.
Question, is it possible to capture images in the web browser from the microscopy camera like that? It is being recognized by browser as a device but video stream is not working.
Please see below script (code is getting from here https://github.com/jhuckaby/webcamjs/blob/master/DOCS.md)
Thank you in advance
window.addEventListener("load", function () {
// [1] GET ALL THE HTML ELEMENTS
var video = document.getElementById("vid-show"),
canvas = document.getElementById("vid-canvas"),
take = document.getElementById("vid-take");
navigator.mediaDevices.getUserMedia({ video: true })
.then(function (stream) {
// [3] SHOW VIDEO STREAM ON VIDEO TAG
video.srcObject = stream;
video.play();
take.addEventListener("click", function () {
// Create snapshot from video
var draw = document.createElement("canvas");
draw.width = video.videoWidth;
draw.height = video.videoHeight;
var context2D = draw.getContext("2d");
context2D.drawImage(video, 0, 0, video.videoWidth, video.videoHeight);
// Output as file
var anchor = document.createElement("a");
anchor.href = draw.toDataURL("image/png");
anchor.download = "webcam.png";
anchor.innerHTML = "Click to download";
canvas.innerHTML = "";
canvas.appendChild(anchor);
var imageBase64 = draw.toDataURL("image/png").replace("data:image/png;base64,", "");
document.getElementById("hdn_ImageByte").setAttribute("value", imageBase64);
});
})
.catch(function (err) {
debugger;
console.log(err)
document.getElementById("vid-controls").innerHTML = "Please enable access and attach a camera";
});
});
</script>```

Browsersync camera acces video.play is not a function

Hi guys I started today to use browsersync with the simple configuration from https://browsersync.io/#install in a project to use the webcam to capture a picture. It was working pretty well but suddenly it stop working, now it doesn't show me the image from the camera and in chrome now appears the error video.play is not a function even with a gulp file to start the local server, so don't know what happened. Does anyone have had a problem like this before?
This is my js file:
(function(){
var video = document.getElementById('video'),
canvas = document.getElementById('canvas'),
context = canvas.getContext('2d'),
photo = document.getElementById('photo'),
vendorUrl = window.URL || window.webkitURL;
navigator.getMedia = navigator.getUserMedia||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.getMedia({
video: true,
audio: false
}, function(stream) {
video.src = vendorUrl.createObjectURL(stream);
video.play();
}, function(error){
//An error occured
//error.code
});
document.getElementById('snap').addEventListener('click', function(){
context.drawImage(video, 0, 0, 400, 300);
photo.setAttribute('src', canvas.toDataURL('image/png'));
});
})();
I had same problem with complete same sourcecode. I migrated my camera image solution to yii2, and my problem was that the #video is a div element and not video element, if you change it to video tag it will work perfectly.
:)

HTML5 getUserMedia is not streaming

I wrote a navigator.getUserMedia() script but it isnt working right.
I reused nearly the same lines of code in another test and there it worked.
The problem is, that actually the camera is turned on (after given permission)
but the stream isnt streaming. The camera stays on but the video element only displays a static image (maybe the first frame).
The src of the video element is a blob.
Heres my markup:
<video src="" id="camera-stream">
<canvas style="display:none"></canvas>
</video>
The basic script is the following:
navigator.getUserMedia = navigator.getUserMedia || navigator.msGetUserMedia
|| navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia;
window.URL = window.URL || window.webkitURL;
var video, canvas, ctx, previewImg, localMediaStream = null;
function startCapture(){
console.log("Start Capture invoked");
if( !video || !navigator.getUserMedia || !window.URL ){
console.log("Start capture returned.");
return;
}
var args = {
video: true
};
navigator.getUserMedia(args, captureSuccess, captureError);
}
function captureSuccess(stream){
console.log("Capture successful");
localMediaStream = stream;
video.src = window.URL.createObjectURL(localMediaStream);
console.log("Stream: ", stream);
}
function captureError(e){
console.log("Capture failed", e);
}
The startCapture() function is invoked in a jQuery onClick-hanlder for a link.
Is there an error in my script?
Ok, i found the error.
Maybe this will help some people.
I forgott to set the autoplay="autoplay" attribute for the video element.

Uploading a picture taken from a webcam

How do i upload a picture that i took from a webcam, i've found a code that worked (only on Chrome)
i've used the code, and when clic on Take picture, i got the picture but i dont see it on the source?
<video autoplay id="vid" style="display:none;"></video>
<canvas id="canvas" width="640" height="480" style="border:1px solid #d3d3d3;"></canvas><br>
<button onclick="snapshot()">Take Picture</button>
<script type="text/javascript">
var video = document.querySelector("#vid");
var canvas = document.querySelector('#canvas');
var ctx = canvas.getContext('2d');
var localMediaStream = null;
var onCameraFail = function (e) {
console.log('Camera did not work.', e);
};
function snapshot() {
if (localMediaStream) {
ctx.drawImage(video, 0, 0);
}
}
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia({video:true}, function (stream) {
video.src = window.URL.createObjectURL(stream);
localMediaStream = stream;
}, onCameraFail);
</script>
How do i add it to a form that have a picture element
<input name="avatar" class="avatar" type="file" required>
as you can see, the field is required, so if it is added to that field the form will see it as a non-empty?
Why it dont work on Opera? i use Opera 12.15
Update: i found how to convert to canvas to image, but dont know how to fill it in the image:
a = canvas.toDataURL("image/jpeg")
document.getElementById("avatar").src = a
if i use the javascript console, i get the source, but if i validate the form, the form thinks that the image is still missing and it is blank.
any ideas?
Build the SetInterval with snapshot() runing in 300ms.
The button only stop the interval, ;)
<button onclick="clearInterval(ivl)">Take Picture</button>
var ivl = setInterval(function(){ snapshot(); },300);

Categories

Resources