html5 getUserMedia Api stop webcam button not working - javascript

I am executing this code on localhost XAMPP Server
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video onclick="changeFilter(this);"width=200 height=200 id="video" controls autoplay></video>
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button onclick="stopWebcam();">Stop WebCam</button>
</p>
<script>
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var webcamStream;
function startWebcam() {
if (navigator.getUserMedia) {
console.log("toto");
navigator.getUserMedia (
// constraints
{
video: true,
audio: false
},
// successCallback
function(localMediaStream) {
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
}
function stopWebcam() {
localMediaStream.stop();
}
</script>
</body>
</html>
This code starts my webcam, but when I press the Stop WebCam button the console gives me the following error:
Uncaught TypeError: Cannot read property 'stop' of undefined function stopWebcam() { webcamStream.stop(); }
I am a JavaScript newbie and I can not see the issue here.

localMediaStream is not available to you in stopWebcam(). Check out this post for more on What is the scope of variables in JavaScript?
Try...
function stopWebcam() {
webcamStream.stop();
}

Related

WebRTC error while creating video chat app

I am getting this error on console while refreshing the page.. Everythng else works fine the Chats and everthing.. just the streaming part is not working
NotSupportedError: MediaStreamError
at module.exports (http://192.168.1.10:9966/index.js:3081:17)
at Object.1.getusermedia (http://192.168.1.10:9966/index.js:4:1)
at o (http://192.168.1.10:9966/index.js:1:265)
at r (http://192.168.1.10:9966/index.js:1:431)
at http://192.168.1.10:9966/index.js:1:460
while creating a video chat app
This is my index.js
var getUserMedia = require('getusermedia')
getUserMedia({video: true, audio: false}, function (err, stream) {
var Peer = require('simple-peer')
var peer = new Peer({
initiator: location.hash === '#init',
trickle: false,
stream: stream
})
peer.on('signal', function (data) {
document.getElementById('yourId').value = JSON.stringify(data)
})
document.getElementById('connect').addEventListener('click', function () {
var otherId = JSON.parse(document.getElementById('otherId').value)
peer.signal(otherId)
})
document.getElementById('send').addEventListener('click', function () {
var yourMessage = document.getElementById('yourMessage').value
peer.send(yourMessage)
})
peer.on('data', function (data) {
document.getElementById('messages').textContent += data + '\n'
})
peer.on('stream', function (stream) {
var video = document.createElement('video')
document.body.appendChild(video)
video.src = window.URL.createObjectURL(stream)
video.play()
})
})
This is my index.html
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>CHatting Video</title>
</head>
<body>
<label>Your ID:</label><br/>
<textarea id="yourId"></textarea><br/>
<label>Other ID:</label><br/>
<textarea id="otherId"></textarea>
<button id="connect">connect</button>
<br/>
<label>Enter Message:</label><br/>
<textarea id="yourMessage"></textarea>
<button id="send">send</button>
<pre id="messages"></pre>
<script src="index.js" charset="utf-8"></script>
</body>
</html>
When I send a message to other browser it works fine but for video chat it does not work
Any Idea on how to fix this.????
I got the error..
I was using getUserMedia instead of navigator.getUserMedia
getUserMedia has been deprecated.

html video player freezes after adding mp4 video arraybuffer to media source

I tried to handle video data from api with javascript, the api response is ok but the player doesn't
work as expected and when I play the video the console throw an error.
DOMException: The element has no supported sources.
Although video data is available the array buffer seem to be not pushed into media source yet
Here is the code
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<title>Document</title>
</head>
<body>
<h1>Hello</h1>
<video id="video" controls autoplay></video>
</body>
</html>
<script>
window.onload = function() {
var mime = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
const videoTag = document.getElementById("video");
if (
"MediaSource" in window &&
MediaSource.isTypeSupported(mime)
) {
var mediaSource = new MediaSource();
//console.log(mediaSource.readyState);
videoTag.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener("sourceopen", sourceOpen);
} else {
console.error("Unsupported MIME type or codec: ", mime);
}
function sourceOpen(e) {
var msource = this;
console.log(msource.readyState);
// URL.revokeObjectURL(vidElement.src);
var sourceBuffer = msource.addSourceBuffer(mime);
var videoUrl = "https://vjs.zencdn.net/v/oceans.mp4";
fetch(videoUrl)
.then(function(response) {
return response.arrayBuffer();
})
.then(function(arrayBuffer) {
console.log("object", arrayBuffer.byteLength);
sourceBuffer.addEventListener("updateend", function(_) {
console.log(msource.readyState);
if (!sourceBuffer.updating &&
mediaSource.readyState === "open"
) {
msource.endOfStream();
}
videoTag.play();
});
sourceBuffer.appendBuffer(arrayBuffer);
})
.catch(e => console.log(e));
}
};
</script>

Send imageObject from webcam by ajax to Flask Server

I want to take the snapshot form webcam and send it by ajax to the Flask server
I have the upload.html taking the snapshot from webcam and and send it through ajax, but I do not know much about Flask server to get data from Ajax and save it on the specific location (/images)
Here is the upload.html. The Webcam work on Firefox only (not working in Chrome). These other browsers I haven't tested yet
//--------------------
// GET USER MEDIA CODE
//--------------------
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var video;
var webcamStream;
function startWebcam() {
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: true,
audio: false
},
// successCallback
function(localMediaStream) {
video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
context = canvas.getContext('2d');
}
function snapshot() {
// Draws current image from the video element into the canvas
context.drawImage(video, 0,0, canvas.width, canvas.height);
webcamStream.stop();
var dataURL = canvas.toDataURL('image/jpeg', 1.0);
document.querySelector('#dl-btn').href = dataURL;
$.ajax({
type: "POST",
contentType: false,
cache: false,
processData: false,
async: false,
url: "/upload",
data: {
imgBase64: dataURL
}
}).done(function(o) {
console.log('saved');
// If you want the file to be visible in the browser
// - please modify the callback in javascript. All you
// need is to return the url to the file, you just saved
// and than put the image in your browser.
});
}
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="camera.js"></script>
</head>
<body onload="init();">
<h1>Take a snapshot of the current video stream</h1>
Click on the Start WebCam button.
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button type="submit" id="dl-btn" href="#" download="participant.jpeg" onclick="snapshot();">Take Snapshot</button>
</p>
<video onclick="snapshot(this);" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots : <p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</body>
</html>
Here is my server code: app_basic.py
import os
from flask import Flask, render_template, request, send_from_directory
app = Flask(__name__)
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
#app.route("/")
def index():
return render_template("upload.html")
#app.route("/upload", methods=['POST'])
def upload():
return send_from_directory('/images','test.jpeg')
if __name__ == "__main__":
app.run(port=4555, debug=True)
Thanks
Updated:
Thanks for #guest271314 help me fix the camera capture working on other browser. I reused my original ajax and upload it to server , but get the 404 error, but I do not know how to save image to server location (/images) 404 error
Now I am lloking the php code to handle data sent from ajax call, how to write the similar code in flask
<?php
define('UPLOAD_DIR', 'images/');
$img = $_POST['imgBase64'];
$img = str_replace('data:image/png;base64,', '', $img);
$img = str_replace(' ', '+', $img);
$data = base64_decode($img);
$file = UPLOAD_DIR . uniqid() . '.png';
$success = file_put_contents($file, $data);
//send request to ocr
print $success ? $file : 'Unable to save the file.';
?>
Use navigator.mediaDevices.getUserMedia(), .then() and .catch()
<!DOCTYPE html>
<html>
<head>
</head>
<body onload="init();">
<h1>Take a snapshot of the current video stream</h1> Click on the Start WebCam button.
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button type="submit" id="dl-btn" href="#" download="participant.jpeg" onclick="snapshot();">Take Snapshot</button>
</p>
<video onclick="snapshot();" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots :
<p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</p>
<script>
//--------------------
// GET USER MEDIA CODE
//--------------------
var video;
var webcamStream;
function startWebcam() {
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(
// constraints
{
video: true,
audio: false
}).then(
// successCallback
function(localMediaStream) {
console.log(webcamStream);
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
})
.catch(
// errorCallback
function(err) {
console.log("The following error occured: " + err);
})
} else {
console.log("getUserMedia not supported");
}
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
video = document.querySelector('video');
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
context = canvas.getContext('2d');
}
function snapshot() {
// Draws current image from the video element into the canvas
console.log(webcamStream);
context.drawImage(video, 0, 0, canvas.width, canvas.height);
webcamStream.getTracks().forEach(function(track) {
track.stop();
});
var dataURL = canvas.toDataURL('image/jpeg', 1.0);
document.querySelector('#dl-btn').href = dataURL;
console.log(dataURL)
}
</script>
</body>
</html>
plnkr https://plnkr.co/edit/vuPJRvYZNXLC7rjzKvpj?p=catalogue

How do i take picture from client side(html) and save it to server side(Python)

I'm new to python i did one application using python in that i want to capture Images from my webcam using html and AJAX javascript and save it to server side python. I have completed capturing of images from using client side HTML but i don't know how to save and pass the data from html client side to server side python.If anybody did this please can you help me...
THANK YOU IN ADVANCE...
My.html:
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Get User Media - Photo</title>
</head>
<body>
<button id="take">Take a photo</button><br />
<video id="v"></video>
<canvas id="canvas" style="display:none;"></canvas>
<img src="D:/VoteTest/img.jpg" id="photo" alt="photo">
<script>
;(function(){
function userMedia(){
return navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia || null;
}
// Now we can use it
if( userMedia() ){
var videoPlaying = false;
var constraints = {
video: true,
audio:false
};
var video = document.getElementById('v');
var media = navigator.getUserMedia(constraints, function(stream){
// URL Object is different in WebKit
var url = window.URL || window.webkitURL;
// create the url and set the source of the video element
video.src = url ? url.createObjectURL(stream) : stream;
// Start the video
video.play();
videoPlaying = true;
}, function(error){
console.log("ERROR");
console.log(error);
});
// Listen for user click on the "take a photo" button
document.getElementById('take').addEventListener('click', function(){
if (videoPlaying){
var canvas = document.getElementById('canvas');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
var data = canvas.toDataURL('image/webp');
document.getElementById('photo').setAttribute('src', data);
}
}, false);
} else {
console.log("KO");
}
})();
</script>
</body>
</html>
I just did this recently for a project. You can use XHR to send the image inside form data:
let formdata = new FormData();
formdata.append("image", data);
let xhr = new XMLHttpRequest();
xhr.open('POST', 'http://yourserver/image', true);
xhr.onload = function () {
if (this.status === 200)
console.log(this.response);
else
console.error(xhr);
};
xhr.send(formdata);
I had trouble using the toDataURL to convert the canvas, so I used toBlob for an easier conversion:
canvas.toBlob(callBackToMyPostFunctionAbove, 'image/jpeg');
Here is a sample HTML file with embedded JavaScript and my Python server.
HTML & Embedded JavaScript
The JavaScript uses:
getUserMedia to start a local video stream
a mouse click on the image to initiate the image capture
a canvas object to save an image from the getUserMedia stream
XHR to send the file as form data
The code:
<!DOCTYPE html>
<html>
<head>
<title>Post an Image test</title>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
</head>
<style>
/* mirror the image */
video, canvas {
transform: scale(-1, 1); /*For Firefox (& IE) */
-webkit-transform: scale(-1, 1); /*for Chrome & Opera (& Safari) */
}
</style>
<body>
<video id="myVideo" autoplay></video>
<script>
let v = document.getElementById("myVideo");
//create a canvas to grab an image for upload
let imageCanvas = document.createElement('canvas');
let imageCtx = imageCanvas.getContext("2d");
//Add file blob to a form and post
function postFile(file) {
let formdata = new FormData();
formdata.append("image", file);
let xhr = new XMLHttpRequest();
xhr.open('POST', 'http://localhost:5000/image', true);
xhr.onload = function () {
if (this.status === 200)
console.log(this.response);
else
console.error(xhr);
};
xhr.send(formdata);
}
//Get the image from the canvas
function sendImagefromCanvas() {
//Make sure the canvas is set to the current video size
imageCanvas.width = v.videoWidth;
imageCanvas.height = v.videoHeight;
imageCtx.drawImage(v, 0, 0, v.videoWidth, v.videoHeight);
//Convert the canvas to blob and post the file
imageCanvas.toBlob(postFile, 'image/jpeg');
}
//Take a picture on click
v.onclick = function() {
console.log('click');
sendImagefromCanvas();
};
window.onload = function () {
//Get camera video
navigator.mediaDevices.getUserMedia({video: {width: 1280, height: 720}, audio: false})
.then(stream => {
v.srcObject = stream;
})
.catch(err => {
console.log('navigator.getUserMedia error: ', err)
});
};
</script>
</body>
</html>
This uses adapter.js to polyfill getUserMedia on different browsers without any error checks.
Python Server
And here is a sample in Python using Flask as a web server:
from flask import Flask, request, Response
import time
PATH_TO_TEST_IMAGES_DIR = './images'
app = Flask(__name__)
#app.route('/')
def index():
return Response(open('./static/getImage.html').read(), mimetype="text/html")
# save the image as a picture
#app.route('/image', methods=['POST'])
def image():
i = request.files['image'] # get the image
f = ('%s.jpeg' % time.strftime("%Y%m%d-%H%M%S"))
i.save('%s/%s' % (PATH_TO_TEST_IMAGES_DIR, f))
return Response("%s saved" % f)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
If you are looking for php in server side, here is how I did it.
Post the image data to php script using jquery:
var imgData = canvas.toDataURL('image/png');
$.post("https://path-to-your-script/capture.php", {image: imgData},
function(data) {
console.log('posted');
});
The php script will be like:
capture.php
$data = $_POST['image'];
// remove "data:image/png;base64," from image data.
$data = str_replace("data:image/png;base64,", "", $data);
// save to file
file_put_contents("/tmp/image.png", base64_decode($data));

Sending images using JavaScript Websockets to Flask server

Overview
A HTML5 page running on Flask using the getUserMedia API accesses the user/my webcam. (Done)
Every frame from the getUserMedia API video is drawn using Javascript canvas. (Done)
Every frame will be sent to the flask server as base64.
Every frame will then get sent from the flask server to a python file where the image is decoded.
The frames will be processed using OpenCV.
I'm using flask_socketio and Javascript websockets.
I'm new to Javascript and Websockets so apologies if the solution is obvious. I think the frames are being sent to the flask server (HTTP/1.1" 200). If they are being sent; it is sent every ~25secs however I want every frame to be sent immediately.
Any help is much appreciated.
HTML + JavaScript
<!DOCTYPE html>
<html>
<head>
</head>
<script type="text/javascript" src="//code.jquery.com/jquery-1.4.2.min.js"></script>
<script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/socket.io/1.3.5/socket.io.min.js"></script>
<script type="text/javascript" charset="utf-8"></script>
<body onload="init();">
<h1></h1>
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button onclick="stopWebcam();">Stop WebCam</button>
</p>
<video onclick="snapshot(this);" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots : <p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</body>
<script type="text/javascript">
//--------------------
// GET USER MEDIA CODE
//--------------------
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var video;
var webcamStream;
function startWebcam() {
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: true,
audio: false
},
// successCallback
function(localMediaStream) {
video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
}
function stopWebcam() {
webcamStream.stop();
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
ctx = canvas.getContext('2d');
}
namespace = '/test';
// Connect to the Socket.IO server.
// The connection URL has the following format:
// http[s]://<domain>:<port>[/<namespace>]
var socket = io.connect(location.protocol + '//' + document.domain + ':' + location.port + namespace);
timer = setInterval(
function snapshot() {
// Draws current image from the video element into the canvas
ctx.drawImage(video, 0,0, canvas.width, canvas.height);
var frame = canvas.toDataURL("image/png");
console.log(frame.substring(0, 50));
}, 1000);
// Event handler for new connections.
// The callback function is invoked when a connection with the
// server is established.
socket.on('frame', function(data) {
socket.emit('frame', frame);
});
</script>
</html>
Flask server(updated)
async_mode = "eventlet"
from eventlet import wsgi, websocket
import eventlet
eventlet.monkey_patch()
from flask import Flask, render_template, session, request
from flask_socketio import SocketIO, emit, disconnect
import base64
app = Flask(__name__)
socketio = SocketIO(app)
#app.route('/')
def cam():
return render_template("liveweb1.html", async_mode=socketio.async_mode)
#socketio.on('frame', namespace='/test')
def user_video(frame):
feed = frame
print (str(feed))
if __name__ == '__main__':
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen(('', 8000)),
certfile='cert.crt',
keyfile='private.key',
server_side=True),
app)
Flask server screenshot
Eventlet server

Categories

Resources