Sending images using JavaScript Websockets to Flask server - javascript

Overview
A HTML5 page running on Flask using the getUserMedia API accesses the user/my webcam. (Done)
Every frame from the getUserMedia API video is drawn using Javascript canvas. (Done)
Every frame will be sent to the flask server as base64.
Every frame will then get sent from the flask server to a python file where the image is decoded.
The frames will be processed using OpenCV.
I'm using flask_socketio and Javascript websockets.
I'm new to Javascript and Websockets so apologies if the solution is obvious. I think the frames are being sent to the flask server (HTTP/1.1" 200). If they are being sent; it is sent every ~25secs however I want every frame to be sent immediately.
Any help is much appreciated.
HTML + JavaScript
<!DOCTYPE html>
<html>
<head>
</head>
<script type="text/javascript" src="//code.jquery.com/jquery-1.4.2.min.js"></script>
<script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/socket.io/1.3.5/socket.io.min.js"></script>
<script type="text/javascript" charset="utf-8"></script>
<body onload="init();">
<h1></h1>
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button onclick="stopWebcam();">Stop WebCam</button>
</p>
<video onclick="snapshot(this);" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots : <p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</body>
<script type="text/javascript">
//--------------------
// GET USER MEDIA CODE
//--------------------
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var video;
var webcamStream;
function startWebcam() {
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: true,
audio: false
},
// successCallback
function(localMediaStream) {
video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
}
function stopWebcam() {
webcamStream.stop();
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
ctx = canvas.getContext('2d');
}
namespace = '/test';
// Connect to the Socket.IO server.
// The connection URL has the following format:
// http[s]://<domain>:<port>[/<namespace>]
var socket = io.connect(location.protocol + '//' + document.domain + ':' + location.port + namespace);
timer = setInterval(
function snapshot() {
// Draws current image from the video element into the canvas
ctx.drawImage(video, 0,0, canvas.width, canvas.height);
var frame = canvas.toDataURL("image/png");
console.log(frame.substring(0, 50));
}, 1000);
// Event handler for new connections.
// The callback function is invoked when a connection with the
// server is established.
socket.on('frame', function(data) {
socket.emit('frame', frame);
});
</script>
</html>
Flask server(updated)
async_mode = "eventlet"
from eventlet import wsgi, websocket
import eventlet
eventlet.monkey_patch()
from flask import Flask, render_template, session, request
from flask_socketio import SocketIO, emit, disconnect
import base64
app = Flask(__name__)
socketio = SocketIO(app)
#app.route('/')
def cam():
return render_template("liveweb1.html", async_mode=socketio.async_mode)
#socketio.on('frame', namespace='/test')
def user_video(frame):
feed = frame
print (str(feed))
if __name__ == '__main__':
eventlet.wsgi.server(eventlet.wrap_ssl(eventlet.listen(('', 8000)),
certfile='cert.crt',
keyfile='private.key',
server_side=True),
app)
Flask server screenshot
Eventlet server

Related

Websockets returns error Error in connection establishment: net::ERR_CONNECTION_RESET

main.py
from flask import Flask, render_template, request
import base64
import ssl
app = Flask(__name__)
sock = Sock(app)
#app.route('/')
def index():
return render_template('index.html')
import asyncio
import websockets
ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
localhost_pem = "cert.pem"
ssl_context.load_cert_chain(localhost_pem, keyfile="cert.key")
async def hello(websocket, path):
n = 0
while True:
n += 1
data = await websocket.recv()
open(f"photo/{n}.png", "wb").write(data)
#start_server = websockets.serve(hello, '0.0.0.0', 8443, ssl=ssl_context)
async def tre():
async with websockets.serve(hello, "0.0.0.0", 8443, ssl=ssl_context):
await asyncio.Future()
#loop = asyncio.get_event_loop()
import threading
def lol():
asyncio.run(tre())
t = threading.Thread(target=lol, args=())
t.start()
print(1)
app.run("0.0.0.0", port=8080, ssl_context=('cert.pem', 'cert.key'))
t.join()
This file hosts the index.html and also hosts a websockets server that accepts images on every message.
index.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width,
initial-scale=1.0">
<style>
body {
text-align: center;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
video {
background-color: black;
margin-bottom: 1rem;
}
#error {
color: red;
padding: 0.6rem;
background-color: rgb(236 157 157);
margin-bottom: 0.6rem;
display: none;
}
</style>
<title>GetUserMedia demo</title>
</head>
<body>
<h1> WebRTC getUserMedia() demo</h1>
<!-- If you use the playsinline attribute then
the video is played "inline". If you omit this
attribute then it works normal in the desktop
browsers, but for the mobile browsers, the video
takes the fullscreen by default. And don't forget
to use the autoplay attribute-->
<video id='video'
width="600"
height="300"
autoplay playsinline>
Sorry, video element not supported in your browser
</video>
<img></img>
<div id="error"></div>
<button onclick="openCamera()"> Open Camera</button>
<button id=screenshot-button></button>
<script>
function dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
// doesn't handle URLEncoded DataURIs - see SO answer #6850276 for code that does this
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
// create a view into the buffer
var ia = new Uint8Array(ab);
// set the bytes of the buffer to the correct values
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
// write the ArrayBuffer to a blob, and you're done
var blob = new Blob([ab], {type: mimeString});
return blob;
}
const videoElem = document.getElementById('video');
const errorElem = document.getElementById('error');
//Declare the MediaStreamConstraints object
const constraints = {
audio: false,
video: true
}
function openCamera() {
//Ask the User for the access of the device camera and microphone
navigator.mediaDevices.getUserMedia(constraints)
.then(mediaStream => {
/* The received mediaStream contains both the
video and audio media data*/
/*Add the mediaStream directly to the source of the video element
using the srcObject attribute*/
videoElem.srcObject = mediaStream;
}).catch(err => {
// handling the error if any
errorElem.innerHTML = err;
errorElem.style.display = "block";
});
}
const screenshotButton = document.querySelector("#screenshot-button");
const canvas = document.createElement("canvas");
const img = document.querySelector("img");
const socket = new WebSocket('wss://' + "205.185.122.202:8443/");
screenshotButton.onclick = setInterval(myFunction,2000);
function myFunction () {
canvas.width = videoElem.videoWidth;
canvas.height = videoElem.videoHeight;
canvas.getContext("2d").drawImage(video, 0, 0);
// Other browsers will fall back to image/png
data = dataURItoBlob(canvas.toDataURL("image/webp"));
socket.send(data);
};
</script>
</body>
</html>
This code gets images from the users camera and sends them to the python file. When the browser is opened and the page is loaded then this error pops up. I cannot figure why it would happen and how to fix it.
My firewall is disabled
Im on linux
Ssl is enabled

How to receive continuous chunk of video as a blob array and set to video tag dynamically in Websocket

I am trying to make my own broadcasting architecture. In this system i am using Websocket to transfer data since i know it is suitable for continuous data transfer.
In my system there is a Host who initiate webcam live broadcast video. I use MediaStreamRecorder.js which record every 5s chunk of video and send to server through websocket as blob array.
Server simply recieve and send to the all client who are connected in that Session.
When client connected then it receive continuous 5s chunk of video as blob array through Websocket.
My main problem is in Client side how can I set the video blob array to html video source dynamically in every 5 seconds such that it can play every 5s chunk of video data.
I am using Glassfish 4.0 as server and Javscript in Host and Client side. Browser: Chrome
Source Code:
ServerBroadCast.java
package websocket1;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import javax.websocket.OnClose;
import javax.websocket.OnMessage;
import javax.websocket.OnOpen;
import javax.websocket.Session;
import javax.websocket.server.ServerEndpoint;
#ServerEndpoint(value = "/liveStreamMulticast")
public class LiveStreamMultiCast {
private static final Set<Session> sessions = Collections.synchronizedSet(new HashSet<Session>());
#OnOpen
public void whenOpening(Session session) {
// session.setMaxBinaryMessageBufferSize(1024*512); // 512 KB
sessions.add(session);
System.out.println("You are Connected!");
System.out.println("Total Connection are connected: " + sessions.size());
}
#OnMessage
public void handleVideo(byte[] videoData, Session HostSession) {
// System.out.println("Insite process video");
try {
if (videoData != null) {
sendVideo(videoData, HostSession);
}
} catch (Throwable e) {
System.out.println("Error sending message " + e.getMessage());
}
}
#OnClose
public void onClosing(Session session) {
System.out.println("Goodbye!");
sessions.remove(session);
}
private void sendVideo(byte[] videoData, Session hostSession) throws IOException {
Iterator<Session> iterator = sessions.iterator();
Session tempSession = null;
while (iterator.hasNext()) {
tempSession = iterator.next();
// System.out.println("Sever send data to "+ tempSession);
if (!tempSession.equals(hostSession))
tempSession.getBasicRemote().sendBinary(ByteBuffer.wrap(videoData));
}
}
}
host.html
<html>
<head>
<title>Demo</title>
<script type="text/javascript" src="js/required/mediastream.js"></script>
</head>
<body>
<video id="video" autoplay=""></video>
<button id="stopButton" onclick="stop()">Stop</button>
<script type="text/javascript">
var url = "ws://localhost:8080/LiveTraining3Demo/liveStreamMulticast"; // 8080/application_name/value_given_in_annotation
var socket = new WebSocket(url);
var video = document.querySelector('video');
socket.onopen = function(){
console.log("Connected to Server!!");
}
socket.onmessage = function(msg){
console.log("Message come from server");
}
/////////////////////////////////
var wholeVideo =[];
var chunks = [];
var mediaRecorder;
//////////////////////////////////////
function gotMedia(stream) {
video.srcObject = stream;
mediaRecorder = new MediaStreamRecorder(stream);
console.log("mediaRecorderCalled");
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.start(5000);//
console.log("recorder started");
mediaRecorder.ondataavailable = (event) =>{
chunks.push(event.data);
console.log("push B");
wholeVideo.push(event.data);
console.log("WholeVideo Size:");
setTimeout(sendData(),5010);
}
}
function sendData(){
//var byteArray = new Uint8Array(recordedTemp);
const superBuffer = new Blob(chunks, {
type: 'video/webm'
});
socket.send(superBuffer);
console.log("Send Data");
console.table(superBuffer);
chunks = [];
}
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
navigator.mediaDevices.getUserMedia({video: true , audio: true})
.then(gotMedia)
.catch(e => { console.error('getUserMedia() failed: ' + e); });
</script>
</body>
</html>
client.html
<html>
<head>
<title>Recieve Video</title>
</head>
<body>
<video id="video" autoplay controls loop
style="width: 700; height: 500; margin: auto">
<source src="" type="video/webm">
</video>
<script>
var url = "ws://localhost:8080/LiveTraining3Demo/liveStreamMulticast"; // 8080/application_name/value_given_in_annotation
var check = true;
var socket = new WebSocket(url);
var videoData = [];
var superBuffer = null;
//var videoUrl;
//socket.binaryType = 'arraybuffer';
socket.onopen = function() {
console.log("Connected!!");
}
var check = true;
socket.onmessage = function(videoStream) {
var video = document.querySelector('video');
var videoUrl = window.URL.createObjectURL(videoStream.data);
video.src = videoUrl;
video.load();
video.onloadeddata = function() {
URL.revokeObjectURL(video.src);
video.play();
}
//video.srcObject
//video.play();
console.table(videoStream);
}
socket.onerror = function(err) {
console.log("Error: " + err);
}
</script>
</body>
</html>
When I try to run all other looks fine but in client.html only the video tag source is display with no any video play.
I am working on it since a week.
Might be my some implementation goes wrong, I also know WebRTC, Mauz Webrtc Broadcast but i didn't like to go through that complex if there is another simple way to do that. I am not like to use node.js server since i have to make this web application with spring.
Any idea can be appreciated.
Thanks In Advance!!.
In client side will get array buffer. So you need to convert array buffer into blob array.
let video = document.querySelector('video');
let blobArray = [];
socket.on('message',data=>{
blobArray.push(new Blob([new Uint8Array(data)],{'type':'video/mp4'}));
let currentTime = video.currentTime;
let blob = new Blob(blobArray,{'type':'video/mp4'});
video.src = window.URL.createObjectURL(blob);
video.currentTime = currentTime;
video.play();
});

Send imageObject from webcam by ajax to Flask Server

I want to take the snapshot form webcam and send it by ajax to the Flask server
I have the upload.html taking the snapshot from webcam and and send it through ajax, but I do not know much about Flask server to get data from Ajax and save it on the specific location (/images)
Here is the upload.html. The Webcam work on Firefox only (not working in Chrome). These other browsers I haven't tested yet
//--------------------
// GET USER MEDIA CODE
//--------------------
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var video;
var webcamStream;
function startWebcam() {
if (navigator.getUserMedia) {
navigator.getUserMedia (
// constraints
{
video: true,
audio: false
},
// successCallback
function(localMediaStream) {
video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
},
// errorCallback
function(err) {
console.log("The following error occured: " + err);
}
);
} else {
console.log("getUserMedia not supported");
}
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
context = canvas.getContext('2d');
}
function snapshot() {
// Draws current image from the video element into the canvas
context.drawImage(video, 0,0, canvas.width, canvas.height);
webcamStream.stop();
var dataURL = canvas.toDataURL('image/jpeg', 1.0);
document.querySelector('#dl-btn').href = dataURL;
$.ajax({
type: "POST",
contentType: false,
cache: false,
processData: false,
async: false,
url: "/upload",
data: {
imgBase64: dataURL
}
}).done(function(o) {
console.log('saved');
// If you want the file to be visible in the browser
// - please modify the callback in javascript. All you
// need is to return the url to the file, you just saved
// and than put the image in your browser.
});
}
<!DOCTYPE html>
<html>
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script src="camera.js"></script>
</head>
<body onload="init();">
<h1>Take a snapshot of the current video stream</h1>
Click on the Start WebCam button.
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button type="submit" id="dl-btn" href="#" download="participant.jpeg" onclick="snapshot();">Take Snapshot</button>
</p>
<video onclick="snapshot(this);" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots : <p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</body>
</html>
Here is my server code: app_basic.py
import os
from flask import Flask, render_template, request, send_from_directory
app = Flask(__name__)
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
#app.route("/")
def index():
return render_template("upload.html")
#app.route("/upload", methods=['POST'])
def upload():
return send_from_directory('/images','test.jpeg')
if __name__ == "__main__":
app.run(port=4555, debug=True)
Thanks
Updated:
Thanks for #guest271314 help me fix the camera capture working on other browser. I reused my original ajax and upload it to server , but get the 404 error, but I do not know how to save image to server location (/images) 404 error
Now I am lloking the php code to handle data sent from ajax call, how to write the similar code in flask
<?php
define('UPLOAD_DIR', 'images/');
$img = $_POST['imgBase64'];
$img = str_replace('data:image/png;base64,', '', $img);
$img = str_replace(' ', '+', $img);
$data = base64_decode($img);
$file = UPLOAD_DIR . uniqid() . '.png';
$success = file_put_contents($file, $data);
//send request to ocr
print $success ? $file : 'Unable to save the file.';
?>
Use navigator.mediaDevices.getUserMedia(), .then() and .catch()
<!DOCTYPE html>
<html>
<head>
</head>
<body onload="init();">
<h1>Take a snapshot of the current video stream</h1> Click on the Start WebCam button.
<p>
<button onclick="startWebcam();">Start WebCam</button>
<button type="submit" id="dl-btn" href="#" download="participant.jpeg" onclick="snapshot();">Take Snapshot</button>
</p>
<video onclick="snapshot();" width=400 height=400 id="video" controls autoplay></video>
<p>
Screenshots :
<p>
<canvas id="myCanvas" width="400" height="350"></canvas>
</p>
<script>
//--------------------
// GET USER MEDIA CODE
//--------------------
var video;
var webcamStream;
function startWebcam() {
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices.getUserMedia(
// constraints
{
video: true,
audio: false
}).then(
// successCallback
function(localMediaStream) {
console.log(webcamStream);
video.src = window.URL.createObjectURL(localMediaStream);
webcamStream = localMediaStream;
})
.catch(
// errorCallback
function(err) {
console.log("The following error occured: " + err);
})
} else {
console.log("getUserMedia not supported");
}
}
//---------------------
// TAKE A SNAPSHOT CODE
//---------------------
var canvas, ctx;
function init() {
video = document.querySelector('video');
// Get the canvas and obtain a context for
// drawing in it
canvas = document.getElementById("myCanvas");
context = canvas.getContext('2d');
}
function snapshot() {
// Draws current image from the video element into the canvas
console.log(webcamStream);
context.drawImage(video, 0, 0, canvas.width, canvas.height);
webcamStream.getTracks().forEach(function(track) {
track.stop();
});
var dataURL = canvas.toDataURL('image/jpeg', 1.0);
document.querySelector('#dl-btn').href = dataURL;
console.log(dataURL)
}
</script>
</body>
</html>
plnkr https://plnkr.co/edit/vuPJRvYZNXLC7rjzKvpj?p=catalogue

How do i take picture from client side(html) and save it to server side(Python)

I'm new to python i did one application using python in that i want to capture Images from my webcam using html and AJAX javascript and save it to server side python. I have completed capturing of images from using client side HTML but i don't know how to save and pass the data from html client side to server side python.If anybody did this please can you help me...
THANK YOU IN ADVANCE...
My.html:
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Get User Media - Photo</title>
</head>
<body>
<button id="take">Take a photo</button><br />
<video id="v"></video>
<canvas id="canvas" style="display:none;"></canvas>
<img src="D:/VoteTest/img.jpg" id="photo" alt="photo">
<script>
;(function(){
function userMedia(){
return navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia || null;
}
// Now we can use it
if( userMedia() ){
var videoPlaying = false;
var constraints = {
video: true,
audio:false
};
var video = document.getElementById('v');
var media = navigator.getUserMedia(constraints, function(stream){
// URL Object is different in WebKit
var url = window.URL || window.webkitURL;
// create the url and set the source of the video element
video.src = url ? url.createObjectURL(stream) : stream;
// Start the video
video.play();
videoPlaying = true;
}, function(error){
console.log("ERROR");
console.log(error);
});
// Listen for user click on the "take a photo" button
document.getElementById('take').addEventListener('click', function(){
if (videoPlaying){
var canvas = document.getElementById('canvas');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
canvas.getContext('2d').drawImage(video, 0, 0);
var data = canvas.toDataURL('image/webp');
document.getElementById('photo').setAttribute('src', data);
}
}, false);
} else {
console.log("KO");
}
})();
</script>
</body>
</html>
I just did this recently for a project. You can use XHR to send the image inside form data:
let formdata = new FormData();
formdata.append("image", data);
let xhr = new XMLHttpRequest();
xhr.open('POST', 'http://yourserver/image', true);
xhr.onload = function () {
if (this.status === 200)
console.log(this.response);
else
console.error(xhr);
};
xhr.send(formdata);
I had trouble using the toDataURL to convert the canvas, so I used toBlob for an easier conversion:
canvas.toBlob(callBackToMyPostFunctionAbove, 'image/jpeg');
Here is a sample HTML file with embedded JavaScript and my Python server.
HTML & Embedded JavaScript
The JavaScript uses:
getUserMedia to start a local video stream
a mouse click on the image to initiate the image capture
a canvas object to save an image from the getUserMedia stream
XHR to send the file as form data
The code:
<!DOCTYPE html>
<html>
<head>
<title>Post an Image test</title>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
</head>
<style>
/* mirror the image */
video, canvas {
transform: scale(-1, 1); /*For Firefox (& IE) */
-webkit-transform: scale(-1, 1); /*for Chrome & Opera (& Safari) */
}
</style>
<body>
<video id="myVideo" autoplay></video>
<script>
let v = document.getElementById("myVideo");
//create a canvas to grab an image for upload
let imageCanvas = document.createElement('canvas');
let imageCtx = imageCanvas.getContext("2d");
//Add file blob to a form and post
function postFile(file) {
let formdata = new FormData();
formdata.append("image", file);
let xhr = new XMLHttpRequest();
xhr.open('POST', 'http://localhost:5000/image', true);
xhr.onload = function () {
if (this.status === 200)
console.log(this.response);
else
console.error(xhr);
};
xhr.send(formdata);
}
//Get the image from the canvas
function sendImagefromCanvas() {
//Make sure the canvas is set to the current video size
imageCanvas.width = v.videoWidth;
imageCanvas.height = v.videoHeight;
imageCtx.drawImage(v, 0, 0, v.videoWidth, v.videoHeight);
//Convert the canvas to blob and post the file
imageCanvas.toBlob(postFile, 'image/jpeg');
}
//Take a picture on click
v.onclick = function() {
console.log('click');
sendImagefromCanvas();
};
window.onload = function () {
//Get camera video
navigator.mediaDevices.getUserMedia({video: {width: 1280, height: 720}, audio: false})
.then(stream => {
v.srcObject = stream;
})
.catch(err => {
console.log('navigator.getUserMedia error: ', err)
});
};
</script>
</body>
</html>
This uses adapter.js to polyfill getUserMedia on different browsers without any error checks.
Python Server
And here is a sample in Python using Flask as a web server:
from flask import Flask, request, Response
import time
PATH_TO_TEST_IMAGES_DIR = './images'
app = Flask(__name__)
#app.route('/')
def index():
return Response(open('./static/getImage.html').read(), mimetype="text/html")
# save the image as a picture
#app.route('/image', methods=['POST'])
def image():
i = request.files['image'] # get the image
f = ('%s.jpeg' % time.strftime("%Y%m%d-%H%M%S"))
i.save('%s/%s' % (PATH_TO_TEST_IMAGES_DIR, f))
return Response("%s saved" % f)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
If you are looking for php in server side, here is how I did it.
Post the image data to php script using jquery:
var imgData = canvas.toDataURL('image/png');
$.post("https://path-to-your-script/capture.php", {image: imgData},
function(data) {
console.log('posted');
});
The php script will be like:
capture.php
$data = $_POST['image'];
// remove "data:image/png;base64," from image data.
$data = str_replace("data:image/png;base64,", "", $data);
// save to file
file_put_contents("/tmp/image.png", base64_decode($data));

socket.io 404 cant connect with socket.io server

I have webrtc / socket.io / nodejs running on a server, everything works fine when i go to the https://domain.com:8080 to test a video conference.
But i want the script to run in my webserver /public_html/
But i dont know why it is not connecting to the 8080 server.
"socket.io.js GET https://domain.com/socket.io/?EIO=3&transport=polling&t=LPgZs2K 404 (Not Found)"
my server (server.js)
// Load required modules
var https = require("https"); // http server core module
var express = require("express"); // web framework external module
var serveStatic = require('serve-static'); // serve static files
var socketIo = require("socket.io"); // web socket external module
var easyrtc = require("../");
const fs = require('fs'); // EasyRTC external module
const options = {
key: fs.readFileSync('key.pem'),
cert: fs.readFileSync('cert.pem')
};
// Set process name
process.title = "node-easyrtc";
// Setup and configure Express http server. Expect a subfolder called "static" to be the web root.
var app = express();
app.use(serveStatic('static', {'index': ['index.html']}));
// Start Express http server on port 8080
var webServer = https.createServer(options, app).listen(8080);
// Start Socket.io so it attaches itself to Express server
var socketServer = socketIo.listen(webServer, {"log level":1});
easyrtc.setOption("logLevel", "debug");
// Overriding the default easyrtcAuth listener, only so we can directly access its callback
easyrtc.events.on("easyrtcAuth", function(socket, easyrtcid, msg, socketCallback, callback) {
easyrtc.events.defaultListeners.easyrtcAuth(socket, easyrtcid, msg, socketCallback, function(err, connectionObj){
if (err || !msg.msgData || !msg.msgData.credential || !connectionObj) {
callback(err, connectionObj);
return;
}
connectionObj.setField("credential", msg.msgData.credential, {"isShared":false});
console.log("["+easyrtcid+"] Credential saved!", connectionObj.getFieldValueSync("credential"));
callback(err, connectionObj);
});
});
// To test, lets print the credential to the console for every room join!
easyrtc.events.on("roomJoin", function(connectionObj, roomName, roomParameter, callback) {
console.log("["+connectionObj.getEasyrtcid()+"] Credential retrieved!", connectionObj.getFieldValueSync("credential"));
easyrtc.events.defaultListeners.roomJoin(connectionObj, roomName, roomParameter, callback);
});
// Start EasyRTC server
var rtc = easyrtc.listen(app, socketServer, null, function(err, rtcRef) {
console.log("Initiated");
rtcRef.events.on("roomCreate", function(appObj, creatorConnectionObj, roomName, roomOptions, callback) {
console.log("roomCreate fired! Trying to create: " + roomName);
appObj.events.defaultListeners.roomCreate(appObj, creatorConnectionObj, roomName, roomOptions, callback);
});
});
//listen on port 8080
webServer.listen(8080, function () {
console.log('listening on http://localhost:8080');
});
my html file on de WEB server. structure like this https://domain.com/test.html
<!DOCTYPE html>
<html>
<head>
<title>EasyRTC Demo:EasyRTC Demo: Video+Audio HD 720</title>
<link rel="stylesheet" type="text/css" href="/easyrtc/easyrtc.css" />
<script src="js/socket.io.js"></script>
<script type="text/javascript" src="js/easyrtc.js"></script>
<script type="text/javascript" src="js/video.js"></script>
<script>
var selfEasyrtcid = "";
function connect() {
easyrtc.setVideoDims(1280,720);
easyrtc.enableDebug(false);
easyrtc.setRoomOccupantListener(convertListToButtons);
easyrtc.easyApp("easyrtc.videoChatHd", "selfVideo", ["callerVideo"], loginSuccess, loginFailure);
}
function clearConnectList() {
var otherClientDiv = document.getElementById("otherClients");
while (otherClientDiv.hasChildNodes()) {
otherClientDiv.removeChild(otherClientDiv.lastChild);
}
}
function convertListToButtons (roomName, data, isPrimary) {
clearConnectList();
var otherClientDiv = document.getElementById("otherClients");
for(var easyrtcid in data) {
var button = document.createElement("button");
button.onclick = function(easyrtcid) {
return function() {
performCall(easyrtcid);
};
}(easyrtcid);
var label = document.createTextNode(easyrtc.idToName(easyrtcid));
button.appendChild(label);
button.className = "callbutton";
otherClientDiv.appendChild(button);
}
}
function performCall(otherEasyrtcid) {
easyrtc.hangupAll();
var acceptedCB = function(accepted, caller) {
if( !accepted ) {
easyrtc.showError("CALL-REJECTED", "Sorry, your call to " + easyrtc.idToName(caller) + " was rejected");
}
};
var successCB = function() {};
var failureCB = function() {};
easyrtc.call(otherEasyrtcid, successCB, failureCB, acceptedCB);
}
function loginSuccess(easyrtcid) {
selfEasyrtcid = easyrtcid;
document.getElementById("iam").innerHTML = "I am " + easyrtc.cleanId(easyrtcid);
}
function loginFailure(errorCode, message) {
easyrtc.showError(errorCode, message);
}
// Sets calls so they are automatically accepted (this is default behaviour)
easyrtc.setAcceptChecker(function(caller, cb) {
cb(true);
} );
</script>
</head>
<body onload="connect();">
<h1>EasyRTC Demo: Video+Audio HD 720p</h1>
<div id="demoContainer">
<div>
Note: your own image will show up postage stamp sized, while the other party"s video will be shown in high-definition (1280x720). Note: not all webcams are seen by WebRTC as providing high-definition video; the fallback is to use standard definition (640x480).
</div>
<div id="connectControls">
<div id="iam">Not yet connected...</div>
<br />
<strong>Connected users:</strong>
<div id="otherClients"></div>
</div>
<div id="videos">
<div style="position:relative;float:left;" width="1282" height="722">
<video autoplay="autoplay" id="callerVideo"></video>
<video class="easyrtcMirror" autoplay="autoplay" id="selfVideo" muted="true" volume="0" ></video>
</div>
<!-- each caller video needs to be in it"s own div so it"s close button can be positioned correctly -->
</div>
</div>
</body>
</html>
Socket.io.js: http://81.171.38.245/js/socket.io.js

Categories

Resources