I have a simple program where a user records themselves (via their webcam) and that video is stored on the same HTML page. Can anyone let me know how I can go about adding to what I have below to record and save the video?
#HTML
<button id="speak_button"></button> #click to start video
<div id="circle"><p id="submit_button">Submit</p></div> #click to end video
<video" id="video" width="180" height="140" autoplay muted></video> #where webcam is
<video id="second_video" width="180" height="140" controls></video> #where I want recorded video to be
#JAVASCRIPT
<script>
let video = document.getElementById("video");
let mediaRecorder;
navigator.mediaDevices.getUserMedia({video: true, audio: true})
.then(function(stream){
video.srcObject = stream;
video.play();
mediaRecorder = new MediaRecorder(stream, { mimeType: "video/webm"});
mediaRecorder.ondatavailable = function(event) {
if (event.data.size > 0) {
const reader = new window.FileReader();
reader.readAsDataURL(event.data);
reader.onloadend = function() {
let base64 = reader.result.split('base64,')[1];
let recording = document.getElementById('second_video');
recording.src = "data:video/webm;base64," + base64;
recording.type = "video/webm";
recording.play();
};
};
};
});
document.getElementById("speak_button").addEventListener("click", function() {
mediaRecorder.start();
stream.record();
});
document.getElementById("submit_button").addEventListener("click", function() {
mediaRecorder.stop();
stream.stop();
var superBuffer = new Blob(recordedChunks);
document.getElementById("second_video") = window.URL.createObjectURL(superBuffer);
});
</script>
Thanks!
HTML CODE
<main id="container">
<video id="gum" playsinline autoplay muted></video>
<video id="recorded" playsinline loop></video>
<section>
<button id="start">Start camera</button>
<button id="record" disabled>Record</button>
<button id="play" disabled>Play</button>
</section>
</main>
<script src="main.js"></script>
main.js code
let mediaRecorder;
let recordedBlobs;
const errorMsgElement = document.querySelector("span#errorMsg");
const recordedVideo = document.querySelector("video#recorded");
const recordButton = document.querySelector("button#record");
const playButton = document.querySelector("button#play");
recordButton.addEventListener("click", () => {
if (recordButton.textContent === "Record") return startRecording();
stopRecording();
recordButton.textContent = "Record";
playButton.disabled = false;
});
playButton.addEventListener("click", () => {
const superBuffer = new Blob(recordedBlobs, { type: "video/webm" });
recordedVideo.src = null;
recordedVideo.srcObject = null;
recordedVideo.src = window.URL.createObjectURL(superBuffer);
recordedVideo.controls = true;
recordedVideo.play();
});
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) recordedBlobs.push(event.data);
}
function startRecording() {
recordedBlobs = [];
let options = { mimeType: "video/webm;codecs=vp9,opus" };
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error("Exception while creating MediaRecorder:", e);
errorMsgElement.innerHTML = `Exception while creating MediaRecorder: ${JSON.stringify(
e
)}`;
return;
}
recordButton.textContent = "Stop Recording";
playButton.disabled = true;
mediaRecorder.onstop = (event) => console.log(event);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start();
console.log("MediaRecorder started", mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
}
document.querySelector("button#start").addEventListener("click", async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: true,
});
recordButton.disabled = false;
window.stream = stream;
const gumVideo = document.querySelector("video#gum");
gumVideo.srcObject = stream;
} catch (e) {
console.error("navigator.getUserMedia error:", e);
errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
});
I need a help. I have a javascript code in my site that record videos and save them on the server. The format in which the videos are saved depends on the browser I am using, because if I use Opera or Chrome, the videos are saved in MKV format, if I use Firefox instead, the videos are saved in WEBM format. The problem is that the videos I save using Opera and Chrome (i.e. MKV) Firefox cannot read them, while Opera and Chrome read the WEBM videos generated with firefox without problems.
How can I make Opera and Chrome also save videos in WEBM so I no longer have the problem?
JavaScript code:
let constraintObj = {
audio: true,
video: {
facingMode: "user",
width: { min: 640, ideal: 1280, max: 1920 },
height: { min: 480, ideal: 720, max: 1080 }
}
};
// width: 1280, height: 720 -- preference only
// facingMode: {exact: "user"}
// facingMode: "environment"
//handle older browsers that might implement getUserMedia in some way
if (navigator.mediaDevices === undefined) {
navigator.mediaDevices = {};
navigator.mediaDevices.getUserMedia = function(constraintObj) {
let getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
if (!getUserMedia) {
return Promise.reject(new Error('getUserMedia is not implemented in this browser'));
}
return new Promise(function(resolve, reject) {
getUserMedia.call(navigator, constraintObj, resolve, reject);
});
}
}else{
navigator.mediaDevices.enumerateDevices()
.then(devices => {
devices.forEach(device=>{
console.log(device.kind.toUpperCase(), device.label);
//, device.deviceId
})
})
.catch(err=>{
console.log(err.name, err.message);
})
}
navigator.mediaDevices.getUserMedia(constraintObj)
.then(function(mediaStreamObj) {
//connect the media stream to the first video element
let video = document.querySelector('video');
if ("srcObject" in video) {
video.srcObject = mediaStreamObj;
} else {
//old version
video.src = window.URL.createObjectURL(mediaStreamObj);
}
video.onloadedmetadata = function(ev) {
//show in the video element what is being captured by the webcam
video.play();
};
//add listeners for saving video/audio
let start = document.getElementById('btnStart');
let stop = document.getElementById('btnStop');
let save = document.getElementById('btnSave');
let vidSave = document.getElementById('vid2');
let mediaRecorder = new MediaRecorder(mediaStreamObj);
let chunks = [];
var blob = null;
document.getElementById("vid2").style.visibility = "hidden";
document.getElementById("btnSave").style.visibility = "hidden";
var contatore=0;
var dontstop=0;
save.addEventListener('click', (ev)=>{
contatore = 0;
const formData = new FormData();
formData.append('video', blob);
fetch('videoRec', {
method: 'POST',
body: formData
})
.then(response => { console.log('upload success, ');
document.getElementById('response').innerHTML="Video salvato con successo, ";
})
.catch(error => {console.log('error');
document.getElementById('response').innerHTML="Errore durante il caricamento del video. Riprova.";
})
});
start.addEventListener('click', (ev)=>{
dontstop=1;
//if user already started video before, ask him if want to save that video before
//recording another video
if(contatore!=0){
var domanda=confirm("Il video appena registrato andrĂ eliminato. Vuoi Procedere?");
if (domanda === false) {
exit;
}
}
//when user's recording, vid2 must be hidden
if(document.getElementById("vid2").style.visibility=="visible"){
document.getElementById("vid2").style.visibility="hidden";
document.getElementById("btnSave").style.visibility="hidden";
document.getElementById('response').innerHTML=" ";
}
//start recording
mediaRecorder.start();
console.log(mediaRecorder.state);
//disable start button and enable stop button
document.getElementById('btnStart').disabled=true;
document.getElementById('btnStop').disabled=false;
//contatore increments when user records, so if he starts another rec without stopping before
//ask him if want to save the last video
contatore++;
var timeleft = 5; //video must be 120 seconds
downloadTimer = setInterval(function(){
if(timeleft <= 0){
//after 120 seconds record will stops automatically
clearInterval(downloadTimer); //the countdown disappared
document.getElementById("countdown").innerHTML = "Finished";
mediaRecorder.stop();
console.log(mediaRecorder.state);
document.getElementById('btnStart').disabled=false;
document.getElementById('btnStop').disabled=true;
video.onloadedmetadata = function(ev) {
chunks.push(ev.data);
};
// vid2 appears
document.getElementById("vid2").style.visibility = "visible";
document.getElementById("btnSave").style.visibility = "visible";
blob = new Blob(chunks, { 'type' : 'video/webm;' });
chunks = [];
let videoURL = window.URL.createObjectURL(blob);
vidSave.src = videoURL;
} else {
//else countdown continues
document.getElementById("countdown").innerHTML = timeleft + " seconds remaining";
}
timeleft -= 1;
}, 1000)
})
stop.addEventListener('click', (ev)=>{
if(dontstop!=1){
exit;
}
mediaRecorder.stop();
console.log(mediaRecorder.state);
document.getElementById('btnStart').disabled=false;
document.getElementById('btnStop').disabled=true;
clearInterval(downloadTimer);
document.getElementById("countdown").innerHTML = "Finished";
document.getElementById("vid2").style.visibility = "visible";
document.getElementById("btnSave").style.visibility = "visible";
});
mediaRecorder.ondataavailable = function(ev) {
chunks.push(ev.data);
}
mediaRecorder.onstop = (ev)=>{
blob = new Blob(chunks, { 'type' : 'video/webm;' });
chunks = [];
let videoURL = window.URL.createObjectURL(blob);
vidSave.src = videoURL;
}
})
.catch(function(err) {
console.log(err.name, err.message);
});
Thank you very much!
Gennaro
you can try to fix the mimetype to video/webm;codecs=VP8, it should work with any browser supporting MediaRecorder
I have this simple code to get chunks of video stream and play them in MediaSource. I see video, but sometimes it stops. It may work for few seconds or for few minutes. But finally it stops at some moment. chrome://media-internals/ shows no errors.
What is wrong here?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
navigator.getUserMedia(constraints, function (stream) {
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e) {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(100);
}, function (e) {
console.log(e)
});
Here is JSFIDDLE which is going to try to do it:
https://jsfiddle.net/stivyakovenko/fkt89cLu/6/
I am using Chrome as my main target.
Looks like this is a bug in Chrome...
https://bugs.chromium.org/p/chromium/issues/detail?id=606000
The mediarecorder will give you part of a whole webm file in the ondataavailable callback. Look like this kind of stuff is not work with mediaSource. It can not work at all in my chrome 66.
Here is a way that works like "video chat" or "live stream" with MediaRecorder without ffmpeg:
You can send that data part by part to your sever with ajax.
The server can return "the whole webm file" to your chrome browser in one
long time response. And the server can return more data part in that response as soon as the server some data from the client.
And this kind of workaroud works only with html too:
You can use a blob list to collect all the blob that come from ondataavailable.
then set the video.src again and again.
Here is a jsfiddle that works:
const constraints = {video: true};
const video1 = document.querySelector('.real1');
const video2 = document.querySelector('.real2');
var blobList = [];
var gCurrentTime = 0;
function playNew(){
gCurrentTime = video2.currentTime;
var thisBlob = new Blob(blobList,{type:"video/webm"});
var url = URL.createObjectURL(thisBlob);
video2.src = url;
video2.currentTime = gCurrentTime;
video2.play();
}
video2.onended = playNew;
var isFirst = true;
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{mimeType:"video/webm"});
mediaRecorder.ondataavailable = function(e){
blobList.push(e.data);
if (isFirst){
playNew();
isFirst = false;
}
}
mediaRecorder.start(1000);
}
function handleError(error) {
console.error('Reeeejected!', error);
}
navigator.mediaDevices.getUserMedia(constraints).
then(handleSuccess).catch(handleError);
<video class="real1" autoplay controls></video>
<video class="real2" controls></video>
https://jsfiddle.net/4akkadht/1/
The html only solution (second one) will blink again and again and have a huge delay. The server long push solution (first one) will not blink and have five seconds delay.
Based on my experience of working with MediaRecorder and MediaSource, most of the errors related to the video freezing or returning errors may be due to the chunks being received out-of-sync. I believe that webm (and maybe other media types also) need the chunks to be received in increasing order of their timecodes. Recording, sending and then receiving the chunks Async may not preserve this increasing order of timecodes.
So, after the above analysis of my own experience of video freezing with MediaRecorder/MediaSource, I changed my code to send the recorded chunks in Sync, not Async.
I am trying to do this as well, however I do not get any video at all. Your jsfiddle does not work for me on chrome or firefox (tested on ubuntu 14.04 and windows 7).
After a bit of research (mainly streaming back the file after it has been recorded), I've found out that the file is not properly fragmented to be played by MSE.
#Steve: I'd be interested to find out how you've done the fragmenting with ffmpeg.
As a sidenote, I also have a similar question here: Display getUserMediaStream live video with media stream extensions (MSE) , with an error description from chrome://media-internals.
a working example in chrome but it freez in firefox
const main = async(function* main(){
const logging = true;
let tasks = Promise.resolve(void 0);
const devices = yield navigator.mediaDevices.enumerateDevices();
console.table(devices);
const stream = yield navigator.mediaDevices.getUserMedia({video: true, audio: true});
if(logging){
stream.addEventListener("active", (ev)=>{ console.log(ev.type); });
stream.addEventListener("inactive", (ev)=>{ console.log(ev.type); });
stream.addEventListener("addtrack", (ev)=>{ console.log(ev.type); });
stream.addEventListener("removetrack", (ev)=>{ console.log(ev.type); });
}
const rec = new MediaRecorder(stream, {mimeType: 'video/webm; codecs="opus,vp8"'});
if(logging){
rec.addEventListener("dataavailable", (ev)=>{ console.log(ev.type); });
rec.addEventListener("pause", (ev)=>{ console.log(ev.type); });
rec.addEventListener("resume", (ev)=>{ console.log(ev.type); });
rec.addEventListener("start", (ev)=>{ console.log(ev.type); });
rec.addEventListener("stop", (ev)=>{ console.log(ev.type); });
rec.addEventListener("error", (ev)=>{ console.error(ev.type, ev); });
}
const ms = new MediaSource();
if(logging){
ms.addEventListener('sourceopen', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceended', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceclose', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('addsourcebuffer', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('removesourcebuffer', (ev)=>{ console.log(ev.type); });
}
const video = document.createElement("video");
if(logging){
video.addEventListener('loadstart', (ev)=>{ console.log(ev.type); });
video.addEventListener('progress', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadedmetadata', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadeddata', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplay', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplaythrough', (ev)=>{ console.log(ev.type); });
video.addEventListener('playing', (ev)=>{ console.log(ev.type); });
video.addEventListener('waiting', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeking', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeked', (ev)=>{ console.log(ev.type); });
video.addEventListener('ended', (ev)=>{ console.log(ev.type); });
video.addEventListener('emptied', (ev)=>{ console.log(ev.type); });
video.addEventListener('stalled', (ev)=>{ console.log(ev.type); });
video.addEventListener('timeupdate', (ev)=>{ console.log(ev.type); }); // annoying
video.addEventListener('durationchange', (ev)=>{ console.log(ev.type); });
video.addEventListener('ratechange', (ev)=>{ console.log(ev.type); });
video.addEventListener('play', (ev)=>{ console.log(ev.type); });
video.addEventListener('pause', (ev)=>{ console.log(ev.type); });
video.addEventListener('error', (ev)=>{ console.warn(ev.type, ev); });
}
//video.srcObject = ms;
video.src = URL.createObjectURL(ms);
video.volume = 0;
video.controls = true;
video.autoplay = true;
document.body.appendChild(video);
yield new Promise((resolve, reject)=>{
ms.addEventListener('sourceopen', ()=> resolve(), {once: true});
});
const sb = ms.addSourceBuffer(rec.mimeType);
if(logging){
sb.addEventListener('updatestart', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('update', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('updateend', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('error', (ev)=>{ console.error(ev.type, ev); });
sb.addEventListener('abort', (ev)=>{ console.log(ev.type); });
}
const stop = async(function* stop(){
console.info("stopping");
if(sb.updating){ sb.abort(); }
if(ms.readyState === "open"){ ms.endOfStream(); }
rec.stop();
stream.getTracks().map((track)=>{ track.stop(); });
yield video.pause();
console.info("end");
});
const button = document.createElement("button");
button.innerHTML = "stop";
button.addEventListener("click", ()=>{
document.body.removeChild(button);
tasks = tasks.then(stop);
}, {once: true});
document.body.appendChild(button);
let i = 0;
rec.ondataavailable = ({data})=>{
tasks = tasks.then(async(function*(){
console.group(""+i);
try{
if(logging){ console.log("dataavailable", "size:", data.size); }
if(data.size === 0){
console.warn("empty recorder data");
throw new Error("empty recorder data");
}
const buf = yield readAsArrayBuffer(data);
sb.appendBuffer(buf);
yield new Promise((resolve, reject)=>{
sb.addEventListener('updateend', ()=> resolve(), {once: true});
sb.addEventListener("error", (err)=> reject(ev), {once: true});
});
if(logging){
console.log("timestampOffset", sb.timestampOffset);
console.log("appendWindowStart", sb.appendWindowStart);
console.log("appendWindowEnd", sb.appendWindowEnd);
for(let i=0; i<sb.buffered.length; i++){
console.log("buffered", i, sb.buffered.start(i), sb.buffered.end(i));
}
for(let i=0; i<video.seekable.length; i++){
console.log("seekable", i, video.seekable.start(i), video.seekable.end(i));
}
console.log("webkitAudioDecodedByteCount", video.webkitAudioDecodedByteCount);
console.log("webkitVideoDecodedByteCount", video.webkitVideoDecodedByteCount);
console.log("webkitDecodedFrameCount", video.webkitDecodedFrameCount);
console.log("webkitDroppedFrameCount", video.webkitDroppedFrameCount);
}
if (video.buffered.length > 1) {
console.warn("MSE buffered has a gap!");
throw new Error("MSE buffered has a gap!");
}
}catch(err){
console.error(err);
yield stop();
console.groupEnd(""+i); i++;
return Promise.reject(err);
}
console.groupEnd(""+i);
i++;
}));
};
rec.start(1000);
console.info("start");
});
function sleep(ms){
return new Promise(resolve =>
setTimeout((()=>resolve(ms)), ms));
}
function readAsArrayBuffer(blob) {
return new Promise((resolve, reject)=>{
const reader = new FileReader();
reader.addEventListener("loadend", ()=> resolve(reader.result), {once: true});
reader.addEventListener("error", (err)=> reject(err.error), {once: true});
reader.readAsArrayBuffer(blob);
});
}
function async(generatorFunc){
return function (arg) {
const generator = generatorFunc(arg);
return next(null);
function next(arg) {
const result = generator.next(arg);
if(result.done){ return result.value; }
else if(result.value instanceof Promise){ return result.value.then(next); }
else{ return Promise.resolve(result.value); }
}
}
}
console.clear();
main().catch(console.error);
https://jsfiddle.net/nthyfgvs/
UPDATE!
This is version 2 that I also created, it will work in Firefox and Chrome, and no freeze. Please note, I am using the same two server-side Python3 programs for writing and reading the webcam data as the data is created from my previous answer.
Browser Client version 2:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video1" width="300" height="300" autoplay controls ></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs=vp8;' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
const constraints = {video: true};
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
//var blobList = [];
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{type:"video/webm; codecs=vp8;"});
mediaRecorder.ondataavailable = function(e){
//blobList.push(e.data);
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
async:false,
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
}
mediaRecorder.start(1000);
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer2.duration > 2) {
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
sourceBuffer2.appendBuffer( reader.result );
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
video.play();
}
function handleError(error) {
console.error('error', error);
}
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=vp8;");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.mediaDevices.getUserMedia(constraints).then(handleSuccess).catch(handleError);
});
</script>
</body>
</html>
This solution works great in Firefox, no freezing. It requires jquery, cgi Python3 for the browser client. It also has two server-side Python3 programs for writing and reading the webcam data as the data is created.
Browser Client:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video" width="300" height="300" controls></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs="vp8, opus"' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8, opus"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=\"vp8, opus\"");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.getUserMedia(constraints, function (stream)
{
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e)
{
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(1000);
}, function (e) {
console.log(e)
});
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer.duration > 2){
sourceBuffer.remove(0, sourceBuffer.duration - 2);
}
if (sourceBuffer2.duration > 2){
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer2.appendBuffer(arr);
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
});
</script>
</body>
</html>
Server-side Python3 webcam video writer: post_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
chunk = '' if not args.getvalue( "chunk" ) else escape( args.getvalue( "chunk" ) )
mp4 = 'webcam.mp4'
mp4_text = 'webcam_text.txt'
with open (mp4, 'ab') as f:
f.write( base64.b64decode(chunk) )
with open (mp4_text, 'a') as f:
f.write( str(len(chunk)) + ',' + chunk + '\n' )
html = 'success'
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print(html)
Server-side Python3 webcam video reader: get_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
offset = '' if not args.getvalue( "offset" ) else escape( args.getvalue( "offset" ) )
mp4_text = 'webcam_text.txt'
data = ''
try:
with open(mp4_text, 'r') as f:
line = f.readlines()[int(offset)]
data = line.split(',')[1].strip()
except:
pass
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print('base64,' + data)