I have this simple code to get chunks of video stream and play them in MediaSource. I see video, but sometimes it stops. It may work for few seconds or for few minutes. But finally it stops at some moment. chrome://media-internals/ shows no errors.
What is wrong here?
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
navigator.getUserMedia(constraints, function (stream) {
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e) {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(100);
}, function (e) {
console.log(e)
});
Here is JSFIDDLE which is going to try to do it:
https://jsfiddle.net/stivyakovenko/fkt89cLu/6/
I am using Chrome as my main target.
Looks like this is a bug in Chrome...
https://bugs.chromium.org/p/chromium/issues/detail?id=606000
The mediarecorder will give you part of a whole webm file in the ondataavailable callback. Look like this kind of stuff is not work with mediaSource. It can not work at all in my chrome 66.
Here is a way that works like "video chat" or "live stream" with MediaRecorder without ffmpeg:
You can send that data part by part to your sever with ajax.
The server can return "the whole webm file" to your chrome browser in one
long time response. And the server can return more data part in that response as soon as the server some data from the client.
And this kind of workaroud works only with html too:
You can use a blob list to collect all the blob that come from ondataavailable.
then set the video.src again and again.
Here is a jsfiddle that works:
const constraints = {video: true};
const video1 = document.querySelector('.real1');
const video2 = document.querySelector('.real2');
var blobList = [];
var gCurrentTime = 0;
function playNew(){
gCurrentTime = video2.currentTime;
var thisBlob = new Blob(blobList,{type:"video/webm"});
var url = URL.createObjectURL(thisBlob);
video2.src = url;
video2.currentTime = gCurrentTime;
video2.play();
}
video2.onended = playNew;
var isFirst = true;
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{mimeType:"video/webm"});
mediaRecorder.ondataavailable = function(e){
blobList.push(e.data);
if (isFirst){
playNew();
isFirst = false;
}
}
mediaRecorder.start(1000);
}
function handleError(error) {
console.error('Reeeejected!', error);
}
navigator.mediaDevices.getUserMedia(constraints).
then(handleSuccess).catch(handleError);
<video class="real1" autoplay controls></video>
<video class="real2" controls></video>
https://jsfiddle.net/4akkadht/1/
The html only solution (second one) will blink again and again and have a huge delay. The server long push solution (first one) will not blink and have five seconds delay.
Based on my experience of working with MediaRecorder and MediaSource, most of the errors related to the video freezing or returning errors may be due to the chunks being received out-of-sync. I believe that webm (and maybe other media types also) need the chunks to be received in increasing order of their timecodes. Recording, sending and then receiving the chunks Async may not preserve this increasing order of timecodes.
So, after the above analysis of my own experience of video freezing with MediaRecorder/MediaSource, I changed my code to send the recorded chunks in Sync, not Async.
I am trying to do this as well, however I do not get any video at all. Your jsfiddle does not work for me on chrome or firefox (tested on ubuntu 14.04 and windows 7).
After a bit of research (mainly streaming back the file after it has been recorded), I've found out that the file is not properly fragmented to be played by MSE.
#Steve: I'd be interested to find out how you've done the fragmenting with ffmpeg.
As a sidenote, I also have a similar question here: Display getUserMediaStream live video with media stream extensions (MSE) , with an error description from chrome://media-internals.
a working example in chrome but it freez in firefox
const main = async(function* main(){
const logging = true;
let tasks = Promise.resolve(void 0);
const devices = yield navigator.mediaDevices.enumerateDevices();
console.table(devices);
const stream = yield navigator.mediaDevices.getUserMedia({video: true, audio: true});
if(logging){
stream.addEventListener("active", (ev)=>{ console.log(ev.type); });
stream.addEventListener("inactive", (ev)=>{ console.log(ev.type); });
stream.addEventListener("addtrack", (ev)=>{ console.log(ev.type); });
stream.addEventListener("removetrack", (ev)=>{ console.log(ev.type); });
}
const rec = new MediaRecorder(stream, {mimeType: 'video/webm; codecs="opus,vp8"'});
if(logging){
rec.addEventListener("dataavailable", (ev)=>{ console.log(ev.type); });
rec.addEventListener("pause", (ev)=>{ console.log(ev.type); });
rec.addEventListener("resume", (ev)=>{ console.log(ev.type); });
rec.addEventListener("start", (ev)=>{ console.log(ev.type); });
rec.addEventListener("stop", (ev)=>{ console.log(ev.type); });
rec.addEventListener("error", (ev)=>{ console.error(ev.type, ev); });
}
const ms = new MediaSource();
if(logging){
ms.addEventListener('sourceopen', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceended', (ev)=>{ console.log(ev.type); });
ms.addEventListener('sourceclose', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('addsourcebuffer', (ev)=>{ console.log(ev.type); });
ms.sourceBuffers.addEventListener('removesourcebuffer', (ev)=>{ console.log(ev.type); });
}
const video = document.createElement("video");
if(logging){
video.addEventListener('loadstart', (ev)=>{ console.log(ev.type); });
video.addEventListener('progress', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadedmetadata', (ev)=>{ console.log(ev.type); });
video.addEventListener('loadeddata', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplay', (ev)=>{ console.log(ev.type); });
video.addEventListener('canplaythrough', (ev)=>{ console.log(ev.type); });
video.addEventListener('playing', (ev)=>{ console.log(ev.type); });
video.addEventListener('waiting', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeking', (ev)=>{ console.log(ev.type); });
video.addEventListener('seeked', (ev)=>{ console.log(ev.type); });
video.addEventListener('ended', (ev)=>{ console.log(ev.type); });
video.addEventListener('emptied', (ev)=>{ console.log(ev.type); });
video.addEventListener('stalled', (ev)=>{ console.log(ev.type); });
video.addEventListener('timeupdate', (ev)=>{ console.log(ev.type); }); // annoying
video.addEventListener('durationchange', (ev)=>{ console.log(ev.type); });
video.addEventListener('ratechange', (ev)=>{ console.log(ev.type); });
video.addEventListener('play', (ev)=>{ console.log(ev.type); });
video.addEventListener('pause', (ev)=>{ console.log(ev.type); });
video.addEventListener('error', (ev)=>{ console.warn(ev.type, ev); });
}
//video.srcObject = ms;
video.src = URL.createObjectURL(ms);
video.volume = 0;
video.controls = true;
video.autoplay = true;
document.body.appendChild(video);
yield new Promise((resolve, reject)=>{
ms.addEventListener('sourceopen', ()=> resolve(), {once: true});
});
const sb = ms.addSourceBuffer(rec.mimeType);
if(logging){
sb.addEventListener('updatestart', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('update', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('updateend', (ev)=>{ console.log(ev.type); }); // annoying
sb.addEventListener('error', (ev)=>{ console.error(ev.type, ev); });
sb.addEventListener('abort', (ev)=>{ console.log(ev.type); });
}
const stop = async(function* stop(){
console.info("stopping");
if(sb.updating){ sb.abort(); }
if(ms.readyState === "open"){ ms.endOfStream(); }
rec.stop();
stream.getTracks().map((track)=>{ track.stop(); });
yield video.pause();
console.info("end");
});
const button = document.createElement("button");
button.innerHTML = "stop";
button.addEventListener("click", ()=>{
document.body.removeChild(button);
tasks = tasks.then(stop);
}, {once: true});
document.body.appendChild(button);
let i = 0;
rec.ondataavailable = ({data})=>{
tasks = tasks.then(async(function*(){
console.group(""+i);
try{
if(logging){ console.log("dataavailable", "size:", data.size); }
if(data.size === 0){
console.warn("empty recorder data");
throw new Error("empty recorder data");
}
const buf = yield readAsArrayBuffer(data);
sb.appendBuffer(buf);
yield new Promise((resolve, reject)=>{
sb.addEventListener('updateend', ()=> resolve(), {once: true});
sb.addEventListener("error", (err)=> reject(ev), {once: true});
});
if(logging){
console.log("timestampOffset", sb.timestampOffset);
console.log("appendWindowStart", sb.appendWindowStart);
console.log("appendWindowEnd", sb.appendWindowEnd);
for(let i=0; i<sb.buffered.length; i++){
console.log("buffered", i, sb.buffered.start(i), sb.buffered.end(i));
}
for(let i=0; i<video.seekable.length; i++){
console.log("seekable", i, video.seekable.start(i), video.seekable.end(i));
}
console.log("webkitAudioDecodedByteCount", video.webkitAudioDecodedByteCount);
console.log("webkitVideoDecodedByteCount", video.webkitVideoDecodedByteCount);
console.log("webkitDecodedFrameCount", video.webkitDecodedFrameCount);
console.log("webkitDroppedFrameCount", video.webkitDroppedFrameCount);
}
if (video.buffered.length > 1) {
console.warn("MSE buffered has a gap!");
throw new Error("MSE buffered has a gap!");
}
}catch(err){
console.error(err);
yield stop();
console.groupEnd(""+i); i++;
return Promise.reject(err);
}
console.groupEnd(""+i);
i++;
}));
};
rec.start(1000);
console.info("start");
});
function sleep(ms){
return new Promise(resolve =>
setTimeout((()=>resolve(ms)), ms));
}
function readAsArrayBuffer(blob) {
return new Promise((resolve, reject)=>{
const reader = new FileReader();
reader.addEventListener("loadend", ()=> resolve(reader.result), {once: true});
reader.addEventListener("error", (err)=> reject(err.error), {once: true});
reader.readAsArrayBuffer(blob);
});
}
function async(generatorFunc){
return function (arg) {
const generator = generatorFunc(arg);
return next(null);
function next(arg) {
const result = generator.next(arg);
if(result.done){ return result.value; }
else if(result.value instanceof Promise){ return result.value.then(next); }
else{ return Promise.resolve(result.value); }
}
}
}
console.clear();
main().catch(console.error);
https://jsfiddle.net/nthyfgvs/
UPDATE!
This is version 2 that I also created, it will work in Firefox and Chrome, and no freeze. Please note, I am using the same two server-side Python3 programs for writing and reading the webcam data as the data is created from my previous answer.
Browser Client version 2:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video1" width="300" height="300" autoplay controls ></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs=vp8;' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
const constraints = {video: true};
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
//var blobList = [];
function handleSuccess(stream) {
video1.srcObject = stream;
var mediaRecorder = new MediaRecorder(stream,{type:"video/webm; codecs=vp8;"});
mediaRecorder.ondataavailable = function(e){
//blobList.push(e.data);
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
async:false,
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
}
mediaRecorder.start(1000);
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer2.duration > 2) {
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
sourceBuffer2.appendBuffer( reader.result );
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
video.play();
}
function handleError(error) {
console.error('error', error);
}
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=vp8;");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.mediaDevices.getUserMedia(constraints).then(handleSuccess).catch(handleError);
});
</script>
</body>
</html>
This solution works great in Firefox, no freezing. It requires jquery, cgi Python3 for the browser client. It also has two server-side Python3 programs for writing and reading the webcam data as the data is created.
Browser Client:
<html>
<head>
<script type="text/javascript" src="js/jquery.min.js"></script>
</head>
<body>
<video id="video" width="300" height="300" controls></video>
<video id="video2" width="300" height="300" controls></video>
<script>
var offsetA = 0;
var res;
var pos;
var b = "base64," ;
var fr = new FileReader();
function b64toBlob(dataURI) {
var byteString = atob(dataURI.split(',')[1]);
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ab], { type: 'video/webm; codecs="vp8, opus"' });
}
// 1. Create a `MediaSource`
var mediaSource2 = new MediaSource();
// 2. Create an object URL from the `MediaSource`
var url = URL.createObjectURL(mediaSource2);
// 3. Set the video's `src` to the object URL
var video = document.getElementById("video2");
video.src = url;
// 4. On the `sourceopen` event, create a `SourceBuffer`
var sourceBuffer2 = null;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
var mediaSource = new MediaSource();
var constraints = {
"audio": true,
"video": {
"mandatory": {
"minWidth": 320, "maxWidth": 320,
"minHeight": 240, "maxHeight": 240
}, "optional": []
}
};
window.mediaSource = mediaSource;
var sourceBuffer;
var video = document.querySelector('#video');
window.video = video;
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function (e) {
console.log("sourceopen");
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8, opus"');
window.sourceBuffer = sourceBuffer;
}, false);
mediaSource.addEventListener('error', function (e) {
console.log("error", e)
}, false);
var stack = [];
video.play();
mediaSource2.addEventListener("sourceopen", function()
{
// NOTE: Browsers are VERY picky about the codec being EXACTLY
// right here. Make sure you know which codecs you're using!
sourceBuffer2 = mediaSource2.addSourceBuffer("video/webm; codecs=\"vp8, opus\"");
sourceBuffer2.mode = 'sequence';
// Make sure to only append one chunk at a time to the SourceBuffer
navigator.getUserMedia(constraints, function (stream)
{
console.log("stream", stream);
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = function (e)
{
fr.onload = function(){
res = this.result;
pos = res.search(b);
pos = pos + b.length;
res = res.substring(pos);
$.ajax({
type: 'POST',
url: 'post_data_webcam.py',
dataType: "html",
data: { chunk: res },
success: function(data){
//alert(data + ' yes');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
fr.readAsDataURL(e.data);
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(1000);
}, function (e) {
console.log(e)
});
var i = setInterval(function()
{
if (mediaSource2.readyState === "open" && sourceBuffer2 && sourceBuffer2.updating === false )
{
if (sourceBuffer.duration > 2){
sourceBuffer.remove(0, sourceBuffer.duration - 2);
}
if (sourceBuffer2.duration > 2){
sourceBuffer2.remove(0, sourceBuffer2.duration - 2);
}
$.ajax({
type: 'POST',
url: 'get_data_webcam.py',
dataType: "html",
async: false,
data: { offset: offsetA },
success: function(data){
data = data.trim();
if (data != 'base64,') {
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer2.appendBuffer(arr);
});
reader.readAsArrayBuffer( b64toBlob(data) );
offsetA = offsetA + 1;
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert('Status: ' + textStatus + ' ' + ' Error: ' + errorThrown);
}
});
}
}, 1000);
});
</script>
</body>
</html>
Server-side Python3 webcam video writer: post_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
chunk = '' if not args.getvalue( "chunk" ) else escape( args.getvalue( "chunk" ) )
mp4 = 'webcam.mp4'
mp4_text = 'webcam_text.txt'
with open (mp4, 'ab') as f:
f.write( base64.b64decode(chunk) )
with open (mp4_text, 'a') as f:
f.write( str(len(chunk)) + ',' + chunk + '\n' )
html = 'success'
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print(html)
Server-side Python3 webcam video reader: get_data_webcam.py
import os
import sys
import cgi
import cgitb
import base64
include_path = '/var/project_path/www'
cgitb.enable(display=0, logdir=f"""{include_path}/tmp_errors""") # include_path is OUTDIR
sys.path.insert(0, include_path)
def enc_print(string='', encoding='utf8'):
sys.stdout.buffer.write(string.encode(encoding) + b'\n')
from html import escape
args = cgi.FieldStorage()
offset = '' if not args.getvalue( "offset" ) else escape( args.getvalue( "offset" ) )
mp4_text = 'webcam_text.txt'
data = ''
try:
with open(mp4_text, 'r') as f:
line = f.readlines()[int(offset)]
data = line.split(',')[1].strip()
except:
pass
enc_print("Content-Type:text/html;charset=utf-8;")
enc_print()
enc_print('base64,' + data)
Related
I am very new to javaScript, I know some basics but have not yet completely understood the complete logics behind it (so far I have only worked with Python and a little bit of VBA)
For uni I have to build a browser interface to record audio and transfer it to a server where a Speech to text application runs. I found some opensource code here (https://github.com/mdn/dom-examples/blob/main/media/web-dictaphone/scripts/app.js) which I wanted to use, but is missing the websocket part. Now I don't know, where exactly to insert that. So far I have this:
code of the Webdictaphone:
// set up basic variables for app
const record = document.querySelector('.record');
const stop = document.querySelector('.stop');
const soundClips = document.querySelector('.sound-clips');
const canvas = document.querySelector('.visualizer');
const mainSection = document.querySelector('.main-controls');
// disable stop button while not recording
stop.disabled = true;
// visualiser setup - create web audio api context and canvas
let audioCtx;
const canvasCtx = canvas.getContext("2d");
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log('getUserMedia supported.');
const constraints = { audio: true };
let chunks = [];
let onSuccess = function(stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt('Enter a name for your sound clip?','My unnamed clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.textContent = 'Delete';
deleteButton.className = 'delete';
if(clipName === null) {
clipLabel.textContent = 'My unnamed clip';
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
e.target.closest(".clip").remove();
}
clipLabel.onclick = function() {
const existingName = clipLabel.textContent;
const newClipName = prompt('Enter a new name for your sound clip?');
if(newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
let onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
websocket part (client side):
window.addEventListener("DOMContentLoaded", () => {
// Open the WebSocket connection and register event handlers.
console.log('DOMContentLoaded done');
const ws = new WebSocket("ws://localhost:8001/"); // temp moved to mediarecorder.onstop
dataToBeSent = function (data) {
ws.send(data);
};
console.log('ws is defined');
})
Right now I just stacked both of the parts on top of each other, but this doesn't work, since, as I found out, you only can define and use variables (such as ws) within a block. This leads to an error that says that ws i not defined when I call the sending function within the if-statement.
I already tried to look for tutorials for hours but none that I found included this topic. I also tried moving the web socket part into the if statement, but that also did - unsurprisingly work, at least not in the way that I tried.
I feel like my problem lays in understanding how to define the websocket so I can call it within the if statement, or figure out a way to somehow get the audio somewhere where ws is considered to be defined. Unfortunately I just don't get behind it and already invested days which has become really frustrating.
I appreciate any help. If you have any ideas what I could change or move in the code or maybe just know any tutorial that could help, I'd be really grateful.
Thanks in advance!
You don't need that window.addEventListener("DOMContentLoaded", () => { part
const ws = new WebSocket("ws://localhost:8001/"); // temp moved to mediarecorder.onstop
dataToBeSent = function (data) {
ws.send(data);
};
const record = document.querySelector(".record");
const stop = document.querySelector(".stop");
const soundClips = document.querySelector(".sound-clips");
const canvas = document.querySelector(".visualizer");
const mainSection = document.querySelector(".main-controls");
// disable stop button while not recording
stop.disabled = true;
// visualiser setup - create web audio api context and canvas
let audioCtx;
const canvasCtx = canvas.getContext("2d");
//main block for doing the audio recording
if (navigator.mediaDevices.getUserMedia) {
console.log("getUserMedia supported.");
const constraints = { audio: true };
let chunks = [];
let onSuccess = function (stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function () {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
};
stop.onclick = function () {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
// mediaRecorder.requestData();
stop.disabled = true;
record.disabled = false;
};
mediaRecorder.onstop = function (e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt(
"Enter a name for your sound clip?",
"My unnamed clip"
);
const clipContainer = document.createElement("article");
const clipLabel = document.createElement("p");
const audio = document.createElement("audio");
const deleteButton = document.createElement("button");
clipContainer.classList.add("clip");
audio.setAttribute("controls", "");
deleteButton.textContent = "Delete";
deleteButton.className = "delete";
if (clipName === null) {
clipLabel.textContent = "My unnamed clip";
} else {
clipLabel.textContent = clipName;
}
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { type: "audio/ogg; codecs=opus" });
chunks = [];
const audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function (e) {
e.target.closest(".clip").remove();
};
clipLabel.onclick = function () {
const existingName = clipLabel.textContent;
const newClipName = prompt("Enter a new name for your sound clip?");
if (newClipName === null) {
clipLabel.textContent = existingName;
} else {
clipLabel.textContent = newClipName;
}
};
};
mediaRecorder.ondataavailable = function (e) {
chunks.push(e.data);
};
};
let onError = function (err) {
console.log("The following error occured: " + err);
};
navigator.mediaDevices.getUserMedia(constraints).then(onSuccess, onError);
} else {
console.log("getUserMedia not supported on your browser!");
}
I can record audio to ogg file in Electron and Chrome by creating blob this way
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
Full example code is this
if (navigator.mediaDevices) {
console.log('getUserMedia supported.');
const constraints = { audio: true };
let chunks = [];
navigator.mediaDevices.getUserMedia(constraints)
.then(function(stream) {
const mediaRecorder = new MediaRecorder(stream);
visualize(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
record.style.color = "black";
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
}
mediaRecorder.onstop = function(e) {
console.log("data available after MediaRecorder.stop() called.");
const clipName = prompt('Enter a name for your sound clip');
const clipContainer = document.createElement('article');
const clipLabel = document.createElement('p');
const audio = document.createElement('audio');
const deleteButton = document.createElement('button');
clipContainer.classList.add('clip');
audio.setAttribute('controls', '');
deleteButton.innerHTML = "Delete";
clipLabel.innerHTML = clipName;
clipContainer.appendChild(audio);
clipContainer.appendChild(clipLabel);
clipContainer.appendChild(deleteButton);
soundClips.appendChild(clipContainer);
audio.controls = true;
const blob = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
chunks = [];
const audioURL = URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
deleteButton.onclick = function(e) {
const evtTgt = e.target;
evtTgt.parentNode.parentNode.removeChild(evtTgt.parentNode);
}
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
})
.catch(function(err) {
console.log('The following error occurred: ' + err);
})
}
I want to create flac file so I tried
const blob = new Blob(chunks, { 'type': 'audio/flac; codecs=flac' });
When I check output file type using Linux file command I get webM in both cases.
How can I get output file in flac format?
I need a help. I have a javascript code in my site that record videos and save them on the server. The format in which the videos are saved depends on the browser I am using, because if I use Opera or Chrome, the videos are saved in MKV format, if I use Firefox instead, the videos are saved in WEBM format. The problem is that the videos I save using Opera and Chrome (i.e. MKV) Firefox cannot read them, while Opera and Chrome read the WEBM videos generated with firefox without problems.
How can I make Opera and Chrome also save videos in WEBM so I no longer have the problem?
JavaScript code:
let constraintObj = {
audio: true,
video: {
facingMode: "user",
width: { min: 640, ideal: 1280, max: 1920 },
height: { min: 480, ideal: 720, max: 1080 }
}
};
// width: 1280, height: 720 -- preference only
// facingMode: {exact: "user"}
// facingMode: "environment"
//handle older browsers that might implement getUserMedia in some way
if (navigator.mediaDevices === undefined) {
navigator.mediaDevices = {};
navigator.mediaDevices.getUserMedia = function(constraintObj) {
let getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
if (!getUserMedia) {
return Promise.reject(new Error('getUserMedia is not implemented in this browser'));
}
return new Promise(function(resolve, reject) {
getUserMedia.call(navigator, constraintObj, resolve, reject);
});
}
}else{
navigator.mediaDevices.enumerateDevices()
.then(devices => {
devices.forEach(device=>{
console.log(device.kind.toUpperCase(), device.label);
//, device.deviceId
})
})
.catch(err=>{
console.log(err.name, err.message);
})
}
navigator.mediaDevices.getUserMedia(constraintObj)
.then(function(mediaStreamObj) {
//connect the media stream to the first video element
let video = document.querySelector('video');
if ("srcObject" in video) {
video.srcObject = mediaStreamObj;
} else {
//old version
video.src = window.URL.createObjectURL(mediaStreamObj);
}
video.onloadedmetadata = function(ev) {
//show in the video element what is being captured by the webcam
video.play();
};
//add listeners for saving video/audio
let start = document.getElementById('btnStart');
let stop = document.getElementById('btnStop');
let save = document.getElementById('btnSave');
let vidSave = document.getElementById('vid2');
let mediaRecorder = new MediaRecorder(mediaStreamObj);
let chunks = [];
var blob = null;
document.getElementById("vid2").style.visibility = "hidden";
document.getElementById("btnSave").style.visibility = "hidden";
var contatore=0;
var dontstop=0;
save.addEventListener('click', (ev)=>{
contatore = 0;
const formData = new FormData();
formData.append('video', blob);
fetch('videoRec', {
method: 'POST',
body: formData
})
.then(response => { console.log('upload success, ');
document.getElementById('response').innerHTML="Video salvato con successo, ";
})
.catch(error => {console.log('error');
document.getElementById('response').innerHTML="Errore durante il caricamento del video. Riprova.";
})
});
start.addEventListener('click', (ev)=>{
dontstop=1;
//if user already started video before, ask him if want to save that video before
//recording another video
if(contatore!=0){
var domanda=confirm("Il video appena registrato andrĂ eliminato. Vuoi Procedere?");
if (domanda === false) {
exit;
}
}
//when user's recording, vid2 must be hidden
if(document.getElementById("vid2").style.visibility=="visible"){
document.getElementById("vid2").style.visibility="hidden";
document.getElementById("btnSave").style.visibility="hidden";
document.getElementById('response').innerHTML=" ";
}
//start recording
mediaRecorder.start();
console.log(mediaRecorder.state);
//disable start button and enable stop button
document.getElementById('btnStart').disabled=true;
document.getElementById('btnStop').disabled=false;
//contatore increments when user records, so if he starts another rec without stopping before
//ask him if want to save the last video
contatore++;
var timeleft = 5; //video must be 120 seconds
downloadTimer = setInterval(function(){
if(timeleft <= 0){
//after 120 seconds record will stops automatically
clearInterval(downloadTimer); //the countdown disappared
document.getElementById("countdown").innerHTML = "Finished";
mediaRecorder.stop();
console.log(mediaRecorder.state);
document.getElementById('btnStart').disabled=false;
document.getElementById('btnStop').disabled=true;
video.onloadedmetadata = function(ev) {
chunks.push(ev.data);
};
// vid2 appears
document.getElementById("vid2").style.visibility = "visible";
document.getElementById("btnSave").style.visibility = "visible";
blob = new Blob(chunks, { 'type' : 'video/webm;' });
chunks = [];
let videoURL = window.URL.createObjectURL(blob);
vidSave.src = videoURL;
} else {
//else countdown continues
document.getElementById("countdown").innerHTML = timeleft + " seconds remaining";
}
timeleft -= 1;
}, 1000)
})
stop.addEventListener('click', (ev)=>{
if(dontstop!=1){
exit;
}
mediaRecorder.stop();
console.log(mediaRecorder.state);
document.getElementById('btnStart').disabled=false;
document.getElementById('btnStop').disabled=true;
clearInterval(downloadTimer);
document.getElementById("countdown").innerHTML = "Finished";
document.getElementById("vid2").style.visibility = "visible";
document.getElementById("btnSave").style.visibility = "visible";
});
mediaRecorder.ondataavailable = function(ev) {
chunks.push(ev.data);
}
mediaRecorder.onstop = (ev)=>{
blob = new Blob(chunks, { 'type' : 'video/webm;' });
chunks = [];
let videoURL = window.URL.createObjectURL(blob);
vidSave.src = videoURL;
}
})
.catch(function(err) {
console.log(err.name, err.message);
});
Thank you very much!
Gennaro
you can try to fix the mimetype to video/webm;codecs=VP8, it should work with any browser supporting MediaRecorder
this is the code :
var mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
var mediaRecorder;
var recordedBlobs;
var sourceBuffer;
var socket = io();
var recordedVideo = document.querySelector('video#recorded');
var gumVideo = document.querySelector('video#gum');
var translateButton = document.querySelector('button#record');
translateButton.onclick = startTranslate;
recordedVideo.src = window.URL.createObjectURL(mediaSource);
socket.on('video', function (data) {
sourceBuffer.appendBuffer(data);
});
navigator.mediaDevices.getUserMedia(constraints)
.then(handleSuccess).catc(handleError);
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
sourceBuffer.onupdate = function(){
console.log("updating");
};
console.log('Source buffer: ', sourceBuffer);
}
function handleSuccess(stream) {
console.log('getUserMedia() got stream: ', stream);
window.stream = stream;
if (window.URL) {
gumVideo.src = window.URL.createObjectURL(stream);
} else {
gumVideo.src = stream;
}
}
function startTranslate() {
recordedBlobs = [];
var options = {mimeType: 'video/webm;codecs=vp9'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm;codecs=vp8'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.log(options.mimeType + ' is not Supported');
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
}
catch (e) {
console.error('Exception while creating MediaRecorder: ' + e);
alert('Exception while creating MediaRecorder: '+ e +'.mimeType: ' + options.mimeType);
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = 'Stop Recording';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = handleStop;
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10); // collect 10ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
socket.emit('video',event.data);
}
}
When i click the translate button after 2-3 seconds
Error like :
Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer
has been removed from the parent media source
what does this error means.Or is it a bug of WebRTC?
I'm having an issue getting a captured blob from the mediaRecorder api to playback in Chrome (it works in Firefox). Not sure if it's a bug in Chrome.
The error it reports:
undefined:1 Uncaught (in promise) DOMException: Unable to decode audio data
window.AudioContext = window.AudioContext || window.webkitAudioContext;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var context = new AudioContext();
var record = document.querySelector('#record');
var stop = document.querySelector('#stop');
if (navigator.getUserMedia) {
console.log('getUserMedia supported.');
var constraints = {
audio: true
};
var chunks = [];
var onSuccess = function(stream) {
var mediaRecorder = new MediaRecorder(stream);
record.onclick = function() {
mediaRecorder.start();
console.log(mediaRecorder.state);
console.log("recorder started");
record.style.background = "red";
stop.disabled = false;
record.disabled = true;
}
stop.onclick = function() {
mediaRecorder.stop();
console.log(mediaRecorder.state);
console.log("recorder stopped");
record.style.background = "";
record.style.color = "";
stop.disabled = true;
record.disabled = false;
}
mediaRecorder.onstop = function(e) {
console.log("onstop() called.", e);
var blob = new Blob(chunks, {
'type': 'audio/wav'
});
chunks = [];
var reader = new FileReader();
reader.addEventListener("loadend", function() {
context.decodeAudioData(reader.result, function(buffer) {
playsound(buffer);
},
function(e) {
console.log("error ", e)
});
});
reader.readAsArrayBuffer(blob);
}
mediaRecorder.ondataavailable = function(e) {
chunks.push(e.data);
}
}
var onError = function(err) {
console.log('The following error occured: ' + err);
}
navigator.getUserMedia(constraints, onSuccess, onError);
} else {
console.log('getUserMedia not supported on your browser!');
}
function playsound(thisbuffer) {
var source = context.createBufferSource();
source.buffer = thisbuffer;
source.connect(context.destination);
source.start(0);
}
<button id="record">record</button>
<button id="stop">stop</button>
I have used your code exactly the way it is. Everything is working fine in Chrome browser.
This issue was fixed when bug https://codereview.chromium.org/1579693006/ was closed and added to the Chrome pipeline.
This is no longer an issue.
To close the loop on this, I suspect this was due to the Chrome bug documented in a comment above. It appears this bug was fixed several years ago and should no longer be a problem as WebAudio now uses ffmpeg for decoding.