Using MediaSource for video with MediaRecord - javascript

I'm trying to broadcast a video from my webcam in javascript. I'm using MediaStream to get the video from my webcam, MediaRecorder to record such video in chunks (which would be transmitted to the server), and MediaSource to assemble these chunks and play them seamlessly in a video container called watchVideo on the source below.
It all works perfectly when i'm capturing only video, i.e. constraints = { video: true } ; but if I add audio, the watchVideo doesn't display anything, and the console shows me the following error:
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
This is the relevant part of the code:
var mime = 'video/webm; codecs=vp8';
if (navigator.mediaDevices) {
constraints = { video: true, audio: true };
navigator.mediaDevices.getUserMedia(constraints)
.then(handleUserMedia)
.catch( err => {
console.log("ERROR: " + err);
})
}
function handleUserMedia(stream) {
source = new MediaSource();
watchVideo.src = window.URL.createObjectURL(source);
source.onsourceopen = () => {
buffer = source.addSourceBuffer(mime);
};
var options = { mimeType: mime };
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.ondataavailable = handleDataAvailable;
}
function handleDataAvailable(evt) {
var filereader = new FileReader();
filereader.onload = () => {
buffer.appendBuffer(filereader.result );
};
filereader.readAsArrayBuffer(evt.data);
}

I came across the question and it actually helped me more than many answers related to this topic. I don't know if you are still interested in the answer but I have tried
mime="video/webm; codecs="vp9,opus"
and it worked fine with audio and video, I hope this answer will help you

Related

(Javascript) Microphone and audio from mediastream are out of sync

I wrote a recorder that records microphone from getUsermedia and audio which is from local using Howler JS.
I created mediastream destination, and
connected each sources (mic, audio) to the destination.
audio seems fine, but microphone is delayed about 2seconds.
I can't figure out the problem.
could you help me guys?
var recorder;
const stop = document.getElementsByClassName("stop");
const record = document.getElementsByClassName("record");
let mediaDest = Howler.ctx.createMediaStreamDestination();
Howler.masterGain.connect(mediaDest);
function onRecordingReady(e) {
// 'e' has 'blob event'
//var audio = document.getElementById("audio");
audioBlob = e.data; // e.data has blob.
//audio.src = URL.createObjectURL(e.data);
}
let audioBlob;
let audioURL = "";
navigator.mediaDevices.getUserMedia({ audio: true }).then(function (stream) {
let userMic = Howler.ctx.createMediaStreamSource(stream);
userMic.connect(mediaDest);
Howler.masterGain.connect(mediaDest);
recorder = new MediaRecorder(mediaDest.stream);
recorder.addEventListener("dataavailable", onRecordingReady);
recorder.addEventListener("stop", function () {
W3Module.convertWebmToMP3(audioBlob).then((mp3blob) => {
const downloadLink = document.createElement("a");
downloadLink.href = URL.createObjectURL(mp3blob);
downloadLink.setAttribute("download", "audio");
//downloadLink.click();
var audio = document.getElementById("audio");
audio.src = URL.createObjectURL(mp3blob);
console.log(mp3blob);
});
});
});
record[0].addEventListener("click", function () {
recorder.start();
});
stop[0].addEventListener("click", function () {
recorder.stop();
});
I figured out the solution.
I didn't know I could connect MediaStreamAudioSourceNode to GainNode.
If someone is suffering this issue, just connect one Node to another Node rather than connect each node to the destination.
I connected the sourceNode to the GainNode, and connected GainNode to the destination.
=========================
It was not the solution...
GainNode playback in realtime whenever input is present...so, even if i can remove the latency, annoying playback occurs.

Audio recording in JavaScript on Chrome, always sends video/ogg to the server

I have been trying to record audio in OGG format on Chrome and send it back to the server, but it always gets their in video/ogg format. Here is what I have:
Capturing audio:
let chunks = [];
let recording = null;
let mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
mediaRecorder.onstop = function() {
recording = new Blob(chunks, { 'type' : 'audio/ogg; codecs=opus' });
}
mediaRecorder.ondataavailable = function(e){
chunks.push(e.data);
}
Sending it to the server:
let data = new FormData();
data.append('audio', recording);
jQuery.ajax(...);
The blob gets to the backend, but always in video/ogg!
I ended up using kbumsik/opus-media-recorder, solved the issue for me. A drop-in replacement for MediaRecorder.
You need to remove the VideoTrack from your MediaStream:
const input = document.querySelector("video");
const stop_btn = document.querySelector("button");
input.onplaying = (evt) => {
input.onplaying = null;
console.clear();
const stream = input.captureStream ? input.captureStream() : input.mozCaptureStream();
// get all video tracks (usually a single one)
stream.getVideoTracks().forEach( (track) => {
track.stop(); // stop that track, so the browser doesn't feed it for nothing
stream.removeTrack( track ); // remove it from the MediaStream
} );
const data = [];
const recorder = new MediaRecorder( stream, { mimeType: "audio/webm" } );
recorder.ondataavailable = (evt) => data.push( evt.data );
recorder.onstop = (evt) => exportFile( new Blob( data ) );
stop_btn.onclick = (evt) => recorder.stop();
stop_btn.disabled = false;
recorder.start();
};
console.log( "play the video to start recording" );
function exportFile( blob ) {
stop_btn.remove();
input.src = URL.createObjectURL( blob );
console.log( "video element now playing recoded file" );
}
video { max-height: 150px; }
<video src="https://upload.wikimedia.org/wikipedia/commons/2/22/Volcano_Lava_Sample.webm" controls crossorigin></video>
<button disabled>stop recording</button>
And since StackOverflow's null origined iframes don't allow for safe download links, here is a fiddle with a download link.
You need to set the mimeType of the MediaRecorder. Otherwise the browser will pick whatever format it likes best to encode the media.
let mediaRecorder = new MediaRecorder(stream, { mimeType: 'my/mimetype' });
To be sure that the browser can actually encode the format you want you could use isTypeSupported().
console.log(MediaRecorder.isTypeSupported('my/mimetype'));
Chrome for example doesn't support "audio/ogg; codecs=opus" but supports "audio/webm; codecs=opus". Firefox supports both. Safari none of them.
Once you've configured the MediaRecorder you can use its mimeType when creating the blob.
recording = new Blob(chunks, { 'type' : mediaRecorder.mimeType });

Not able to create seekable video blobs from mediarecorder using EBML.js - MediaRecorder API - Chrome

Using media recorder, I am able to upload and append the video blobs on azure. But combined video is not seekable on download with following code -
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
appendBlockToAzure(chunks);
}
};
mediaRecorder.start(10000);
I tried using EBML.js, if I use the following code then I get the seekable video file. This approach needs the file to be processed at the end. Therefore, final file could be of 1GB in size which will take very long time to upload.
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
if(mediaRecorder.state == "inactive") { //if media recorder is stopped
var combined = new Blob(chunks, { type: event.data.type });
getSeekableBlob(combined, function (seekableBlob) {
saveCombinedVideoToAzure(seekableBlob);
});
}
}
};
mediaRecorder.start(10000);
That's the reason I want to upload simultaneously to the azure. If I use the following code, then it logs unknown tag warnings and then length error. Also, the video file is not playable.
var seekablechunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
getSeekableBlob(event.data, function (seekableBlob) {
seekablechunks.push(seekableBlob);
saveCombinedVideoToAzure(seekablechunks);
});
}
};
mediaRecorder.start(10000);
Function 'getSeekableBlob':
function getSeekableBlob(inputBlob, callback) {
// EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml
if(typeof EBML === 'undefined') {
throw new Error('Please link: https://www.webrtc- experiment.com/EBML.js');
}
var reader = new EBML.Reader();
var decoder = new EBML.Decoder();
var tools = EBML.tools;
var fileReader = new FileReader();
fileReader.onload = function (e) {
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
reader.read(element);
});
reader.stop();
var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
var body = this.result.slice(reader.metadataSize);
var newBlob = new Blob([refinedMetadataBuf, body], {
type: 'video/webm'
});
callback(newBlob);
};
fileReader.readAsArrayBuffer(inputBlob);
}
Is there a way to get seekable blobs and upload them to azure?
It's a challenge for an open-ended streaming source for media (for example MediaRecorder) to create a file with SeekHead elements in it. The Seek elements in a SeekHead element contain byte offsets to elements in the file.
MediaRecorder doesn't create segments or SeekHead elements as you have discovered. To do so it would need to be able to see the future to know how big future compressed video and audio elements will be in the file.
A good way for you to handle this problem might be to post-process your uploaded files on a server. You can use ts-ebml to do this in a streaming fashion on a server when a file is completely uploaded.
It's possible, I suppose, to create Javascript software in your browser that can transform the stream of data emitted by MediaRecorder so it's seekable, on the fly. To make your stream seekeable you'd need to insert SeekHead elements every so often. You'd buffer up multiple seconds of the stream, then locate the Cluster elements in each buffer, then write a SeekHead element pointing to some of them. (Chrome's MediaRecorder outputs Clusters beginning with video key frames.) If you succeed in doing this you'll know a lot about Matroska / webm.
Suddenly, our Face on camera web-cam recorder component stopped saving webm blob.
In the console there were warnings about {EBML_ID: "55b0", type: "unknown", ...} during reader.read(element) and then
"Uncaught (in promise) Error: No schema entry found for unknown" in EBMLEncoder.js" at tools.makeMetadataSeekable(...) call.
Ignoring unknown elements from the decoder workarounded the issue:
...
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
if (element.type !== 'unknown') {
reader.read(element);
}
});
reader.stop();
...
Related issue on ts-ebml npm package https://github.com/legokichi/ts-ebml/issues/33 with similar workaround

Javascript | MediaRecorder API - Recorded video file on Desktop does not play in Mobile

I am trying to develop a canva-like Insta story creator using Canvas and MediaRecorder
The app is working perfectly on a desktop browser - I am able to download the file, and play it on desktop. However, when I send that file to my mobile, it doesn't play(even on Insta). I figure this is an issue with codecs - but don't know how to solve the same.
This is the function that handles the mediaRecorderAPI
Is there any mime type that I can use, that is universal and can play for any device?
initRecorder () {
var dl = document.querySelector("#dl")
let videoStream = this.canvas.captureStream(60);
if(this.isAudioPresent) {
videoStream.addTrack(this.audioStream.getAudioTracks()[0])
}
let mediaRecorder = new MediaRecorder(videoStream, {
videoBitsPerSecond : 2500000,
mime: 'video/webm'
});
let chunks = [];
mediaRecorder.onstop = function(e) {
var blob = new Blob(chunks, { 'type' : 'video/webm' });
chunks = [];
var videoURL = URL.createObjectURL(blob);
dl.href = videoURL;
};
mediaRecorder.ondataavailable = function(e) {
e.data.size && chunks.push(e.data);
};
mediaRecorder.start();
setTimeout(function (){ mediaRecorder.stop(); },this.storytime);
}
```
Figured this out: Different browsers use different transcoding. Insta only accepts MP4 transcoding. Hence, you need to use either a transcoder on the frontend(ffmpeg.js or wasm version of ffmpeg) or send your data to backend and handle there(which I ended up doing)

JavaScript MediaSource Example

I'm trying to create video player using MediaSource , but I can't make it play while buffering new data. I have this code that downloads the full data then plays it.
var vidElement = document.querySelector('video');
if (window.MediaSource) {
var mediaSource = new MediaSource();
vidElement.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.log("The Media Source Extensions API is not supported.")
}
function sourceOpen(e) {
URL.revokeObjectURL(vidElement.src);
var mime = 'video/webm; codecs="opus, vp09.00.10.08"';
var mediaSource = e.target;
var sourceBuffer = mediaSource.addSourceBuffer(mime);
var videoUrl = 'droid.webm';
fetch(videoUrl)
.then(function(response) {
return response.arrayBuffer();
})
.then(function(arrayBuffer) {
sourceBuffer.addEventListener('updateend', function(e) {
if (!sourceBuffer.updating && mediaSource.readyState === 'open') {
mediaSource.endOfStream();
}
});
sourceBuffer.appendBuffer(arrayBuffer);
});
}
This code is not working on the local host. I get the MediaSource closed and this line never gets called
mediaSource.endOfStream();
Can any one tell me why the state is closed, please?
Any help about creating player like YouTube or any open source.
and by the way I tried a lot of codes and sources for 2 days now, and it's always the MediaSource giving me errors like the source removed, or not linked.
For starters, on your first line you have a collection...
var V=document.querySelector('video');
Shouldn't that be...
var V=document.querySelector('video')[0];
Before you start operating on it?

Categories

Resources