My application receives some video chunks from a ServerSentEvent (SSE) and, using MediaStream API, it should append them in a buffer and visualize them into an HTML5 video tag.
The problem is MediaSource API, that stops working when the program tries to append a chunk to the mediaStream buffer.
The error appears when the program tries to append the first chunk.
This is the client-side code:
window.MediaSource = window.MediaSource || window.WebKitMediaSource;
window.URL = window.URL || window.webkitURL;
if (!!!window.MediaSource) {alert('MediaSource API is not available');}
var video = document.getElementById("video");
var mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', function(e) {
var source = new EventSource('http://localhost:5000/video');
// this is the line that catch the error
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
source.addEventListener('chunkStreamed', function(e){
var chunk = new Blob(JSON.parse(e.data));
console.log("chunk: ", chunk);
var reader = new FileReader();
reader.onload = function(e) {
// error is caused by this line
sourceBuffer.appendBuffer(new Uint8Array(e.target.result));
};
reader.readAsArrayBuffer(chunk);
});
source.addEventListener('endStreaming', function(e){
console.log(e.data);
// mediaSource.endOfStream();
// endOfStream() not here, sourceBuffer.appendBuffer will done after this command and will cause InvalidStateError
});
source.onopen = function(e) {
console.log("EventSource open");
};
source.onerror = function(e) {
console.log("error", e);
source.close();
};
}, false);
and this is the complete error log:
Uncaught QuotaExceededError: An attempt was made to add something to storage that exceeded the quota.
The problem comes out when the app tries to do sourceBuffer.appendBuffer(new Uint8Array(e.target.result));.
I really can't understand way this error appear. Code is really
like the code of this example
Related
Using media recorder, I am able to upload and append the video blobs on azure. But combined video is not seekable on download with following code -
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
appendBlockToAzure(chunks);
}
};
mediaRecorder.start(10000);
I tried using EBML.js, if I use the following code then I get the seekable video file. This approach needs the file to be processed at the end. Therefore, final file could be of 1GB in size which will take very long time to upload.
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
if(mediaRecorder.state == "inactive") { //if media recorder is stopped
var combined = new Blob(chunks, { type: event.data.type });
getSeekableBlob(combined, function (seekableBlob) {
saveCombinedVideoToAzure(seekableBlob);
});
}
}
};
mediaRecorder.start(10000);
That's the reason I want to upload simultaneously to the azure. If I use the following code, then it logs unknown tag warnings and then length error. Also, the video file is not playable.
var seekablechunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
getSeekableBlob(event.data, function (seekableBlob) {
seekablechunks.push(seekableBlob);
saveCombinedVideoToAzure(seekablechunks);
});
}
};
mediaRecorder.start(10000);
Function 'getSeekableBlob':
function getSeekableBlob(inputBlob, callback) {
// EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml
if(typeof EBML === 'undefined') {
throw new Error('Please link: https://www.webrtc- experiment.com/EBML.js');
}
var reader = new EBML.Reader();
var decoder = new EBML.Decoder();
var tools = EBML.tools;
var fileReader = new FileReader();
fileReader.onload = function (e) {
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
reader.read(element);
});
reader.stop();
var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
var body = this.result.slice(reader.metadataSize);
var newBlob = new Blob([refinedMetadataBuf, body], {
type: 'video/webm'
});
callback(newBlob);
};
fileReader.readAsArrayBuffer(inputBlob);
}
Is there a way to get seekable blobs and upload them to azure?
It's a challenge for an open-ended streaming source for media (for example MediaRecorder) to create a file with SeekHead elements in it. The Seek elements in a SeekHead element contain byte offsets to elements in the file.
MediaRecorder doesn't create segments or SeekHead elements as you have discovered. To do so it would need to be able to see the future to know how big future compressed video and audio elements will be in the file.
A good way for you to handle this problem might be to post-process your uploaded files on a server. You can use ts-ebml to do this in a streaming fashion on a server when a file is completely uploaded.
It's possible, I suppose, to create Javascript software in your browser that can transform the stream of data emitted by MediaRecorder so it's seekable, on the fly. To make your stream seekeable you'd need to insert SeekHead elements every so often. You'd buffer up multiple seconds of the stream, then locate the Cluster elements in each buffer, then write a SeekHead element pointing to some of them. (Chrome's MediaRecorder outputs Clusters beginning with video key frames.) If you succeed in doing this you'll know a lot about Matroska / webm.
Suddenly, our Face on camera web-cam recorder component stopped saving webm blob.
In the console there were warnings about {EBML_ID: "55b0", type: "unknown", ...} during reader.read(element) and then
"Uncaught (in promise) Error: No schema entry found for unknown" in EBMLEncoder.js" at tools.makeMetadataSeekable(...) call.
Ignoring unknown elements from the decoder workarounded the issue:
...
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
if (element.type !== 'unknown') {
reader.read(element);
}
});
reader.stop();
...
Related issue on ts-ebml npm package https://github.com/legokichi/ts-ebml/issues/33 with similar workaround
I'm trying to create video player using MediaSource , but I can't make it play while buffering new data. I have this code that downloads the full data then plays it.
var vidElement = document.querySelector('video');
if (window.MediaSource) {
var mediaSource = new MediaSource();
vidElement.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.log("The Media Source Extensions API is not supported.")
}
function sourceOpen(e) {
URL.revokeObjectURL(vidElement.src);
var mime = 'video/webm; codecs="opus, vp09.00.10.08"';
var mediaSource = e.target;
var sourceBuffer = mediaSource.addSourceBuffer(mime);
var videoUrl = 'droid.webm';
fetch(videoUrl)
.then(function(response) {
return response.arrayBuffer();
})
.then(function(arrayBuffer) {
sourceBuffer.addEventListener('updateend', function(e) {
if (!sourceBuffer.updating && mediaSource.readyState === 'open') {
mediaSource.endOfStream();
}
});
sourceBuffer.appendBuffer(arrayBuffer);
});
}
This code is not working on the local host. I get the MediaSource closed and this line never gets called
mediaSource.endOfStream();
Can any one tell me why the state is closed, please?
Any help about creating player like YouTube or any open source.
and by the way I tried a lot of codes and sources for 2 days now, and it's always the MediaSource giving me errors like the source removed, or not linked.
For starters, on your first line you have a collection...
var V=document.querySelector('video');
Shouldn't that be...
var V=document.querySelector('video')[0];
Before you start operating on it?
I'm trying to broadcast a video from my webcam in javascript. I'm using MediaStream to get the video from my webcam, MediaRecorder to record such video in chunks (which would be transmitted to the server), and MediaSource to assemble these chunks and play them seamlessly in a video container called watchVideo on the source below.
It all works perfectly when i'm capturing only video, i.e. constraints = { video: true } ; but if I add audio, the watchVideo doesn't display anything, and the console shows me the following error:
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
This is the relevant part of the code:
var mime = 'video/webm; codecs=vp8';
if (navigator.mediaDevices) {
constraints = { video: true, audio: true };
navigator.mediaDevices.getUserMedia(constraints)
.then(handleUserMedia)
.catch( err => {
console.log("ERROR: " + err);
})
}
function handleUserMedia(stream) {
source = new MediaSource();
watchVideo.src = window.URL.createObjectURL(source);
source.onsourceopen = () => {
buffer = source.addSourceBuffer(mime);
};
var options = { mimeType: mime };
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.ondataavailable = handleDataAvailable;
}
function handleDataAvailable(evt) {
var filereader = new FileReader();
filereader.onload = () => {
buffer.appendBuffer(filereader.result );
};
filereader.readAsArrayBuffer(evt.data);
}
I came across the question and it actually helped me more than many answers related to this topic. I don't know if you are still interested in the answer but I have tried
mime="video/webm; codecs="vp9,opus"
and it worked fine with audio and video, I hope this answer will help you
I'm currently editing my mp3 file with multiple effects like so
var mainVerse = document.getElementById('audio1');
var s = source;
source.disconnect(audioCtx.destination);
for (var i in filters1) {
s.connect(filters1[i]);
s = filters1[i];
}
s.connect(audioCtx.destination);
The mp3 plays accordingly on the web with the filters on it. Is it possible to create and download a new mp3 file with these new effects, using web audio api or any writing to mp3 container javascript library ? If not whats the best to solve this on the web ?
UPDATE - Using OfflineAudioContext
Using the sample code from https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext/oncomplete
I've tried using the offline node like so;
var audioCtx = new AudioContext();
var offlineCtx = new OfflineAudioContext(2,44100*40,44100);
osource = offlineCtx.createBufferSource();
function getData() {
request = new XMLHttpRequest();
request.open('GET', 'Song1.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
myBuffer = buffer;
osource.buffer = myBuffer;
osource.connect(offlineCtx.destination);
osource.start();
//source.loop = true;
offlineCtx.startRendering().then(function(renderedBuffer) {
console.log('Rendering completed successfully');
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var song = audioCtx.createBufferSource();
song.buffer = renderedBuffer;
song.connect(audioCtx.destination);
song.start();
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
rec.exportWAV(function(e){
rec.clear();
Recorder.forceDownload(e, "filename.wav");
});
}).catch(function(err) {
console.log('Rendering failed: ' + err);
// Note: The promise should reject when startRendering is called a second time on an OfflineAudioContext
});
});
}
request.send();
}
// Run getData to start the process off
getData();
Still getting the recorder to download an empty file, I'm using the song source as the source for the recorder. The song plays and everything with his code but recorder doesn't download it
Use https://github.com/mattdiamond/Recorderjs to record a .wav file. Then use https://github.com/akrennmair/libmp3lame-js to encode it to .mp3.
There's a nifty guide here, if you need a hand: http://audior.ec/blog/recording-mp3-using-only-html5-and-javascript-recordmp3-js/
UPDATE
Try moving
rec = new Recorder(song, {
workerPath: 'Recorderjs/recorderWorker.js'
});
so that it is located above the call to start rendering, and connect it to osource instead, like so:
rec = new Recorder(osource, {
workerPath: 'Recorderjs/recorderWorker.js'
});
osource.connect(offlineCtx.destination);
osource.start();
offlineCtx.startRendering().then(function(renderedBuffer) {
.....
I am trying to read a local file using the FileReader readAsArrayBuffer property.
The read is success and in the "onload" callback, I see the Array Buffer object in reader.result. But the Array Buffer is just empty. The length is set, but not the data. How do I get this data?
Here is my code
<!DOCTYPE html>
<html>
<body>
<input type="file" id="file" />
</body>
<script>
function handleFileSelect(evt) {
var files = evt.target.files; // FileList object
var selFile = files[0];
var reader = new FileReader();
reader.onload = function(e) {
console.log(e.target.result);
};
reader.onerror = function(e) {
console.log(e);
};
reader.readAsArrayBuffer(selFile);
}
document.getElementById('file').addEventListener('change', handleFileSelect, false);
</script>
</html>
the console output for reader.result
e.target.result
ArrayBuffer {}
e.target.result.byteLength
25312
Can anyone tell me how to get this data?
is there some security issue?
There is no error, the onerror is not executed.
From comments: Can you please let me know how to access the buffer contents? I am actually trying to play an audio file using AudioContext... For that I would need the buffer data...
Here is how to read array buffer and convert it into binary string,
function onfilechange(evt) {
var reader = new FileReader();
reader.onload = function(evt) {
var chars = new Uint8Array(evt.target.result);
var CHUNK_SIZE = 0x8000;
var index = 0;
var length = chars.length;
var result = '';
var slice;
while (index < length) {
slice = chars.subarray(index, Math.min(index + CHUNK_SIZE, length));
result += String.fromCharCode.apply(null, slice);
index += CHUNK_SIZE;
}
// Here you have file content as Binary String in result var
};
reader.readAsArrayBuffer(evt.target.files[0]);
}
If you try to print ArrayBuffer via console.log you always get empty object {}
Well, playing a sound using the AudioContext stuff isn't actually that hard.
Set up the context.
Load any data into the buffer (e.g. FileReader for local files, XHR for remote stuff).
Setup a new source, and start it.
All in all, something like this:
var context = new(window.AudioContext || window.webkitAudioContext)();
function playsound(raw) {
console.log("now playing a sound, that starts with", new Uint8Array(raw.slice(0, 10)));
context.decodeAudioData(raw, function (buffer) {
if (!buffer) {
console.error("failed to decode:", "buffer null");
return;
}
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(0);
console.log("started...");
}, function (error) {
console.error("failed to decode:", error);
});
}
function onfilechange(then, evt) {
var reader = new FileReader();
reader.onload = function (e) {
console.log(e);
then(e.target.result);
};
reader.onerror = function (e) {
console.error(e);
};
reader.readAsArrayBuffer(evt.target.files[0]);
}
document.getElementById('file')
.addEventListener('change', onfilechange.bind(null, playsound), false);
See this live in a jsfiddle, which works for me in Firefox and Chrome.
I threw in a console.log(new Uint8Array()) for good measure, as browser will usually log the contents directly (if the buffer isn't huge).
For other stuff that you can do with ArrayBuffers, see e.g. the corresponding MDN documentation.