Concatenating audio blobs - javascript

I tried concatenating audio blobs using Web RTC experiment by Muaz Khan, but when I play the concatenated audio, the HTML audio element does not show the full length of the audio file and also if you download and play, the issue will persist. I used ffmpeg to concate these blobs, though is there a way which can be used for concatenating audio blobs using the Web RTC js experiment by Muaz Khan. A similar attempt which also did not work out : Combine two audio blob recordings

The best way is to convert the blobs into AudioBuffers (Convert blob into ArrayBuffer using FileReader and then decode those arrayBuffers into AudioBuffers). You can then merge/combine more than one AudioBuffers and get the resultant.
Following code will work in such situation:
var blob="YOUR AUDIO BLOB";
var f = new FileReader();
f.onload = function (e) {
audioContext.decodeAudioData(e.target.result, function (buffer) {
arrayBuffer.push(buffer);
if (arrayBuffer.length > 1) {
resultantbuffer = appendBuffer(arrayBuffer[0], arrayBuffer[1]);
arrayBuffer = [];
arrayBuffer.push(resultantbuffer);
}
else {
resultantbuffer = buffer;
}
}, function (e) {
console.warn(e);
});
};
f.readAsArrayBuffer(blob);
This code read the blob and convert into arrayBuffer (e.target.result) and decode those buffers into AudioBuffers (buffer). I used appendBuffer method for appending more than one audioBuffers. Here is the method:
function appendBuffer(buffer1, buffer2) {
///Using AudioBuffer
var numberOfChannels = Math.min(buffer1.numberOfChannels, buffer2.numberOfChannels);
var tmp = recordingAudioContext.createBuffer(numberOfChannels, (buffer1.length + buffer2.length), buffer1.sampleRate);
for (var i = 0; i < numberOfChannels; i++) {
var channel = tmp.getChannelData(i);
channel.set(buffer1.getChannelData(i), 0);
channel.set(buffer2.getChannelData(i), buffer1.length);
}
return tmp;
}
Do let me know if you face any problem.

Related

Streaming into <audio> element

I would like to play audio from a web socket that sends packages of sound data of unknown total length. The playback should start as soon as the first package arrives and it should not be interrupted by new packages.
What I have done so far:
ws.onmessage = e => {
const soundDataBase64 = JSON.parse(e.data);
const bytes = window.atob(soundDataBase64);
const arrayBuffer = new window.ArrayBuffer(bytes.length);
const bufferView = new window.Uint8Array(arrayBuffer);
for (let i = 0; i < bytes.length; i++) {
bufferView[i] = bytes.charCodeAt(i);
}
const blob = new Blob([arrayBuffer], {"type": "audio/mp3"});
const objectURL = window.URL.createObjectURL(blob);
const audio = document.createElement("audio");
audio.src = objectURL;
audio.controls = "controls";
document.body.appendChild(audio);
};
However, to my knowledge, it is not possible to extend the size of ArrayBuffer and Uint8Array. I would have to create a new blob, object URL and assign it to the audio element. But I guess, this would interrupt the audio playback.
On the MDN page of <audio>, there is a hint to MediaStream, which looks promising. However, I am not quite sure how to write data onto a media stream and how to connect the media stream to an audio element.
Is it currently possible with JS to write something like pipe where I can input data on one end, which is then streamed to a consumer? How would seamless streaming be achieved in JS (preferably without a lot of micro management code)?
As #Kaiido pointed out in the comments, I can use the MediaSource object. After connecting a MediaSource object to an <audio> element in the DOM, I can add a SourceBuffer to an opened MediaSource object and then append ArrayBuffers to the SourceBuffer.
Example:
const ws = new window.WebSocket(url);
ws.onmessage = _ => {
console.log("Media source not ready yet... discard this package");
};
const mediaSource = new window.MediaSource();
const audio = document.createElement("audio");
audio.src = window.URL.createObjectURL(mediaSource);
audio.controls = true;
document.body.appendChild(audio);
mediaSource.onsourceopen = _ => {
const sourceBuffer = mediaSource.addSourceBuffer("audio/mpeg"); // mpeg appears to not work in Firefox, unfortunately :(
ws.onmessage = e => {
const soundDataBase64 = JSON.parse(e.data);
const bytes = window.atob(soundDataBase64);
const arrayBuffer = new window.ArrayBuffer(bytes.length);
const bufferView = new window.Uint8Array(arrayBuffer);
for (let i = 0; i < bytes.length; i++) {
bufferView[i] = bytes.charCodeAt(i);
}
sourceBuffer.appendBuffer(arrayBuffer);
};
};
I tested this successfully in Google Chrome 94. Unfortunately, in Firefox 92, the MIME type audio/mpeg seems not working. There, I get the error Uncaught DOMException: MediaSource.addSourceBuffer: Type not supported in MediaSource and the warning Cannot play media. No decoders for requested formats: audio/mpeg.

Not able to create seekable video blobs from mediarecorder using EBML.js - MediaRecorder API - Chrome

Using media recorder, I am able to upload and append the video blobs on azure. But combined video is not seekable on download with following code -
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
appendBlockToAzure(chunks);
}
};
mediaRecorder.start(10000);
I tried using EBML.js, if I use the following code then I get the seekable video file. This approach needs the file to be processed at the end. Therefore, final file could be of 1GB in size which will take very long time to upload.
var chunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
chunks.push(event.data);
if(mediaRecorder.state == "inactive") { //if media recorder is stopped
var combined = new Blob(chunks, { type: event.data.type });
getSeekableBlob(combined, function (seekableBlob) {
saveCombinedVideoToAzure(seekableBlob);
});
}
}
};
mediaRecorder.start(10000);
That's the reason I want to upload simultaneously to the azure. If I use the following code, then it logs unknown tag warnings and then length error. Also, the video file is not playable.
var seekablechunks =[];
var mediaRecorder = new MediaRecorder(stream, 'video/x-matroska;codecs=vp8,opus');
mediaRecorder.ondataavailable = function(event) {
if(event.data && event.data.size > 0) {
getSeekableBlob(event.data, function (seekableBlob) {
seekablechunks.push(seekableBlob);
saveCombinedVideoToAzure(seekablechunks);
});
}
};
mediaRecorder.start(10000);
Function 'getSeekableBlob':
function getSeekableBlob(inputBlob, callback) {
// EBML.js copyrights goes to: https://github.com/legokichi/ts-ebml
if(typeof EBML === 'undefined') {
throw new Error('Please link: https://www.webrtc- experiment.com/EBML.js');
}
var reader = new EBML.Reader();
var decoder = new EBML.Decoder();
var tools = EBML.tools;
var fileReader = new FileReader();
fileReader.onload = function (e) {
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
reader.read(element);
});
reader.stop();
var refinedMetadataBuf = tools.makeMetadataSeekable(reader.metadatas, reader.duration, reader.cues);
var body = this.result.slice(reader.metadataSize);
var newBlob = new Blob([refinedMetadataBuf, body], {
type: 'video/webm'
});
callback(newBlob);
};
fileReader.readAsArrayBuffer(inputBlob);
}
Is there a way to get seekable blobs and upload them to azure?
It's a challenge for an open-ended streaming source for media (for example MediaRecorder) to create a file with SeekHead elements in it. The Seek elements in a SeekHead element contain byte offsets to elements in the file.
MediaRecorder doesn't create segments or SeekHead elements as you have discovered. To do so it would need to be able to see the future to know how big future compressed video and audio elements will be in the file.
A good way for you to handle this problem might be to post-process your uploaded files on a server. You can use ts-ebml to do this in a streaming fashion on a server when a file is completely uploaded.
It's possible, I suppose, to create Javascript software in your browser that can transform the stream of data emitted by MediaRecorder so it's seekable, on the fly. To make your stream seekeable you'd need to insert SeekHead elements every so often. You'd buffer up multiple seconds of the stream, then locate the Cluster elements in each buffer, then write a SeekHead element pointing to some of them. (Chrome's MediaRecorder outputs Clusters beginning with video key frames.) If you succeed in doing this you'll know a lot about Matroska / webm.
Suddenly, our Face on camera web-cam recorder component stopped saving webm blob.
In the console there were warnings about {EBML_ID: "55b0", type: "unknown", ...} during reader.read(element) and then
"Uncaught (in promise) Error: No schema entry found for unknown" in EBMLEncoder.js" at tools.makeMetadataSeekable(...) call.
Ignoring unknown elements from the decoder workarounded the issue:
...
var ebmlElms = decoder.decode(this.result);
ebmlElms.forEach(function (element) {
if (element.type !== 'unknown') {
reader.read(element);
}
});
reader.stop();
...
Related issue on ts-ebml npm package https://github.com/legokichi/ts-ebml/issues/33 with similar workaround

Play wav file as bytes received from server

I am receiving a raw data wave file from server, and I need to play this array of bytes on the client side.
I tried to use decodeAudioData like in this link but i got the error :
DOMException : Unable to decode Audio data.
It is logical because my raw data is not a regular mp3 file, it is a wave that needs to be played with 8000Hz rate and 1 channel and 16bits per sample.
Is there a function to play a byte array received from server with a certain rate and a number of channels
I managed to play the bytes on browser using this method :
function playWave(byteArray) {
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var myAudioBuffer = audioCtx.createBuffer(1, byteArray.length, 8000);
var nowBuffering = myAudioBuffer.getChannelData(0);
for (var i = 0; i < byteArray.length; i++) {
nowBuffering[i] = byteArray[i];
}
var source = audioCtx.createBufferSource();
source.buffer = myAudioBuffer;
source.connect(audioCtx.destination);
source.start();
}

In-browser gzip decompression using HTML5 Blob API and createObjectURL?

I am investigating an idea to make use of the browsers built in gzip decompression facility by using the HTML5 Blob API. With a trick the browser may do the uncompressing when the Blob is referenced as an Object URL via URL.createObjectURL and then inserted into the DOM. It appears that the only obstacle is the ability to set Content-Encoding for the blob.
If it would be possible to set the content-encoding of the Blob Object to gzip the browser will decompress the Blob and it will be possible to return the result to a library or application. If this would be possible, it may result in a stable and fast decompressing solution that costs just a few bytes instead of for example 22.6kb for pako_inflate.min.js.
I hereby want to inform if anyone knows of a solution to use a javascript Blob to uncompress gzip data in the browser.
The code that I use to test via the browser console is the following. It will change the background of a page to green on success.
/* create blob */
var createBlobUrl = function(fileData,mimeType) {
var blob;
// Create blob
try {
blob = new Blob([fileData], {type: mimeType});
} catch (e) { // Backwards-compatibility
window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder;
blob = new BlobBuilder();
blob.append(fileData);
blob = blob.getBlob(mimeType);
}
return URL.createObjectURL(blob);
};
/* CSS: body, * { background:green; } */
var zip = window.atob('H4sIAAAAAAAA/0vKT6nUUdBSqFZISkzOTi/KL81LsUovSk3Ns1aoBQCLj6wTHQAAAA==');
function str2bytes (str) {
var bytes = new Uint8Array(str.length);
for (var i=0; i<str.length; i++) {
bytes[i] = str.charCodeAt(i);
}
return bytes;
}
var bloburl = createBlobUrl(str2bytes(zip),'text/css');
var link = document.createElement('link');
link.setAttribute('rel','stylesheet');
link.setAttribute('type','text/css');
link.setAttribute('href',bloburl);
document.head.appendChild(link);

HTML5: Play video from stored binary string

I am trying to read the contents of a video file as a binary string using the FileReader.readAsBinaryString(Blob|File) as shown in the example http://www.html5rocks.com/en/tutorials/file/dndfiles/#toc-reading-files and then store and play the video.
I tried it using the below (with a webm video file),but get a "Video format or MIME type not supported."
function readBlob (file, startByte, endByte, callback) {
console.log('readBlob():', file, startByte, endByte);
var reader = new FileReader();
reader.onloadend = function (evt) {
if (evt.target.readyState == FileReader.DONE) {
callback(evt.target.result);
var player = document.getElementById('player');
player.src = "data:video/webm;base64,"+evt.target.result;
player.load();
player.play();
}
}
var blob = file.slice(startByte, endByte);
reader.readAsBinaryString(blob);
}
Does anyone know if it is possible to read a video file (one supported by the browser being used) as a binary string and play it in the browser HTML5 video player?
TIA
Your problem might be with the player.src
player.src = "data:video/webm;base64,"+evt.target.result;
It is expecting the data to be in base64 but you're giving it a binary string.
Try encoding it to base64 using btoa
player.src = "data:video/webm;base64,"+btoa(evt.target.result);
How about FileReader.readAsDataURL(Blob|File) ?
It is explained in your html5rocks-link as well.

Categories

Resources