How to cut audiofile from input in browser with js, lamejs? - javascript

I have input in my Vue component where i upload audiofile via #change, next i need to cut it, for example i have 30 seconds audio and i need to receive audio from 5 to 15 second.
I have installed lamejs package to do that.
But after all my operations with audio i receive cutted audio but without any sound, so i dont know where is the reason of that. Need help!
method which upload file
async onUploadFile(event) {
const fileData = event.target.files[0];
this.file = fileData;
await this.decodeFile(fileData);
},
method which decodes file to audioBuffer
async onUploadFile(event) {
const fileData = event.target.files[0];
this.file = fileData;
await this.decodeFile(fileData);
},
method which cut audio and encode it to mp3 and then receive blob url
async audioBufferSlice(buffer, begin, end) {
const audioContext = new AudioContext();
const channels = buffer.numberOfChannels;
const rate = buffer.sampleRate;
const duration = buffer.duration;
const startOffset = rate * begin;
const endOffset = rate * end;
const frameCount = endOffset - startOffset;
const audioLength = endOffset - startOffset;
let trimmedAudio = audioContext.createBuffer(
buffer.numberOfChannels,
audioLength,
rate
);
for(var i = 0; i < buffer.numberOfChannels; i++){
trimmedAudio.copyToChannel(buffer.getChannelData(i).slice(begin, end), i);
}
var audioData = this.serializeAudioBuffer(trimmedAudio);
let mp3Data = [];
const sampleBlockSize = 1152;
let mp3encoder = new lamejs.Mp3Encoder(2, audioData.sampleRate, 128);
var left = new Int8Array(audioData.channels[0].length);
var right = new Int8Array(audioData.channels[1].length);
for (var i = 0; i < audioData.channels[0].length; i += sampleBlockSize) {
var leftChunk = left.subarray(i, i + sampleBlockSize);
var rightChunk = right.subarray(i, i + sampleBlockSize);
var mp3buf = await mp3encoder.encodeBuffer(leftChunk, rightChunk);
if (mp3buf.length > 0) {
mp3Data.push(mp3buf);
}
}
let buf = await mp3encoder.flush();
if (buf.length > 0) {
mp3Data.push(buf);
}
var blob = new Blob(mp3Data, {type: 'audio/mp3'});
var url = window.URL.createObjectURL(blob);
console.log('MP3 URl: ', url);
},
What did I do wrong that I receive cut audio but without any sound?
I look at that repository as example https://github.com/Vinit-Dantkale/AudioFy

Related

Encode a Blob with PHP and Decode with JavaScript through an AJAX request [duplicate]

I have Base64-encoded binary data in a string:
const contentType = 'image/png';
const b64Data = 'iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
I would like to create a blob: URL containing this data and display it to the user:
const blob = new Blob(????, {type: contentType});
const blobUrl = URL.createObjectURL(blob);
window.location = blobUrl;
I haven't been been able to figure out how to create the BLOB.
In some cases I am able to avoid this by using a data: URL instead:
const dataUrl = `data:${contentType};base64,${b64Data}`;
window.location = dataUrl;
However, in most cases the data: URLs are prohibitively large.
How can I decode a Base64 string to a BLOB object in JavaScript?
The atob function will decode a Base64-encoded string into a new string with a character for each byte of the binary data.
const byteCharacters = atob(b64Data);
Each character's code point (charCode) will be the value of the byte. We can create an array of byte values by applying this using the .charCodeAt method for each character in the string.
const byteNumbers = new Array(byteCharacters.length);
for (let i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
You can convert this array of byte values into a real typed byte array by passing it to the Uint8Array constructor.
const byteArray = new Uint8Array(byteNumbers);
This in turn can be converted to a BLOB by wrapping it in an array and passing it to the Blob constructor.
const blob = new Blob([byteArray], {type: contentType});
The code above works. However the performance can be improved a little by processing the byteCharacters in smaller slices, rather than all at once. In my rough testing 512 bytes seems to be a good slice size. This gives us the following function.
const b64toBlob = (b64Data, contentType='', sliceSize=512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
const blob = b64toBlob(b64Data, contentType);
const blobUrl = URL.createObjectURL(blob);
window.location = blobUrl;
Full Example:
const b64toBlob = (b64Data, contentType='', sliceSize=512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
const contentType = 'image/png';
const b64Data = 'iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
const blob = b64toBlob(b64Data, contentType);
const blobUrl = URL.createObjectURL(blob);
const img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
Here is a more minimal method without any dependencies or libraries.
It requires the new fetch API. (Can I use it?)
var url = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=="
fetch(url)
.then(res => res.blob())
.then(console.log)
With this method you can also easily get a ReadableStream, ArrayBuffer, text, and JSON.
(fyi this also works with node-fetch in Node)
As a function:
const b64toBlob = (base64, type = 'application/octet-stream') =>
fetch(`data:${type};base64,${base64}`).then(res => res.blob())
But I would encourage you to don't use base64 in the first place. There are better ways to send and receive binary data. JSON isn't always the best option. it takes up more bandwidth and waste processing time (de)encodeing stuff. Us eg canvas.toBlob instead of canvas.toDataURL and use FormData to send binary files. you can also return back a multipart payload and decode it using await response.formData() that is coming from a server response. FormData can go both ways.
I did a simple performance test towards Jeremy's ES6 sync version.
The sync version will block UI for a while.
keeping the devtool open can slow the fetch performance
document.body.innerHTML += '<input autofocus placeholder="try writing">'
// get some dummy gradient image
var img=function(){var a=document.createElement("canvas"),b=a.getContext("2d"),c=b.createLinearGradient(0,0,1500,1500);a.width=a.height=3000;c.addColorStop(0,"red");c.addColorStop(1,"blue");b.fillStyle=c;b.fillRect(0,0,a.width,a.height);return a.toDataURL()}();
async function perf() {
const blob = await fetch(img).then(res => res.blob())
// turn it to a dataURI
const url = img
const b64Data = url.split(',')[1]
// Jeremy Banks solution
const b64toBlob = (b64Data, contentType = '', sliceSize=512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
// bench blocking method
let i = 500
console.time('blocking b64')
while (i--) {
await b64toBlob(b64Data)
}
console.timeEnd('blocking b64')
// bench non blocking
i = 500
// so that the function is not reconstructed each time
const toBlob = res => res.blob()
console.time('fetch')
while (i--) {
await fetch(url).then(toBlob)
}
console.timeEnd('fetch')
console.log('done')
}
perf()
Optimized (but less readable) implementation:
function base64toBlob(base64Data, contentType) {
contentType = contentType || '';
var sliceSize = 1024;
var byteCharacters = atob(base64Data);
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
Following is my TypeScript code which can be converted easily into JavaScript and you can use
/**
* Convert BASE64 to BLOB
* #param base64Image Pass Base64 image data to convert into the BLOB
*/
private convertBase64ToBlob(base64Image: string) {
// Split into two parts
const parts = base64Image.split(';base64,');
// Hold the content type
const imageType = parts[0].split(':')[1];
// Decode Base64 string
const decodedData = window.atob(parts[1]);
// Create UNIT8ARRAY of size same as row data length
const uInt8Array = new Uint8Array(decodedData.length);
// Insert all character code into uInt8Array
for (let i = 0; i < decodedData.length; ++i) {
uInt8Array[i] = decodedData.charCodeAt(i);
}
// Return BLOB image after conversion
return new Blob([uInt8Array], { type: imageType });
}
See this example: https://jsfiddle.net/pqhdce2L/
function b64toBlob(b64Data, contentType, sliceSize) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var byteCharacters = atob(b64Data);
var byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
var blob = new Blob(byteArrays, {type: contentType});
return blob;
}
var contentType = 'image/png';
var b64Data = Your Base64 encode;
var blob = b64toBlob(b64Data, contentType);
var blobUrl = URL.createObjectURL(blob);
var img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
For all browser support, especially on Android, perhaps you can add this:
try{
blob = new Blob(byteArrays, {type : contentType});
}
catch(e){
// TypeError old Google Chrome and Firefox
window.BlobBuilder = window.BlobBuilder ||
window.WebKitBlobBuilder ||
window.MozBlobBuilder ||
window.MSBlobBuilder;
if(e.name == 'TypeError' && window.BlobBuilder){
var bb = new BlobBuilder();
bb.append(byteArrays);
blob = bb.getBlob(contentType);
}
else if(e.name == "InvalidStateError"){
// InvalidStateError (tested on FF13 WinXP)
blob = new Blob(byteArrays, {type : contentType});
}
else{
// We're screwed, blob constructor unsupported entirely
}
}
For all copy-paste lovers out there like me, here is a cooked download function which works on Chrome, Firefox and Edge:
window.saveFile = function (bytesBase64, mimeType, fileName) {
var fileUrl = "data:" + mimeType + ";base64," + bytesBase64;
fetch(fileUrl)
.then(response => response.blob())
.then(blob => {
var link = window.document.createElement("a");
link.href = window.URL.createObjectURL(blob, { type: mimeType });
link.download = fileName;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
});
}
I'm posting a more declarative way of sync Base64 converting. While async fetch().blob() is very neat and I like this solution a lot, it doesn't work on Internet Explorer 11 (and probably Edge - I haven't tested this one), even with the polyfill - take a look at my comment to Endless' post for more details.
const blobPdfFromBase64String = base64String => {
const byteArray = Uint8Array.from(
atob(base64String)
.split('')
.map(char => char.charCodeAt(0))
);
return new Blob([byteArray], { type: 'application/pdf' });
};
Bonus
If you want to print it you could do something like:
const isIE11 = !!(window.navigator && window.navigator.msSaveOrOpenBlob); // Or however you want to check it
const printPDF = blob => {
try {
isIE11
? window.navigator.msSaveOrOpenBlob(blob, 'documents.pdf')
: printJS(URL.createObjectURL(blob)); // http://printjs.crabbly.com/
} catch (e) {
throw PDFError;
}
};
Bonus x 2 - Opening a BLOB file in new tab for Internet Explorer 11
If you're able to do some preprocessing of the Base64 string on the server you could expose it under some URL and use the link in printJS :)
For image data, I find it simpler to use canvas.toBlob (asynchronous)
function b64toBlob(b64, onsuccess, onerror) {
var img = new Image();
img.onerror = onerror;
img.onload = function onload() {
var canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
var ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
canvas.toBlob(onsuccess);
};
img.src = b64;
}
var base64Data = 'data:image/jpg;base64,/9j/4AAQSkZJRgABAQA...';
b64toBlob(base64Data,
function(blob) {
var url = window.URL.createObjectURL(blob);
// do something with url
}, function(error) {
// handle error
});
If you can stand adding one dependency to your project there's the great blob-util npm package that provides a handy base64StringToBlob function. Once added to your package.json you can use it like this:
import { base64StringToBlob } from 'blob-util';
const contentType = 'image/png';
const b64Data = 'iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
const blob = base64StringToBlob(b64Data, contentType);
// Do whatever you need with your blob...
I noticed that Internet Explorer 11 gets incredibly slow when slicing the data like Jeremy suggested. This is true for Chrome, but Internet Explorer seems to have a problem when passing the sliced data to the Blob-Constructor. On my machine, passing 5 MB of data makes Internet Explorer crash and memory consumption is going through the roof. Chrome creates the blob in no time.
Run this code for a comparison:
var byteArrays = [],
megaBytes = 2,
byteArray = new Uint8Array(megaBytes*1024*1024),
block,
blobSlowOnIE, blobFastOnIE,
i;
for (i = 0; i < (megaBytes*1024); i++) {
block = new Uint8Array(1024);
byteArrays.push(block);
}
//debugger;
console.profile("No Slices");
blobSlowOnIE = new Blob(byteArrays, { type: 'text/plain'});
console.profileEnd();
console.profile("Slices");
blobFastOnIE = new Blob([byteArray], { type: 'text/plain'});
console.profileEnd();
So I decided to include both methods described by Jeremy in one function. Credits go to him for this.
function base64toBlob(base64Data, contentType, sliceSize) {
var byteCharacters,
byteArray,
byteNumbers,
blobData,
blob;
contentType = contentType || '';
byteCharacters = atob(base64Data);
// Get BLOB data sliced or not
blobData = sliceSize ? getBlobDataSliced() : getBlobDataAtOnce();
blob = new Blob(blobData, { type: contentType });
return blob;
/*
* Get BLOB data in one slice.
* => Fast in Internet Explorer on new Blob(...)
*/
function getBlobDataAtOnce() {
byteNumbers = new Array(byteCharacters.length);
for (var i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
byteArray = new Uint8Array(byteNumbers);
return [byteArray];
}
/*
* Get BLOB data in multiple slices.
* => Slow in Internet Explorer on new Blob(...)
*/
function getBlobDataSliced() {
var slice,
byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
slice = byteCharacters.slice(offset, offset + sliceSize);
byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
byteArray = new Uint8Array(byteNumbers);
// Add slice
byteArrays.push(byteArray);
}
return byteArrays;
}
}
The method with fetch is the best solution, but if anyone needs to use a method without fetch then here it is, as the ones mentioned previously didn't work for me:
function makeblob(dataURL) {
const BASE64_MARKER = ';base64,';
const parts = dataURL.split(BASE64_MARKER);
const contentType = parts[0].split(':')[1];
const raw = window.atob(parts[1]);
const rawLength = raw.length;
const uInt8Array = new Uint8Array(rawLength);
for (let i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], { type: contentType });
}
In browser just
Uint8Array.from(atob(YOUR_BASE64_DATA), (c) => c.charCodeAt(0))
compare with fetch
!(async () => {
const start = performance.now();
let i = 0;
while (i++ < 1e3) {
const dataUrl =
"data:application/octet-stream;base64,H4sIAAAAAAAAA0vOzyvOz0nVy8lP10jISM3JyVdIr8osUFCpdkksSdXLyy/X0KxN0ORKHlU3qm5U3ai6UXWj6kauOgBVt1KRLwcAAA==";
body = await (await fetch(dataUrl)).blob();
}
console.log(performance.now() - start); // 508.19999999925494ms
})();
!(async () => {
const start = performance.now();
let i = 0;
while (i++ < 1e3) {
const base64Data =
"H4sIAAAAAAAAA0vOzyvOz0nVy8lP10jISM3JyVdIr8osUFCpdkksSdXLyy/X0KxN0ORKHlU3qm5U3ai6UXWj6kauOgBVt1KRLwcAAA==";
body = Uint8Array.from(atob(base64Data), (c) => c.charCodeAt(0));
}
console.log(performance.now() - start); // 7.899999998509884ms
})();
Depends on your data size, choose performance one.
Two different variations
function base64ToBlob(base64, contentType='image/png', chunkLength=512) {
const byteCharsArray = Array.from(atob(base64.substr(base64.indexOf(',') + 1)));
const chunksIterator = new Array(Math.ceil(byteCharsArray.length / chunkLength));
const bytesArrays = [];
for (let c = 0; c < chunksIterator.length; c++) {
bytesArrays.push(new Uint8Array(byteCharsArray.slice(c * chunkLength, chunkLength * (c + 1)).map(s => s.charCodeAt(0))));
}
const blob = new Blob(bytesArrays, {type: contentType});
return blob;
}
/* Not sure how it performs with big images */
async function base64ToBlobLight(base64) { return await fetch(base64).then(res => res.blob()); }
/* Test */
const base64Data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAMAAADXqc3KAAAB+FBMVEUAAAA/mUPidDHiLi5Cn0XkNTPmeUrkdUg/m0Q0pEfcpSbwaVdKskg+lUP4zA/iLi3msSHkOjVAmETdJSjtYFE/lkPnRj3sWUs8kkLeqCVIq0fxvhXqUkbVmSjwa1n1yBLepyX1xxP0xRXqUkboST9KukpHpUbuvRrzrhF/ljbwaljuZFM4jELaoSdLtElJrUj1xxP6zwzfqSU4i0HYnydMtUlIqUfywxb60AxZqEXaoifgMCXptR9MtklHpEY2iUHWnSjvvRr70QujkC+pUC/90glMuEnlOjVMt0j70QriLS1LtEnnRj3qUUXfIidOjsxAhcZFo0bjNDH0xxNLr0dIrUdmntVTkMoyfL8jcLBRuErhJyrgKyb4zA/5zg3tYFBBmUTmQTnhMinruBzvvhnxwxZ/st+Ktt5zp9hqota2vtK6y9FemNBblc9HiMiTtMbFtsM6gcPV2r6dwroseLrMrbQrdLGdyKoobKbo3Zh+ynrgVllZulTsXE3rV0pIqUf42UVUo0JyjEHoS0HmsiHRGR/lmRz/1hjqnxjvpRWfwtOhusaz0LRGf7FEfbDVmqHXlJeW0pbXq5bec3fX0nTnzmuJuWvhoFFhm0FtrziBsjaAaDCYWC+uSi6jQS3FsSfLJiTirCOkuCG1KiG+wSC+GBvgyhTszQ64Z77KAAAARXRSTlMAIQRDLyUgCwsE6ebm5ubg2dLR0byXl4FDQzU1NDEuLSUgC+vr6urq6ubb29vb2tra2tG8vLu7u7uXl5eXgYGBgYGBLiUALabIAAABsElEQVQoz12S9VPjQBxHt8VaOA6HE+AOzv1wd7pJk5I2adpCC7RUcHd3d3fXf5PvLkxheD++z+yb7GSRlwD/+Hj/APQCZWxM5M+goF+RMbHK594v+tPoiN1uHxkt+xzt9+R9wnRTZZQpXQ0T5uP1IQxToyOAZiQu5HEpjeA4SWIoksRxNiGC1tRZJ4LNxgHgnU5nJZBDvuDdl8lzQRBsQ+s9PZt7s7Pz8wsL39/DkIfZ4xlB2Gqsq62ta9oxVlVrNZpihFRpGO9fzQw1ms0NDWZz07iGkJmIFH8xxkc3a/WWlubmFkv9AB2SEpDvKxbjidN2faseaNV3zoHXvv7wMODJdkOHAegweAfFPx4G67KluxzottCU9n8CUqXzcIQdXOytAHqXxomvykhEKN9EFutG22p//0rbNvHVxiJywa8yS2KDfV1dfbu31H8jF1RHiTKtWYeHxUvq3bn0pyjCRaiRU6aDO+gb3aEfEeVNsDgm8zzLy9egPa7Qt8TSJdwhjplk06HH43ZNJ3s91KKCHQ5x4sw1fRGYDZ0n1L4FKb9/BP5JLYxToheoFCVxz57PPS8UhhEpLBVeAAAAAElFTkSuQmCC';
const blob = base64ToBlob(base64Data);
const blobUrl = URL.createObjectURL(blob);
const img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
/**********************/
/* Test */
(async () => {
const blob = await base64ToBlobLight(base64Data);
const blobUrl = URL.createObjectURL(blob);
const img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
})();
This would prove to be much short solution.
const byteArray = new Buffer(base64String.replace(/^[\w\d;:\/]+base64\,/g, ''), 'base64');
base64String is the string containing the base 64 string.
byteArray is the array you need.
The regex replacement is optional and is just there to deal with prefix as in the case of dataurl string.

How to make oscillator loudness match the loudness of a sample?

I want a tone from an oscillator to play under an audio clip of a song. How do I make the tones loudness match the loudness of the sample? I want the tone to be quieter when the song is quieter, and the tone to be louder at the louder parts. I have used the createAnalyzer to detect the volume of the sample, but the code does not work.
const audioContext = new AudioContext();
const audiodiv = document.querySelector('.audiodiv')
const buffer = audioContext.createBuffer(
1,
audioContext.sampleRate * 1,
audioContext.sampleRate
);
const channelData = buffer.getChannelData(0)
for (let i = 0; i < buffer.length; i++){
channelData[i] = Math.random() * 2 - 1;
}
const primaryGainControl = audioContext.createGain()
primaryGainControl.gain.setValueAtTime(0.005, 0);
primaryGainControl.connect(audioContext.destination);
const samplebutton = document.createElement('button')
samplebutton.innerText = 'sample'
samplebutton.addEventListener('click', async () => {
let volume = 0
const response = await fetch('testsong.wav')
const soundBuffer = await response.arrayBuffer()
const sampleBuffer = await audioContext.decodeAudioData(soundBuffer)
const analyzer = audioContext.createAnalyser()
volume = analyzer.frequencyBinCount
const sampleSource = audioContext.createBufferSource()
sampleSource.buffer = sampleBuffer
sampleSource.playbackRate.setValueAtTime(1, 0)
sampleSource.connect(primaryGainControl)
sampleSource.start()
})
audiodiv.appendChild(samplebutton)
const toneButton = document.createElement('button')
toneButton.innerText = "tone"
toneButton.addEventListener("click", () => {
const toneOscillator = audioContext.createOscillator();
toneOscillator.frequency.setValueAtTime(150, 0)
toneOscillator.type = "sine"
toneOscillator.connect(oscGain);
toneOscillator.start()
toneOscillator.stop(audioContext.currentTime + 5)
const oscGain = audioContext.createGain()
oscGain.gain.value = volume
oscGain.connect(primaryGainControl)
})
audiodiv.appendChild(toneButton)

URL.createobjecturl() not creating the url for the correct image [duplicate]

I have Base64-encoded binary data in a string:
const contentType = 'image/png';
const b64Data = 'iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
I would like to create a blob: URL containing this data and display it to the user:
const blob = new Blob(????, {type: contentType});
const blobUrl = URL.createObjectURL(blob);
window.location = blobUrl;
I haven't been been able to figure out how to create the BLOB.
In some cases I am able to avoid this by using a data: URL instead:
const dataUrl = `data:${contentType};base64,${b64Data}`;
window.location = dataUrl;
However, in most cases the data: URLs are prohibitively large.
How can I decode a Base64 string to a BLOB object in JavaScript?
The atob function will decode a Base64-encoded string into a new string with a character for each byte of the binary data.
const byteCharacters = atob(b64Data);
Each character's code point (charCode) will be the value of the byte. We can create an array of byte values by applying this using the .charCodeAt method for each character in the string.
const byteNumbers = new Array(byteCharacters.length);
for (let i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
You can convert this array of byte values into a real typed byte array by passing it to the Uint8Array constructor.
const byteArray = new Uint8Array(byteNumbers);
This in turn can be converted to a BLOB by wrapping it in an array and passing it to the Blob constructor.
const blob = new Blob([byteArray], {type: contentType});
The code above works. However the performance can be improved a little by processing the byteCharacters in smaller slices, rather than all at once. In my rough testing 512 bytes seems to be a good slice size. This gives us the following function.
const b64toBlob = (b64Data, contentType='', sliceSize=512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
const blob = b64toBlob(b64Data, contentType);
const blobUrl = URL.createObjectURL(blob);
window.location = blobUrl;
Full Example:
const b64toBlob = (b64Data, contentType='', sliceSize=512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
const contentType = 'image/png';
const b64Data = 'iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
const blob = b64toBlob(b64Data, contentType);
const blobUrl = URL.createObjectURL(blob);
const img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
Here is a more minimal method without any dependencies or libraries.
It requires the new fetch API. (Can I use it?)
var url = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=="
fetch(url)
.then(res => res.blob())
.then(console.log)
With this method you can also easily get a ReadableStream, ArrayBuffer, text, and JSON.
(fyi this also works with node-fetch in Node)
As a function:
const b64toBlob = (base64, type = 'application/octet-stream') =>
fetch(`data:${type};base64,${base64}`).then(res => res.blob())
But I would encourage you to don't use base64 in the first place. There are better ways to send and receive binary data. JSON isn't always the best option. it takes up more bandwidth and waste processing time (de)encodeing stuff. Us eg canvas.toBlob instead of canvas.toDataURL and use FormData to send binary files. you can also return back a multipart payload and decode it using await response.formData() that is coming from a server response. FormData can go both ways.
I did a simple performance test towards Jeremy's ES6 sync version.
The sync version will block UI for a while.
keeping the devtool open can slow the fetch performance
document.body.innerHTML += '<input autofocus placeholder="try writing">'
// get some dummy gradient image
var img=function(){var a=document.createElement("canvas"),b=a.getContext("2d"),c=b.createLinearGradient(0,0,1500,1500);a.width=a.height=3000;c.addColorStop(0,"red");c.addColorStop(1,"blue");b.fillStyle=c;b.fillRect(0,0,a.width,a.height);return a.toDataURL()}();
async function perf() {
const blob = await fetch(img).then(res => res.blob())
// turn it to a dataURI
const url = img
const b64Data = url.split(',')[1]
// Jeremy Banks solution
const b64toBlob = (b64Data, contentType = '', sliceSize=512) => {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
// bench blocking method
let i = 500
console.time('blocking b64')
while (i--) {
await b64toBlob(b64Data)
}
console.timeEnd('blocking b64')
// bench non blocking
i = 500
// so that the function is not reconstructed each time
const toBlob = res => res.blob()
console.time('fetch')
while (i--) {
await fetch(url).then(toBlob)
}
console.timeEnd('fetch')
console.log('done')
}
perf()
Optimized (but less readable) implementation:
function base64toBlob(base64Data, contentType) {
contentType = contentType || '';
var sliceSize = 1024;
var byteCharacters = atob(base64Data);
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
Following is my TypeScript code which can be converted easily into JavaScript and you can use
/**
* Convert BASE64 to BLOB
* #param base64Image Pass Base64 image data to convert into the BLOB
*/
private convertBase64ToBlob(base64Image: string) {
// Split into two parts
const parts = base64Image.split(';base64,');
// Hold the content type
const imageType = parts[0].split(':')[1];
// Decode Base64 string
const decodedData = window.atob(parts[1]);
// Create UNIT8ARRAY of size same as row data length
const uInt8Array = new Uint8Array(decodedData.length);
// Insert all character code into uInt8Array
for (let i = 0; i < decodedData.length; ++i) {
uInt8Array[i] = decodedData.charCodeAt(i);
}
// Return BLOB image after conversion
return new Blob([uInt8Array], { type: imageType });
}
See this example: https://jsfiddle.net/pqhdce2L/
function b64toBlob(b64Data, contentType, sliceSize) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var byteCharacters = atob(b64Data);
var byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
var blob = new Blob(byteArrays, {type: contentType});
return blob;
}
var contentType = 'image/png';
var b64Data = Your Base64 encode;
var blob = b64toBlob(b64Data, contentType);
var blobUrl = URL.createObjectURL(blob);
var img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
For all browser support, especially on Android, perhaps you can add this:
try{
blob = new Blob(byteArrays, {type : contentType});
}
catch(e){
// TypeError old Google Chrome and Firefox
window.BlobBuilder = window.BlobBuilder ||
window.WebKitBlobBuilder ||
window.MozBlobBuilder ||
window.MSBlobBuilder;
if(e.name == 'TypeError' && window.BlobBuilder){
var bb = new BlobBuilder();
bb.append(byteArrays);
blob = bb.getBlob(contentType);
}
else if(e.name == "InvalidStateError"){
// InvalidStateError (tested on FF13 WinXP)
blob = new Blob(byteArrays, {type : contentType});
}
else{
// We're screwed, blob constructor unsupported entirely
}
}
For all copy-paste lovers out there like me, here is a cooked download function which works on Chrome, Firefox and Edge:
window.saveFile = function (bytesBase64, mimeType, fileName) {
var fileUrl = "data:" + mimeType + ";base64," + bytesBase64;
fetch(fileUrl)
.then(response => response.blob())
.then(blob => {
var link = window.document.createElement("a");
link.href = window.URL.createObjectURL(blob, { type: mimeType });
link.download = fileName;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
});
}
I'm posting a more declarative way of sync Base64 converting. While async fetch().blob() is very neat and I like this solution a lot, it doesn't work on Internet Explorer 11 (and probably Edge - I haven't tested this one), even with the polyfill - take a look at my comment to Endless' post for more details.
const blobPdfFromBase64String = base64String => {
const byteArray = Uint8Array.from(
atob(base64String)
.split('')
.map(char => char.charCodeAt(0))
);
return new Blob([byteArray], { type: 'application/pdf' });
};
Bonus
If you want to print it you could do something like:
const isIE11 = !!(window.navigator && window.navigator.msSaveOrOpenBlob); // Or however you want to check it
const printPDF = blob => {
try {
isIE11
? window.navigator.msSaveOrOpenBlob(blob, 'documents.pdf')
: printJS(URL.createObjectURL(blob)); // http://printjs.crabbly.com/
} catch (e) {
throw PDFError;
}
};
Bonus x 2 - Opening a BLOB file in new tab for Internet Explorer 11
If you're able to do some preprocessing of the Base64 string on the server you could expose it under some URL and use the link in printJS :)
For image data, I find it simpler to use canvas.toBlob (asynchronous)
function b64toBlob(b64, onsuccess, onerror) {
var img = new Image();
img.onerror = onerror;
img.onload = function onload() {
var canvas = document.createElement('canvas');
canvas.width = img.width;
canvas.height = img.height;
var ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
canvas.toBlob(onsuccess);
};
img.src = b64;
}
var base64Data = 'data:image/jpg;base64,/9j/4AAQSkZJRgABAQA...';
b64toBlob(base64Data,
function(blob) {
var url = window.URL.createObjectURL(blob);
// do something with url
}, function(error) {
// handle error
});
If you can stand adding one dependency to your project there's the great blob-util npm package that provides a handy base64StringToBlob function. Once added to your package.json you can use it like this:
import { base64StringToBlob } from 'blob-util';
const contentType = 'image/png';
const b64Data = 'iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==';
const blob = base64StringToBlob(b64Data, contentType);
// Do whatever you need with your blob...
I noticed that Internet Explorer 11 gets incredibly slow when slicing the data like Jeremy suggested. This is true for Chrome, but Internet Explorer seems to have a problem when passing the sliced data to the Blob-Constructor. On my machine, passing 5 MB of data makes Internet Explorer crash and memory consumption is going through the roof. Chrome creates the blob in no time.
Run this code for a comparison:
var byteArrays = [],
megaBytes = 2,
byteArray = new Uint8Array(megaBytes*1024*1024),
block,
blobSlowOnIE, blobFastOnIE,
i;
for (i = 0; i < (megaBytes*1024); i++) {
block = new Uint8Array(1024);
byteArrays.push(block);
}
//debugger;
console.profile("No Slices");
blobSlowOnIE = new Blob(byteArrays, { type: 'text/plain'});
console.profileEnd();
console.profile("Slices");
blobFastOnIE = new Blob([byteArray], { type: 'text/plain'});
console.profileEnd();
So I decided to include both methods described by Jeremy in one function. Credits go to him for this.
function base64toBlob(base64Data, contentType, sliceSize) {
var byteCharacters,
byteArray,
byteNumbers,
blobData,
blob;
contentType = contentType || '';
byteCharacters = atob(base64Data);
// Get BLOB data sliced or not
blobData = sliceSize ? getBlobDataSliced() : getBlobDataAtOnce();
blob = new Blob(blobData, { type: contentType });
return blob;
/*
* Get BLOB data in one slice.
* => Fast in Internet Explorer on new Blob(...)
*/
function getBlobDataAtOnce() {
byteNumbers = new Array(byteCharacters.length);
for (var i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
byteArray = new Uint8Array(byteNumbers);
return [byteArray];
}
/*
* Get BLOB data in multiple slices.
* => Slow in Internet Explorer on new Blob(...)
*/
function getBlobDataSliced() {
var slice,
byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
slice = byteCharacters.slice(offset, offset + sliceSize);
byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
byteArray = new Uint8Array(byteNumbers);
// Add slice
byteArrays.push(byteArray);
}
return byteArrays;
}
}
The method with fetch is the best solution, but if anyone needs to use a method without fetch then here it is, as the ones mentioned previously didn't work for me:
function makeblob(dataURL) {
const BASE64_MARKER = ';base64,';
const parts = dataURL.split(BASE64_MARKER);
const contentType = parts[0].split(':')[1];
const raw = window.atob(parts[1]);
const rawLength = raw.length;
const uInt8Array = new Uint8Array(rawLength);
for (let i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], { type: contentType });
}
In browser just
Uint8Array.from(atob(YOUR_BASE64_DATA), (c) => c.charCodeAt(0))
compare with fetch
!(async () => {
const start = performance.now();
let i = 0;
while (i++ < 1e3) {
const dataUrl =
"data:application/octet-stream;base64,H4sIAAAAAAAAA0vOzyvOz0nVy8lP10jISM3JyVdIr8osUFCpdkksSdXLyy/X0KxN0ORKHlU3qm5U3ai6UXWj6kauOgBVt1KRLwcAAA==";
body = await (await fetch(dataUrl)).blob();
}
console.log(performance.now() - start); // 508.19999999925494ms
})();
!(async () => {
const start = performance.now();
let i = 0;
while (i++ < 1e3) {
const base64Data =
"H4sIAAAAAAAAA0vOzyvOz0nVy8lP10jISM3JyVdIr8osUFCpdkksSdXLyy/X0KxN0ORKHlU3qm5U3ai6UXWj6kauOgBVt1KRLwcAAA==";
body = Uint8Array.from(atob(base64Data), (c) => c.charCodeAt(0));
}
console.log(performance.now() - start); // 7.899999998509884ms
})();
Depends on your data size, choose performance one.
Two different variations
function base64ToBlob(base64, contentType='image/png', chunkLength=512) {
const byteCharsArray = Array.from(atob(base64.substr(base64.indexOf(',') + 1)));
const chunksIterator = new Array(Math.ceil(byteCharsArray.length / chunkLength));
const bytesArrays = [];
for (let c = 0; c < chunksIterator.length; c++) {
bytesArrays.push(new Uint8Array(byteCharsArray.slice(c * chunkLength, chunkLength * (c + 1)).map(s => s.charCodeAt(0))));
}
const blob = new Blob(bytesArrays, {type: contentType});
return blob;
}
/* Not sure how it performs with big images */
async function base64ToBlobLight(base64) { return await fetch(base64).then(res => res.blob()); }
/* Test */
const base64Data = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAMAAADXqc3KAAAB+FBMVEUAAAA/mUPidDHiLi5Cn0XkNTPmeUrkdUg/m0Q0pEfcpSbwaVdKskg+lUP4zA/iLi3msSHkOjVAmETdJSjtYFE/lkPnRj3sWUs8kkLeqCVIq0fxvhXqUkbVmSjwa1n1yBLepyX1xxP0xRXqUkboST9KukpHpUbuvRrzrhF/ljbwaljuZFM4jELaoSdLtElJrUj1xxP6zwzfqSU4i0HYnydMtUlIqUfywxb60AxZqEXaoifgMCXptR9MtklHpEY2iUHWnSjvvRr70QujkC+pUC/90glMuEnlOjVMt0j70QriLS1LtEnnRj3qUUXfIidOjsxAhcZFo0bjNDH0xxNLr0dIrUdmntVTkMoyfL8jcLBRuErhJyrgKyb4zA/5zg3tYFBBmUTmQTnhMinruBzvvhnxwxZ/st+Ktt5zp9hqota2vtK6y9FemNBblc9HiMiTtMbFtsM6gcPV2r6dwroseLrMrbQrdLGdyKoobKbo3Zh+ynrgVllZulTsXE3rV0pIqUf42UVUo0JyjEHoS0HmsiHRGR/lmRz/1hjqnxjvpRWfwtOhusaz0LRGf7FEfbDVmqHXlJeW0pbXq5bec3fX0nTnzmuJuWvhoFFhm0FtrziBsjaAaDCYWC+uSi6jQS3FsSfLJiTirCOkuCG1KiG+wSC+GBvgyhTszQ64Z77KAAAARXRSTlMAIQRDLyUgCwsE6ebm5ubg2dLR0byXl4FDQzU1NDEuLSUgC+vr6urq6ubb29vb2tra2tG8vLu7u7uXl5eXgYGBgYGBLiUALabIAAABsElEQVQoz12S9VPjQBxHt8VaOA6HE+AOzv1wd7pJk5I2adpCC7RUcHd3d3fXf5PvLkxheD++z+yb7GSRlwD/+Hj/APQCZWxM5M+goF+RMbHK594v+tPoiN1uHxkt+xzt9+R9wnRTZZQpXQ0T5uP1IQxToyOAZiQu5HEpjeA4SWIoksRxNiGC1tRZJ4LNxgHgnU5nJZBDvuDdl8lzQRBsQ+s9PZt7s7Pz8wsL39/DkIfZ4xlB2Gqsq62ta9oxVlVrNZpihFRpGO9fzQw1ms0NDWZz07iGkJmIFH8xxkc3a/WWlubmFkv9AB2SEpDvKxbjidN2faseaNV3zoHXvv7wMODJdkOHAegweAfFPx4G67KluxzottCU9n8CUqXzcIQdXOytAHqXxomvykhEKN9EFutG22p//0rbNvHVxiJywa8yS2KDfV1dfbu31H8jF1RHiTKtWYeHxUvq3bn0pyjCRaiRU6aDO+gb3aEfEeVNsDgm8zzLy9egPa7Qt8TSJdwhjplk06HH43ZNJ3s91KKCHQ5x4sw1fRGYDZ0n1L4FKb9/BP5JLYxToheoFCVxz57PPS8UhhEpLBVeAAAAAElFTkSuQmCC';
const blob = base64ToBlob(base64Data);
const blobUrl = URL.createObjectURL(blob);
const img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
/**********************/
/* Test */
(async () => {
const blob = await base64ToBlobLight(base64Data);
const blobUrl = URL.createObjectURL(blob);
const img = document.createElement('img');
img.src = blobUrl;
document.body.appendChild(img);
})();
This would prove to be much short solution.
const byteArray = new Buffer(base64String.replace(/^[\w\d;:\/]+base64\,/g, ''), 'base64');
base64String is the string containing the base 64 string.
byteArray is the array you need.
The regex replacement is optional and is just there to deal with prefix as in the case of dataurl string.

Azure Speech javascript SDK: Output audio in mp3

I use the sdk.connection methods to capture audio from the speech to text recognizer. It creates PCM audio that I want to convert into MP3.
This is how connection is initialised:
const con = SpeechSDK.Connection.fromRecognizer(this.recognizer);
con.messageSent = args => {
// Only record outbound audio mesages that have data in them.
if (
args.message.path === "audio" &&
args.message.isBinaryMessage &&
args.message.binaryMessage !== null
) {
this.wavFragments[this.wavFragmentCount++] =
args.message.binaryMessage;
}
};
and this is the wav file build:
let byteCount = 0;
for (let i = 0; i < this.wavFragmentCount; i++) {
byteCount += this.wavFragments[i].byteLength;
}
// Output array.
const sentAudio = new Uint8Array(byteCount);
byteCount = 0;
for (let i = 0; i < this.wavFragmentCount; i++) {
sentAudio.set(new Uint8Array(this.wavFragments[i]), byteCount);
byteCount += this.wavFragments[i].byteLength;
} // Write the audio back to disk.
// Set the file size in the wave header:
const view = new DataView(sentAudio.buffer);
view.setUint32(4, byteCount, true);
view.setUint32(40, byteCount, true);
I tried using lamejs to convert 'sentAudio' into MP3.
import {lamejs} from "../../modules/lame.min.js";
const wavBlob = new Blob([sentAudio]);
const reader = new FileReader();
reader.onload = evt => {
const audioData = evt.target.result;
const wav = lamejs.WavHeader.readHeader(new DataView(audioData));
const mp3enc = new lamejs.Mp3Encoder(1, wav.sampleRate, 128);
const samples = new Int8Array(audioData, wav.dataOffset, wav.dataLen / 2);
let mp3Tmp = mp3enc.encodeBuffer(samples); // encode mp3
// Push encode buffer to mp3Data variable
const mp3Data = [];
mp3Data.push(mp3Tmp);
// Get end part of mp3
mp3Tmp = mp3enc.flush();
// Write last data to the output data, too
// mp3Data contains now the complete mp3Data
mp3Data.push(mp3Tmp);
const blob = new Blob(mp3Data, { type: "audio/mp3" });
this.createDownloadLink(blob, "mp3");
};
reader.readAsArrayBuffer(wavBlob);
MP3 Blob is empty or contains inaudible sounds.
I have also tried using the 'encodeMP3' method described in this example but it gives the same output.
Any existing solutions to support this mp3 conversion ?
Regarding the issue, please refer to the following code.
let byteCount = 0;
for (let i= 0; i < wavFragmentCount; i++) {
byteCount += wavFragments[i].byteLength;
}
// Output array.
const sentAudio: Uint8Array = new Uint8Array(byteCount);
byteCount = 0;
for (let i: number = 0; i < wavFragmentCount; i++) {
sentAudio.set(new Uint8Array(wavFragments[i]), byteCount);
byteCount += wavFragments[i].byteLength;
}
// create wav file blob
const view = new DataView(sentAudio.buffer);
view.setUint32(4, byteCount, true);
view.setUint32(40, byteCount, true);
let wav = new Blob([view], { type: 'audio/wav' });
// read wave file as base64
var reader = new FileReader();
reader.readAsDataURL(wav);
reader.onload = () => {
var base64String = reader.result.toString();
base64String = base64String.split(',')[1];
// convert to buffer
var binary_string = window.atob(base64String);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
// convert to mp3 with lamejs
var wavHdr = lamejs.WavHeader.readHeader(
new DataView(bytes.buffer)
);
console.log(wavHdr);
var wavSamples = new Int16Array(
bytes.buffer,
0,
wavHdr.dataLen / 2
);
let mp3 = this.wavToMp3(
wavHdr.channels,
wavHdr.sampleRate,
wavSamples
);
reader.readAsDataURL(mp3);
reader.onload = () => {
var base64String = reader.result;
console.log(base64String);
};
};
function wavToMp3(channels, sampleRate, samples) {
console.log(channels);
console.log(sampleRate);
var buffer = [];
var mp3enc = new lamejs.Mp3Encoder(channels, sampleRate, 128);
var remaining = samples.length;
var maxSamples = 1152;
for (var i = 0; remaining >= maxSamples; i += maxSamples) {
var mono = samples.subarray(i, i + maxSamples);
var mp3buf = mp3enc.encodeBuffer(mono);
if (mp3buf.length > 0) {
buffer.push(new Int8Array(mp3buf));
}
remaining -= maxSamples;
}
var d = mp3enc.flush();
if (d.length > 0) {
buffer.push(new Int8Array(d));
}
console.log('done encoding, size=', buffer.length);
var blob = new Blob(buffer, { type: 'audio/mp3' });
var bUrl = window.URL.createObjectURL(blob);
console.log('Blob created, URL:', bUrl);
return blob;
}

Trying to get the Hash (SHA-512) of a very Large file, more that 2.5 G in javascript

I am trying to get the SHA512 of a large file. 2.5 G and maybe more larger file.
I so the approach it's to create an arraybuffer to be digest by the crypto.subtle.digest API.
The problem is i always have a
Array buffer allocation failed
Is it my chunk size, it's there a limit on the array buffer. I got no more idea ?
Or maybe there is a better way to get the hash digest instead use a full arraybuffer ?
// received a file object
function CalculateHash(file)
{
var obj = { File : file};
var chunkSize = 10485760;
const chunksQuantity = Math.ceil(obj.File.size / chunkSize);
const chunksQueue = new Array(chunksQuantity).fill().map((_, index) => index).reverse();
var buffer = null;
reader.onload = async function (evt) {
if (buffer == null) {
buffer = evt.currentTarget.result;
} else {
var tmp = new Uint8Array(buffer.byteLength + evt.currentTarget.result.byteLength);
tmp.set(new Uint8Array(buffer), 0);
tmp.set(new Uint8Array(evt.currentTarget.result), buffer.byteLength);
buffer = tmp;
}
readNext();
}
var readNext = async function () {
if (chunksQueue.length > 0) {
const chunkId = chunksQueue.pop();
const sentSize = chunkId * chunkSize;
const chunk = obj.File.slice(sentSize, sentSize + chunkSize);
reader.readAsArrayBuffer(chunk);
} else {
var x = await digestMessage(buffer);
hash.SHA512 = x.toUpperCase();
buffer = null;
}
}
readNext();
}
async function digestMessage(file) {
const hashBuffer = await crypto.subtle.digest('SHA-512', file); // hash the message
const hashArray = Array.from(new Uint8Array(hashBuffer)); // convert buffer to byte array
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); // convert bytes to hex string
return hashHex;
}
Base on #ArtjomB. answer, the problem was the progressiveHash. The limitation of the BufferArray and the browser.
That is the final worker code. It mixe both approach with native digest that is most very fast than the cryptoJS library. If the file is larger than 1Gb we use the CryptoJS library, if not we use the native browser digest. Any suggestion are welcome!
var window = self;
var document = {};
self.importScripts("/Crypto.min.js");
onmessage = async function (args) {
var obj = args.data;
var reader = new FileReader();
var hash = {};
var chunkSize = 10485760;
var largeFileTrigger = 1048576000;
const chunksQuantity = Math.ceil(obj.File.size / chunkSize);
const chunksQueue = new Array(chunksQuantity).fill().map((_, index) => index).reverse();
var isLargeFile = obj.File.size > largeFileTrigger;
var buffer = null;
var progressiveArray = [];
reader.onload = async function (evt) {
if (isLargeFile) {
progressiveArray.push(evt.currentTarget.result);
} else {
if (buffer == null) {
buffer = evt.currentTarget.result;
} else {
var tmp = new Uint8Array(buffer.byteLength + evt.currentTarget.result.byteLength);
tmp.set(new Uint8Array(buffer), 0);
tmp.set(new Uint8Array(evt.currentTarget.result), buffer.byteLength);
buffer = tmp;
}
}
readNext();
}
var readNext = async function () {
if (chunksQueue.length > 0) {
const chunkId = chunksQueue.pop();
const sentSize = chunkId * chunkSize;
const chunk = obj.File.slice(sentSize, sentSize + chunkSize);
reader.readAsArrayBuffer(chunk);
} else {
var hexHash = null;
if (isLargeFile) {
var sha = CryptoJS.algo.SHA512.create();
for (var i = 0; i < progressiveArray.length; i++) {
sha.update(arrayBufferToWordArray(progressiveArray[i]));
}
hexHash = sha.finalize().toString();
} else {
hexHash = await digestMessage(buffer);
}
SHA512 = hexHash.toUpperCase();
buffer = null;
progressiveArray = null;
postMessage({ Hash: SHA512 });
}
}
readNext();
}
async function digestMessage(file) {
const hashBuffer = await crypto.subtle.digest('SHA-512', file); // hash the message
const hashArray = Array.from(new Uint8Array(hashBuffer)); // convert buffer to byte array
const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); // convert bytes to hex string
return hashHex;
}
function arrayBufferToWordArray(ab) {
var i8a = new Uint8Array(ab);
var a = [];
for (var i = 0; i < i8a.length; i += 4) {
a.push(i8a[i] << 24 | i8a[i + 1] << 16 | i8a[i + 2] << 8 | i8a[i + 3]);
}
return CryptoJS.lib.WordArray.create(a, i8a.length);
}

Categories

Resources