AngularJS - Decode Byte Array and Play Audio File (Wav/MP3) - javascript

Using below service to play audio file (wav/mp3) that comes in byte array format.
myAudioService.getAudioTone(userid).then(function (data) {
var context; // Audio context
var buf; // Audio buffer
$window.AudioContext = $window.webkitAudioContext;
context = new AudioContext();
$timeout(function () {
$scope.playByteArray = function(){
var arrayBuffer = new ArrayBuffer(data.length);
var bufferView = new Uint8Array(arrayBuffer);
for (i = 0; i < data.length; i++) {
bufferView[i] = data[i];
}
context.decodeAudioData(arrayBuffer, function(buffer) {
buf = buffer;
play();
});
}
$scope.play = function(audioBuffer){
// Create a source node from the buffer
var source = context.createBufferSource();
source.buffer = buf;
// Connect to the final output node (the speakers)
source.connect(context.destination);
// Play immediately
source.start(0);
}
if(data.length !== '' || data !== ''){
$scope.playByteArray();
}
}, 3000);
});
The functions are called but it throws below exception.
Uncaught (in promise) DOMException: Unable to decode audio data
How do I run it in Chrome, FF and IE ?
P.S. $window and $timeout are already defined in controller.

Based on the error message arrayBuffer doesn't contain what you think it contains. You should verify that the bytes in the array are the same as the encoded wav/mp3 file.

Related

Receiving a video parts and append them to a MediaSource using javascript

I'm making a video streaming service and I'm having trouble recreating the source stream on the client.
I have made a camera page that sends video data chunks to the server along with a video index, the server then stores this data chunk on the harddisk for the client to download. I can retrieve the video data chunk from the client by calling the url:
/Lessen/LesStreamPart/{streamid}?Index={index}
Explanation:
hub.server.join(current_lesid);
When a client joins the stream the page will start to receive updates about the stream by SignalR:
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When a update is received, the page checks if it has already setup the MediaSource control for that stream, if not, because it is the first time, the page will start the stream:
function startStream()
When the stream is started the page will setup the MediaSource object for the video element. Then wait till the MediaSource object is instantiated.
function openStream()
After the MediaSource object has been instantiated the page will start populating the MediaSource object with the Mimetype information, after that it will load the first part of the video stream and will append it to the MediaSource object.
function loadChunks()
Once the MediaSource update has finished, the page will start loading the remaining video parts.
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration)
When the camera adds a new chunk, the page will be signalled again using SignalR. Because the streamid will match the page will then continue with loading the newer chunks by calling:
function loadChunks()
JS:
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid +"?Index=0";
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
}
req.send();
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength)
{
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
var req = new XMLHttpRequest();
req.responseType = "arraybuffer";
req.open("GET", url, true);
req.onload = function () {
// Append response to the sourcebuffer
var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
// Recursive call to add remaining chunks
loadChunks();
}
req.send();
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
HTML:
<input type="hidden" id="LesId" value="#(Model.Id)" />
<video autoplay controls id="VideoPlayerElement"></video>
OUTPUT:
The page doesn't show any errors, but I do get a broken video icon in the video element. Does anyone know what this might be?
I read in a different stackoverflow that it might be the VP8 codec that needs to be used, I changed it, but it remains not working.
EDIT:
I changed the javascript code a bit. It turned out I called the "loadChunks" function, but it was already called by the "updateend" event of the "sourceBuffer". I then got a lot more errors.
I changed the way I communicate with the server to "$.get();". It solved the errors, but I still get no image.
// Declare variables
var hub = $.connection.lesHub; // The SignalR hub
var buffering = false; // Semaphore for buffering
var video; // Pointer to video element
var mediaSource; // Pointer to mediasource object
var sourceBuffer; // Pointer to mediasource' sourcebuffer object
var current_lesid = document.querySelector('#LesId').value; // Current les id
var current_lesstreamid; // Current stream id (set in update)
var current_contenttype; // Current video content type (mimetype)
var current_index; // Current loaded index
var current_indexlength; // Current loaded index length
// Will be called once SignalR sends a video chunk update event
function startStream() {
// Open MediaSource
mediaSource = new MediaSource();
// Add listeners
mediaSource.addEventListener('webkitsourceopen', openStream, false);
//mediaSource.addEventListener('webkitsourceclose', closed, false);
mediaSource.addEventListener('sourceopen', openStream, false);
//mediaSource.addEventListener('sourceclose', closed, false);
// Set MediaSource as video element source
video = document.querySelector('video#VideoPlayerElement');
video.src = URL.createObjectURL(mediaSource);
}
function openStream() {
// Set the buffering semafore
buffering = true;
// Start the stream with contenttype
sourceBuffer = mediaSource.addSourceBuffer(current_contenttype);
// Wait for the sourcebuffer to be ready to load all other chunks
sourceBuffer.addEventListener("updateend", loadChunks);
// If there are any video chunks
if (current_indexlength > 0) {
// Load the first video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=0";
//$("body").append("<video><source src='" + url + "'/></video>");
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to 0
current_index = 0;
//}
//req.send();
});
}
else {
// Release buffering semafore
buffering = false;
}
}
function loadChunks() {
//video.play();
// Set the buffering semafore
buffering = true;
// Calculate the newindex
var newindex = current_index + 1;
// Check if the newindex is in use?
if (newindex < current_indexlength) {
// Load new video chunk
var url = "/Lessen/LesStreamPart/" + current_lesstreamid + "?Index=" + newindex;
$.get(url, function (resp) {
//var req = new XMLHttpRequest();
//req.responseType = "arraybuffer";
//req.open("GET", url, true);
//req.onload = function () {
// Append response to the sourcebuffer
//var resp = req.response;
var array = new Uint8Array(resp);
sourceBuffer.appendBuffer(array);
// Set the current index to newindex
current_index = newindex;
//}
//req.send();
});
}
else {
// Newindex is not in use, release buffering semafore
buffering = false;
}
}
// Start recording callbacks
hub.client.startLesStream = function (lesid, lesstreamid, contenttype) {
// This is called while there are no video data chunks, so we can ignore it.
};
// Update recording callbacks
hub.client.updateLesStream = function (lesid, lesstreamid, contenttype, index, duration) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if buffering
if (buffering) {
// The webpage is currently busy, we will time out this message with 100ms
setTimeout(function () {
hub.client.updateLesStream(lesid, lesstreamid, contenttype, index, duration);
}, 100);
}
else {
// Not buffering, so we can begin processing
// When the streamid is different reload the stream, when the page starts
// the "current_lesstreamid" is undefined, so we will reload the video
if (current_lesstreamid == lesstreamid) {
// Update to current stream
current_indexlength = index + 1;
loadChunks();
}
else {
// Different stream started
current_lesstreamid = lesstreamid;
current_contenttype = contenttype;
current_indexlength = index + 1;
startStream();
}
}
}
};
// Stop recording callbacks
hub.client.stopLesStream = function (lesid, lesstreamid, contenttype) {
// Check if update is for our lesid (not actually needed)
if (current_lesid == lesid) {
// Check if stream is currently shown
if (current_lesstreamid == lesstreamid) {
// Stop the stream
mediaSource.endOfStream();
}
}
};
// Start SignalR
$.connection.hub.start().done(function () {
// And join the room
hub.server.join(current_lesid);
});
I get perfect example to solve this problem and into simple way...
i am using three static files, but you can append data from socket's or any api also.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<br>
<video controls="true" autoplay="true"></video>
<script>
(async() => {
const mediaSource = new MediaSource();
const video = document.querySelector("video");
// video.oncanplay = e => video.play();
const urls = ["https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4", "https://raw.githubusercontent.com/w3c/web-platform-tests/master/media-source/mp4/test.mp4","https://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4"];
const request = url => fetch(url).then(response => response.arrayBuffer());
// `urls.reverse()` stops at `.currentTime` : `9`
const files = await Promise.all(urls.map(request));
/*
`.webm` files
Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
Uncaught DOMException: Failed to set the 'timestampOffset' property on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
*/
// const mimeCodec = "video/webm; codecs=opus";
// https://stackoverflow.com/questions/14108536/how-do-i-append-two-video-files-data-to-a-source-buffer-using-media-source-api/
const mimeCodec = "video/mp4; codecs=avc1.42E01E, mp4a.40.2";
const media = await Promise.all(files.map(file => {
return new Promise(resolve => {
let media = document.createElement("video");
let blobURL = URL.createObjectURL(new Blob([file]));
media.onloadedmetadata = async e => {
resolve({
mediaDuration: media.duration,
mediaBuffer: file
})
}
media.src = blobURL;
})
}));
console.log(media);
mediaSource.addEventListener("sourceopen", sourceOpen);
video.src = URL.createObjectURL(mediaSource);
async function sourceOpen(event) {
if (MediaSource.isTypeSupported(mimeCodec)) {
const sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
for (let chunk of media) {
await new Promise(resolve => {
sourceBuffer.appendBuffer(chunk.mediaBuffer);
sourceBuffer.onupdateend = e => {
sourceBuffer.onupdateend = null;
sourceBuffer.timestampOffset += chunk.mediaDuration;
console.log(mediaSource.duration);
resolve()
}
})
}
mediaSource.endOfStream();
}
else {
console.warn(mimeCodec + " not supported");
}
};
})()
</script>
</body>
</html>
It seems to be a codec issue and the method for reading data. When you receive a video blob, you need to convert/store it using a FileReader, this worked for me. For best codec support I needed to use the VP8 codec (please inform me if you know a better one).
This is my working example where I use a MediaRecorder to record the webcam then paste the video blobs into a MediaSource.
const video1 = document.getElementById('video1');
const video2 = document.getElementById('video2');
const mediaSource = new MediaSource();
video2.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
function sourceOpen(openargs) {
navigator.mediaDevices
.getUserMedia({ audio: false, video: true })
.then(function (stream) {
video1.srcObject = stream;
var options = { mimeType: 'video/webm; codecs=vp8' };
var mediaRecorder = new MediaRecorder(stream, options);
var sourceBuffer = null;
mediaRecorder.ondataavailable = function (e) {
if (sourceBuffer == null) {
sourceBuffer = mediaSource.addSourceBuffer(mediaRecorder.mimeType);
window.sourceBuffer = sourceBuffer;
}
var reader = new FileReader();
reader.addEventListener("loadend", function () {
var arr = new Uint8Array(reader.result);
sourceBuffer.appendBuffer(arr);
});
reader.readAsArrayBuffer(e.data);
};
mediaRecorder.start(5000);
});
}
video {
width: 320px;
height: 180px;
}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<video id="video1" controls autoplay muted></video><br />
<video id="video2" controls autoplay muted></video>
</body>
</html>

Loading audio files through URLs from remote locations (Opera)?

Maybe someone knows why codepend does not load audio files from URLs? (I do not have pro codepen, so I can't use direct uploading of files to pen).
I have this Audio "loader" implementation in my program:
// Audio loader implementation.
window.onload = init;
let context;
let bufferLoader;
let greenBuffer = null;
let redBuffer = null;
let blueBuffer = null;
let yellowBuffer = null;
let dohBuffer = null;
let woohooBuffer = null;
let excellentBuffer = null;
let superDohBuffer = null;
// Buffer loader class taken from https://www.html5rocks.com/en/tutorials/webaudio/intro/
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
let request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
let loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (let i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
function init() {
try {
// Fix up for prefixing
window.AudioContext = window.AudioContext||window.webkitAudioContext;
context = new AudioContext();
}
catch(e) {
alert('Web Audio API is not supported in this browser');
}
bufferLoader = new BufferLoader(
context,
[
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound1.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound2.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound3.mp3',
'https://cors-anywhere.herokuapp.com/https://s3.amazonaws.com/freecodecamp/simonSound4.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doh.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/woohoo.mp3',
'https://cors-anywhere.herokuapp.com/http://springfieldfiles.com/sounds/burns/excellnt.mp3',
'https://cors-anywhere.herokuapp.com/http://www.springfieldfiles.com/sounds/homer/doheth.mp3',
],
setBuffers
);
bufferLoader.load();
}
function setBuffers(bufferList){
greenBuffer = bufferList[0];
redBuffer = bufferList[1];
blueBuffer = bufferList[2];
yellowBuffer = bufferList[3];
dohBuffer = bufferList[4];
woohooBuffer = bufferList[5];
excellentBuffer = bufferList[6];
superDohBuffer = bufferList[7];
}
If I use this code locally (not on codepen), it works fine. It loads those files and later I can play those audio files how I want. But if I run it on codepen, it throws this (note I also prepended https://cors-anywhere.herokuapp.com/ to URLs to bypass CORS):
console_runner-079c09a….js:1 decodeAudioData error DOMException: Unable to decode audio data
(anonymous) # console_runner-079c09a….js:1
(anonymous) # pen.js:80
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
index.html:1 Uncaught (in promise) DOMException: Unable to decode audio data
Full pen can be checked here: https://codepen.io/andriusl/pen/proxKj
Update.
It seems this is related with browsers. AudioContext does not properly work with Opera browser, so this question is more oriented to browser than codepen itself.

Play wav file as bytes received from server

I am receiving a raw data wave file from server, and I need to play this array of bytes on the client side.
I tried to use decodeAudioData like in this link but i got the error :
DOMException : Unable to decode Audio data.
It is logical because my raw data is not a regular mp3 file, it is a wave that needs to be played with 8000Hz rate and 1 channel and 16bits per sample.
Is there a function to play a byte array received from server with a certain rate and a number of channels
I managed to play the bytes on browser using this method :
function playWave(byteArray) {
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var myAudioBuffer = audioCtx.createBuffer(1, byteArray.length, 8000);
var nowBuffering = myAudioBuffer.getChannelData(0);
for (var i = 0; i < byteArray.length; i++) {
nowBuffering[i] = byteArray[i];
}
var source = audioCtx.createBufferSource();
source.buffer = myAudioBuffer;
source.connect(audioCtx.destination);
source.start();
}

Silence when Playing AAC Chunks

I'm attempting to use Aurora.JS to play audio received from a streaming AAC-encoded source. I'm successfully pulling chunked data, and trying to feed it into a custom emitter, but no audio is actually playing.
Maybe I'm missing something very simple. Here's a sample of what I'm trying to do:
http://jsfiddle.net/Rc6Su/4/
(You're almost certainly gonna get a CORS error when hitting "Play" because the source is cross-domain. The only way I can easily get around that is using this plugin: https://chrome.google.com/webstore/detail/allow-control-allow-origi/nlfbmbojpeacfghkpbjhddihlkkiljbi/related?hl=en)
Before you mention it, this is going into a PhoneGap app and so the cross-domain issue isn't going to be a problem.
The problem code is somewhere in here:
var aurora_source = null;
var player = null;
function make_noise(chunk) {
var uarr = (function (chunk) {
var buf = new ArrayBuffer(chunk.length * 2); // 2 bytes for each character
var bufView = new Uint8Array(buf);
for (var i=0, strLen=chunk.length; i<strLen; i++) {
bufView[i] = chunk.charCodeAt(i);
}
return buf;
})(chunk);
var abData = new AV.Buffer(uarr);
if (!aurora_source) {
var MySource = AV.EventEmitter.extend ({
start : function () {
this.emit('data', abData);
},
pause : function () {
},
reset : function () {
}
});
aurora_source = new MySource();
asset = new AV.Asset(aurora_source);
player = new AV.Player(asset);
player.play();
} else {
$("#debug").append("emit data");
$("#debug").append("\n");
aurora_source.emit('data', abData);
}
}
Could not get audio to play, but found at least that
bufView[i] = chunk.charCodeAt(i);
may have to be replaced by
bufView[i] = chunk.charCodeAt(i) & 0xff;
see What does charCodeAt(...) & 0xff accomplish?
hope it helps.

javascript readAsArrayBuffer returns empty Array Buffer

I am trying to read a local file using the FileReader readAsArrayBuffer property.
The read is success and in the "onload" callback, I see the Array Buffer object in reader.result. But the Array Buffer is just empty. The length is set, but not the data. How do I get this data?
Here is my code
<!DOCTYPE html>
<html>
<body>
<input type="file" id="file" />
</body>
<script>
function handleFileSelect(evt) {
var files = evt.target.files; // FileList object
var selFile = files[0];
var reader = new FileReader();
reader.onload = function(e) {
console.log(e.target.result);
};
reader.onerror = function(e) {
console.log(e);
};
reader.readAsArrayBuffer(selFile);
}
document.getElementById('file').addEventListener('change', handleFileSelect, false);
</script>
</html>
the console output for reader.result
e.target.result
ArrayBuffer {}
e.target.result.byteLength
25312
Can anyone tell me how to get this data?
is there some security issue?
There is no error, the onerror is not executed.
From comments: Can you please let me know how to access the buffer contents? I am actually trying to play an audio file using AudioContext... For that I would need the buffer data...
Here is how to read array buffer and convert it into binary string,
function onfilechange(evt) {
var reader = new FileReader();
reader.onload = function(evt) {
var chars = new Uint8Array(evt.target.result);
var CHUNK_SIZE = 0x8000;
var index = 0;
var length = chars.length;
var result = '';
var slice;
while (index < length) {
slice = chars.subarray(index, Math.min(index + CHUNK_SIZE, length));
result += String.fromCharCode.apply(null, slice);
index += CHUNK_SIZE;
}
// Here you have file content as Binary String in result var
};
reader.readAsArrayBuffer(evt.target.files[0]);
}
If you try to print ArrayBuffer via console.log you always get empty object {}
Well, playing a sound using the AudioContext stuff isn't actually that hard.
Set up the context.
Load any data into the buffer (e.g. FileReader for local files, XHR for remote stuff).
Setup a new source, and start it.
All in all, something like this:
var context = new(window.AudioContext || window.webkitAudioContext)();
function playsound(raw) {
console.log("now playing a sound, that starts with", new Uint8Array(raw.slice(0, 10)));
context.decodeAudioData(raw, function (buffer) {
if (!buffer) {
console.error("failed to decode:", "buffer null");
return;
}
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(0);
console.log("started...");
}, function (error) {
console.error("failed to decode:", error);
});
}
function onfilechange(then, evt) {
var reader = new FileReader();
reader.onload = function (e) {
console.log(e);
then(e.target.result);
};
reader.onerror = function (e) {
console.error(e);
};
reader.readAsArrayBuffer(evt.target.files[0]);
}
document.getElementById('file')
.addEventListener('change', onfilechange.bind(null, playsound), false);
See this live in a jsfiddle, which works for me in Firefox and Chrome.
I threw in a console.log(new Uint8Array()) for good measure, as browser will usually log the contents directly (if the buffer isn't huge).
For other stuff that you can do with ArrayBuffers, see e.g. the corresponding MDN documentation.

Categories

Resources