I am developing a web application in which application downloads the encrypted chunks of data. And after then I have to decrypt and play the video. But I cannot let the user to wait for all decryption. Hence I am using Media Stream API. It is working. But I am getting this error after decryption of last chunk.
"Uncaught DOMException: Failed to execute 'addSourceBuffer' on 'MediaSource': This MediaSource has reached the limit of SourceBuffer objects it can handle. No additional SourceBuffer objects may be added.(…)"
<script type="text/javascript">
//////////
var no_of_files = 0;
var no_of_dlfiles = 0;
var FilesURL = [];
var files_str = 'video/vid_1.webm, video/vid_2.webm, video/vid_3.webm, video/vid_4.webm, video/vid_5.webm';
var file_counter = 0;
var mimeCodec = 'video/webm; codecs="vorbis,vp8"';
var passkey = "014bcbc0e15c4fc68b098f9b16f62bb7shahbaz.hansinfotech#gmail.com";
FilesURL = files_str.split(',');
no_of_files = FilesURL.length;
var player = document.getElementById('videoplayer');
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
player.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.error('Unsupported MIME type or codec: ', mimeCodec);
}
//////////
function sourceOpen (_) {
console.log("start");
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
sourceBuffer.mode = "sequence";
function WriteDatatoTemp()
{
//console.log(this.readyState); // open
if(file_counter<FilesURL.length)
{
console.log(file_counter);
no_of_dlfiles++;
$("#decryptionRatio").text(no_of_dlfiles+" of "+no_of_files);
$("#decryption_status").show();
getFileObject(FilesURL[file_counter], function (fileObject) {
//
var outputFile = fileObject;
var reader = new FileReader();
reader.onloadend = function(e){
var decrypted_data = JSON.parse(CryptoJS.AES.decrypt(e.target.result, passkey, {format: CryptoJSAesJson}).toString(CryptoJS.enc.Utf8));
var byteArray = Base64Binary.decodeArrayBuffer(decrypted_data);
sourceBuffer.addEventListener('updateend', function(){
file_counter++;
// console.log(file_counter);
if(player.paused)
{
player.play();
}
if(file_counter == FilesURL.length - 1)
{
mediaSource.endOfStream();
}
WriteDatatoTemp();
});
try
{
while(!sourceBuffer.updating)
{
sourceBuffer.appendBuffer(byteArray);
}
}
catch(e)
{
console.log(e);
}
};
reader.readAsText(outputFile);
//
});
}
}
WriteDatatoTemp();
}
///
var getFileBlob = function (url, cb) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
xhr.addEventListener('load', function() {
cb(xhr.response);
});
xhr.send();
};
var blobToFile = function (blob, name) {
blob.lastModifiedDate = new Date();
blob.name = name;
return blob;
};
var getFileObject = function(filePathOrUrl, cb) {
getFileBlob(filePathOrUrl, function (blob) {
cb(blobToFile(blob, 'vid.webm'));
});
};
</script>
Related
I have an audio using Web Audio API and want to record it at half the speed and later speed it up to normal at back-end. The output I get is pitched down.
var mr = null;
var source = null;
var stop = function(){
mr.stop();
source.stop();
}
var save = function(blob, filename){
var link = document.createElement('a');
link.href = URL.createObjectURL(blob);
link.download = filename || 'data.json';
link.click();
}
var context = new ( window.AudioContext || window.webkitAudioContext )();
var dest = context.createMediaStreamDestination();
function loadSound(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
source = context.createBufferSource();
source.buffer = buffer;
source.playbackRate.value = 0.5;
source.connect(dest);
mr = new MediaRecorder(dest.stream);
mr.start();
source.start(0);
var chunks = [];
mr.ondataavailable = function(e){
chunks.push(e.data);
}
mr.onstop = function(e) {
var blob = new Blob(chunks, { 'type' : 'video/webm' });
save(blob,"a.webm");
}
setTimeout(stop, 10000);
}, null);
}
request.send();
}
loadSound("https://storage.googleapis.com/frulix-dev.appspot.com/words");
I have created a fiddle for the case https://jsfiddle.net/thenectorgod/v3ogtwvp/4/.
It does not happen for normal recording containing audio and that audio comes clear after speeding to 2.
Can anyone suggest how to get proper audio in this case?
I've got an array of URLs [URL1, URL2, URL3,...] : each element is a link to one of the chunks of the same file. Each chunk is separately encrypted, with the same key as all the other chunks.
I download each chunk (in a forEach function) with a XMLHttpRequest. onload :
each chunk is first decrypted
then each chunk is converted to an ArrayBuffer (source)
each ArrayBuffer is pushed to an array (source)
when the three first steps are done for each chunk (callback by a var incremented on step#1 === the array.length), a blob is constructed with the array
the blob is saved as file with FileReader API & filesaver.js
If it's a one chunk's file, everything works fine.
But with multiple chunks, steps #1 & #2 are ok, but only the last ArrayBuffer seems to be pushed to the array. What am I missing?
Below my code
// var for incrementation in forEach funtion
var chunkdownloaded = 0;
// 'clearfileurl' is the array of url's chunks :[URL1, URL2, URL3,...]
clearfileurl.forEach(function(entry) {
var xhr = new XMLHttpRequest();
var started_at = new Date();
xhr.open('GET', entry, true);
xhr.responseType = 'text';
// request progress
xhr.onprogress = function(pe) {
if (pe.lengthComputable) {
downloaderval.set((pe.loaded / pe.total) * 100);
}
};
// on request's success
xhr.onload = function(e) {
if (this.status == 200) {
chunkdownloaded+=1;
var todecrypt = this.response;
// decrypt request's response: get a dataURI
try {
var bytesfile = CryptoJS.AES.decrypt(todecrypt.toString(), userKey);
var decryptedfile = bytesfile.toString(CryptoJS.enc.Utf8);
} catch(err) {
console.log (err);
return false;
}
//convert a dataURI to a Blob
var MyBlobBuilder = function() {
this.parts = [];
}
MyBlobBuilder.prototype.append = function(dataURI) {
//function dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
// var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
this.parts.push(ab);
console.log('parts', this.parts)
this.blob = undefined; // Invalidate the blob
}
MyBlobBuilder.prototype.getBlob = function() {
if (!this.blob) {
console.log (this.parts);
this.blob = new Blob(this.parts);
}
return this.blob;
};
var myBlobBuilder = new MyBlobBuilder();
myBlobBuilder.append(decryptedfile);
// if all chunks are downloaded
if (chunkdownloaded === clearfileurl.length) {
// get the blob
var FinalFile = myBlobBuilder.getBlob();
// launch consturction of a file with'FinalFile' inside FileReader API
var reader = new FileReader();
reader.onload = function(e){
// build & save on client the final file with 'file-saver' library
var FileSaver = require('file-saver');
var file = new File([FinalFile], clearfilename, {type: clearfiletype});
FileSaver.saveAs(file);
};
reader.readAsText(FinalFile);
} else {
console.log('not yet');
}
}
};
// sending XMLHttpRequest
xhr.send();
});
You need to take out the declaration of MyBlobBuilder, try this:
// var for incrementation in forEach funtion
var chunkdownloaded = 0;
//convert a dataURI to a Blob
var MyBlobBuilder = function() {
this.parts = [];
}
MyBlobBuilder.prototype.append = function(dataURI, index) {
//function dataURItoBlob(dataURI) {
// convert base64 to raw binary data held in a string
var byteString = atob(dataURI.split(',')[1]);
// separate out the mime component
// var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to an ArrayBuffer
var ab = new ArrayBuffer(byteString.length);
var ia = new Uint8Array(ab);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
this.parts[index] = ab;
console.log('parts', this.parts)
this.blob = undefined; // Invalidate the blob
}
MyBlobBuilder.prototype.getBlob = function() {
if (!this.blob) {
console.log (this.parts);
this.blob = new Blob(this.parts);
}
return this.blob;
};
var myBlobBuilder = new MyBlobBuilder();
// 'clearfileurl' is the array of url's chunks :[URL1, URL2, URL3,...]
clearfileurl.forEach(function(entry, index) {
var xhr = new XMLHttpRequest();
var started_at = new Date();
xhr.open('GET', entry, true);
xhr.responseType = 'text';
// request progress
xhr.onprogress = function(pe) {
if (pe.lengthComputable) {
downloaderval.set((pe.loaded / pe.total) * 100);
}
};
// on request's success
xhr.onload = function(e) {
if (this.status == 200) {
chunkdownloaded+=1;
var todecrypt = this.response;
// decrypt request's response: get a dataURI
try {
var bytesfile = CryptoJS.AES.decrypt(todecrypt.toString(), userKey);
var decryptedfile = bytesfile.toString(CryptoJS.enc.Utf8);
} catch(err) {
console.log (err);
return false;
}
myBlobBuilder.append(decryptedfile, index);
// if all chunks are downloaded
if (chunkdownloaded === clearfileurl.length) {
// get the blob
var FinalFile = myBlobBuilder.getBlob();
// launch consturction of a file with'FinalFile' inside FileReader API
var reader = new FileReader();
reader.onload = function(e){
// build & save on client the final file with 'file-saver' library
var FileSaver = require('file-saver');
var file = new File([FinalFile], clearfilename, {type: clearfiletype});
FileSaver.saveAs(file);
};
reader.readAsText(FinalFile);
} else {
console.log('not yet');
}
}
};
// sending XMLHttpRequest
xhr.send();
});
*edit I also updated the append function to ensure that the files are in the correct order
I want to mix two audio sources by put one song as background of another into single source.
for example, i have input :
<input id="files" type="file" name="files[]" multiple onchange="handleFilesSelect(event)"/>
And script to decode this files:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new window.AudioContext();
var sources = [];
var files = [];
var mixed = {};
function handleFilesSelect(event){
if(event.target.files.length <= 1)
return false;
files = event.target.files;
readFiles(mixAudioSources);
}
function readFiles(index, callback){
var freader = new FileReader();
var i = index ? index : 0;
freader.onload = function (e) {
context.decodeAudioData(e.target.result, function (buf) {
sources[i] = context.createBufferSource();
sources[i].connect(context.destination);
sources[i].buffer = buf;
if(files.length > i+1){
readFiles(i + 1, callback);
} else {
if(callback){
callback();
}
}
});
};
freader.readAsArrayBuffer(files[i]);
}
function mixAudioSources(){
//So on our scenario we have here two decoded audio sources in "sources" array.
//How we can mix that "sources" into "mixed" variable by putting "sources[0]" as background of "sources[1]"
}
So how i can mix this sources into one source? For example i have two files, how i can put one source as background of another and put this mix into single source?
Another scenario: if i read input stream from microphone for example and i want to put this input on background song (some kind of karaoke) it is possible to do this work on client with html5 support? What about performance? Maybe better way to mix this audio sources on server side?
If it possible, so what the possible implementation of mixAudioSources function?
Thanks.
Two approach originally posted at Is it possible to mix multiple audio files on top of each other preferably with javascript, adjusted to process File objects at change event of <input type="file"> element.
The first approach utilizes OfflineAudioContext(), AudioContext.createBufferSource(), AudioContext.createMediaStreamDestination(), Promise constructor, Promise.all(), MediaRecorder() to mix audio tracks, then offer mixed audio file for download.
var div = document.querySelector("div");
function handleFilesSelect(input) {
div.innerHTML = "loading audio tracks.. please wait";
var files = Array.from(input.files);
var duration = 60000;
var chunks = [];
var audio = new AudioContext();
var mixedAudio = audio.createMediaStreamDestination();
var player = new Audio();
var context;
var recorder;
var description = "";
player.controls = "controls";
function get(file) {
description += file.name.replace(/\..*|\s+/g, "");
return new Promise(function(resolve, reject) {
var reader = new FileReader;
reader.readAsArrayBuffer(file);
reader.onload = function() {
resolve(reader.result)
}
})
}
function stopMix(duration, ...media) {
setTimeout(function(media) {
media.forEach(function(node) {
node.stop()
})
}, duration, media)
}
Promise.all(files.map(get)).then(function(data) {
var len = Math.max.apply(Math, data.map(function(buffer) {
return buffer.byteLength
}));
context = new OfflineAudioContext(2, len, 44100);
return Promise.all(data.map(function(buffer) {
return audio.decodeAudioData(buffer)
.then(function(bufferSource) {
var source = context.createBufferSource();
source.buffer = bufferSource;
source.connect(context.destination);
return source.start()
})
}))
.then(function() {
return context.startRendering()
})
.then(function(renderedBuffer) {
return new Promise(function(resolve) {
var mix = audio.createBufferSource();
mix.buffer = renderedBuffer;
mix.connect(audio.destination);
mix.connect(mixedAudio);
recorder = new MediaRecorder(mixedAudio.stream);
recorder.start(0);
mix.start(0);
div.innerHTML = "playing and recording tracks..";
// stop playback and recorder in 60 seconds
stopMix(duration, mix, recorder)
recorder.ondataavailable = function(event) {
chunks.push(event.data);
};
recorder.onstop = function(event) {
var blob = new Blob(chunks, {
"type": "audio/ogg; codecs=opus"
});
console.log("recording complete");
resolve(blob)
};
})
})
.then(function(blob) {
console.log(blob);
div.innerHTML = "mixed audio tracks ready for download..";
var audioDownload = URL.createObjectURL(blob);
var a = document.createElement("a");
a.download = description + "." + blob.type.replace(/.+\/|;.+/g, "");
a.href = audioDownload;
a.innerHTML = a.download;
document.body.appendChild(a);
a.insertAdjacentHTML("afterend", "<br>");
player.src = audioDownload;
document.body.appendChild(player);
})
})
.catch(function(e) {
console.log(e)
});
}
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<input id="files"
type="file"
name="files[]"
accept="audio/*"
multiple
onchange="handleFilesSelect(this)" />
<div></div>
</body>
</html>
The second approach uses AudioContext.createChannelMerger(), AudioContext.createChannelSplitter()
var div = document.querySelector("div");
function handleFilesSelect(input) {
div.innerHTML = "loading audio tracks.. please wait";
var files = Array.from(input.files);
var chunks = [];
var channels = [
[0, 1],
[1, 0]
];
var audio = new AudioContext();
var player = new Audio();
var merger = audio.createChannelMerger(2);
var splitter = audio.createChannelSplitter(2);
var mixedAudio = audio.createMediaStreamDestination();
var duration = 60000;
var context;
var recorder;
var audioDownload;
var description = "";
player.controls = "controls";
function get(file) {
description += file.name.replace(/\..*|\s+/g, "");
console.log(description);
return new Promise(function(resolve, reject) {
var reader = new FileReader;
reader.readAsArrayBuffer(file);
reader.onload = function() {
resolve(reader.result)
}
})
}
function stopMix(duration, ...media) {
setTimeout(function(media) {
media.forEach(function(node) {
node.stop()
})
}, duration, media)
}
Promise.all(files.map(get)).then(function(data) {
return Promise.all(data.map(function(buffer, index) {
return audio.decodeAudioData(buffer)
.then(function(bufferSource) {
var channel = channels[index];
var source = audio.createBufferSource();
source.buffer = bufferSource;
source.connect(splitter);
splitter.connect(merger, channel[0], channel[1]);
return source
})
}))
.then(function(audionodes) {
merger.connect(mixedAudio);
merger.connect(audio.destination);
recorder = new MediaRecorder(mixedAudio.stream);
recorder.start(0);
audionodes.forEach(function(node, index) {
node.start(0)
});
div.innerHTML = "playing and recording tracks..";
stopMix(duration, ...audionodes, recorder);
recorder.ondataavailable = function(event) {
chunks.push(event.data);
};
recorder.onstop = function(event) {
var blob = new Blob(chunks, {
"type": "audio/ogg; codecs=opus"
});
audioDownload = URL.createObjectURL(blob);
var a = document.createElement("a");
a.download = description + "." + blob.type.replace(/.+\/|;.+/g, "");
a.href = audioDownload;
a.innerHTML = a.download;
player.src = audioDownload;
document.body.appendChild(a);
document.body.appendChild(player);
};
})
})
.catch(function(e) {
console.log(e)
});
}
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<input id="files"
type="file"
name="files[]"
accept="audio/*"
multiple onchange="handleFilesSelect(this)" />
<div></div>
</body>
</html>
I just want to complement the excellent answer of guest271314 and post here the solution based on answer of guest271314 for second scenario (the second source is microphone input). Actually is client karaoke. Script:
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new window.AudioContext();
var playbackTrack = null;
function handleFileSelect(event){
var file = event.files[0];
var freader = new FileReader();
freader.onload = function (e) {
context.decodeAudioData(e.target.result, function (buf) {
playbackTrack = context.createBufferSource();
playbackTrack.buffer = buf;
var karaokeButton = document.getElementById("karaoke_start");
karaokeButton.style.display = "inline-block";
karaokeButton.addEventListener("click", function(){
startKaraoke();
});
});
};
freader.readAsArrayBuffer(file);
}
function stopMix(duration, mediaRecorder) {
setTimeout(function(mediaRecorder) {
mediaRecorder.stop();
context.close();
}, duration, mediaRecorder)
}
function startKaraoke(){
navigator.mediaDevices.getUserMedia({audio: true,video: false})
.then(function(stream) {
var mixedAudio = context.createMediaStreamDestination();
var merger = context.createChannelMerger(2);
var splitter = context.createChannelSplitter(2);
var duration = 5000;
var chunks = [];
var channel1 = [0,1];
var channel2 = [1, 0];
var gainNode = context.createGain();
playbackTrack.connect(gainNode);
gainNode.connect(splitter);
gainNode.gain.value = 0.5; // From 0 to 1
splitter.connect(merger, channel1[0], channel1[1]);
var microphone = context.createMediaStreamSource(stream);
microphone.connect(splitter);
splitter.connect(merger, channel2[0], channel2[1]);
merger.connect(mixedAudio);
merger.connect(context.destination);
playbackTrack.start(0);
var mediaRecorder = new MediaRecorder(mixedAudio.stream);
mediaRecorder.start(1);
mediaRecorder.ondataavailable = function (event) {
chunks.push(event.data);
}
mediaRecorder.onstop = function(event) {
var player = new Audio();
player.controls = "controls";
var blob = new Blob(chunks, {
"type": "audio/mp3"
});
audioDownload = URL.createObjectURL(blob);
var a = document.createElement("a");
a.download = "karaokefile." + blob.type.replace(/.+\/|;.+/g, "");
a.href = audioDownload;
a.innerHTML = a.download;
player.src = audioDownload;
document.body.appendChild(a);
document.body.appendChild(player);
};
stopMix(duration, mediaRecorder);
})
.catch(function(error) {
console.log('error: ' + error);
});
}
And Html:
<input id="file"
type="file"
name="file"
accept="audio/*"
onchange="handleFileSelect(this)" />
<span id="karaoke_start" style="display:none;background-color:yellow;cursor:pointer;">start karaoke</span>
Here the working plnkr example: plnkr
I am having problem in sending my base64 image from phonegap (ios) to firebase storage. The main problem is firebase storage only accepted BLOB or File as attachment.
Heres my code for the camera function. Cordova-plugin-camera
function GetCamera(){
navigator.camera.getPicture( cameraSuccess, cameraError, {quality :50,
destinationType: Camera.DestinationType.DATA_URL,
encodingType: Camera.EncodingType.JPEG,
saveToPhotoAlbum: true});}
function to convert base 64 to blob
function b64toblob(b64_data, content_type) {
content_type = content_type || '';
var slice_size = 512;
var byte_characters = atob(b64_data);
var byte_arrays = [];
for(var offset = 0; offset < byte_characters.length; offset += slice_size) {
var slice = byte_characters.slice(offset, offset + slice_size);
var byte_numbers = new Array(slice.length);
for(var i = 0; i < slice.length; i++) {
byte_numbers[i] = slice.charCodeAt(i);
}
var byte_array = new Uint8Array(byte_numbers);
byte_arrays.push(byte_array);
}
var blob = new Blob(byte_arrays, {type: content_type});
return blob;};
Camera success function. take note that imageblob is a global variable
function cameraSuccess(imageData){
document.getElementById('Attachment1').innerHTML = "Attachment: True";
var image = imageData;
imageblob = b64toblob(image,"image/jpeg");}
putting the blob to firebase storage
try{
var storageRef = storage.ref().child('fire');
var uploadTask = storageRef.put(imageblob);
uploadTask.on('state_changed',null, null, function(){
var downloadURL = uploadTask.snapshot.downloadURL;
console.log("downloadURL :"+downloadURL);
});
i have tried every single thing, but its not working. Really need your guys help.. i am out of ideas
Cordova camera plugin doesn't return file object. That is problem with plugin.
But it returns all details about image. By using that you can create a blob or file object.
Reference for creating blob from file url.
var getFileBlob = function (url, cb) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
xhr.addEventListener('load', function() {
cb(xhr.response);
});
xhr.send();
};
var blobToFile = function (blob, name) {
blob.lastModifiedDate = new Date();
blob.name = name;
return blob;
};
var getFileObject = function(filePathOrUrl, cb) {
getFileBlob(filePathOrUrl, function (blob) {
cb(blobToFile(blob, 'test.jpg')); // Second argument is name of the image
});
};
Calling function for get file blob
getFileObject('img/test.jpg', function (fileObject) { // First argument is path of the file
console.log(fileObject);
});
In your camera success function try this.
function cameraSuccess(imageData){
document.getElementById('Attachment1').innerHTML = "Attachment: True";
getFileObject(imageData.nativeURL, function(fileObject) {
console.log(fileObject);
var imgName = fileObject.name;
var metadata = { contentType: fileObject.type };
var uploadFile = storageRef.child("images/" + imgName).put(fileObject, metadata);
uploadFile.on(firebase.storage.TaskEvent.STATE_CHANGED, function(snapshot) {
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log(progress);
}, function(error) {
console.log(error);
}, function() {
var imgFirebaseURL = uploadFile.snapshot.downloadURL;
console.log(imgFirebaseURL);
});
});
}
In my web application, I have a requirement to play part of mp3 file. This is a local web app, so I don't care about downloads etc, everything is stored locally.
My use case is as follows:
determine file to play
determine start and stop of the sound
load the file [I use BufferLoader]
play
Quite simple.
Right now I just grab the mp3 file, decode it in memory for use with WebAudio API, and play it.
Unfortunately, because the mp3 files can get quite long [30minutes of audio for example] the decoded file in memory can take up to 900MB. That's a bit too much to handle.
Is there any option, where I could decode only part of the file? How could I detect where to start and how far to go?
I cannot anticipate the bitrate, it can be constant, but I would expect variable as well.
Here's an example of what I did:
http://tinyurl.com/z9vjy34
The code [I've tried to make it as compact as possible]:
var MediaPlayerAudioContext = window.AudioContext || window.webkitAudioContext;
var MediaPlayer = function () {
this.mediaPlayerAudioContext = new MediaPlayerAudioContext();
this.currentTextItem = 0;
this.playing = false;
this.active = false;
this.currentPage = null;
this.currentAudioTrack = 0;
};
MediaPlayer.prototype.setPageNumber = function (page_number) {
this.pageTotalNumber = page_number
};
MediaPlayer.prototype.generateAudioTracks = function () {
var audioTracks = [];
var currentBegin;
var currentEnd;
var currentPath;
audioTracks[0] = {
begin: 4.300,
end: 10.000,
path: "example.mp3"
};
this.currentPageAudioTracks = audioTracks;
};
MediaPlayer.prototype.show = function () {
this.mediaPlayerAudioContext = new MediaPlayerAudioContext();
};
MediaPlayer.prototype.hide = function () {
if (this.playing) {
this.stop();
}
this.mediaPlayerAudioContext = null;
this.active = false;
};
MediaPlayer.prototype.play = function () {
this.stopped = false;
console.trace();
this.playMediaPlayer();
};
MediaPlayer.prototype.playbackStarted = function() {
this.playing = true;
};
MediaPlayer.prototype.playMediaPlayer = function () {
var instance = this;
var audioTrack = this.currentPageAudioTracks[this.currentAudioTrack];
var newBufferPath = audioTrack.path;
if (this.mediaPlayerBufferPath && this.mediaPlayerBufferPath === newBufferPath) {
this.currentBufferSource = this.mediaPlayerAudioContext.createBufferSource();
this.currentBufferSource.buffer = this.mediaPlayerBuffer;
this.currentBufferSource.connect(this.mediaPlayerAudioContext.destination);
this.currentBufferSource.onended = function () {
instance.currentBufferSource.disconnect(0);
instance.audioTrackFinishedPlaying()
};
this.playing = true;
this.currentBufferSource.start(0, audioTrack.begin, audioTrack.end - audioTrack.begin);
this.currentAudioStartTimeInAudioContext = this.mediaPlayerAudioContext.currentTime;
this.currentAudioStartTimeOffset = audioTrack.begin;
this.currentTrackStartTime = this.mediaPlayerAudioContext.currentTime - (this.currentTrackResumeOffset || 0);
this.currentTrackResumeOffset = null;
}
else {
function finishedLoading(bufferList) {
instance.mediaPlayerBuffer = bufferList[0];
instance.playMediaPlayer();
}
if (this.currentBufferSource){
this.currentBufferSource.disconnect(0);
this.currentBufferSource.stop(0);
this.currentBufferSource = null;
}
this.mediaPlayerBuffer = null;
this.mediaPlayerBufferPath = newBufferPath;
this.bufferLoader = new BufferLoader(this.mediaPlayerAudioContext, [this.mediaPlayerBufferPath], finishedLoading);
this.bufferLoader.load();
}
};
MediaPlayer.prototype.stop = function () {
this.stopped = true;
if (this.currentBufferSource) {
this.currentBufferSource.onended = null;
this.currentBufferSource.disconnect(0);
this.currentBufferSource.stop(0);
this.currentBufferSource = null;
}
this.bufferLoader = null;
this.mediaPlayerBuffer = null;
this.mediaPlayerBufferPath = null;
this.currentTrackStartTime = null;
this.currentTrackResumeOffset = null;
this.currentAudioTrack = 0;
if (this.currentTextTimeout) {
clearTimeout(this.currentTextTimeout);
this.textHighlightFinished();
this.currentTextTimeout = null;
this.currentTextItem = null;
}
this.playing = false;
};
MediaPlayer.prototype.getNumberOfPages = function () {
return this.pageTotalNumber;
};
MediaPlayer.prototype.playbackFinished = function () {
this.currentAudioTrack = 0;
this.playing = false;
};
MediaPlayer.prototype.audioTrackFinishedPlaying = function () {
this.currentAudioTrack++;
if (this.currentAudioTrack >= this.currentPageAudioTracks.length) {
this.playbackFinished();
} else {
this.playMediaPlayer();
}
};
//
//
// Buffered Loader
//
// Class used to get the sound files
//
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = [];
this.loadCount = 0;
}
// this allows us to handle media files with embedded artwork/id3 tags
function syncStream(node) { // should be done by api itself. and hopefully will.
var buf8 = new Uint8Array(node.buf);
buf8.indexOf = Array.prototype.indexOf;
var i = node.sync, b = buf8;
while (1) {
node.retry++;
i = b.indexOf(0xFF, i);
if (i == -1 || (b[i + 1] & 0xE0 == 0xE0 )) break;
i++;
}
if (i != -1) {
var tmp = node.buf.slice(i); //carefull there it returns copy
delete(node.buf);
node.buf = null;
node.buf = tmp;
node.sync = i;
return true;
}
return false;
}
BufferLoader.prototype.loadBuffer = function (url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
function decode(sound) {
loader.context.decodeAudioData(
sound.buf,
function (buffer) {
if (!buffer) {
alert('error decoding file data');
return
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function (error) {
if (syncStream(sound)) {
decode(sound);
} else {
console.error('decodeAudioData error', error);
}
}
);
}
request.onload = function () {
// Asynchronously decode the audio file data in request.response
var sound = {};
sound.buf = request.response;
sound.sync = 0;
sound.retry = 0;
decode(sound);
};
request.onerror = function () {
alert('BufferLoader: XHR error');
};
request.send();
};
BufferLoader.prototype.load = function () {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
};
There is no way of streaming with decodeAudioData(), you need to use MediaElement with createMediaStreamSource and run your stuff then. decodeAudioData() cannot stream on a part.#zre00ne And mp3 will be decoded big!!! Verybig!!!