WebAudio play sound pops at start and end - javascript

Whenever I play a sound using code such as
// binaryData = a wave file from a websocket
let ctx = new AudioContext();
ctx.decodeAudioData(binaryData, function(audioData){
let source = ctx.createBufferSource();
source.buffer = audioData;
source.connect(ctx.destination);
source.start(0);
});
There is a very audible click or pop between each clip played. Forget the fact that I'm trying to play real-time audio with this system; why is it that there is a glitchy noise at the beginning and end of each sound clip played? I'm not understanding how this is acceptable behaviour in 2017 from an audio playing device... Is there any way to mitigate or eliminate this?
Answer
Following the answer below here is a good set of #s to use to reduce clicking to basically nothing. I'm not saying this works great for a tone, but its flawless for voice.
// start of clip
// clipPlayTime may be 0 or your scheduled play time
gain.setValueAtTime(0.01, clipPlayTime);
gain.exponentialRampToValueAtTime(1, clipPlayTime + 0.001);
// end of clip
gain.setValueAtTime(1, clipPlayTime + clipLength - 0.001);
gain.exponentialRampToValueAtTime(0.01, clipPlayTime + clipLength);
This creates a ramp up and a ramp down.

Use a exponentialRampToValueAtTime() to remove (or atleast reduce) the clicking noise.
Here's a great explanation: Web Audio, the ugly click and the human ear
Full Example
Base example taken from: https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/decodeAudioData
<button class="play">Play</button>
<button class="stop">Stop</button>
<script type="text/javascript">
var audioCtx = new(window.AudioContext || window.webkitAudioContext)();
var source;
var play = document.querySelector('.play');
var stop = document.querySelector('.stop');
var gainNode = audioCtx.createGain();
function getData() {
source = audioCtx.createBufferSource();
var request = new XMLHttpRequest();
request.open('GET', './sample.wav', true);
request.responseType = 'arraybuffer';
request.onload = function() {
var audioData = request.response;
audioCtx.decodeAudioData(audioData, function(buffer) {
source.buffer = buffer;
source.connect(gainNode);
gainNode.connect(audioCtx.destination);
gainNode.gain.setValueAtTime(1, audioCtx.currentTime);
},
function(e) {
console.log("Error with decoding audio data" + e.err);
});
}
request.send();
}
play.onclick = function() {
getData();
source.start(0);
play.setAttribute('disabled', 'disabled');
}
stop.onclick = function() {
gainNode.gain.setValueAtTime(gainNode.gain.value, audioCtx.currentTime);
gainNode.gain.exponentialRampToValueAtTime(0.0001, audioCtx.currentTime + 1);
setTimeout(function() {
source.stop();
}, 1000)
play.removeAttribute('disabled');
}
</script>

Related

Create a popup media player using JS?

I have been trying to make an audio player that has basic controls allowing audio to be played instead of a video. It needs to be viewed in PIP mode, as well. Here is my code so far:
var audio = document.createElement('audio'); // The Audio
var canvas = document.createElement('canvas'); // New Canvas To Contain Video
canvas.width = canvas.height = 512;
var video = document.createElement('video'); // Create Video
video.srcObject = canvas.captureStream(); // Make Video Reflect The Canvas
video.muted = true;
function showPictureInPictureWindow() {
const image = new Image();
image.crossOrigin = true;
image.src = [...navigator.mediaSession.metadata.artwork].pop().src;
image.decode();
canvas.getContext('2d').drawImage(image, 0, 0, 512, 512);
video.onloadedmetadata = function() {
video.play();
video.requestPictureInPicture();
};
}
window.VID = video;
window.AU = audio;
function playAudio() {
audio.src = "mysong.mp3";
// Update Media Session metadata
navigator.mediaSession.metadata = new MediaMetadata({
title: "Audio Title",
artist: "MEEEEEEE",
album: "LOOOL",
artwork: [{src: "graphics/128x128.png", sizes: "128x128", type: "image/png"}]
});
// Play audio
audio.play();
// Show track album in a Picture-in-Picture window
showPictureInPictureWindow();
}
window.VID.requestPictureInPicture();
playAudio();
Any help that you can give me would be very much appreciated. (I already have the pause and play functions but I didn't include them for message size)
PS: (I don't know why, but earlier I was getting an error that said "JavaScript exception: Failed to execute 'requestPictureInPicture' on 'HTMLVideoElement': Metadata for the video element are not loaded yet" and now I have no errors at all... and no music either...)
PPS: (I fixed some of the bugs... and now I have the original error again.)
I have the same problem as you, but I have solved it
My solution:
My guess is that the video data did not load successfully when the RequestPicturePicture function was executed
So you can execute the RequestPicturePicture function in the loadedData function

Web Audio API is not working properly, in google chrome browser

I've created a simple music player, which creates a bufferArray for a particular audio URL to play the music.
It is working fine in many of my cellphone's browser, so I guess there is no cross origin issue for audio URL.
however chrome is not playing audio.
Also I've created a Uint8Array for plotting frequency data inside canvas, while many browsers are plotting frequency graph in canvas successfully, chrome is not doing so!
Take a look at what I've tried so far!
```
<!DOCTYPE html>
<html>
<head>
<title>Page Title</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
<center>
<h1>Music Player</h1>
<hr>
<div id="div"></div>
<canvas></canvas>
<p>Frequency plot</p>
</center>
<script>
url = "https://dl.dropbox.com/s/5jyylqps64nyoez/Legends%20never%20die.mp3?dl=0";
const div = document.querySelector("#div");
const cvs = document.querySelector("canvas");
cvs.width = window.innerWidth - 20;
cvs.height = 200;
const c = cvs.getContext("2d");
function loadMusic(url){
div.innerHTML = "Loading music, please wait...";
const context = new AudioContext();
const source = context.createBufferSource();
const analyser = context.createAnalyser();
let request = new XMLHttpRequest();
request.open("GET",url,true);
request.responseType = "arraybuffer";
request.onload = ()=>{
div.innerHTML = "Music loaded, please wait, music will be played soon...";
context.decodeAudioData(request.response,suffer=>{
source.buffer = suffer;
source.connect(context.destination);
source.connect(analyser);
analyser.connect(context.destination);
source.start();
div.innerHTML = "Music is playing... Enjoy!";
setInterval(()=>{
c.clearRect(0,0,cvs.width,cvs.height);
let array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
let m = 0;
for(m = 0; m < array.length; m++){
let x = (parseInt(window.innerWidth -20)*m)/array.length;
c.beginPath();
c.moveTo(x,150-((100*array[m])/255));
c.lineTo((parseInt(window.innerWidth -20)*(m+1))/array.length,150-((100*array[m+1])/255));
c.lineWidth = 1;
c.strokeStyle = "black";
c.stroke();
}
},1);
});
}
request.send();
}
loadMusic(url);
</script>
</body>
</html>
```
This is more a couple of observations than a complete solution.
The code given worked for me on Edge, Chrome and Firefox on Windows 10.
On IOS 14 Safari and IOS 14 Chrome it seemed to stop after putting out the loading message.
This MDN reference used a 'cross browser' method to create audiocontext so I added this line:
var AudioContext = window.AudioContext || window.webkitAudioContext;
before this line:
const context = new AudioContext();
[edit: have just confirmed at caniuse that -webkit prefix needed by Safari]
That seemed to do the trick in as much as the rest of the code was executed. However, there was no sound and it appeared the audio was not playing. The plot also showed just a single horizontal line.
Is this a manifestation of IOS's requirement that there must be some user interaction before audio will actually be played?
I'm pretty sure the audio was loaded as there was a noticeable pause at that point. I suspect that there will have to be a button which when clicked actually starts the playing.

Really strange playback using mediarecorder API: different parts are lost on different devices

I'm working on an app running in a browser where the user can record his voice. Using the MediaRecorder API I can get the recording and POST it to my server. The problem comes in if the user pauses and restarts the recording. When that happens, the first time they get it from the server it plays fine, but when it is replayed only the last segment is played. If I reload the web page and try again, once again the first time I get the whole recording, subsequent times it is just the last segment.
It is using opus codec, so I tried playing it in VLC. There, I get only the 1st segment, never any of the subsequent ones. Finally, I tried converting to MP3, and when I do that - the MP3 has the whole recording! So the whole thing is being saved, but somehow the segments seem to be stored in the file and mess up replay. In each case only one segment of the blob is playing. I have to say I'm at something of a loss even as how to attack this. The time showed by the player is the time of the 1st segment, whether it plays the first, second, or the whole thing. Any thoughts?
edited to provide a working example
How to test: put this code where it can be served and open it (I use a chrome-based browser). Click on Start to start recording, then Pause, then Start again to continue recording, then Pause again to stop. Then click on setup to load the recording, and listen to the recording. The first time I listen I get the whole recording, though the playback timer only shows the first segment. Subsequent playbacks only play the last segment. Pressing the setup button again will cause it to play the whole recording, but again only the first time.
<!doctype html>
<html>
<head>
<script>
var audio = null;
function init() {
audio = new Recording();
audio.prepareAudio();
}
class Recording {
recordButton;
pauseButton;
recorder;
mimeType;
audioChunks;
constructor() {
this.mimeType = this.recorder = null;
this.recordButton = this.pauseButton = null;
this.audioChunks = [];
}
getRecorder() {
return new Promise(async resolve => {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mediaRecorder = new MediaRecorder(stream);
var _this = this;
mediaRecorder.addEventListener('dataavailable', event => {
_this.audioChunks.push(event.data);
});
const start = () => {
mediaRecorder.start();
};
const pause = () =>
new Promise(resolve => {
mediaRecorder.addEventListener('stop', () => {
resolve(_this.audioChunks);
});
mediaRecorder.stop();
_this.mimeType = mediaRecorder.mimeType;
});
resolve({ start, pause });
});
}
get codec() {
if (!this.mimeType) return null;
let split = this.mimeType.split('=');
return (split.length > 1) ? split[1] : split;
}
prepareAudio() {
this.recordButton = document.querySelector('#record');
this.pauseButton = document.querySelector('#pause');
var _this = this;
this.recordButton.addEventListener('click', async () => {
_this.recordButton.setAttribute('disabled', true);
_this.pauseButton.removeAttribute('disabled');
if (!_this.recorder) {
_this.recorder = await this.getRecorder();
}
_this.recorder.start();
});
this.pauseButton.addEventListener('click', async () => {
_this.recordButton.removeAttribute('disabled');
_this.pauseButton.setAttribute('disabled', true);
await _this.recorder.pause();
});
}
data() {
return new Promise((resolve, reject) => {
const reader = new FileReader();
let codec = this.audioChunks[0].type;
let audioBlob = new Blob(this.audioChunks, {type: this.codec || codec});
reader.readAsDataURL(audioBlob);
reader.onload = () => resolve({codec: codec, data: reader.result.split(',')[1]});
});
}
blobUrl() {
let codec = this.audioChunks[0].type;
let audioBlob = new Blob(this.audioChunks, {type: this.codec || codec});
let blobUrl = URL.createObjectURL(audioBlob);
let player = document.getElementById('play-blob');
player.src = blobUrl;
player.disabled = false;
return blobUrl;
}
}
</script>
</head>
<body onload="init()">
<div>
<button id="record" class="button fill" >Start</button>
<br />
<button id="pause" class="button fill" >Pause</button>
<br />
<button class="button fill" onclick="audio.blobUrl()">setup</button>
</div>
<audio id="play-blob" controls></audio>
</body>
</html>
This isn't a complete answer, but I'm understanding more of what is happening. The audio player, at least for the versions of Chrome and Firefox I am using (up-to-date), does not seem to handle streaming audio properly. When the source is loaded it does not know the length (of course). When the blob is created with multiple segments (new Blob([segment1, segment2, ...])) the first time the duration is given as infinite, and the whole clip plays. On subsequent plays the clip time is given as the length of the longest segment and only the last segment is played. The audio object gives the duration as the length of the longest segment.
I've also solved my immediate problem by replacing the audio device, using howler. That plays the entire clip as I expected repeatedly.

how can i play piece of sound sample by web sound API?

I have an "asd.wav" sample with total duration 3 secs and play it:
let source = audioCtx.createBufferSource();
source.buffer = buffer; // recieved buffer of asd.wav
source.connect(audioCtx.destination);
source.start(0);
It plays perfectly from 0.00 to 3.00 second, but how i can play this sample only from 1.00 to 2.00 second?
This should do the trick. May be it can be done in a simpler way, but this what I could come up with.
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();
var getSound = new XMLHttpRequest();
getSound.open("GET", "./asd.wav", true);
getSound.responseType = "arraybuffer";
getSound.onload = function() {
audioCtx.decodeAudioData(getSound.response, function(buffer) {
let start_time = 1, end_time = 2, sample_rate = buffer.sampleRate,
channel_number = 0; // assuming a mono (one channel) audio
let source = audioCtx.createBufferSource();
let data = buffer.getChannelData(channel_number);
data = data.slice(start_time * sample_rate, end_time * sample_rate)
let new_buffer = audioCtx.createBuffer(1 /*number of channels =1*/ , data.length, sample_rate);
new_buffer.copyToChannel(data, 0);
source.buffer = new_buffer
source.connect(audioCtx.destination);
source.start(0);
});
};
getSound.send();
In case of multi channel audio, you will need to repeat the steps to copy data to each channel.
In addition to explicitly requesting the resource to be played and slicing out a portion of it, you can also leverage an audio element. Per the docs, by appending #t=[starttime][,endtime] to the URL, you can specify the portion of interest.
From there, it's a trivial matter of creating a source from the media element and playing it, rather than doing it all from scratch.
As with an AJAX request, you're still subject to Cross-Origin restrictions.
Here's an example - just substitute the URL with one on the same domain, one referring to a resource with a CORS header, or one that uses your own server as a proxy to a compatible resource that doesn't come with the CORS header.
As you can see, the code for this method along with its accompanying HTML requires far less code than the AJAX approach does. You can also create and load audio elements dynamically as shown in the button click handler.
<!doctype html>
<html>
<head>
<script>
"use strict";
function byId(id){return document.getElementById(id)}
///////////////////////////////////
window.addEventListener('load', onDocLoaded, false);
function onDocLoaded(evt)
{
//loadAndPlayPortion("3 seconds.wav", 0.0, 1.0);
playAudioElement( byId('myAudioElem') );
};
function onBtnClicked(evt)
{
var audio = document.createElement('audio');
audio.onloadeddata = function(){ playAudioElement(this); };
audio.src = '3 seconds.wav#t=2,3'; // could build this URL from user input
}
function playAudioElement(audioElem)
{
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();
var source = audioCtx.createMediaElementSource(audioElem);
source.connect(audioCtx.destination);
audioElem.play();
}
function loadAndPlayPortion(soundUrl, startTimeSecs, endTimeSecs)
{
var AudioContext = window.AudioContext || window.webkitAudioContext;
var audioCtx = new AudioContext();
var ajax = new XMLHttpRequest();
ajax.open("GET", soundUrl, true);
ajax.responseType = "arraybuffer";
ajax.onload = onFileLoaded;
ajax.send();
function onFileLoaded()
{
audioCtx.decodeAudioData(this.response, onDataDecoded);
}
function onDataDecoded(sampleBuffer)
{
let source = audioCtx.createBufferSource(), numChannels=sampleBuffer.numberOfChannels,
sampleRate = sampleBuffer.sampleRate,
nRequiredSamples = (endTimeSecs-startTimeSecs)*sampleRate,
newBuffer = audioCtx.createBuffer( numChannels, nRequiredSamples, sampleRate);
for (var curChannel=0; curChannel<numChannels; curChannel++)
{
var channelData = sampleBuffer.getChannelData(curChannel);
channelData = channelData.slice(startTimeSecs*sampleRate, endTimeSecs*sampleRate);
newBuffer.copyToChannel(channelData, curChannel, 0);
}
source.buffer = newBuffer; // chosen portion of received buffer of sound-file
source.connect(audioCtx.destination);
source.start(0);
}
}
</script>
<style>
</style>
</head>
<body>
<button onclick='onBtnClicked()'>Create Audio element</button>
<audio id='myAudioElem' src='3 seconds.wav#t=1,2'></audio>
</body>
</html>

Media Source Extensions Not Working

I am trying to use the MediaSource API to append separate WebM videos to a single source.
I found a Github project that was attempting the same thing, where a playlist of WebMs is loaded, and each one is appended as a SourceBuffer. But it was last committed a year ago, and thus out-of-sync with the current spec. So I forked it and updated to the latest API properties/methods, plus some restructuring. Much of the existing code was taken directly from the spec’s examples and Eric Bidelman’s test page.
However, I can not get it to work as expected. I am testing in two browsers, both on Mac OS X 10.9.2: Chrome 35 stable (latest at the time of this writing), and Firefox 30 beta with the flag media.mediasource.enabled set to true in about:config (this feature will not be introduced until FF 25, and current stable is 24).
Here are the problems I’m running into.
Both browsers
I want the video to be, in the end, one long video composed of the 11 WebMs (00.webm, 01.webm, …, 10.webm). Right now, each browser only plays 1 segment of the video.
Chrome
Wildly inconsistent behavior. Seems impossible to reproduce any of these bugs reliably.
Sometimes the video is blank, or has a tall black bar in the middle of it, and is unplayable.
Sometimes the video will load and pause on the first frame of 01.webm.
Sometimes, the video will play a couple of frames of the 02.webm and pause, having only loaded the first three segments.
The Play button is initially grayed out.
Pressing the grayed out Play button produces wildly inconsistent behaviors. Sometimes, it loads a black, unplayable video. Other times, it will play the first segment, then, when you get to the end, it stops, and when you press Play/Pause again, it will load the next segment. Even then, it will sometimes skip over segments and gets stuck on 04.webm. Regardless, it never plays the final segment, even though the console will report going through all of the buffers.
It is honestly different every time. I can’t list them all here.
Known caveats: Chrome does not currently implement sourceBuffer.mode, though I do not know what effect this might have.
Firefox
Only plays 00.webm. Total running time is 0:08, the length of that video.
Video seeking does not work. (This may be expected behavior, as there is nothing actually happening in the onSeeking event handler.)
Video can not be restarted once finished.
My initial theory was that this had to do with mediaSource.sourceBuffers[0].timestampOffset = duration and duration = mediaSource.duration. But I can’t seem to get anything back from mediaSource.duration except for NaN, even though I’m appending new segments.
Completely lost here. Guidance very much appreciated.
EDIT: I uncommented the duration parts of the code, and ran mse_webm_remuxer from Aaron Colwell's Media Source Extension Tools (thanks Adam Hart for the tips) on all of the videos. Voila, no more unpredictable glitches in Chrome! But alas, it still pauses once a media segment ends, and even when you press play, it sometimes gets stuck on one frame.
In Firefox Beta, it doesn’t play past the first segment, responding with:
TypeError: Value being assigned to SourceBuffer.timestampOffset is not a finite floating-point value.
Logging the value of duration returns NaN (but only in FF).
The main problem is with the video files. If you open chrome://media-internals/ you can see error Media segment did not begin with keyframe. Using properly formatted videos, like the one from Eric Bidelman's example (I hope he doesn't get mad that I keep linking directly to that video, but it's the only example video I've found that works), your code does work with the following change in appendNextMediaSegment():
duration = mediaSource.duration;
mediaSource.sourceBuffers[0].timestampOffset = duration;
mediaSource.sourceBuffers[0].appendBuffer(mediaSegment);
You can try Aaron Colwell's Media Source Extension Tools to try to get your videos working, but I've had limited success.
It also seems a little weird that you're looking at the onProgress event before appending segments, but I guess that could work if you only want to append if the video is actually playing. It could make the seekbar act odd since the video length is unknown, but that can be a problem in any case.
I agree with the opinion Adam Hart said. With a webm file, I tried to implement an example like http://html5-demos.appspot.com/static/media-source.html and then made a conclusion that its problem caused the source file I used.
If you have an arrow left, how about trying to use "samplemuxer" introduced at https://developer.mozilla.org/en-US/docs/Web/HTML/DASH_Adaptive_Streaming_for_HTML_5_Video.
In my opinion, samplemuxer is one of encoders like FFMPEG.
I found that the converted file works with mediaSource API. If you will also see it works, please let me know.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>MediaSource API Demo</title>
</head>
<body>
<h3>Appending .webm video chunks using the Media Source API</h3>
<section>
<video controls autoplay width="320" height="240"></video>
<pre id="log"></pre>
</section>
<script>
//ORIGINAL CODE http://html5-demos.appspot.com/static/media-source.html
var FILE = 'IU_output2.webm';
var NUM_CHUNKS = 5;
var video = document.querySelector('video');
var mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
function callback(e) {
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
logger.log('mediaSource readyState: ' + this.readyState);
GET(FILE, function(uInt8Array) {
var file = new Blob([uInt8Array], {type: 'video/webm'});
var chunkSize = Math.ceil(file.size / NUM_CHUNKS);
logger.log('num chunks:' + NUM_CHUNKS);
logger.log('chunkSize:' + chunkSize + ', totalSize:' + file.size);
// Slice the video into NUM_CHUNKS and append each to the media element.
var i = 0;
(function readChunk_(i) {
var reader = new FileReader();
// Reads aren't guaranteed to finish in the same order they're started in,
// so we need to read + append the next chunk after the previous reader
// is done (onload is fired).
reader.onload = function(e) {
try {
sourceBuffer.appendBuffer(new Uint8Array(e.target.result));
logger.log('appending chunk:' + i);
}catch(e){
console.log(e);
}
if (i == NUM_CHUNKS - 1) {
if(!sourceBuffer.updating)
mediaSource.endOfStream();
} else {
if (video.paused) {
video.play(); // Start playing after 1st chunk is appended.
}
sourceBuffer.addEventListener('updateend', function(e){
if( i < NUM_CHUNKS - 1 )
readChunk_(++i);
});
} //end if
};
var startByte = chunkSize * i;
var chunk = file.slice(startByte, startByte + chunkSize);
reader.readAsArrayBuffer(chunk);
})(i); // Start the recursive call by self calling.
});
}
mediaSource.addEventListener('sourceopen', callback, false);
// mediaSource.addEventListener('webkitsourceopen', callback, false);
//
// mediaSource.addEventListener('webkitsourceended', function(e) {
// logger.log('mediaSource readyState: ' + this.readyState);
// }, false);
function GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(new Uint8Array(xhr.response));
};
}
</script>
<script>
function Logger(id) {
this.el = document.getElementById('log');
}
Logger.prototype.log = function(msg) {
var fragment = document.createDocumentFragment();
fragment.appendChild(document.createTextNode(msg));
fragment.appendChild(document.createElement('br'));
this.el.appendChild(fragment);
};
Logger.prototype.clear = function() {
this.el.textContent = '';
};
var logger = new Logger('log');
</script>
</body>
</html>
another test code
<!DOCTYPE html>
<html>
<head>
<title>MediaSource API Demo</title>
</head>
<body>
<h3>Appending .webm video chunks using the Media Source API</h3>
<section>
<video controls autoplay width="320" height="240"></video>
<pre id="log"></pre>
</section>
<script>
//ORIGINAL CODE http://html5-demos.appspot.com/static/media-source.html
var FILE = 'IU_output2.webm';
// var FILE = 'test_movie_output.webm';
var NUM_CHUNKS = 10;
var video = document.querySelector('video');
var mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
function callback(e) {
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
logger.log('mediaSource readyState: ' + this.readyState);
GET(FILE, function(uInt8Array) {
logger.log('byteLength:' + uInt8Array.byteLength );
sourceBuffer.appendBuffer(uInt8Array);
});
}
mediaSource.addEventListener('sourceopen', callback, false);
// mediaSource.addEventListener('webkitsourceopen', callback, false);
//
// mediaSource.addEventListener('webkitsourceended', function(e) {
// logger.log('mediaSource readyState: ' + this.readyState);
// }, false);
function GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(new Uint8Array(xhr.response));
};
}
</script>
<script>
function Logger(id) {
this.el = document.getElementById('log');
}
Logger.prototype.log = function(msg) {
var fragment = document.createDocumentFragment();
fragment.appendChild(document.createTextNode(msg));
fragment.appendChild(document.createElement('br'));
this.el.appendChild(fragment);
};
Logger.prototype.clear = function() {
this.el.textContent = '';
};
var logger = new Logger('log');
</script>
</body>
</html>
Thanks.

Categories

Resources