as a follow-up on this post: How to rapidly play multiple copies of a soundfile in javascript I created a small demo page to illustrate the core of my problem.
My goal is to rapidly play the same audiofile over and over again while a user holds down a button, without crashing the browser ;-) (second line in the fiddle)
My initial method uses clone node to create multiple audio objects in the DOM. This works actually fine in chrome and edge, but safari and firefox run into problems after a while. This leads to dis-synchronized audios, and audios that keep on starting after the user has released the button.
So Codebreaker007 proposed to use audio-context instead, which resulted in a couple of different problems. Chrome replies:
The AudioContext was not allowed to start. It must be resumed (or created) after a user gesture on the page.
Chrome and firefox don't play the audio file. I then followed the google guide and got at least the error messages to be gone, but still no audible audio. Using the web audio plugin for chrome I could at one point see that the audio nodes are being created correctly. How can I make them audible? How can I get the Audio Context to start?
I think I'm quite close to the solution, so let's fix this together.
<!doctype html>
<html class="h-100" lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<title>Test</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" integrity="sha384-Vkoo8x4CGsO3+Hhxv8T/Q5PaXtkKtu6ug5TOeNV6gBiFeWPGFN9MuhOf23Q9Ifjh" crossorigin="anonymous">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script>
<script>
AudioContext:true;
var clickingBuffer = null;
var timer
var inStart = 0
var inStop = 0
var timer = null
var type = ""
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
// =========== Variant 1: Clone Node ========== //
var sound = new Audio("https://sounds4email.com/wav/hellobaby.mp3");
sound.preload = 'auto';
sound.load();
function triggersound(){
console.log("triggerSound")
var click=sound.cloneNode();
click.volume=1;
click.play();
}
// =========== Variant 2: AudioContext ========== //
function loadClickSound(url) {
console.log("loading sound")
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
context.decodeAudioData(request.response, function(buffer) {
if (!buffer) {
console.log('Error decoding file data: ' + url);
return;
}
clickingBuffer = buffer;
});
request.onerror = function() {
console.log('BufferLoader: XHR error');
};
console.og("sound buffered")
request.send();
};
}
function playSound(buffer, time, volume) {
console.log("playSound")
context.resume().then(() => {
var source = context.createBufferSource(); // creates a sound source
source.buffer = buffer; // tell the source which sound to play
source.connect(context.destination); // connect the source to the context's destination (the speakers)
var gainNode = context.createGain(); // Create a gain node
source.connect(gainNode); // Connect the source to the gain node
gainNode.connect(context.destination); // Connect the gain node to the destination
gainNode.gain.value = volume; // Set the volume
source.start(time); // play the source at the deisred time 0=now
console.log('Playback resumed successfully');
});
}
// =========== RAPID FIRE ========== //
function stop() {
console.log("stop")
inStop = 1
}
// Initializing the spinning sequence. Blocking other user interaction
function start(tp) {
type = tp
console.log("active")
context.resume().then(() => {
console.log('Playback resumed successfully');
if (null === timer) {
timer = setInterval(timerCallback, 200)
inStart = 1
}
});
}
/**
* Timer callback
*/
function timerCallback() {
console.log(type + " " + inStart + " " + inStop)
if (inStart) {
if(type==="node"){
triggersound()
} else if(type==="context"){
playSound(clickingBuffer, 0, 1)
}
}
if (inStop) {
inStop = 0
clearTimeout(timer)
timer = null
console.log("stopped")
}
}
// =========== DOC READY ========== //
$( document ).ready(function() {
console.log( "ready!" );
// You call with in your document ready
loadClickSound("https://sounds4email.com/wav/hellobaby.mp3");
// Fix up prefixi
});
// =================================================================================//
</script>
</head>
<body class="d-flex flex-column align-content-center align-items-center justify-content-center w-100 h-100" >
<div class="row p-1 w-100">
<div class="col">
Click once:
</div>
<button id="clickNode" style="width: 100px; height: 100px;" class="col m-1" onclick="triggersound()">
Clone Node
</button>
<button id="clickContext" style="width: 100px; height: 100px;" class="col m-1" onclick="playSound(clickingBuffer, 0, 1)">
Audio Context
</button>
</div>
<div class="row p-1 w-100">
<div class="col">
Press and hold:
</div>
<button id="autoNode" style="width: 100px; height: 100px;" class="col m-1" onmousedown="start('node')" onmouseup="stop()">
Auto Clone Node
</button>
<button id="autoContext" style="width: 100px; height: 100px;" class="col m-1" onmousedown="start('context')" onmouseup="stop()">
Auto Audio Context
</button>
</div>
</body>
</html>
Ok here is the code you want for your function. This code can use a local file to test with to rule out all kind of security issues (the xhr code is included). It uses plain JS ES5 and has been tested with firefox and chrome on different OS. Please put this into an audio_test.html as it is to verify the function. One word of warning, don't mix html tags and java script function calls, use event listeners as I demostrate it in the code.The stop button function is just a starter, to relock after play extra code is necessary I didn't bother to write.
Do not try to create buffers in an ongoing way because this fills up memory and crashes the browsers/OS. If you want overlapping sound this means using buffer arrays, but thats would be an other question.
<!DOCTYPE html>
<!-- Author: codebreaker007 # stackoverflow -->
<html class="h-100" lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<title>Web Audio API: load + play</title>
</head>
<body>
<p>Example of using the Web Audio API to load a sound file and </br>play once, play continous on mousedown or stop play
and start playing on user-click.</p>
Tested in Firefox and Chrome</p>
<input type="file" accept="audio/*" value="">
<button id="playonce" disabled=disabled>Play once</button>
<button id="playstop" disabled=disabled>Stop play</button>
<button id="playcont" disabled=disabled>Play cont</button>
<script>
/* global AudioContext:true,
*/
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var source = null;
var clickingBuffer = null;
var mouseIsDown = false;
var buttonPo = document.getElementById("playonce");
var buttonPs = document.getElementById("playstop");
var buttonPc = document.getElementById("playcont");
if (document.readyState!="loading") docReady();
/* Modern browsers */
else document.addEventListener("DOMContentLoaded", docReady);
function docReady() {
buttonPo.addEventListener("click", function(e){
playSound(clickingBuffer, 0, 0, 0.8);
buttonPs.disabled = false;
});
buttonPs.addEventListener("click", function(e){
if (source) {
source.stop(0);
}
buttonPs.disabled = true;
});
buttonPc.addEventListener("mousedown", function(e){
playSound(clickingBuffer, 1, 0, 1);
// while(mouseIsDown) playSound(clickingBuffer, 0, 1);
});
buttonPc.addEventListener("mouseup", function(e){
if (source) {
source.stop(0);
}
});
}
function playSound(buffer2play, isLoop, time, volume) {
console.log("playsound called");
source = context.createBufferSource(); // creates a sound source
source.buffer = buffer2play; // tell the source which sound to play
if (isLoop) source.loop = true;
else source.loop = false;
source.connect(context.destination); // connect the source to the context's destination (the speakers)
var gainNode = context.createGain(); // Create a gain node
source.connect(gainNode); // Connect the source to the gain node
gainNode.connect(context.destination); // Connect the gain node to the destination
gainNode.gain.value = volume; // Set the volume
source.start(time); // play the source at the deisred time 0=now
console.log("playSound");
}
function initSound(arrayBuffer) {
context.decodeAudioData(arrayBuffer, function(buffer) {
// clickingBuffer is global to reuse the decoded audio later.
clickingBuffer = buffer;
// Test routine activate buttons
buttonPo.disabled = false;
buttonPc.disabled = false;
}, function(e) {
console.log('Error decoding file', e);
});
}
// User selects file, read it as an ArrayBuffer and pass to the API.
var fileInput = document.querySelector('input[type="file"]');
fileInput.addEventListener('change', function(e) {
var reader = new FileReader();
reader.onload = function(e) {
initSound(this.result);
};
reader.readAsArrayBuffer(this.files[0]);
}, false);
// Load file from a URL as an ArrayBuffer.
// Example: loading via xhr2: loadSoundFile('sounds/test.mp3');
function loadClickSound(url) {
console.log("loading sound");
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = function() {
// Decode asynchronously
initSound(this.response); // this.response is an ArrayBuffer.
};
xhr.send();
}
</script>
</body>
</html>
Related
I am trying to make simple 1:1 video conferencing app with Agora web SDK, i started with their basic demo and currently am trying to implement audio/video mute/unmute functionality. I read and tried other SO answers on the same but they too didn't worked.
Here is my index.html file
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Video Call Demo</title>
<link rel="stylesheet" href="./styles/style.css">
</head>
<body>
<h1>
Video Call Demo<br><small style="font-size: 14pt">Powered by Agora.io</small>
</h1>
<h4>My Feed :</h4>
<div id="me"></div>
<button onclick="myFunc()">Mute</button> <!-- button for muting -->
<p id="demo"></p> <!-- this works for onclick action of above button -->
<h4>Remote Feeds :</h4>
<div id="remote-container">
</div>
<h4>Canvas :</h4>
<div id="canvas-container">
</div>
<script src="scripts/AgoraRTCSDK-2.4.0.js"></script>
<script src="scripts/script.js"></script>
<script src="https://cdn.agora.io/sdk/release/AgoraRTCSDK-3.2.1.js"></script>
</body>
</html>
Here is script.js file where actual logic is implemented
/**
* #name handleFail
* #param err - error thrown by any function
* #description Helper function to handle errors
*/
let handleFail = function(err){
console.log("Error : ", err);
};
// Queries the container in which the remote feeds belong
let remoteContainer= document.getElementById("remote-container");
let canvasContainer =document.getElementById("canvas-container");
/**
* #name addVideoStream
* #param streamId
* #description Helper function to add the video stream to "remote-container"
*/
function addVideoStream(streamId){
let streamDiv=document.createElement("div"); // Create a new div for every stream
streamDiv.id=streamId; // Assigning id to div
streamDiv.style.transform="rotateY(180deg)"; // Takes care of lateral inversion (mirror image)
remoteContainer.appendChild(streamDiv); // Add new div to container
}
/**
* #name removeVideoStream
* #param evt - Remove event
* #description Helper function to remove the video stream from "remote-container"
*/
function removeVideoStream (evt) {
let stream = evt.stream;
stream.stop();
let remDiv=document.getElementById(stream.getId());
remDiv.parentNode.removeChild(remDiv);
console.log("Remote stream is removed " + stream.getId());
}
function addCanvas(streamId){
let canvas=document.createElement("canvas");
canvas.id='canvas'+streamId;
canvasContainer.appendChild(canvas);
let ctx = canvas.getContext('2d');
let video=document.getElementById(`video${streamId}`);
video.addEventListener('loadedmetadata', function() {
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
});
video.addEventListener('play', function() {
const $this = this; //cache
(function loop() {
if (!$this.paused && !$this.ended) {
ctx.drawImage($this, 0, 0);
setTimeout(loop, 1000 / 30); // drawing at 30fps
}
})();
}, 0);
}
// Client Setup
// Defines a client for RTC
let client = AgoraRTC.createClient({
mode: 'live',
codec: "h264"
});
// Client Setup
// Defines a client for Real Time Communication
client.init("d2684f11650c446faaeb289d973e2dd3",() => console.log("AgoraRTC client initialized") ,handleFail);
let localStream = AgoraRTC.createStream({
// streamID: uid,
audio: true,
video: true,
screen: false,
camera: {
camId: '',
micId: '',
stream: {}
}
});
// The client joins the channel
client.join(null,"any-channel",null, (uid)=>{
localStorage.streamID = uid
// Stream object associated with your web cam is initialized
// let localStream = AgoraRTC.createStream({
// streamID: uid,
// audio: true,
// video: true,
// screen: false
// });
// Associates the stream to the client
localStream.init(function() {
//Plays the localVideo
localStream.play('me');
//Publishes the stream to the channel
client.publish(localStream, handleFail);
},handleFail);
},handleFail);
//When a stream is added to a channel
client.on('stream-added', function (evt) {
client.subscribe(evt.stream, handleFail);
});
//When you subscribe to a stream
client.on('stream-subscribed', function (evt) {
let stream = evt.stream;
addVideoStream(stream.getId());
stream.play(stream.getId());
addCanvas(stream.getId());
});
// this is the function for muting
function myFunc(){
document.getElementById("demo").innerHTML="yay" // p tag works
let x = localStream.muteVideo(); // from docs
console.log(x);
// taken from one of the answers but it isn't working
client.on("mute-video", function (evt) {
const remoteId = evt.uid;
localStream.camera.stream.muteVideo();
});
}
//When a person is removed from the stream
client.on('stream-removed',removeVideoStream);
client.on('peer-leave',removeVideoStream);
There are JavaScript scope errors for a few variables and objects you have given. I would recommend fixing those. You have also tried to import two versions of the SDK (2.x and 3.x) in the HTML file.
For example: script.js:62 Uncaught ReferenceError: AgoraRTC is not defined. Try to debug all such errors you see in the console.
I would recommend following the Agora documentation for setting up a working video call and the muteAudio and the muteVideo functions for muting audio and video.
Simple Sample App: https://github.com/AgoraIO/Basic-Video-Call/tree/master/One-to-One-Video/Agora-Web-Tutorial-1to1
Fully Professional Sample App: https://github.com/akshatvg/speakOut
I've created a simple music player, which creates a bufferArray for a particular audio URL to play the music.
It is working fine in many of my cellphone's browser, so I guess there is no cross origin issue for audio URL.
however chrome is not playing audio.
Also I've created a Uint8Array for plotting frequency data inside canvas, while many browsers are plotting frequency graph in canvas successfully, chrome is not doing so!
Take a look at what I've tried so far!
```
<!DOCTYPE html>
<html>
<head>
<title>Page Title</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
<center>
<h1>Music Player</h1>
<hr>
<div id="div"></div>
<canvas></canvas>
<p>Frequency plot</p>
</center>
<script>
url = "https://dl.dropbox.com/s/5jyylqps64nyoez/Legends%20never%20die.mp3?dl=0";
const div = document.querySelector("#div");
const cvs = document.querySelector("canvas");
cvs.width = window.innerWidth - 20;
cvs.height = 200;
const c = cvs.getContext("2d");
function loadMusic(url){
div.innerHTML = "Loading music, please wait...";
const context = new AudioContext();
const source = context.createBufferSource();
const analyser = context.createAnalyser();
let request = new XMLHttpRequest();
request.open("GET",url,true);
request.responseType = "arraybuffer";
request.onload = ()=>{
div.innerHTML = "Music loaded, please wait, music will be played soon...";
context.decodeAudioData(request.response,suffer=>{
source.buffer = suffer;
source.connect(context.destination);
source.connect(analyser);
analyser.connect(context.destination);
source.start();
div.innerHTML = "Music is playing... Enjoy!";
setInterval(()=>{
c.clearRect(0,0,cvs.width,cvs.height);
let array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
let m = 0;
for(m = 0; m < array.length; m++){
let x = (parseInt(window.innerWidth -20)*m)/array.length;
c.beginPath();
c.moveTo(x,150-((100*array[m])/255));
c.lineTo((parseInt(window.innerWidth -20)*(m+1))/array.length,150-((100*array[m+1])/255));
c.lineWidth = 1;
c.strokeStyle = "black";
c.stroke();
}
},1);
});
}
request.send();
}
loadMusic(url);
</script>
</body>
</html>
```
This is more a couple of observations than a complete solution.
The code given worked for me on Edge, Chrome and Firefox on Windows 10.
On IOS 14 Safari and IOS 14 Chrome it seemed to stop after putting out the loading message.
This MDN reference used a 'cross browser' method to create audiocontext so I added this line:
var AudioContext = window.AudioContext || window.webkitAudioContext;
before this line:
const context = new AudioContext();
[edit: have just confirmed at caniuse that -webkit prefix needed by Safari]
That seemed to do the trick in as much as the rest of the code was executed. However, there was no sound and it appeared the audio was not playing. The plot also showed just a single horizontal line.
Is this a manifestation of IOS's requirement that there must be some user interaction before audio will actually be played?
I'm pretty sure the audio was loaded as there was a noticeable pause at that point. I suspect that there will have to be a button which when clicked actually starts the playing.
I'm trying to test playing audio using a HTMLAudioElement and a AudioSourceNode. For the later application I need two features:
The pitch must be preserved after the playbackRate changed.
The volume needs to be changed to a value greater than 1.
Because feature 2 I added a workaround with the AudioSourceNode and the GainNode.
I need the audio file as ArrayBuffer in the later app that's why I added the file reading part before.
Problem:
The code works fine with Chrome and Opera, but not with Firefox. The playBackRate is set to 2, but the playbackRate of audio signal did not change. Why is that's the case and how can I fix it?
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Timestretching 2</title>
<script type="text/javascript">
var audioContext = window.AudioContext // Default
|| window.webkitAudioContext // Safari and old versions of Chrome
|| window.mozAudioContext
|| false;
function play() {
var fileInput = document.getElementById("file");
var file = fileInput.files[0];
var reader = new FileReader();
reader.onload = function (event) {
console.log("finished!");
console.log(event.srcElement.result);
var audioCtxt = new audioContext();
var url = URL.createObjectURL(new File([event.srcElement.result], "test.wav"));
var player = new Audio();
const source = audioCtxt.createMediaElementSource(player);
player.src = url;
console.log("wait to play");
player.addEventListener("canplay", function () {
// create a gain node to set volume greater than 1
const gainNode = audioCtxt.createGain();
gainNode.gain.value = 2.0; // double the volume
source.connect(gainNode);
gainNode.connect(audioCtxt.destination);
player.playbackRate = 2.0;
player.play();
player.playbackRate = 2.0;
console.log("playbackRate is " + player.playbackRate);
});
};
reader.onprogress = function (progress) {
console.log(progress.loaded / progress.total);
};
reader.onerror = function (error) {
console.error(error);
};
reader.readAsArrayBuffer(file);
}
</script>
</head>
<body>
<input id="file" type="file" value="Audio Datei auswählen"/>
<button onclick="play()">PLAY</button>
</body>
</html>
I am trying to use the MediaSource API to append separate WebM videos to a single source.
I found a Github project that was attempting the same thing, where a playlist of WebMs is loaded, and each one is appended as a SourceBuffer. But it was last committed a year ago, and thus out-of-sync with the current spec. So I forked it and updated to the latest API properties/methods, plus some restructuring. Much of the existing code was taken directly from the spec’s examples and Eric Bidelman’s test page.
However, I can not get it to work as expected. I am testing in two browsers, both on Mac OS X 10.9.2: Chrome 35 stable (latest at the time of this writing), and Firefox 30 beta with the flag media.mediasource.enabled set to true in about:config (this feature will not be introduced until FF 25, and current stable is 24).
Here are the problems I’m running into.
Both browsers
I want the video to be, in the end, one long video composed of the 11 WebMs (00.webm, 01.webm, …, 10.webm). Right now, each browser only plays 1 segment of the video.
Chrome
Wildly inconsistent behavior. Seems impossible to reproduce any of these bugs reliably.
Sometimes the video is blank, or has a tall black bar in the middle of it, and is unplayable.
Sometimes the video will load and pause on the first frame of 01.webm.
Sometimes, the video will play a couple of frames of the 02.webm and pause, having only loaded the first three segments.
The Play button is initially grayed out.
Pressing the grayed out Play button produces wildly inconsistent behaviors. Sometimes, it loads a black, unplayable video. Other times, it will play the first segment, then, when you get to the end, it stops, and when you press Play/Pause again, it will load the next segment. Even then, it will sometimes skip over segments and gets stuck on 04.webm. Regardless, it never plays the final segment, even though the console will report going through all of the buffers.
It is honestly different every time. I can’t list them all here.
Known caveats: Chrome does not currently implement sourceBuffer.mode, though I do not know what effect this might have.
Firefox
Only plays 00.webm. Total running time is 0:08, the length of that video.
Video seeking does not work. (This may be expected behavior, as there is nothing actually happening in the onSeeking event handler.)
Video can not be restarted once finished.
My initial theory was that this had to do with mediaSource.sourceBuffers[0].timestampOffset = duration and duration = mediaSource.duration. But I can’t seem to get anything back from mediaSource.duration except for NaN, even though I’m appending new segments.
Completely lost here. Guidance very much appreciated.
EDIT: I uncommented the duration parts of the code, and ran mse_webm_remuxer from Aaron Colwell's Media Source Extension Tools (thanks Adam Hart for the tips) on all of the videos. Voila, no more unpredictable glitches in Chrome! But alas, it still pauses once a media segment ends, and even when you press play, it sometimes gets stuck on one frame.
In Firefox Beta, it doesn’t play past the first segment, responding with:
TypeError: Value being assigned to SourceBuffer.timestampOffset is not a finite floating-point value.
Logging the value of duration returns NaN (but only in FF).
The main problem is with the video files. If you open chrome://media-internals/ you can see error Media segment did not begin with keyframe. Using properly formatted videos, like the one from Eric Bidelman's example (I hope he doesn't get mad that I keep linking directly to that video, but it's the only example video I've found that works), your code does work with the following change in appendNextMediaSegment():
duration = mediaSource.duration;
mediaSource.sourceBuffers[0].timestampOffset = duration;
mediaSource.sourceBuffers[0].appendBuffer(mediaSegment);
You can try Aaron Colwell's Media Source Extension Tools to try to get your videos working, but I've had limited success.
It also seems a little weird that you're looking at the onProgress event before appending segments, but I guess that could work if you only want to append if the video is actually playing. It could make the seekbar act odd since the video length is unknown, but that can be a problem in any case.
I agree with the opinion Adam Hart said. With a webm file, I tried to implement an example like http://html5-demos.appspot.com/static/media-source.html and then made a conclusion that its problem caused the source file I used.
If you have an arrow left, how about trying to use "samplemuxer" introduced at https://developer.mozilla.org/en-US/docs/Web/HTML/DASH_Adaptive_Streaming_for_HTML_5_Video.
In my opinion, samplemuxer is one of encoders like FFMPEG.
I found that the converted file works with mediaSource API. If you will also see it works, please let me know.
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>MediaSource API Demo</title>
</head>
<body>
<h3>Appending .webm video chunks using the Media Source API</h3>
<section>
<video controls autoplay width="320" height="240"></video>
<pre id="log"></pre>
</section>
<script>
//ORIGINAL CODE http://html5-demos.appspot.com/static/media-source.html
var FILE = 'IU_output2.webm';
var NUM_CHUNKS = 5;
var video = document.querySelector('video');
var mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
function callback(e) {
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
logger.log('mediaSource readyState: ' + this.readyState);
GET(FILE, function(uInt8Array) {
var file = new Blob([uInt8Array], {type: 'video/webm'});
var chunkSize = Math.ceil(file.size / NUM_CHUNKS);
logger.log('num chunks:' + NUM_CHUNKS);
logger.log('chunkSize:' + chunkSize + ', totalSize:' + file.size);
// Slice the video into NUM_CHUNKS and append each to the media element.
var i = 0;
(function readChunk_(i) {
var reader = new FileReader();
// Reads aren't guaranteed to finish in the same order they're started in,
// so we need to read + append the next chunk after the previous reader
// is done (onload is fired).
reader.onload = function(e) {
try {
sourceBuffer.appendBuffer(new Uint8Array(e.target.result));
logger.log('appending chunk:' + i);
}catch(e){
console.log(e);
}
if (i == NUM_CHUNKS - 1) {
if(!sourceBuffer.updating)
mediaSource.endOfStream();
} else {
if (video.paused) {
video.play(); // Start playing after 1st chunk is appended.
}
sourceBuffer.addEventListener('updateend', function(e){
if( i < NUM_CHUNKS - 1 )
readChunk_(++i);
});
} //end if
};
var startByte = chunkSize * i;
var chunk = file.slice(startByte, startByte + chunkSize);
reader.readAsArrayBuffer(chunk);
})(i); // Start the recursive call by self calling.
});
}
mediaSource.addEventListener('sourceopen', callback, false);
// mediaSource.addEventListener('webkitsourceopen', callback, false);
//
// mediaSource.addEventListener('webkitsourceended', function(e) {
// logger.log('mediaSource readyState: ' + this.readyState);
// }, false);
function GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(new Uint8Array(xhr.response));
};
}
</script>
<script>
function Logger(id) {
this.el = document.getElementById('log');
}
Logger.prototype.log = function(msg) {
var fragment = document.createDocumentFragment();
fragment.appendChild(document.createTextNode(msg));
fragment.appendChild(document.createElement('br'));
this.el.appendChild(fragment);
};
Logger.prototype.clear = function() {
this.el.textContent = '';
};
var logger = new Logger('log');
</script>
</body>
</html>
another test code
<!DOCTYPE html>
<html>
<head>
<title>MediaSource API Demo</title>
</head>
<body>
<h3>Appending .webm video chunks using the Media Source API</h3>
<section>
<video controls autoplay width="320" height="240"></video>
<pre id="log"></pre>
</section>
<script>
//ORIGINAL CODE http://html5-demos.appspot.com/static/media-source.html
var FILE = 'IU_output2.webm';
// var FILE = 'test_movie_output.webm';
var NUM_CHUNKS = 10;
var video = document.querySelector('video');
var mediaSource = new MediaSource();
video.src = window.URL.createObjectURL(mediaSource);
function callback(e) {
var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');
logger.log('mediaSource readyState: ' + this.readyState);
GET(FILE, function(uInt8Array) {
logger.log('byteLength:' + uInt8Array.byteLength );
sourceBuffer.appendBuffer(uInt8Array);
});
}
mediaSource.addEventListener('sourceopen', callback, false);
// mediaSource.addEventListener('webkitsourceopen', callback, false);
//
// mediaSource.addEventListener('webkitsourceended', function(e) {
// logger.log('mediaSource readyState: ' + this.readyState);
// }, false);
function GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(new Uint8Array(xhr.response));
};
}
</script>
<script>
function Logger(id) {
this.el = document.getElementById('log');
}
Logger.prototype.log = function(msg) {
var fragment = document.createDocumentFragment();
fragment.appendChild(document.createTextNode(msg));
fragment.appendChild(document.createElement('br'));
this.el.appendChild(fragment);
};
Logger.prototype.clear = function() {
this.el.textContent = '';
};
var logger = new Logger('log');
</script>
</body>
</html>
Thanks.
I'm trying to implement Web Audio API. The code works on Chrome 29.0.1547.76 but not on Safari 6.0.5 (8536.30.1). The key is whether I use noteOn(0) or start(0).
I want to use start() so that I can play part of a sound:
asource.start(0, 2, 1);
works fine in Chrome (plays immediately, starts at the 2 s mark, plays for 1 s) but results in
TypeError: 'undefined' is not a function (evaluating 'asource.start(0, 2, 1)')
on Safari. Replacing that one line with
asource.noteOn(0);
works. [Well, I need to call noteOff(0) instead of stop(0).] I get the same error with start(0). So, I'm assuming that Safari does not implement start(0)? But if so, why do some of the examples at HTML5 Rocks that use start(0) work?
For reference, here's the complete web page. I've tried many different sounds/formats; all result in the same error.
<!DOCTYPE html>
<html lang=en>
<head>
<meta charset="utf-8">
<title>Web Audio API Issue</title>
</head>
<body>
<p>Example working on Chrome but not Safari when using start()</p>
<button id="Load" onclick="init()" >Load</button>
<button id="Play" onclick="playSound()" disabled>Play</button>
<button id="Stop" onclick="stopSound()" disabled>Stop</button>
<script>
var web_audio_context;
var abuffer;
var asource;
function init() {
contextClass = (window.AudioContext ||
window.webkitAudioContext ||
window.mozAudioContext ||
window.msAudioContext);
web_audio_context = new contextClass();
var theURL = './digits.mp3';
var xhr = new XMLHttpRequest();
xhr.open('GET', theURL, true);
xhr.responseType = 'arraybuffer';
xhr.onload = function(e) {
finishedLoading(this.response);
};
xhr.send();
}
function finishedLoading(arrayBuffer) {
web_audio_context.decodeAudioData(arrayBuffer, function(buffer) {
abuffer = buffer;
document.getElementById('Load').disabled = true;
document.getElementById('Play').disabled = false;
document.getElementById('Stop').disabled = false;
}, function(e) {
console.log('Error decoding file', e);
});
}
function playSound() {
asource = web_audio_context.createBufferSource();
asource.buffer = abuffer;
asource.connect(web_audio_context.destination);
asource.start(0, 2, 1);
}
function stopSound() {
asource.stop(0);
}
</script>
</body>
</html>
In newer Web Audio API versions the method noteOn is renamed to start. Safari still uses the older version while Chrome uses a more current one.
Just try:
asource.start ? asource.start(0, 2, 1) : asource.noteOn(0, 2, 1);
Try adding Web Audio API monkey patch library by Chris Wilson to your project.
It might help.