Play/pause using howler.js with meteor framework - javascript

Okay, so I'm trying to let users play/pause when they click on the gif once or twice. I currently have it set up where the user could only play the sound once without stopping it.
I'm using the javascript audio library howler.js and the meteor framework.
Below is the code:
Template.gif.rendered = function () {
freezeframe_options = {
trigger_event: "click"
};
$.getScript("/client/scripts/freezeframe.js", function () {
$(".gif").click(function () {
if (!$(this).hasClass('played')) {
var gifId = $(this).attr("data-gif-id"); // Will return the gif ID number
var soundFile = $(this).attr("data-sound-file"); // Will return the sound file
var fileFormat = "mp3";
var mp3Test = new Audio();
var canPlayMP3 = (typeof mp3Test.canPlayType === "function" && mp3Test.canPlayType("audio/mpeg") !== "");
if (!canPlayMP3) {
fileFormat = "ogg";
}
var sound = new Howl({
urls: ['sounds/' + soundFile + '.' + fileFormat]
}).play();
$(this).addClass('played');
}
;
});
});
};

I'm using a few classes to track the current playback state:
playing = the sound is currently being played
paused = the sound is currently paused
played = the sound has been listened to completely at least once
I've created a howlers object to store the Howl instances, keyed off of the data-gif-id (so the key is the data-gif-id and the value is the Howl object). If the data-gif-id key is not in the howlers object, then I create a new Howl object, otherwise I just call the play() and pause() methods on the corresponding value that is already in the howlers object.
Here is the code:
Template.gif.rendered = function () {
freezeframe_options = {
trigger_event: "click"
};
howlers = {}; // set up an object to hold the Howl instances
// moved these static lines out of the click function
var fileFormat = "mp3";
var mp3Test = new Audio();
var canPlayMP3 = (typeof mp3Test.canPlayType === "function" && mp3Test.canPlayType("audio/mpeg") !== "");
if (!canPlayMP3) {
fileFormat = "ogg";
}
$.getScript("/client/scripts/freezeframe.js", function () {
$(".gif").click(function () {
var e = $(this);
var soundFile = e.attr("data-sound-file") + '.' + fileFormat; // Will return the sound file
var gifId = e.attr("data-gif-id"); // Will return the gif ID number
if (gifId in howlers){
if (e.hasClass('paused')){ // If currently paused, unpause
e.removeClass('paused');
e.addClass('playing');
howlers[gifId].play();
} else if (e.hasClass('playing')) { // If currently playing, pause
e.removeClass('playing');
e.addClass('paused');
howlers[gifId].pause();
} else { // If not playing and not paused, play
e.addClass('playing');
howlers[gifId].play();
}
} else { // this is a new instance, so add it to the howlers object and start playing
howlers[gifId] = new Howl({
urls: ['sounds/' + soundFile],
onend: function(){ // when playing ends, add the 'played' class to track which sounds have been played completely
e.removeClass('playing');
e.addClass('played');
}
});
e.addClass('playing');
howlers[gifId].play();
}
});
});
};

Related

how to live stream data in webrtc

I am currently new to webrtc, I have watched videos of webrtc but the problem is it is only one to one, I want to stream a video on a specific URL let us say test.com/live and whoever visits this URL can see the stream unlike normal peer to peer
navigator.mediaDevices
.getUserMedia({ video: true, audio: true })
.then((currentStream) => {
setStream(currentStream);
myVideo.current.srcObject = currentStream;
});
this is the code to get my media data, how can I stream this data to this particular URL, please I am new to webrtc can anybody explain?
This is a snippet from a video streamer I built, You can create a data stream and attach it.
I hope this can be useful.
Peer-to-peer communications with WebRTC
<script>
var RTCPeerConnection = null;
var getUserMedia = null;
var attachMediaStream = null;
var reattachMediaStream = null;
var webrtcDetectedBrowser = null;
if (navigator.mozGetUserMedia) {
console.log("This appears to be Firefox");
webrtcDetectedBrowser = "firefox";
// The RTCPeerConnection object.
RTCPeerConnection = mozRTCPeerConnection;
// The RTCSessionDescription object.
RTCSessionDescription = mozRTCSessionDescription;
// The RTCIceCandidate object.
RTCIceCandidate = mozRTCIceCandidate;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.mozGetUserMedia.bind(navigator);
// Attach a media stream to an element.
attachMediaStream = function (element, stream) {
console.log("Attaching media stream");
element.src = URL.createObjectURL(stream);;
element.play();
};
reattachMediaStream = function (to, from) {
console.log("Reattaching media stream");
to.mozSrcObject = from.mozSrcObject;
to.play();
};
// Fake get{Video,Audio}Tracks
MediaStream.prototype.getVideoTracks = function () {
return [];
};
MediaStream.prototype.getAudioTracks = function () {
return [];
};
} else if (navigator.webkitGetUserMedia) {
console.log("This appears to be Chrome");
webrtcDetectedBrowser = "chrome";
// The RTCPeerConnection object.
RTCPeerConnection = webkitRTCPeerConnection;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
// Attach a media stream to an element.
attachMediaStream = function (element, stream) {
element.src = webkitURL.createObjectURL(stream);
};
reattachMediaStream = function (to, from) {
to.src = from.src;
};
// The representation of tracks in a stream is changed in M26.
// Unify them for earlier Chrome versions in the coexisting period.
if (!webkitMediaStream.prototype.getVideoTracks) {
webkitMediaStream.prototype.getVideoTracks = function () {
return this.videoTracks;
};
webkitMediaStream.prototype.getAudioTracks = function () {
return this.audioTracks;
};
}
// New syntax of getXXXStreams method in M26.
if (!webkitRTCPeerConnection.prototype.getLocalStreams) {
webkitRTCPeerConnection.prototype.getLocalStreams = function () {
return this.localStreams;
};
webkitRTCPeerConnection.prototype.getRemoteStreams = function () {
return this.remoteStreams;
};
}
} else {
console.log("Browser does not appear to be WebRTC-capable");
}
</script>

I'm capturing screen by using media recorder and making video from blob but that video is not showing it's duration [duplicate]

I am in the process of replacing RecordRTC with the built in MediaRecorder for recording audio in Chrome. The recorded audio is then played in the program with audio api. I am having trouble getting the audio.duration property to work. It says
If the video (audio) is streamed and has no predefined length, "Inf" (Infinity) is returned.
With RecordRTC, I had to use ffmpeg_asm.js to convert the audio from wav to ogg. My guess is somewhere in the process RecordRTC sets the predefined audio length. Is there any way to set the predefined length using MediaRecorder?
This is a chrome bug.
FF does expose the duration of the recorded media, and if you do set the currentTimeof the recorded media to more than its actual duration, then the property is available in chrome...
var recorder,
chunks = [],
ctx = new AudioContext(),
aud = document.getElementById('aud');
function exportAudio() {
var blob = new Blob(chunks);
aud.src = URL.createObjectURL(new Blob(chunks));
aud.onloadedmetadata = function() {
// it should already be available here
log.textContent = ' duration: ' + aud.duration;
// handle chrome's bug
if (aud.duration === Infinity) {
// set it to bigger than the actual duration
aud.currentTime = 1e101;
aud.ontimeupdate = function() {
this.ontimeupdate = () => {
return;
}
log.textContent += ' after workaround: ' + aud.duration;
aud.currentTime = 0;
}
}
}
}
function getData() {
var request = new XMLHttpRequest();
request.open('GET', 'https://upload.wikimedia.org/wikipedia/commons/4/4b/011229beowulf_grendel.ogg', true);
request.responseType = 'arraybuffer';
request.onload = decodeAudio;
request.send();
}
function decodeAudio(evt) {
var audioData = this.response;
ctx.decodeAudioData(audioData, startRecording);
}
function startRecording(buffer) {
var source = ctx.createBufferSource();
source.buffer = buffer;
var dest = ctx.createMediaStreamDestination();
source.connect(dest);
recorder = new MediaRecorder(dest.stream);
recorder.ondataavailable = saveChunks;
recorder.onstop = exportAudio;
source.start(0);
recorder.start();
log.innerHTML = 'recording...'
// record only 5 seconds
setTimeout(function() {
recorder.stop();
}, 5000);
}
function saveChunks(evt) {
if (evt.data.size > 0) {
chunks.push(evt.data);
}
}
// we need user-activation
document.getElementById('button').onclick = function(evt){
getData();
this.remove();
}
<button id="button">start</button>
<audio id="aud" controls></audio><span id="log"></span>
So the advice here would be to star the bug report so that chromium's team takes some time to fix it, even if this workaround can do the trick...
Thanks to #Kaiido for identifying bug and offering the working fix.
I prepared an npm package called get-blob-duration that you can install to get a nice Promise-wrapped function to do the dirty work.
Usage is as follows:
// Returns Promise<Number>
getBlobDuration(blob).then(function(duration) {
console.log(duration + ' seconds');
});
Or ECMAScript 6:
// yada yada async
const duration = await getBlobDuration(blob)
console.log(duration + ' seconds')
A bug in Chrome, detected in 2016, but still open today (March 2019), is the root cause behind this behavior. Under certain scenarios audioElement.duration will return Infinity.
Chrome Bug information here and here
The following code provides a workaround to avoid the bug.
Usage : Create your audioElement, and call this function a single time, providing a reference of your audioElement. When the returned promise resolves, the audioElement.duration property should contain the right value. ( It also fixes the same problem with videoElements )
/**
* calculateMediaDuration()
* Force media element duration calculation.
* Returns a promise, that resolves when duration is calculated
**/
function calculateMediaDuration(media){
return new Promise( (resolve,reject)=>{
media.onloadedmetadata = function(){
// set the mediaElement.currentTime to a high value beyond its real duration
media.currentTime = Number.MAX_SAFE_INTEGER;
// listen to time position change
media.ontimeupdate = function(){
media.ontimeupdate = function(){};
// setting player currentTime back to 0 can be buggy too, set it first to .1 sec
media.currentTime = 0.1;
media.currentTime = 0;
// media.duration should now have its correct value, return it...
resolve(media.duration);
}
}
});
}
// USAGE EXAMPLE :
calculateMediaDuration( yourAudioElement ).then( ()=>{
console.log( yourAudioElement.duration )
});
Thanks #colxi for the actual solution, I've added some validation steps (As the solution was working fine but had problems with long audio files).
It took me like 4 hours to get it to work with long audio files turns out validation was the fix
function fixInfinity(media) {
return new Promise((resolve, reject) => {
//Wait for media to load metadata
media.onloadedmetadata = () => {
//Changes the current time to update ontimeupdate
media.currentTime = Number.MAX_SAFE_INTEGER;
//Check if its infinite NaN or undefined
if (ifNull(media)) {
media.ontimeupdate = () => {
//If it is not null resolve the promise and send the duration
if (!ifNull(media)) {
//If it is not null resolve the promise and send the duration
resolve(media.duration);
}
//Check if its infinite NaN or undefined //The second ontime update is a fallback if the first one fails
media.ontimeupdate = () => {
if (!ifNull(media)) {
resolve(media.duration);
}
};
};
} else {
//If media duration was never infinity return it
resolve(media.duration);
}
};
});
}
//Check if null
function ifNull(media) {
if (media.duration === Infinity || media.duration === NaN || media.duration === undefined) {
return true;
} else {
return false;
}
}
//USAGE EXAMPLE
//Get audio player on html
const AudioPlayer = document.getElementById('audio');
const getInfinity = async () => {
//Await for promise
await fixInfinity(AudioPlayer).then(val => {
//Reset audio current time
AudioPlayer.currentTime = 0;
//Log duration
console.log(val)
})
}
I wrapped the webm-duration-fix package to solve the webm length problem, which can be used in nodejs and web browsers to support video files over 2GB with not too much memory usage.
Usage is as follows:
import fixWebmDuration from 'webm-duration-fix';
const mimeType = 'video/webm\;codecs=vp9';
const blobSlice: BlobPart[] = [];
mediaRecorder = new MediaRecorder(stream, {
mimeType
});
mediaRecorder.ondataavailable = (event: BlobEvent) => {
blobSlice.push(event.data);
}
mediaRecorder.onstop = async () => {
// fix blob, support fix webm file larger than 2GB
const fixBlob = await fixWebmDuration(new Blob([...blobSlice], { type: mimeType }));
// to write locally, it is recommended to use fs.createWriteStream to reduce memory usage
const fileWriteStream = fs.createWriteStream(inputPath);
const blobReadstream = fixBlob.stream();
const blobReader = blobReadstream.getReader();
while (true) {
let { done, value } = await blobReader.read();
if (done) {
console.log('write done.');
fileWriteStream.close();
break;
}
fileWriteStream.write(value);
value = null;
}
blobSlice = [];
};
//If you want to modify the video file completely, you can use this package "webmFixDuration", Other methods are applied at the display level only on the video tag With this method, the complete video file is modified
webmFixDuration github example
mediaRecorder.onstop = async () => {
const duration = Date.now() - startTime;
const buggyBlob = new Blob(mediaParts, { type: 'video/webm' });
const fixedBlob = await webmFixDuration(buggyBlob, duration);
displayResult(fixedBlob);
};

Using a switch case inside for loop to play audio too quick

I'm trying to play a sound by looping an array and split an array into each array, and then using switch case to detect what's in the array.
function keeper() {
number2 = get.num;
sNumber = number2.toString();
output = [];
for ( i = 0, len = sNumber.length; i < len; i ++) {
output.push(+sNumber.charAt(i));
console.log(output);
switch (output[i]){
case 0:
console.log('0');
audio0 = new Audio('logo/Q0.wav');
audio0.play();
break;
case 1:
console.log('1');
audio1 = new Audio('logo/Q1.wav');
audio1.play();
break;
case 2:
console.log('2');
audio2 = new Audio('logo/Q2.wav');
audio2.play();
break;
case 3:
console.log('3');
audio3 = new Audio('logo/Q3.wav');
audio3.play();
break;
case 4:
console.log('4');
audio4 = new Audio('logo/Q4.wav');
audio4.play();
break;
case 5:
console.log('5');
audio5 = new Audio('logo/Q5.wav');
audio5.play();
break;
}
}}
The function it works just fine, but apparently the sound thats played out it too quick. is there any solution to fix this?
I'm assuming you want to hear the sounds after each other?
That doesn't work like this.
Lets say the first number in the array is: 0.
So sound 0 gets played.
But, since you loop through the array, and you reach the next number, eg. 2: sound 2 gets played immediately after.
The loop doesn't wait for the first sound the finish before starting the next play().
what you could do is modify the loop to wait for the audio ended event.
for example:
var audio0 = document.getElementById("myAudio");
audio0.onended = function() {
alert("The audio has ended");
};
Try using an audio sprite. I'm sure there's an app or whatever to do certain tasks programmatically but be aware step 1 and 2 are done manually.
Take a group of audio files and either use Audacity or an online service to join them into one file.
Next, get the start times of each clip of the audio file and store them in an array.
The following Demo will take the file and array, generate the HTML layout, create a button for each clip that corresponds to the array parameter. So when a button is clicked it will play only a clip of the audio sprite (the audio file).
The audio sprite in this Demo was not edited very well, I just made it to demonstrate how everything works. The timing relies on the timeupdate event which checks the playing time about every 250ms give or take. So if you want to make a more accurate start and end times, try leaving a gap of 250ms between clips.
Details commented in Demo
Demo
// Store path to audio file in a variable
var xFile = 'https://storage04.dropshots.com/photos7000/photos/1381926/20180318/175955.mp4'
// Store cues of each start time of each clip in an array
var xMap = [0, 1.266, 2.664, 3.409, 4.259,4.682, 5.311, 7.169, 7.777, 9.575, 10.88,11.883,13.64, 15.883, 16.75, 17, 17.58];
/* Register doc to act when the DOM is ready but before the images
|| are fully loaded. When that occurs, call loadAudio()
*/
document.addEventListener('DOMContentLoaded', function(e) {
loadAudio(e, xFile, xMap);
});
/* Pass the Event Object, file, and array through
|| Make a Template Literal of the HTML layout and the hidden
|| <audio> tag. Interpolate the ${file} into the <audio> tag.
|| Insert the TL into the <body> and parse it into HTML.
== Call generateBtn() function...
*/
function loadAudio(e, file, map) {
var template = `
<fieldset class='set'>
<legend>Sound FX Test Panel</legend>
</fieldset>
<audio id='sndFX' hidden>
<source src='${file}' type='audio/wav'>
</audio>`;
document.body.insertAdjacentHTML('beforeend', template);
generateBtn(e, map);
}
/* Pass the Event Object and the array through
|| Reference fieldset.set
|| create a documentFragment in order to speedup appending
|| map() the array...
|| create a <button>
|| Set btn class to .btn
|| Set button.btn data-idx to the corresponding index value of
|| map array.
|| Set button.btn text to its index number.
|| Add button.btn to the documentFragment...
|| return an array of .btn (not used in this demo)
== Call the eventBinder() function...
*/
function generateBtn(e, map) {
var set = document.querySelector('.set');
var frag = document.createDocumentFragment();
map.map(function(mark, index, map) {
var btn = document.createElement('button');
btn.className = 'btn';
btn.dataset.idx = map[index];
btn.textContent = index;
frag.appendChild(btn);
return btn;
});
set.appendChild(frag);
eventBinder(e, set, map);
}
/* Pass EventObject, fieldset.set, and map array through
|| Reference the <audio> tag.
|| Register fieldset.set to the click event
|| if the clicked node (e.target) class is .btn...
|| Determine the start and end time of the audio clip.
== Call playClip() function
*/
function eventBinder(e, set, map) {
var sFX = document.getElementById('sndFX');
set.addEventListener('click', function(e) {
if (e.target.className === 'btn') {
var cue = parseFloat(e.target.textContent);
var start = parseFloat(e.target.dataset.idx);
if (cue !== (map.length - 1)) {
var end = parseFloat(e.target.nextElementSibling.dataset.idx);
} else {
var end = parseFloat(sFX.duration);
}
playClip.call(this, sFX, start, end);
} else {
return false;
}
});
}
/* Pass the reference to the <audio> tag, start and end of clip
|| pause audio
|| Set the currentTime to the start parameter
|| Listen for timeupdate event...
|| should currentTime meet or exceed the end parameter...
|| pause <audio> tag.
*/
function playClip(sFX, start, end) {
sFX.pause();
sFX.currentTime = start;
sFX.play();
sFX.ontimeupdate = function() {
if (sFX.currentTime >= end) {
sFX.pause();
}
}
return false;
}
Try to use a timer:
for (var i = 1; i <= 3; i++) {
(function(index) {
setTimeout(function() { alert(index); }, i * 1000);
})(i);
}
Use the setTimeout fuction like that

Audio Event Listener Getting Called Non Stop

I initially noticed this issue with the timeupdate event that was only occurring on firefox, but found that it applies to other event listeners as well (so far I have seen it on canplay)
Basically, I have an element that I create inside of an angular directive; I bind a function to ontimeupdate, and that event gets fired nonstop even when the currentTime value is 0
Brief summary of the code:
// angular directive controller function
...
DOMAudioObject = createAudioObject(src);
DOMAudioObject.audio.ontimeupdate = onTimeUpdate;
function createAudioObject(src) {
return {
audio: new Audio(src),
playback: $scope.playback,
name: $scope.name
};
}
function onTimeUpdate() {
var currentTime = DOMAudioObject.audio.currentTime;
console.log(currentTime);
$scope.currentTime = currentTime;
$scope.audio.currentTime = currentTime;
$scope.$apply();
}
Full controller code:
function audioCtrl($scope) {
// private reference to the DOM object that plays the audio
var DOMAudioObject,
watchers = {unbind: {}};
// register watchers once the model is available, we need at least the id field
watchers.unbind.model = $scope.$watch('model', init);
// remove watchers when the user navigates away
$scope.$on('$destroy', destroyWatchers);
function applyAudioPropertiesAsync() {
DOMAudioObject.audio.volume = $scope.volume;
DOMAudioObject.audio.currentTime = $scope.currentTime;
$scope.audio.duration = DOMAudioObject.audio.duration;
}
function applyAudioMetaProperties() {
$scope.audio = $scope.audio || {};
$scope.audio.id = $scope.model.id;
$scope.audio.playback = $scope.playback;
$scope.audio.name = $scope.model.name;
$scope.audio.volume = $scope.volume;
$scope.audio.currentTime = $scope.currentTime;
$scope.audio.duration = DOMAudioObject.audio.duration || 0;
}
// fired when the audio object has been loaded from src
function bindAudio(src, oldSrc) {
if (src === undefined) {
return;
}
// now safe to register watchers since they rely on the audio object
registerWatchers();
// if there is already a stored audio object associated with this visual, use it
DOMAudioObject = $audio.get($scope.model.id);
// audio src has been updated, reflect by pausing and creating a new audio object
if (oldSrc && src !== oldSrc) {
$scope.playback.play = false;
$scope.currentTime = 0.0;
pause(DOMAudioObject);
DOMAudioObject = null;
}
// create a new audio object or use stored values instead of reinitializing each time
if (!DOMAudioObject) {
DOMAudioObject = createAudioObject(src);
// set in $audio service for persistence across views and controllers
$audio.set($scope.model.id, DOMAudioObject);
} else {
$scope.playback = DOMAudioObject.playback || $scope.playback;
$scope.currentTime = DOMAudioObject.audio.currentTime || $scope.currentTime;
$scope.volume = DOMAudioObject.audio.volume || $scope.volume;
}
// only bind meta properties, binding actual Audio object causes problems in some browsers
applyAudioMetaProperties();
// add values that must be calculated after initial load
DOMAudioObject.audio.oncanplay = applyAudioPropertiesAsync;
// tell playback progress indicator to move on timeupdate event by firing a digest cycle
DOMAudioObject.audio.ontimeupdate = onTimeUpdate;
// tell animate directive to stop scrolling when the audio has ended
DOMAudioObject.audio.onended = onAudioEnded;
// tell parent this directive is ready when the audio has fully loaded
watchers.unbind.duration = $scope.$watch('audio.duration', function (val) {
if (val > 0) {
$scope.$emit('audio.ready', {id: $scope.model.id, audio: $scope.audio});
watchers.unbind.duration();
}
});
}
// create a dom audio object
function createAudioObject(src) {
return {
audio: new Audio(src),
playback: $scope.playback,
name: $scope.model.name
};
}
function destroyWatchers() {
if (watchers.unbind.audio) {
watchers.unbind.audio();
}
if (watchers.unbind.playback) {
watchers.unbind.playback();
}
if (watchers.unbind.progress) {
watchers.unbind.progress();
}
if (watchers.unbind.volume) {
watchers.unbind.volume();
}
}
function init(visual) {
if (visual === undefined) {
return;
}
// prevent updates to visual model from rebinding audio
watchers.unbind.model();
// when the audio-src is available and fully loaded, create audio objects
watchers.unbind.audio = $scope.$watch('audioSrc', bindAudio);
}
function onAudioEnded() {
// ensure playback variables are updated
$scope.$apply($scope.playback.play = false);
$scope.currentTime = 0;
}
// timeupdate event to update scope attribute with that of the Audio object
function onTimeUpdate() {
var currentTime = DOMAudioObject.audio.currentTime;
$scope.currentTime = currentTime;
$scope.audio.currentTime = currentTime;
$scope.$apply();
}
// pause the current track
function pause(audio) {
if (audio) {
audio.audio.pause();
}
}
// play the current track
function play(audio) {
if (audio) {
audio.audio.play();
}
}
function registerWatchers() {
// allow audio to be toggled on/off
watchers.unbind.playback = $scope.$watch('playback.play', togglePlay);
// allow volume changes
watchers.unbind.volume = $scope.$watch('volume', updateVolume);
// allow seeking of audio
watchers.unbind.progress = $scope.$watch('currentTime', seek);
// update the name variable on the audio object so it reflects in global scope
watchers.unbind.name = $scope.$watch('model.name', applyAudioMetaProperties);
}
// move audio position pointer to a new place
function seek(val) {
var threshold = 1,
curr = DOMAudioObject.audio.currentTime;
if ((val >= curr + threshold) || (val <= curr - threshold)) {
DOMAudioObject.audio.currentTime = val;
}
}
// toggle play/pause
function togglePlay(val) {
if (val) {
play(DOMAudioObject);
$audio.setGlobal($scope.audio);
} else {
pause(DOMAudioObject);
}
}
// allow the volume to be changed, scope reference updates automatically (pass by reference)
function updateVolume(val) {
if (val) {
DOMAudioObject.audio.volume = val;
}
}
}
Then, as you see in that image, the onTimeUpdate() function keeps getting called over and over again, even though the value of currentTime hasnt changed (it is 0 each and every time)
Again this only occurs on firefox. Chrome, safari, and even internet explorer behave nicely. I am running firefox 40.0.3 on Mac OS X 10.11 El Capitan, angular 1.4.6
Does anyone have some insight into what might be happening here, and any potential solutions to fix it?
it turns out, the culprit was this line here
DOMAudioObject.audio.currentTime = $scope.currentTime;
Apparently, setting this value causes the event listeners to fire indefinitely. I have no idea why. But I removed this line and everything works as expected.
I posted a new question to see if anyone has some insight to this strangeness
Setting currentTime attribute programatically on audio element causes event listeners to fire indefinitely

Web Audio Api trouble(DOM Exception 12)

I issued strange error(SYNTAX_ERR: DOM Exception 12) on Chrome with Audio API. I tried Audio Api first time and did tutorial(few times) of Kyle Nau(http://www.youtube.com/watch?v=1wYTkZVQKzs). When I run code with simple mp3 playing all sounds plays fine, but when I try to add volume control block from same tutorial plays only last sound in list of new object creation. Two first shows "SYNTAX_ERR: DOM Exception 12" on play. I checked mp3s and changing position on declaration = same bad effect. Remove volume control and all plays fine again. In this tutorial all fine too.
Tests show that problem apper when uncomment this part:
playSound.connect(this.gainNode);
this.gainNode.connect(audioContext.destination);
I can't understand why this error is appers.
Here code. This is fine working variant(i marked problem place with comment):
function Sound(source, level) {
if (!window.audioContex) {
audioContext = new webkitAudioContext;
};
var that = this;
that.source = source;
that.buffer = null;
that.isLoaded = false;
// that.gainNode = audioContext.createGain();
// if (!level) {
// that.gainNode.gain.value = 1;
// } else {
// that.gainNode.gain.value = level;
// };
var getSound = new XMLHttpRequest();
getSound.open("GET",that.source,true);
getSound.responseType = "arraybuffer";
getSound.onload = function() {
audioContext.decodeAudioData(getSound.response,function(buffer) {
that.buffer = buffer;
that.isLoaded = true;
});
};
getSound.send();
};
Sound.prototype.play = function(){
if(this.isLoaded === true) {
var playSound = audioContext.createBufferSource();
playSound.buffer = this.buffer;
// playSound.connect(this.gainNode);
// this.gainNode.connect(audioContext.destination);
playSound.connect(audioContext.destination);
playSound.noteOn(0);
};
};
// Sound.prototype.setVolume = function(level) {
// this.gainNode.gain.value = level;
// };
var laserSound = new Sound("sound/laser.mp3");
var dropSound = new Sound("sound/drop.mp3");
var pickupSound = new Sound("sound/pickup.mp3");
// laserSound.setVolume(.1);
window.addEventListener("keydown", onKeyDown);
function onKeyDown(event) {
switch (event.keyCode) {
//Z
case 90:
laserSound.play();
break;
//X
case 88:
dropSound.play();
break;
//C
case 67:
pickupSound.play();
break;
};
};
When you create your gain node in the first line you've commented out, it must be audioContext.createGainNode(); rather than audioContext.createGain();
It looks like you're missing the Node.
I hope that helps.
You have a syntax error somewhere. You don't need to put semi colons after function declarations. You would only use a semi colon in this case:
var myFunction = function(){
};

Categories

Resources