Web Audio Api trouble(DOM Exception 12) - javascript

I issued strange error(SYNTAX_ERR: DOM Exception 12) on Chrome with Audio API. I tried Audio Api first time and did tutorial(few times) of Kyle Nau(http://www.youtube.com/watch?v=1wYTkZVQKzs). When I run code with simple mp3 playing all sounds plays fine, but when I try to add volume control block from same tutorial plays only last sound in list of new object creation. Two first shows "SYNTAX_ERR: DOM Exception 12" on play. I checked mp3s and changing position on declaration = same bad effect. Remove volume control and all plays fine again. In this tutorial all fine too.
Tests show that problem apper when uncomment this part:
playSound.connect(this.gainNode);
this.gainNode.connect(audioContext.destination);
I can't understand why this error is appers.
Here code. This is fine working variant(i marked problem place with comment):
function Sound(source, level) {
if (!window.audioContex) {
audioContext = new webkitAudioContext;
};
var that = this;
that.source = source;
that.buffer = null;
that.isLoaded = false;
// that.gainNode = audioContext.createGain();
// if (!level) {
// that.gainNode.gain.value = 1;
// } else {
// that.gainNode.gain.value = level;
// };
var getSound = new XMLHttpRequest();
getSound.open("GET",that.source,true);
getSound.responseType = "arraybuffer";
getSound.onload = function() {
audioContext.decodeAudioData(getSound.response,function(buffer) {
that.buffer = buffer;
that.isLoaded = true;
});
};
getSound.send();
};
Sound.prototype.play = function(){
if(this.isLoaded === true) {
var playSound = audioContext.createBufferSource();
playSound.buffer = this.buffer;
// playSound.connect(this.gainNode);
// this.gainNode.connect(audioContext.destination);
playSound.connect(audioContext.destination);
playSound.noteOn(0);
};
};
// Sound.prototype.setVolume = function(level) {
// this.gainNode.gain.value = level;
// };
var laserSound = new Sound("sound/laser.mp3");
var dropSound = new Sound("sound/drop.mp3");
var pickupSound = new Sound("sound/pickup.mp3");
// laserSound.setVolume(.1);
window.addEventListener("keydown", onKeyDown);
function onKeyDown(event) {
switch (event.keyCode) {
//Z
case 90:
laserSound.play();
break;
//X
case 88:
dropSound.play();
break;
//C
case 67:
pickupSound.play();
break;
};
};

When you create your gain node in the first line you've commented out, it must be audioContext.createGainNode(); rather than audioContext.createGain();
It looks like you're missing the Node.
I hope that helps.

You have a syntax error somewhere. You don't need to put semi colons after function declarations. You would only use a semi colon in this case:
var myFunction = function(){
};

Related

How to make audio element more responsive?

Introduction
Greetings, I have looked at some similar questions that are on this platform and none of them seem to match my problem or maybe I missed something but I will try my best to give it a try because I am new in this platform.
OverView
This audio app maps the keyboard keys to audio samples.
Problem
There is a audio delay when pressing the keys rapidly and after each press the sample sound keeps playing for 4 to 6 second before it calls another sound and doesn't kill the sound immediately after the keyboard button is released.
Code
This is what I tried:
const dump = console.log.bind(console);
const sample = new Object
({
drum: "/samples/drums/trance01/BD_Trance.wav",
clap: "/samples/drums/trance01/Clap_trance.wav",
});
const keymap = new Object
({
"KeyD": "drum",
"KeyC": "clap",
});
Object.keys(sample).forEach((smpl)=>
{
let node = document.createElement("audio");
node.id = (smpl+"Sample");
node.className = "instrument";
node.src = sample[smpl];
document.body.appendChild(node);
});
document.body.addEventListener("keydown", function keyListener(event)
{
event.preventDefault(); // kill it
event.stopPropagation(); // seal it's ashes in a capsule
event.stopImmediatePropagation(); // and hurl it into the sun!
let key = event.code;
// console.log("pressed: "+key);
let tgt = keymap[key];
if (!tgt){ dump(key+" - is unused"); return };
var intervalID = setInterval(myCallback, 500, 'Parameter 1', 'Parameter 2');
function myCallback(a, b)
{
let nde = document.getElementById(tgt+"Sample");
nde.play();
dump("play: "+tgt);
}
});
Following what happens in the logic and how elements respond in their own way (and time) is important for analysing what the issue may be.
In this case I believe this can be solved by cloning the source-node, not expecting it to play multiple instances of itself by itself (or that is what I believe should happen) - but we can force it to:
const dump = console.log.bind(console);
const sample = new Object
({
drum: "/samples/drums/trance01/BD_Trance.wav",
clap: "/samples/drums/trance01/Clap_trance.wav",
});
const keymap = new Object
({
"KeyD": "drum",
});
(Object.keys(sample)).forEach((item,indx)=>
{
let node = document.createElement("audio");
node.id = (item+"Sample");
node.className = "instrument";
node.src = sample[item];
document.body.appendChild(node);
});
function keyHandler(event)
{
if (event.ctrlKey){ return }; // whew .. that was annoying as f*ck
event.preventDefault(); // kill it
event.stopPropagation(); // seal it's ashes in a capsule
event.stopImmediatePropagation(); // and hurl it into the sun!
let key = event.code;
let tag = keymap[key];
let nde,tgt;
tgt = document.getElementById(tag+"Sample");
if (!tgt){ dump(key+" - is unused"); return };
nde = tgt.cloneNode(true);
nde.id = (tgt.id+"Clone");
nde.addEventListener("ended",function()
{
this.parentNode.removeChild(this);
});
document.body.appendChild(nde);
nde.play();
dump("playing: "+tgt);
}
document.body.addEventListener("keydown", keyHandler);

I'm capturing screen by using media recorder and making video from blob but that video is not showing it's duration [duplicate]

I am in the process of replacing RecordRTC with the built in MediaRecorder for recording audio in Chrome. The recorded audio is then played in the program with audio api. I am having trouble getting the audio.duration property to work. It says
If the video (audio) is streamed and has no predefined length, "Inf" (Infinity) is returned.
With RecordRTC, I had to use ffmpeg_asm.js to convert the audio from wav to ogg. My guess is somewhere in the process RecordRTC sets the predefined audio length. Is there any way to set the predefined length using MediaRecorder?
This is a chrome bug.
FF does expose the duration of the recorded media, and if you do set the currentTimeof the recorded media to more than its actual duration, then the property is available in chrome...
var recorder,
chunks = [],
ctx = new AudioContext(),
aud = document.getElementById('aud');
function exportAudio() {
var blob = new Blob(chunks);
aud.src = URL.createObjectURL(new Blob(chunks));
aud.onloadedmetadata = function() {
// it should already be available here
log.textContent = ' duration: ' + aud.duration;
// handle chrome's bug
if (aud.duration === Infinity) {
// set it to bigger than the actual duration
aud.currentTime = 1e101;
aud.ontimeupdate = function() {
this.ontimeupdate = () => {
return;
}
log.textContent += ' after workaround: ' + aud.duration;
aud.currentTime = 0;
}
}
}
}
function getData() {
var request = new XMLHttpRequest();
request.open('GET', 'https://upload.wikimedia.org/wikipedia/commons/4/4b/011229beowulf_grendel.ogg', true);
request.responseType = 'arraybuffer';
request.onload = decodeAudio;
request.send();
}
function decodeAudio(evt) {
var audioData = this.response;
ctx.decodeAudioData(audioData, startRecording);
}
function startRecording(buffer) {
var source = ctx.createBufferSource();
source.buffer = buffer;
var dest = ctx.createMediaStreamDestination();
source.connect(dest);
recorder = new MediaRecorder(dest.stream);
recorder.ondataavailable = saveChunks;
recorder.onstop = exportAudio;
source.start(0);
recorder.start();
log.innerHTML = 'recording...'
// record only 5 seconds
setTimeout(function() {
recorder.stop();
}, 5000);
}
function saveChunks(evt) {
if (evt.data.size > 0) {
chunks.push(evt.data);
}
}
// we need user-activation
document.getElementById('button').onclick = function(evt){
getData();
this.remove();
}
<button id="button">start</button>
<audio id="aud" controls></audio><span id="log"></span>
So the advice here would be to star the bug report so that chromium's team takes some time to fix it, even if this workaround can do the trick...
Thanks to #Kaiido for identifying bug and offering the working fix.
I prepared an npm package called get-blob-duration that you can install to get a nice Promise-wrapped function to do the dirty work.
Usage is as follows:
// Returns Promise<Number>
getBlobDuration(blob).then(function(duration) {
console.log(duration + ' seconds');
});
Or ECMAScript 6:
// yada yada async
const duration = await getBlobDuration(blob)
console.log(duration + ' seconds')
A bug in Chrome, detected in 2016, but still open today (March 2019), is the root cause behind this behavior. Under certain scenarios audioElement.duration will return Infinity.
Chrome Bug information here and here
The following code provides a workaround to avoid the bug.
Usage : Create your audioElement, and call this function a single time, providing a reference of your audioElement. When the returned promise resolves, the audioElement.duration property should contain the right value. ( It also fixes the same problem with videoElements )
/**
* calculateMediaDuration()
* Force media element duration calculation.
* Returns a promise, that resolves when duration is calculated
**/
function calculateMediaDuration(media){
return new Promise( (resolve,reject)=>{
media.onloadedmetadata = function(){
// set the mediaElement.currentTime to a high value beyond its real duration
media.currentTime = Number.MAX_SAFE_INTEGER;
// listen to time position change
media.ontimeupdate = function(){
media.ontimeupdate = function(){};
// setting player currentTime back to 0 can be buggy too, set it first to .1 sec
media.currentTime = 0.1;
media.currentTime = 0;
// media.duration should now have its correct value, return it...
resolve(media.duration);
}
}
});
}
// USAGE EXAMPLE :
calculateMediaDuration( yourAudioElement ).then( ()=>{
console.log( yourAudioElement.duration )
});
Thanks #colxi for the actual solution, I've added some validation steps (As the solution was working fine but had problems with long audio files).
It took me like 4 hours to get it to work with long audio files turns out validation was the fix
function fixInfinity(media) {
return new Promise((resolve, reject) => {
//Wait for media to load metadata
media.onloadedmetadata = () => {
//Changes the current time to update ontimeupdate
media.currentTime = Number.MAX_SAFE_INTEGER;
//Check if its infinite NaN or undefined
if (ifNull(media)) {
media.ontimeupdate = () => {
//If it is not null resolve the promise and send the duration
if (!ifNull(media)) {
//If it is not null resolve the promise and send the duration
resolve(media.duration);
}
//Check if its infinite NaN or undefined //The second ontime update is a fallback if the first one fails
media.ontimeupdate = () => {
if (!ifNull(media)) {
resolve(media.duration);
}
};
};
} else {
//If media duration was never infinity return it
resolve(media.duration);
}
};
});
}
//Check if null
function ifNull(media) {
if (media.duration === Infinity || media.duration === NaN || media.duration === undefined) {
return true;
} else {
return false;
}
}
//USAGE EXAMPLE
//Get audio player on html
const AudioPlayer = document.getElementById('audio');
const getInfinity = async () => {
//Await for promise
await fixInfinity(AudioPlayer).then(val => {
//Reset audio current time
AudioPlayer.currentTime = 0;
//Log duration
console.log(val)
})
}
I wrapped the webm-duration-fix package to solve the webm length problem, which can be used in nodejs and web browsers to support video files over 2GB with not too much memory usage.
Usage is as follows:
import fixWebmDuration from 'webm-duration-fix';
const mimeType = 'video/webm\;codecs=vp9';
const blobSlice: BlobPart[] = [];
mediaRecorder = new MediaRecorder(stream, {
mimeType
});
mediaRecorder.ondataavailable = (event: BlobEvent) => {
blobSlice.push(event.data);
}
mediaRecorder.onstop = async () => {
// fix blob, support fix webm file larger than 2GB
const fixBlob = await fixWebmDuration(new Blob([...blobSlice], { type: mimeType }));
// to write locally, it is recommended to use fs.createWriteStream to reduce memory usage
const fileWriteStream = fs.createWriteStream(inputPath);
const blobReadstream = fixBlob.stream();
const blobReader = blobReadstream.getReader();
while (true) {
let { done, value } = await blobReader.read();
if (done) {
console.log('write done.');
fileWriteStream.close();
break;
}
fileWriteStream.write(value);
value = null;
}
blobSlice = [];
};
//If you want to modify the video file completely, you can use this package "webmFixDuration", Other methods are applied at the display level only on the video tag With this method, the complete video file is modified
webmFixDuration github example
mediaRecorder.onstop = async () => {
const duration = Date.now() - startTime;
const buggyBlob = new Blob(mediaParts, { type: 'video/webm' });
const fixedBlob = await webmFixDuration(buggyBlob, duration);
displayResult(fixedBlob);
};

Javascript function does not execute, script continues normally

It's been a long night trying to solve this one.
I'm trying to load a small text file, parse it, then use the information to provide the user with video options. I do this when the page loads but I also do it in response to a user event. In both cases I get the same result. The load_playList function does not execute.
The code is below. The window.load and selectVideo(X) routines are the starting points. In both cases the load_Playlist function is ignored.
It seems that load_playList never executes. The alert message is never executed, yet the script continues as if everything were normal. It's as if I typed the function's name wrong, so I did that and the script failed. So, the browser seems to see the function, but ignores it.
var videoList = [];
var videoTitles = [];
var videoCaptions = [];
/*
var videoList = [
'videos/ZionParkParade.mp4',
'videos/Pointless2014.mp4'];
var videoTitles = [
'The 50GT Zion Canyon Cruise',
'The Tinyvette at Sonoma Raceway'];
var videoCaptions = ['Caption 1','Caption 2'];
*/
window.onload = function()
{
alert(0);
load_playList; // Loads and parses a small text file.
alert(1);
load_video(0); // Set up the first video to play.
alert(2);
}
function load_playList()
{
alert('load_playList');
var listFile = ReadFile('videos/PlayList.txt');
var playList = listFile.split('\n');
var j = 0;
for (i = 0; i < math.trunc(playList.length / 3); i++)
{
videoList[i] = playList[j];
videoTitles[i] = playList[j+1];
videoCaptions[i] = playList[j+2];
j++;
j++;
j++
}
}
function selectVideo(X)
{
alert(10);
load_playList; // Loads and parses a small text file.
alert(11);
load_video(Number(X));
alert(12);
}
function FileRead(U)
{
if (window.XMLHttpRequest)
{// code for IE7+, Firefox, Chrome, Opera, Safari
X=new XMLHttpRequest();
}
else
{// code for IE6, IE5
X=new ActiveXObject("Microsoft.XMLHTTP");
}
X.open('GET', U, false );
X.setRequestHeader('Content-Type', 'text/html')
X.send();
return X.responseText;
}
function load_video(N)
{
var V = document.getElementById("video_player");
V.pause();
V.src = videoList[N];
V.auto = false;
V.type = "video/mp4";
// Update the title and captions.
document.getElementById('pause_button').innerHTML = "Play";
document.getElementById('videoTitle').innerHTML = videoTitles[N];
document.getElementById('videoCaption').innerHTML = videoCaptions[N];
}
If I un-comment the initial variable declarations, o provide initial values, everything works, except the text file is never loaded.
Thanks in advance.
Edit - I found two problems in the load_playList routine but still can't get that function to run. I don't even see the first alert.
I pasted the load_playList code into the onload routine and it works. I can live with that, but danged if it makes any sense.
window.onload = function()
{
// load_playList; // Loads and parses a small text file.
var listFile = load_file('videos/PlayList.txt');
var playList = listFile.split('\n');
var j = 0;
for (i = 0; i < Math.trunc(playList.length / 3); i++)
{
videoList[i] = playList[j];
videoTitles[i] = playList[j+1];
videoCaptions[i] = playList[j+2];
j++;
j++;
j++
}
load_video(0); // Set up the first video to play.
var vid = document.getElementById("video_player");
vid.volume = 0.2;
}
function load_playList()
{
alert(10);
var listFile = load_file('videos/PlayList.txt');
alert(11);
var playList = listFile.split('\n');
alert('Length = '+playList.length);
alert('Count = '+Math.trunc(playList.length / 3));
var j = 0;
for (i = 0; i < Math.trunc(playList.length / 3); i++)
{
videoList[i] = playList[j];
videoTitles[i] = playList[j+1];
videoCaptions[i] = playList[j+2];
j++;
j++;
j++
}
alert(12);
}
Just include () after the function name. Just calling the name won't run the function.
load_playList();
Refer
Also refer the fiddle for watching a function call
The work-around, pasting that routine's code into the onload routine, worked, but I wasn't satisfied and finally stumbled on this as a solution:
window.onload = function()
{
load_playList(0); // Loads and parses a small text file.
load_dropdown(0); // Populate the dropdown menu.
load_video(0); // Set up the first video to play.
var vid = document.getElementById("video_player");
vid.volume = 0.2;
}
Neither load_playList nor load_dropdown need an argument passed to them but I did anyway, and that worked.
I'm not sure why this is so but I'll take it.

Using a switch case inside for loop to play audio too quick

I'm trying to play a sound by looping an array and split an array into each array, and then using switch case to detect what's in the array.
function keeper() {
number2 = get.num;
sNumber = number2.toString();
output = [];
for ( i = 0, len = sNumber.length; i < len; i ++) {
output.push(+sNumber.charAt(i));
console.log(output);
switch (output[i]){
case 0:
console.log('0');
audio0 = new Audio('logo/Q0.wav');
audio0.play();
break;
case 1:
console.log('1');
audio1 = new Audio('logo/Q1.wav');
audio1.play();
break;
case 2:
console.log('2');
audio2 = new Audio('logo/Q2.wav');
audio2.play();
break;
case 3:
console.log('3');
audio3 = new Audio('logo/Q3.wav');
audio3.play();
break;
case 4:
console.log('4');
audio4 = new Audio('logo/Q4.wav');
audio4.play();
break;
case 5:
console.log('5');
audio5 = new Audio('logo/Q5.wav');
audio5.play();
break;
}
}}
The function it works just fine, but apparently the sound thats played out it too quick. is there any solution to fix this?
I'm assuming you want to hear the sounds after each other?
That doesn't work like this.
Lets say the first number in the array is: 0.
So sound 0 gets played.
But, since you loop through the array, and you reach the next number, eg. 2: sound 2 gets played immediately after.
The loop doesn't wait for the first sound the finish before starting the next play().
what you could do is modify the loop to wait for the audio ended event.
for example:
var audio0 = document.getElementById("myAudio");
audio0.onended = function() {
alert("The audio has ended");
};
Try using an audio sprite. I'm sure there's an app or whatever to do certain tasks programmatically but be aware step 1 and 2 are done manually.
Take a group of audio files and either use Audacity or an online service to join them into one file.
Next, get the start times of each clip of the audio file and store them in an array.
The following Demo will take the file and array, generate the HTML layout, create a button for each clip that corresponds to the array parameter. So when a button is clicked it will play only a clip of the audio sprite (the audio file).
The audio sprite in this Demo was not edited very well, I just made it to demonstrate how everything works. The timing relies on the timeupdate event which checks the playing time about every 250ms give or take. So if you want to make a more accurate start and end times, try leaving a gap of 250ms between clips.
Details commented in Demo
Demo
// Store path to audio file in a variable
var xFile = 'https://storage04.dropshots.com/photos7000/photos/1381926/20180318/175955.mp4'
// Store cues of each start time of each clip in an array
var xMap = [0, 1.266, 2.664, 3.409, 4.259,4.682, 5.311, 7.169, 7.777, 9.575, 10.88,11.883,13.64, 15.883, 16.75, 17, 17.58];
/* Register doc to act when the DOM is ready but before the images
|| are fully loaded. When that occurs, call loadAudio()
*/
document.addEventListener('DOMContentLoaded', function(e) {
loadAudio(e, xFile, xMap);
});
/* Pass the Event Object, file, and array through
|| Make a Template Literal of the HTML layout and the hidden
|| <audio> tag. Interpolate the ${file} into the <audio> tag.
|| Insert the TL into the <body> and parse it into HTML.
== Call generateBtn() function...
*/
function loadAudio(e, file, map) {
var template = `
<fieldset class='set'>
<legend>Sound FX Test Panel</legend>
</fieldset>
<audio id='sndFX' hidden>
<source src='${file}' type='audio/wav'>
</audio>`;
document.body.insertAdjacentHTML('beforeend', template);
generateBtn(e, map);
}
/* Pass the Event Object and the array through
|| Reference fieldset.set
|| create a documentFragment in order to speedup appending
|| map() the array...
|| create a <button>
|| Set btn class to .btn
|| Set button.btn data-idx to the corresponding index value of
|| map array.
|| Set button.btn text to its index number.
|| Add button.btn to the documentFragment...
|| return an array of .btn (not used in this demo)
== Call the eventBinder() function...
*/
function generateBtn(e, map) {
var set = document.querySelector('.set');
var frag = document.createDocumentFragment();
map.map(function(mark, index, map) {
var btn = document.createElement('button');
btn.className = 'btn';
btn.dataset.idx = map[index];
btn.textContent = index;
frag.appendChild(btn);
return btn;
});
set.appendChild(frag);
eventBinder(e, set, map);
}
/* Pass EventObject, fieldset.set, and map array through
|| Reference the <audio> tag.
|| Register fieldset.set to the click event
|| if the clicked node (e.target) class is .btn...
|| Determine the start and end time of the audio clip.
== Call playClip() function
*/
function eventBinder(e, set, map) {
var sFX = document.getElementById('sndFX');
set.addEventListener('click', function(e) {
if (e.target.className === 'btn') {
var cue = parseFloat(e.target.textContent);
var start = parseFloat(e.target.dataset.idx);
if (cue !== (map.length - 1)) {
var end = parseFloat(e.target.nextElementSibling.dataset.idx);
} else {
var end = parseFloat(sFX.duration);
}
playClip.call(this, sFX, start, end);
} else {
return false;
}
});
}
/* Pass the reference to the <audio> tag, start and end of clip
|| pause audio
|| Set the currentTime to the start parameter
|| Listen for timeupdate event...
|| should currentTime meet or exceed the end parameter...
|| pause <audio> tag.
*/
function playClip(sFX, start, end) {
sFX.pause();
sFX.currentTime = start;
sFX.play();
sFX.ontimeupdate = function() {
if (sFX.currentTime >= end) {
sFX.pause();
}
}
return false;
}
Try to use a timer:
for (var i = 1; i <= 3; i++) {
(function(index) {
setTimeout(function() { alert(index); }, i * 1000);
})(i);
}
Use the setTimeout fuction like that

Play/pause using howler.js with meteor framework

Okay, so I'm trying to let users play/pause when they click on the gif once or twice. I currently have it set up where the user could only play the sound once without stopping it.
I'm using the javascript audio library howler.js and the meteor framework.
Below is the code:
Template.gif.rendered = function () {
freezeframe_options = {
trigger_event: "click"
};
$.getScript("/client/scripts/freezeframe.js", function () {
$(".gif").click(function () {
if (!$(this).hasClass('played')) {
var gifId = $(this).attr("data-gif-id"); // Will return the gif ID number
var soundFile = $(this).attr("data-sound-file"); // Will return the sound file
var fileFormat = "mp3";
var mp3Test = new Audio();
var canPlayMP3 = (typeof mp3Test.canPlayType === "function" && mp3Test.canPlayType("audio/mpeg") !== "");
if (!canPlayMP3) {
fileFormat = "ogg";
}
var sound = new Howl({
urls: ['sounds/' + soundFile + '.' + fileFormat]
}).play();
$(this).addClass('played');
}
;
});
});
};
I'm using a few classes to track the current playback state:
playing = the sound is currently being played
paused = the sound is currently paused
played = the sound has been listened to completely at least once
I've created a howlers object to store the Howl instances, keyed off of the data-gif-id (so the key is the data-gif-id and the value is the Howl object). If the data-gif-id key is not in the howlers object, then I create a new Howl object, otherwise I just call the play() and pause() methods on the corresponding value that is already in the howlers object.
Here is the code:
Template.gif.rendered = function () {
freezeframe_options = {
trigger_event: "click"
};
howlers = {}; // set up an object to hold the Howl instances
// moved these static lines out of the click function
var fileFormat = "mp3";
var mp3Test = new Audio();
var canPlayMP3 = (typeof mp3Test.canPlayType === "function" && mp3Test.canPlayType("audio/mpeg") !== "");
if (!canPlayMP3) {
fileFormat = "ogg";
}
$.getScript("/client/scripts/freezeframe.js", function () {
$(".gif").click(function () {
var e = $(this);
var soundFile = e.attr("data-sound-file") + '.' + fileFormat; // Will return the sound file
var gifId = e.attr("data-gif-id"); // Will return the gif ID number
if (gifId in howlers){
if (e.hasClass('paused')){ // If currently paused, unpause
e.removeClass('paused');
e.addClass('playing');
howlers[gifId].play();
} else if (e.hasClass('playing')) { // If currently playing, pause
e.removeClass('playing');
e.addClass('paused');
howlers[gifId].pause();
} else { // If not playing and not paused, play
e.addClass('playing');
howlers[gifId].play();
}
} else { // this is a new instance, so add it to the howlers object and start playing
howlers[gifId] = new Howl({
urls: ['sounds/' + soundFile],
onend: function(){ // when playing ends, add the 'played' class to track which sounds have been played completely
e.removeClass('playing');
e.addClass('played');
}
});
e.addClass('playing');
howlers[gifId].play();
}
});
});
};

Categories

Resources