Loop false, poolsize 1.
Running sound.playSound() successfully plays the sound, but sound.isPlaying remains true even after the full sound has been played.
Already tried adding the 'sound-ended' event listener on the entity but it doesn't trigger neither.
There must be a proper state somewhere... right ?
You can track the state of the sound component by listening to the sound-ended event. Below, click the button to play a sound, it will remain red until the audio track is over.
<script src="https://aframe.io/releases/1.3.0/aframe.min.js"></script>
<script>
AFRAME.registerComponent("foo", {
init: function() {
const soundComp = this.el.components.sound; // grab the `sound` component
// play the sound when the user clicks the button
this.el.addEventListener("click", evt => {
this.el.setAttribute("color", "red") // turn the button red
soundComp.playSound(); // playsound
})
// catch the `sound-ended` event
this.el.addEventListener("sound-ended", evt => {
this.el.setAttribute("color", "green") // turn it back to green
})
}
})
</script>
<a-scene cursor="rayOrigin: mouse" raycaster="objects: a-sphere">
<a-assets>
<audio id="ding" crossorigin="anonymous" src="https://gftruj.github.io/webzamples/arjs/sound/sound/Ding-sound-effect.mp3" preload="auto"></audio>
</a-assets>
<a-sphere position="0 1 -3" radius="0.25" color="green" sound="src: #ding; autoplay: false" foo ></a-sphere>
</a-scene>
Related
I'm working on a project in which I want to repurpuse this example
https://aframe.io/aframe/examples/showcase/model-viewer/
all I want to add is a trigger that starts the animation on click event.
I have managed to implement this to run on my server
https://github.com/aframevr/aframe/tree/master/examples/showcase/model-viewer
but now struggling to code the event handler.
in model-viewer.js I can see a line that triggers animation at the start
modelEl.setAttribute('animation-mixer', '');
I cant seem to figure out how to play it on click.
I have done this implementation before in a simpler setup (https://codepen.io/wspluta/pen/rNwReNB)
<script>
AFRAME.registerComponent('animationhandler', {
init: function() {
let playing = false;
this.el.addEventListener('click', () => {
if (!playing) {
this.el.setAttribute('animation-mixer', 'clip:*; loop: once; clampWhenFinished: true; duration : 6');
playing=true;
} else {
this.el.removeAttribute('animation-mixer');
playing=false;
}
});
}
})
</script>
<body>
<a-scene>
<a-assets>
<a-asset id="sky" src="https://raw.githubusercontent.com/WSPluta/webxr102/main/tatooine.jpg"> </a-asset>
<a-asset-item id="tie" src="https://raw.githubusercontent.com/WSPluta/webxr102/main/newTie.gltf"></a-asset-item>
</a-assets> <a-entity id="tie" gltf-model="#tie" position="0 0.5 -5" scale="0.25 0.25 0.25" animationhandler></a-entity>
<a-plane id="background" position="0 5 -15" height="9" width="16" rotation="0 0 0" opacity="0.9"></a-plane>
<a-sky src="#sky"></a-sky>
<a-camera>
<a-cursor color="yellow"></a-cursor>
</a-camera>
</a-scene>
</body>
but I'm unable to figure out how to modify example/showcase document in order to implement it. I really want to reuse the camera movement and all the good stuff that comes from the example/showcase file.
I wanted to play an animation and used this code with react/nextjs to achieve this functionality.
const handleAnimate = useCallback(
(e) => {
const { keyCode } = e
const rot = document.getElementById("camOrbit").getAttribute("rotation");
// when w is pressed
if (keyCode === 87) {
document
.getElementById("modAnim")
.setAttribute("animation-mixer", `clip: Walk; loop: repeat`);
document.getElementById("modAnim").setAttribute("rotation", `0 ${rot.y} 0`);
//
//
//
//
// document.getElementById("modAnim").setAttribute("position", `${pos.x} 0 ${pos.z}`);
document.addEventListener("keyup", (event) => {
document
.getElementById("modAnim")
.setAttribute("animation-mixer", `clip: Idle; loop: repeat`);
});
}
},
[setAttribute]
);
I'm trying to play a sound just once when a marker is detected with the A-frame and AR.JS libraries.
I'm trying the code lines below but the sound is playing indefinite.
<a-scene embedded arjs='sourceType: webcam; debugUIEnabled: false;';>
<a-assets>
<audio id="sound" src="audio.mp3" preload="auto"></audio>
</a-assets>
<a-marker preset="hiro">
<a-entity id="examplemodel" gltf-model="./models/Example.glb" soundhandler></a-entity>
</a-marker>
<a-entity sound="src: #sound" autoplay="false"></a-entity>
<a-entity camera></a-entity>
</a-scene>
<script>
AFRAME.registerComponent('soundhandler', {
tick: function () {
var entity = document.querySelector('[sound]');
if (document.querySelector('a-marker').object3D.visible == true) {
entity.components.sound.playSound();
} else {
entity.components.sound.pauseSound();
}
}
});
</script>
When I changed tick for init I got this error:
Uncaught TypeError: Cannot read property 'playSound' of undefined
Could you please give me some ideas to solve this issue?
It's playing indefinetely, because once it's visible - on each render loop you call playSound().
If you add a simple toggle check - You'll get your "once per visible" result:
tick: function() {
// marker is the object3D of marker
if (marker.visible && !this.markervisible) {
// do your stuff here once per visible
this.markervisible = true;
} else if (!marker.visible) {
this.markervisible = false;
}
}
Check it out here
I have successfully implemented the ability to click on different markers and make their object scale or rotate every time you click on them.
My problem is, when I have two different markers with two different components they are both being called on each click. It baffles me. Why would they both be being called?
UPDATE!!!
I have not fixed the problem but I have narrowed down to what I believe is causing it.
I have multiple markers printed out and hanging around my office. When I focus my camera on the different markers they are basically centered on my screen. If I center the marker on my camera, both markers are on the same spot. And, they both fire.
If I have print outs of two markers side by side and both are on my screen at the same time then if I click one they click independently of each other.
If I use my hands to move the print outs and lay one marker on top of the other. Then they go back to both firing at the same time.
It definitely has something to do with the area of the screen the marker take up.
I hope that makes sense.
Is there a way to "clear" or "refresh" things when a marker is found and lost?
Ultimate goal is to have a user walk down a hallway with barcodes on the wall so that they can see virtual paintings. The user is definitely going to focus in on the barcode, so the above issue is going to be present.
Any advice is appreciated!
Here is my JavaScript code:
AFRAME.registerComponent('marker-image-click', {
init: function() {
const objImageImage = document.querySelector('#image-image');
let intRotationX = objImageImage.getAttribute('rotation').x;
let intRotationY = objImageImage.getAttribute('rotation').y;
let intRotationZ = objImageImage.getAttribute('rotation').z;
objImageImage.addEventListener('click', function(ev, target) {
if (objImageImage.object3D.visible === true) {
console.log('click-image');
objImageImage.setAttribute('rotation', {x: intRotationX, y: intRotationY, z: intRotationZ});
intRotationX += 6.0;
intRotationY += 6.0;
intRotationZ += 6.0;
}
});
}
});
AFRAME.registerComponent('marker-avocado-click', {
init: function() {
const objEntityAvocado = document.querySelector('#entity-avocado');
let intScaleX = objEntityAvocado.getAttribute('scale').x;
let intScaleY = objEntityAvocado.getAttribute('scale').y;
let intScaleZ = objEntityAvocado.getAttribute('scale').z;
objEntityAvocado.addEventListener('click', function(ev, target) {
if (objEntityAvocado.object3D.visible === true) {
console.log('click-avocado');
objEntityAvocado.setAttribute('scale', {x: intScaleX, y: intScaleY, z: intScaleZ});
intScaleX += 0.25;
intScaleY += 0.25;
intScaleZ += 0.25;
}
});
}
});
Here is my HTML code:
<a-marker marker-image-lostfound marker-image-click id="marker-image" type="barcode" value="2" emitevents="true">
<a-entity cursor="rayOrigin: mouse"
raycaster="objects: .clickable-image; useWorldCoordinates: true;"> <!-- must add this for clicking -->
<a-image id="image-image" class="clickable-image"
src="images/test.png"
position="0 0 0" rotation="-90 0 0" scale="1 1 1"
width="" height="" opacity="1"> <!-- note the class="clickable" for clicking -->
</a-image>
</a-marker>
<a-marker marker-avocado-lostfound marker-avocado-click id="marker-avocado" type="barcode" value="3" emitevents="true">
<a-entity cursor="rayOrigin: mouse"
raycaster="objects: .clickable-avocado; useWorldCoordinates: true;"> <!-- must add this for clicking -->
</a-entity>
<a-entity id="entity-avocado" class="clickable-avocado"
gltf-model="models/Avocado/Avocado.gltf"
position="0 0 0.3" rotation="-90 0 0" scale="10 10 10"> <!-- note the class="clickable" for clicking -->
</a-entity>
</a-marker>
I'm having a problem with HTML Video and JavaScript so have written some simple code to demonstrate. There is one video which contains three "clips" all five seconds long (obviously, real-world, they are a lot longer). One at 25 - 30 seconds, one at 55 - 60 seconds and the last at 85 - 90 seconds. I want the user to be able to click the relevant button for each five second clip.
There are two issues:
The Chrome currenttimer() bug which doesn't seem to let you change the start time of an external video (The video will be stored on an Azure Blob). There appear to be a number of posts on this but no fix.
When you play the first clip and then try and play the second clip, because the start time of clip 2 is after the end time for clip 1, it doesn't play because the AddEventListener is still in effect. Is there a way to drop the original EventListener or replace it with the new end time?
Here is the code being used:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<div style="width: 700px; height: 400px; margin: auto; text-align: center;">
<video id="video1" width="620" controls>
<source type="video/mp4" src="external video link here" />
Your browser does not support HTML5 video.
</video>
<input type="button" value="Play Clip 1 (25 - 30 seconds" onclick="javascript: showvid(25);" /><br />
<input type="button" value="Play Clip 2 (55 - 60 seconds" onclick="javascript: showvid(55);" /><br />
<input type="button" value="Play Clip 3 (85 - 90 seconds" onclick="javascript: showvid(85);" /><br />
</div>
<script type="text/javascript">
function showvid(timer) {
var myVideo = document.getElementById("video1");
myVideo.currentTime = timer;
myVideo.play();
myVideo.addEventListener("timeupdate", function () {
if (this.currentTime >= (timer + 5)) {
this.pause();
}
});
}
</script>
</body>
</html>
UPDATE 1
I've changed the event listener check to pause the video only if the currenttime is within a second of the end time. SO if the next clip is more than a second away, they listener won't stop the clip before it starts.
Still looking into the Chrome issue.
I don't know what Chrome bug you are talking about, but for cleaner code, you might be interested in the #t=start[,end] Media Fragment, which will allow you to set a time range directly as the source of your <video>:
onclick =e=> {
const data = e.target.dataset;
if(!data.start) return;
vid.src = vid.src.split('#')[0] +
'#t=' + data.start + ',' + data.end;
// url.vid#t=start,end
vid.play();
}
<button data-start="5" data-end="10">play [5,10]</button>
<button data-start="35" data-end="40">play [35,40]</button>
<button data-start="00:01:25" data-end="00:01:30">play [00:01:25,00:01:30]</button>
<video id="vid" src="https://upload.wikimedia.org/wikipedia/commons/transcoded/2/22/Volcano_Lava_Sample.webm/Volcano_Lava_Sample.webm.360p.webm" muted></video>
Now if you really wish to go the way you were going, then you'll have change your code a bit.
Never add a new event listener from an user-generated event.
Add it once, and only trigger semaphores / update variables from user events.
So we first add the timeupdate event on our <video>, then if no user generated event did happen, we exit early. Otherwise, we check for a variable that is accessible to both our event listeners (here called next_stop) if we should pause or not.
Then, in the buttons event listeners, we update the <video>'scurrentTime, request it to play and update next_stop.
The two event listeners can interact thanks to the shared next_stop variable, but no more conflicts.
let next_stop = Infinity; // a variable shared by both event listeners
// add the event listeners only once
vid.addEventListener('timeupdate', handleTimeupdate, {passive: true});
document.addEventListener('click', handleClick);
function handleTimeupdate(evt) {
// not set? exit early
if(!isFinite(next_stop)) return;
// a single action
if(this.currentTime > next_stop) {
this.pause();
// if you want to disable the range once it's done
// e.g to allow default controls interactions
// next_stop = Infinity;
}
}
function handleClick(evt) {
const times = parseTime(evt.target);
if(!times) return;
// update the video's current time
vid.currentTime = times.start;
// update the shared variable
next_stop = times.end;
// start playing if needed
if(vid.paused) {
vid.play();
}
}
function parseTime(target) {
const data = target.dataset;
if(!data || !data.start) return null;
return {start: +data.start, end: +data.end};
}
<button data-start="5" data-end="10">play [5,10]</button>
<button data-start="35" data-end="40">play [35,40]</button>
<button data-start="85" data-end="90">play [00:01:25,00:01:30]</button>
<video id="vid" src="https://upload.wikimedia.org/wikipedia/commons/transcoded/2/22/Volcano_Lava_Sample.webm/Volcano_Lava_Sample.webm.360p.webm" controls></video>
So I'm struggling to find a solution to play/stop/pause sound on "click" i.e. when focusing with a black dot with A-Frame sound entity... What I would like to have is a plain, shape or whatever with a play/pause image on it, which would trigger audio when focused. Did anyone encounter something similar perhaps?
<audio id="sound" crossorigin="anonymous" preload="auto" src="some-audio-file.mp3"></audio>
... would trigger something like sound="on: click; src: #sound"
try making a custom component
AFRAME.registerComponent('audiohandler', {
init:function() {
let playing = false;
let audio = document.querySelector("#audio");
this.el.addEventListener('click', () => {
if(!playing) {
audio.play();
} else {
audio.pause();
audio.currentTime = 0;
}
playing = !playing;
});
}
})
and use it within Your 'button"
<a-box audiohandler> </a-box>
You can check all media methods, properties etc here.
You can check this button here.