I'm trying to play a sound just once when a marker is detected with the A-frame and AR.JS libraries.
I'm trying the code lines below but the sound is playing indefinite.
<a-scene embedded arjs='sourceType: webcam; debugUIEnabled: false;';>
<a-assets>
<audio id="sound" src="audio.mp3" preload="auto"></audio>
</a-assets>
<a-marker preset="hiro">
<a-entity id="examplemodel" gltf-model="./models/Example.glb" soundhandler></a-entity>
</a-marker>
<a-entity sound="src: #sound" autoplay="false"></a-entity>
<a-entity camera></a-entity>
</a-scene>
<script>
AFRAME.registerComponent('soundhandler', {
tick: function () {
var entity = document.querySelector('[sound]');
if (document.querySelector('a-marker').object3D.visible == true) {
entity.components.sound.playSound();
} else {
entity.components.sound.pauseSound();
}
}
});
</script>
When I changed tick for init I got this error:
Uncaught TypeError: Cannot read property 'playSound' of undefined
Could you please give me some ideas to solve this issue?
It's playing indefinetely, because once it's visible - on each render loop you call playSound().
If you add a simple toggle check - You'll get your "once per visible" result:
tick: function() {
// marker is the object3D of marker
if (marker.visible && !this.markervisible) {
// do your stuff here once per visible
this.markervisible = true;
} else if (!marker.visible) {
this.markervisible = false;
}
}
Check it out here
Related
Loop false, poolsize 1.
Running sound.playSound() successfully plays the sound, but sound.isPlaying remains true even after the full sound has been played.
Already tried adding the 'sound-ended' event listener on the entity but it doesn't trigger neither.
There must be a proper state somewhere... right ?
You can track the state of the sound component by listening to the sound-ended event. Below, click the button to play a sound, it will remain red until the audio track is over.
<script src="https://aframe.io/releases/1.3.0/aframe.min.js"></script>
<script>
AFRAME.registerComponent("foo", {
init: function() {
const soundComp = this.el.components.sound; // grab the `sound` component
// play the sound when the user clicks the button
this.el.addEventListener("click", evt => {
this.el.setAttribute("color", "red") // turn the button red
soundComp.playSound(); // playsound
})
// catch the `sound-ended` event
this.el.addEventListener("sound-ended", evt => {
this.el.setAttribute("color", "green") // turn it back to green
})
}
})
</script>
<a-scene cursor="rayOrigin: mouse" raycaster="objects: a-sphere">
<a-assets>
<audio id="ding" crossorigin="anonymous" src="https://gftruj.github.io/webzamples/arjs/sound/sound/Ding-sound-effect.mp3" preload="auto"></audio>
</a-assets>
<a-sphere position="0 1 -3" radius="0.25" color="green" sound="src: #ding; autoplay: false" foo ></a-sphere>
</a-scene>
I have the following component registered to my 3D glb object:
AFRAME.registerComponent('room', {
init: function() {
let mesh = this.el.getObject3D('mesh')
if (!mesh) {
this.el.addEventListener('model-loaded', () => {
el.addEventListener("click", evt => {
console.log(evt)
if (evt.detail !== null) {
this.intersection = evt.detail.intersection
}
});
})
}
},
});
In my camera object, I have placed the cursor with the raycaster attached:
<a-entity camera-listener id="rig" class='rig' movement-controls="speed: 0.1;" navigator="cameraRig: #rig; cameraHead: #camera-rig;">
<a-entity id="camera-rig" camera="active: true" position="0 1.6 0" look-controls="pointerLockEnabled: false; reverseMouseDrag: true">
<a-entity id="mouseCursor" cursor="rayOrigin: mouse" raycaster="objects: .collidable"></a-entity>
</a-entity>
<a-entity laser-controls="hand: left" visible="true" raycaster="objects: .collidable;"></a-entity>
<a-entity laser-controls="hand: right" visible="true" raycaster="objects: .collidable;"></a-entity>
</a-entity>
When testing the app on Chrome, Firefox and Android devices the variable "evt" returns with a detail attribute that is not null that can be assigned to decide which child element of my glb object has been clicked. It works for me perfectly in these cases.
However, on Safari and iOS devices, the detail attribute of "evt" is always null and causes my app to crash if not checked. The check is easy but the main issue is that on these browsers I cannot detect which child object has been clicked from my glb as part of the scene.
Is there another way of raycasting that works across all browsers or have I made a mistake with my configuration? Any help would be much appreciated.
I'm working on a project in which I want to repurpuse this example
https://aframe.io/aframe/examples/showcase/model-viewer/
all I want to add is a trigger that starts the animation on click event.
I have managed to implement this to run on my server
https://github.com/aframevr/aframe/tree/master/examples/showcase/model-viewer
but now struggling to code the event handler.
in model-viewer.js I can see a line that triggers animation at the start
modelEl.setAttribute('animation-mixer', '');
I cant seem to figure out how to play it on click.
I have done this implementation before in a simpler setup (https://codepen.io/wspluta/pen/rNwReNB)
<script>
AFRAME.registerComponent('animationhandler', {
init: function() {
let playing = false;
this.el.addEventListener('click', () => {
if (!playing) {
this.el.setAttribute('animation-mixer', 'clip:*; loop: once; clampWhenFinished: true; duration : 6');
playing=true;
} else {
this.el.removeAttribute('animation-mixer');
playing=false;
}
});
}
})
</script>
<body>
<a-scene>
<a-assets>
<a-asset id="sky" src="https://raw.githubusercontent.com/WSPluta/webxr102/main/tatooine.jpg"> </a-asset>
<a-asset-item id="tie" src="https://raw.githubusercontent.com/WSPluta/webxr102/main/newTie.gltf"></a-asset-item>
</a-assets> <a-entity id="tie" gltf-model="#tie" position="0 0.5 -5" scale="0.25 0.25 0.25" animationhandler></a-entity>
<a-plane id="background" position="0 5 -15" height="9" width="16" rotation="0 0 0" opacity="0.9"></a-plane>
<a-sky src="#sky"></a-sky>
<a-camera>
<a-cursor color="yellow"></a-cursor>
</a-camera>
</a-scene>
</body>
but I'm unable to figure out how to modify example/showcase document in order to implement it. I really want to reuse the camera movement and all the good stuff that comes from the example/showcase file.
I wanted to play an animation and used this code with react/nextjs to achieve this functionality.
const handleAnimate = useCallback(
(e) => {
const { keyCode } = e
const rot = document.getElementById("camOrbit").getAttribute("rotation");
// when w is pressed
if (keyCode === 87) {
document
.getElementById("modAnim")
.setAttribute("animation-mixer", `clip: Walk; loop: repeat`);
document.getElementById("modAnim").setAttribute("rotation", `0 ${rot.y} 0`);
//
//
//
//
// document.getElementById("modAnim").setAttribute("position", `${pos.x} 0 ${pos.z}`);
document.addEventListener("keyup", (event) => {
document
.getElementById("modAnim")
.setAttribute("animation-mixer", `clip: Idle; loop: repeat`);
});
}
},
[setAttribute]
);
I just started trying the A-frame with AR.js to hopefully get an object to render on a piece of paper. I have tested the object and it is correctly rendered. However, when I try it with a custom image, it no longer works... I am trying to show a gltf file on a pattern. THe pattern is (in image form): and was made using https://jeromeetienne.github.io/AR.js/three.js/examples/marker-training/examples/generator.html
Does anyone see anything wrong with my code as to why that pattern is not being taken by the camera as a match? Is it my code? or is it something to do with the pattern file I am trying to load?
<head>
<script src="https://aframe.io/releases/0.8.0/aframe.min.js"></script>
<script src="https://cdn.rawgit.com/jeromeetienne/AR.js/1.6.0/aframe/build/aframe-ar.js"></script>
</head>
<body style='margin : 0px; overflow: hidden;'>
<a-scene embedded arjs='sourceType: webcam;'>
<a-assets>
<a-asset-item id="mesh" src="./data/Camargue.gltf"></a-asset-item>
</a-assets>
<a-entity gltf-model="#mesh" rotation="0 180 0" modify-materials></a-entity>
<a-light type="directional" color="#fff" position="-1 -5 -5" look-at="a-entity"></a-light>
<a-light type="ambient" color="#fff" intensity="3" look-at="a-entity"></a-light>
<a-marker-camera type="pattern" patternUrl="data/pattern-logo.patt"></a-marker-camera>
</a-scene>
<script>
AFRAME.registerComponent('modify-materials', {
init: function () {
// Wait for model to load.
this.el.addEventListener('model-loaded', () => {
// Grab the mesh / scene.
const obj = this.el.getObject3D('mesh');
// Go over the submeshes and modify materials we want.
obj.traverse(node => {
if (node.name.indexOf('Loggia-frame') !== -1) {
var environment = new THREE.CubeTextureLoader().setPath('data/HDR/').load(
[
'posx.jpg', 'negx.jpg', 'posy.jpg', 'negy.jpg', 'posz.jpg', 'negz.jpg'
]
);
environment.format = THREE.RGBFormat;
environment.mapping = THREE.CubeReflectionMapping;
var material = node.material;
material.envMap = environment;
}
});
});
}
});
</script>
</body>
So I'm struggling to find a solution to play/stop/pause sound on "click" i.e. when focusing with a black dot with A-Frame sound entity... What I would like to have is a plain, shape or whatever with a play/pause image on it, which would trigger audio when focused. Did anyone encounter something similar perhaps?
<audio id="sound" crossorigin="anonymous" preload="auto" src="some-audio-file.mp3"></audio>
... would trigger something like sound="on: click; src: #sound"
try making a custom component
AFRAME.registerComponent('audiohandler', {
init:function() {
let playing = false;
let audio = document.querySelector("#audio");
this.el.addEventListener('click', () => {
if(!playing) {
audio.play();
} else {
audio.pause();
audio.currentTime = 0;
}
playing = !playing;
});
}
})
and use it within Your 'button"
<a-box audiohandler> </a-box>
You can check all media methods, properties etc here.
You can check this button here.