follow camera in phaser.js - javascript

So I have just figured out how to load a tilemap that I have created in the program tiled and it works as expected so then I am trying to add a player with a basic follow camera using some code from the example but I have nothing happening except the is viewing the middle of the tilemap now here is the code I have tried:
var game = new Phaser.Game(800, 600, Phaser.AUTO, 'phaser-example', { preload: preload, create: create });
function preload() {
// Tilemaps are split into two parts: The actual map data (usually stored in a CSV or JSON file)
// and the tileset/s used to render the map.
// Here we'll load the tilemap data. The first parameter is a unique key for the map data.
// The second is a URL to the JSON file the map data is stored in. This is actually optional, you can pass the JSON object as the 3rd
// parameter if you already have it loaded (maybe via a 3rd party source or pre-generated). In which case pass 'null' as the URL and
// the JSON object as the 3rd parameter.
// The final one tells Phaser the foramt of the map data, in this case it's a JSON file exported from the Tiled map editor.
// This could be Phaser.Tilemap.CSV too.
game.load.tilemap('gameMap', 'assets/tilemaps/gameMap2.json', null, Phaser.Tilemap.TILED_JSON);
// Next we load the tileset. This is just an image, loaded in via the normal way we load images:
game.load.image('tiles', 'assets/tilemaps/gameMap.png');
game.load.image('player','assets/img/player.gif');
}
var map;
var layer;
var player;
var cursors;
function create() {
game.stage.backgroundColor = '#787878';
game.world.setBounds(0, 0, 3200, 3200);
game.physics.startSystem(Phaser.Physics.P2JS);
player = game.add.sprite(game.world.centerX, game.world.centerY, 'player');
game.physics.p2.enable(player);
cursors = game.input.keyboard.createCursorKeys();
game.camera.follow(player);
// The 'mario' key here is the Loader key given in game.load.tilemap
map = game.add.tilemap('gameMap');
// The first parameter is the tileset name, as specified in the Tiled map editor (and in the tilemap json file)
// The second parameter maps this name to the Phaser.Cache key 'tiles'
map.addTilesetImage('spritesheet', 'tiles');
// Creates a layer from the World1 layer in the map data.
// A Layer is effectively like a Phaser.Sprite, so is added to the display list.
layer = map.createLayer('Tile Layer 1');
// This resizes the game world to match the layer dimensions
layer.resizeWorld();
}
function update() {
player.body.setZeroVelocity();
if (cursors.up.isDown)
{
player.body.moveUp(300)
}
else if (cursors.down.isDown)
{
player.body.moveDown(300);
}
if (cursors.left.isDown)
{
player.body.velocity.x = -300;
}
else if (cursors.right.isDown)
{
player.body.moveRight(300);
}
}
function render() {
game.debug.cameraInfo(game.camera, 32, 32);
game.debug.spriteCoords(player, 32, 500);
}
I am new to using phaser and I am not sure why this is not working any pointers?

Try to create the character's functionality and follow-up of the camera after having implemented the creation of the map in this way:
function create() {
//Configure physical engine
game.physics.startSystem(Phaser.Physics.P2JS);
//Creating the tilemap
game.stage.backgroundColor = '#787878';
map = game.add.tilemap('mario');
map.addTilesetImage('SuperMarioBros-World1-1', 'tiles');
layer = map.createLayer('World1');
layer.resizeWorld();
//Configure player controls
cursors = game.input.keyboard.createCursorKeys();
//Player creation
player = game.add.sprite(game.world.centerX, game.world.centerY, 'player');
game.physics.p2.enable(player);
game.camera.follow(player);
}
I have used this example as a basis:

Related

Creating MapKit JS with origin, destination and waypoints

I'm migrating from Google Maps API to Apple MapKit JS for the simple reason I have a developer account with them and they offer more free hits.
Anyway, actual examples of MapKit JS are a bit thin (or at least Google isn't finding them - draw what conspiracy theories you will), so although I've got the basics going of displaying an embeded map, I can't seem to do the next step which is route between two points (Apple's documentation also seems impenetrable as they don't show examples).
Here's my script for a basic map:
<script>
mapkit.init({
authorizationCallback: function(done) {
done('[MY-TOKEN]');
}
});
var MarkerAnnotation = mapkit.MarkerAnnotation
var myMarker = new mapkit.Coordinate(55.9496320, -3.1866360)
var myRegion = new mapkit.CoordinateRegion(
new mapkit.Coordinate(55.9496320, -3.1866360),
new mapkit.CoordinateSpan(0.003, 0.003)
);
var map = new mapkit.Map("map");
var myAnnotation = new MarkerAnnotation(myMarker, { color: "#9b6bcc", title: "theSpace On The Mile"});
map.showItems([myAnnotation]);
map.region = myRegion;
</script>
Now I want to:
• Show a walking route between two points
• Include waypoints on the route
Could someone show the code that would achieve this? Once I can see an example I know I'll get it ;-)
Ok, so I've found a solution to this so sharing it here for the benefit of others.
Let's start by saying Apple's MapKit JS doesn't appear to have a waypoints option as offered by Google Maps API - so the way around that is to create a map that stores the markers in an array and then routes from one to the next. The code stores the location of the last waypoint in a variable, and doesn't bother to draw a route to the last waypoint if this is the first one in the array (obviously).
<script>
// Initiallise MapKit - you'll need your own long-lived token for this
mapkit.init({
authorizationCallback: function(done) {
done('[MY-TOKEN]');
}
});
// Function to draw the route once MapKit has returned a response
function directionHandler(error, data) {
data["routes"].forEach(function(route, routeIdx) {
if (routeIdx !== 0) { return; }
overlays = [];
route['path'].forEach(function(path) {
// This styles the line drawn on the map
let overlayStyle = new mapkit.Style({
lineWidth: 3,
strokeColor: "#9b6bcc"
});
let overlay = new mapkit.PolylineOverlay(path, {
style: overlayStyle
});
overlays.push(overlay);
});
map.addOverlays(overlays);
});
}
// This asks MapKit for directions and when it gets a response sends it to directionHandler
function computeDirections(origin,destination) {
let directionsOptions = {
origin: origin,
destination: destination,
transportType: mapkit.Directions.Transport.Walking
};
directions.route(directionsOptions, directionHandler);
}
// This sets the initial region, but is overridden when all points have been potted to automatically set the bounds
var myRegion = new mapkit.CoordinateRegion(
new mapkit.Coordinate(55.9496320, -3.1866360),
new mapkit.CoordinateSpan(0.05, 0.05)
);
var map = new mapkit.Map("map");
map.region = myRegion;
var myAnnotations = [];
// lastWaypoint variable is 'unset' initially so the map doesn't try and find a route to the lastWaypoint for the first point of the route
var lastWaypoint = "unset";
var directions = new mapkit.Directions();
// Array of co-ordinates and label for marker
waypoints = [
{name:'Sofi’s Bar',lat:55.9746308,lon:-3.1722282},
{name:'TThe Roseleaf Cafe',lat:55.975992,lon:-3.173474},
{name:'Hemingway’s',lat:55.9763631,lon:-3.1706564},
{name:'Teuchter’s Landing',lat:55.9774693,lon:-3.1713826},
{name:'The King’s Wark',lat:55.9761425,lon:-3.1695419},
{name:'Malt and Hops',lat:55.975885,lon:-3.1698957},
{name:'The Carrier’s Quarters',lat:55.9760813,lon:-3.1685323},
{name:'Noble’s',lat:55.974905,lon:-3.16714},
{name:'The Fly Half',lat:55.9747906,lon:-3.1674496},
{name:'Port O’ Leith',lat:55.974596,lon:-3.167525}
];
// Loop through the array and create marker for each
waypoints.forEach(function(data) {
var myAnnotation = new mapkit.MarkerAnnotation(new mapkit.Coordinate(data['lat'],data['lon']), {
color: "#9b6bcc",
title: data['name']
});
myAnnotations.push(myAnnotation);
// As long as this isn't the first point on the route, draw a route back to the last point
if(lastWaypoint!="unset") {
computeDirections(lastWaypoint,new mapkit.Coordinate(data['lat'],data['lon']));
}
lastWaypoint = new mapkit.Coordinate(data['lat'],data['lon']);
});
map.showItems(myAnnotations);
</script>
This map is for a pub crawl around Leith, so the trasportType is 'Walking', but change that to 'Automobile' if you so wish.
With credit to Vasile whose MapKit JS Demo (https://github.com/vasile/mapkit-js-demo) helped me understand a lot more about the options.

Use the audiocontext as audio input in p5.js

I am implementing a synthesizer which uses the nodes of the audio-api to generate sound and my goal is to visualize it using p5.
I currently have a script that analyzes audio with fft and visualizes the frequencies with bars. My audio input at the moment is a locally saved song but I need to change it, so it uses the audiocontext as input.
Currently I can get the audiocontext with p5's own method getAudioContext() but then I have no clue how to set it as input for the visualization.
I know the API has a createBuffer()-Method but I haven't found a way to use it as input for p5.
var fft;
var button;
var song;
var slider;
var audiocontext;
var out;
var prue;
var source;
function preload(){
song = loadSound("src/media/Chopin - Nocturne op.9 No.2.mp3");
button = createButton("Play");
button.mousePressed(togglePlaying);
slider = createSlider(0,1,0.5,0.01);
this.audiocontext = getAudioContext();
}
function setup() {
createCanvas(windowWidth,windowHeight);
fft = new p5.FFT(0.8);
source = context.createBufferSource();
widthBand = (width / 128);
source.connect(context.destination);
}
function draw() {
background(61);
var spectrum = fft.analyze();
noStroke();
for (var i = 0; i<spectrum.length; i++) {
var amp = spectrum[i];
var y = map(amp, 0, 256, height, 0);
fill(i, 255, 255);
rect(i*widthBand,y,widthBand-2, height - y );
}
//Set Volume according to slider
audiocontext.setVolume(slider.value());
}
//Play/Pause Button
function togglePlaying(){
if(!song.isPlaying()){
song.play();
button.html("Pause");
}else if(song.isPlaying()){
song.pause();
button.html("Play");
}
}
Any help would be very appreciated!
Audiocontext is not an input himself but contains one or more input nodes (and output and connections and ...). P5 creates own Audiocontext and operates inside of that.
So, option one: build your app using p5 functionality only. It's a powerful library, all the needed tools (e.g. AudioIn(), MonoSynth() etc.) should be available.
Option two: initialize p5 first and then use p5 created audiocontext to add extra nodes, which can later be used by p5.
var cnv, fft, audiocontext, osc;
//p5 setup.
function setup() {
cnv = createCanvas();
fft = new p5.FFT(0.8);
audiocontext = getAudioContext(); //if p5.Audiocontext doesn't exist
// then new is created. Let's make
// it global.
myCustomSetup(); //now we can create our own input nodes, filters...
fft.setInput(osc); //after which we can connect fft to those created
//nodes
}
function myCustomSetup() {
//p5 audiocontext is usable here, allowing to use full WebAudioApi
//and connect all nodes, created here or by some p5 function.
osc = audiocontext.createOscillator();
}

Javascript - retrieve random image from array

I'm trying to write a program using Javascript and the p5.js library to trigger a random image from an array whenever a peak in an audio file is detected. p5's sound library can detect the audio peak for me and then trigger a function upon that audio peak. However, I don't have much experience in Javascript so I'm not sure where to go from here. I've created an array of images and am planning on creating a function using math.Random to grab one of these images. Can I then call that function within my triggerBeat function?
Also, I've set the image as the background so that it's not within p5's draw function, so I'm trying to change the bg variable. I've preloaded the background image, and I've also got code within the preload function to allow the user to upload an audio file.
Sorry if this doesn't make a ton of sense. I'm pretty new to Javascript and I've spent most of today trying to wrap my head around it.
EDIT: updated code
var cnv, song, fft, peakDetect, img, bg;
var imageset = new Array("1.png","2.png","3.png");
function preload(){
img = loadImage("1.png");
var loader = document.querySelector(".loader");
document.getElementById("audiofile").onchange = function(event) {
if(event.target.files[0]) {
if(typeof song != "undefined") {
song.disconnect();
song.stop();
}
song = loadSound(URL.createObjectURL(event.target.files[0]));
loader.classList.add("loading");
}
}
}
function setup() {
cnv = createCanvas(900,900);
drawImage(imageset[0]);
fft = new p5.FFT();
peakDetect = new p5.PeakDetect();
setupSound();
peakDetect.onPeak(drawImage(imageset));
}
function draw() {
drawImage();
}
function drawImage(arr) {
var bg = loadImage(random(arr));
background(bg);
fill(0);
text('play', width/2, height/2);
fft.analyze();
peakDetect.update(fft);
}
function setupSound() {
cnv.mouseClicked( function() {
if (song.isPlaying() ) {
song.stop();
} else {
song.play();
}
});
}
p5 has math functions, one of which is random.
If one argument is given and it is an array, returns a random element from that array.
EDIT
As the result was more messy after answering the initial question, I updated the whole code.
var cnv, song, fft, peakDetect, img, bg;
var imageset = new Array("pic1.png","pic2.png","pic3.png", "pic4.png");
var imagesArr = [];
//next line will make p5 global. Otherwise would the p5 functions be
//accessable from p5 struct functions only.
new p5();
/*******************************************************************
* PRELOAD
* we are using for loading images/audios only
********************************************************************/
function preload(){
//load all images from 'imageset' into 'imagesArr'
for(var i=0; i<imageset.length; i++){
loadImage('../img/'+imageset[i], function(img) {
imagesArr.push(img);
});
}
// next lets load soundfile(s).
//song = loadSound("../snd/test.mp3");
// I used testfile, didn't touch nor tested your code here,
// BUT, again:
// you should only (pre)load you sounds here, setting event should go
// to the setup()
var loader = document.querySelector(".loader");
document.getElementById("audiofile").onchange = function(event) {
if(event.target.files[0]) {
if(typeof song != "undefined") {
song.disconnect();
song.stop();
}
song = loadSound(URL.createObjectURL(event.target.files[0]));
loader.classList.add("loading");
}
}
}
/*******************************************************************
* SETUP
* run once, use for initialisation.
********************************************************************/
function setup() {
//create canvas, draw initial background and text
cnv = createCanvas(900,900);
drawBackground();
text('play', width/2, height/2);
//initiate fft, peakdetect. Set event (onpeak)
fft = new p5.FFT();
peakDetect = new p5.PeakDetect();
setupSound();
peakDetect.onPeak(drawBackground);
}
/*******************************************************************
* DRAW
* endless loop. Here happens all the action.
* But you cannot draw your background here, as it is done by event.
********************************************************************/
function draw(){
//fft and peakdetecting are in use.
fft.analyze();
peakDetect.update(fft);
}
function drawBackground() {
background(255);
background(random(imagesArr));
}
function setupSound() {
cnv.mouseClicked( function() {
if (song.isPlaying() ) {
song.stop();
} else {
song.play();
}
});
}
Have yourArray[Math.floor(Math.random() * yourArray.length)] to get a random img by calling it in your triggerBeat function

Can we Add 3D Model AR.GeoLocation with Occlusion in Wikitude JAVASCRIPT API

Is it possible to use occlusion for 3D Model AR.GeoLocation in AR GeoObject ?
We are working on an app that would display advertisement banners at a certain height on a building when you enter a predefined geofenced area.
Now, at the current stage, using wikitude we are able to place the AR object model onto the building. But the issue is that, are able to see the model beyond walls and building between the target building. We want to avoid this and show the ad only when there is no hindrance between the target building and the camera.
The confusion I have is that we want the occluder to be dynamic in its behaviour. So whether its a building, a tree, hand or anything else, the effect should be applied and model not be displayed for that portion that is being blocked.
Also, I have attached a couple of URLs for your reference to showcase what we are looking for
https://www.youtube.com/watch?v=a5NLRMEnu2U
https://www.youtube.com/watch?v=CQFkTvEcfpk
var World = {
loaded: false,
rotating: false,
init: function initFn() {
this.createModelAtLocation();
},
createModelAtLocation: function createModelAtLocationFn() {
/*
First a location where the model should be displayed will be defined. This location will be relativ to the user.
*/
//var location = new AR.RelativeLocation(null, 5, 0, 2);
var geoLoc = new AR.GeoLocation(23.027390, 72.558721, 320.);//National Handloom
//var geoLoc = new AR.GeoLocation(23.028350, 72.506674, 320.);//Iscon
//var geoLoc = new AR.GeoLocation(26.206274, 73.048096, 320.);//Jodhpur
//var geoLoc = new AR.GeoLocation(40.319421, -74.631490, 320.);//client
var location = new AR.RelativeLocation(geoLoc, 10, 10, 10);
/*
Next the model object is loaded.
*/
var modelEarth = new AR.Model("assets/earth.wt3", {
onLoaded: this.worldLoaded,
scale: {
x: 10,
y: 10,
z: 10
}
});
var indicatorImage = new AR.ImageResource("assets/indi.png");
var indicatorDrawable = new AR.ImageDrawable(indicatorImage, 0.1, {
verticalAnchor: AR.CONST.VERTICAL_ANCHOR.TOP
});
/*
Putting it all together the location and 3D model is added to an AR.GeoObject.
*/
var obj = new AR.GeoObject(location, {
drawables: {
cam: [modelEarth],
indicator: [indicatorDrawable]
}
});
},
worldLoaded: function worldLoadedFn() {
World.loaded = true;
var e = document.getElementById('loadingMessage');
e.parentElement.removeChild(e);
}
};
World.init();
Please let me know
Thanks
Umesh.

Change texture of Pixi's AnimatedSprite

I use this code to setup a texture atlas animation:
PIXI.loader
.add('out2', 'assets/out2.png')
.load(function (loader, resources){
onRotationsLoaded(loader, resources)
});
function onRotationsLoaded(loader, resources) {
first = new PIXI.extras.AnimatedSprite(setupFrames(resources["out2"].texture.baseTexture));
app.renderer.plugins.prepare.upload(first, function(){
console.log("loaded first");
// ready to go
});
}
function setupFrames(name) {
var frames = [];
array is an array that stores correct position for each frame of animation
for (var i = 0; i < array.length; i++) {
var rect = new PIXI.Rectangle(array[i].frame.x, array[i].frame.y, array[i].frame.w, array[i].frame.h);
frames.push(new PIXI.Texture(name, rect));
}
return frames;
}
I would like to change the texture of the AnimatedSprite first in a click event or something. The new texture needs to be fetched from the server(I do not want to load it at start, because there are too many of them). I could destroy first and create second AnimatedSprite, but is there a way to just change it's texture atlas image?
I'd say simply replacing AnimatedSprite._textures would work.
first.textures = setupFrames('secondOne');
If the new texture has different frame counts from the previous one, you might like to call AnimatedSprite.prototype.gotoAndPlay(frame) right after replacing texture to reset the current frame.

Categories

Resources