on the client side, when you open the socket it will send this message:
socket.onopen = function(event) {
var jsonstring = JSON.stringify({"type":"join", "id":myStorage.username});
socket.send(jsonstring);
};
on the server side we have an object called clientMessages, but i can't seem to tell if any data is getting added to it. additionally, it needs to somehow hold all the active users on the server.
var clientMessages = {};
socket.on("message", function (data) {
var parsed = JSON.parse(data);
if (parsed.type === "join") {
var jsonstringy = JSON.stringify({ type: "history", value: clientMessages});
socket.send(jsonstringy);
clientMessages[parsed.id] = [parsed];
} else if (parsed.type === "leave") {
delete clientMessages[parsed.id];
} else {
clientMessages[parsed.id].push(parsed);
}
});
You can console.log(clientMessages) every time you get a message to check it, but definitely this way should be ok. I do not see any problem there.
EDIT:
Ok i saw the source code, var clientMessages = {};is inside wss.on('connec...this means that everytime someone connects it will reset that variable. you should move the var clientMessages = {}; above that wss.on('..
basically you will end up with something like this
var clientMessages = {}; //line 20
wss.on("connection", function (socket) { //line 21 not edited
whichever function whats to keep a watch on this object, send them the clientMessages.length. This method can then perform the next necessary actions.
Related
I have an issue in which users aren't updating case status when saving cases. The status field is required and usually always populated. I have a requirement from the client to prompt the user to make sure the case status is updated and correct. Without building a bunch of code for this, I leveraged a form, made everything required not visible (it will be there anyways initially) and only made the case status fields visible.
I did this so that it's always navigated to OnSave as seen as follows:
The following is the JS:
function OnSave(context)
{
debugger;
var formContext = context.getFormContext();
var saveCounter = 0;
LoadCaseStatusModal(formContext, saveCounter);
}
function OnLoad(context)
{
var formContext = context.getFormContext;
}
function LoadCaseStatusModal(formContext, saveCounter)
{
debugger;
formContext.data.entity.save();
var formContext = formContext;
if (formContext.ui.getFormType()==2)
{
var lblForm = "Case Status Modal";
if (formContext.ui.formSelector.getCurrentItem().getLabel() != lblForm)
{
var items = formContext.ui.formSelector.items.get();
for (var i in items)
{
var item = items[i];
var itemId = item.getId();
var itemLabel = item.getLabel()
if (itemLabel == lblForm)
{
item.navigate();
}
}
}
}
}
The problem here is when I navigate here:
function LoadCaseStatusModal(formContext, saveCounter)
{
debugger;
formContext.data.entity.save();
The formContext.data.entity.save();kicks the OnSave() off again and the user gets a prompt asking them to save again when they already saved. Totally kills the whole flow.
So I thought I'd create an OnSave helper variable like so: var saveCounter = 0;
I immediately knew this would cause probs.
Then I found this: https://learn.microsoft.com/en-us/power-apps/developer/model-driven-apps/clientapi/reference/save-event-arguments/getsavemode
The problem here is that it doesn't same to tell me if the user executed this or was this kicked off by JS? -- The OnSave function.
Am I thinking to hard? am I missing something? any extra eye would help.
Regards,
I suppose .save() to be async... That is why I suggest a setTimeout() below. Anyway, using a global flag to make an exception on the programmatic save is a way to investigate.
let savingProgramatically = false
function OnSave(context)
{
debugger;
// return if the flag is on
if(savingProgramatically) return
var formContext = context.getFormContext();
var saveCounter = 0;
LoadCaseStatusModal(formContext, saveCounter);
}
function OnLoad(context)
{
var formContext = context.getFormContext;
}
function LoadCaseStatusModal(formContext, saveCounter)
{
debugger;
// Set the flag true
savingProgramatically = true
// save
formContext.data.entity.save();
// set the flag back to false asap -- Adjust the delay
setTimeout(()=>{savingProgramatically = false}, 100) // maybe 0 delay works... Or no setTimeout at all.
...
Before putting this on the cloud code I tested it in Angular with success producing the correct console.log responses throughout the program. Since this function manipulates data in the user table it must use the master key and be in cloud code. With this code in the cloud it saves the column 'duty' to the user table but with no data (there is data to be saved, this I am sure of). Moreover, I'm not even sure that the code runs past the first Parse Query as the console.log returns nothing in the Parse Logs. Where am I going wrong?
'use strict';
var express = require('express');
var app = express();
app.use(express.bodyParser());
var _ = require('underscore');
var server = require('http').createServer(app);
Parse.Cloud.define("updateMerchant", function(request, response) {
Parse.Cloud.useMasterKey();
var user = Parse.Object.extend("User")
var merchantQuery = new Parse.Query(Parse.User);
var Offers = Parse.Object.extend("Offer");
var offerQuery = new Parse.Query(Offers);
var Matches = Parse.Object.extend("Matched");
var matchQuery = new Parse.Query(Matches);
var merchantDuty = [];
var merchants = request.params.data;//I confirmed the validity of this a key value pair where the value is an array of objects.
var merchantIds = _.map(merchants, function(n){return n.id});
console.log(merchantIds)
offerQuery.containedIn("user", merchants);
offerQuery.limit(1000);
offerQuery.find({//CODE STOPS RUNNING?!?
success: function (offers) {
var offerIds = _.map(offers, function (n) {
return n.id});
console.log(offers)//this is telling as it does not appear in the Parse log!
var offersBeta = _.map(offers, function (n) {
return _.extend(_.find(n), {id: n.id})});
matchQuery.containedIn("offer", offers);
matchQuery.limit(1000);
matchQuery.find({
success: function (matches) {
var merchantArray = _.map(_.flatten(matches), function (n) {return _.find(n)});
var offers3 = _.map(offersBeta, function (n) {return _.extend(n, {
Matched: _.filter(merchantArray, function (a) {return a.offer.id == n.id})})})
var duty = function (TotalBill, id) {
var promise = new Parse.Promise();
merchantQuery.get(id, {
success: function (merchantBill) {
merchantBill.set("duty", TotalBill);
merchantBill.save().then(function(obj){ console.log(obj); }, function(error){console.log(error)})}})}
merchantDuty.push(duty(_.map(offer9, function(n){return n.TotalBill}), _.map(offer9, function(n){return n.id})));
},
error: function(){console.log(error);
}
})
}
})
//Code begins running again!
return Parse.Promise.when(merchantDuty).then(function() {
response.success("Success");
},
function(error) {response.error("Something is still wrong");
console.log(error);})
})
To be more clear, nothing between offerQuery.find and return Parse.Promise is run.
You need to pass pointers in offerQuery.containedIn("user", merchants);. See this.
Try this:
var _ = require('underscore');
Parse.Cloud.define("updateMerchant", function(request, response) {
Parse.Cloud.useMasterKey();
var merchantDuty = [];
var merchants = request.params.data;//I confirmed the validity of this a key value pair where the value is an array of objects.
// var merchantIds = _.map(merchants, function(n) {return n.id;});
// console.log(merchantIds);
// Since I don't have the merchants request parameter, I'll fake it with some fake users
var fakeMerchants = [{"username":"Batman","objectId":"f7zZkPx7kT","createdAt":"2015-04-07T19:41:25.014Z","updatedAt":"2015-04-07T19:41:25.014Z","__type":"Object","className":"_User"},{"username":"Robin","objectId":"wgG4EfaFN1","createdAt":"2015-04-07T19:41:35.024Z","updatedAt":"2015-04-07T19:41:35.024Z","__type":"Object","className":"_User"}];
// We can get some users like this:
// var fakeMerchantsQuery = new Parse.Query(Parse.User);
// fakeMerchantsQuery.find().then(function(users) {
// console.log(users);
// });
// Since the 'user' column in Offer Class is a pointer, we need to pass merchant pointers.
// Otherwise we get the error "pointer field user needs a pointer value"
// See https://www.parse.com/questions/using-containedin-with-array-of-pointers
var fakeMerchantsPointers = _.map(fakeMerchants, function(merchant) { // TODO change to real merchants
var pointer = new Parse.User();
pointer.id = merchant.objectId;
return pointer;
});
console.log(fakeMerchantsPointers);
var offerQuery = new Parse.Query(Parse.Object.extend("Offer"));
offerQuery.containedIn("user", fakeMerchantsPointers); // TODO change to real merchants
offerQuery.limit(1000);
offerQuery.find().then(function(offers) {
console.log("inside offer query");
console.log(offers);
// Here I assume that the column 'offer' is a Pointer
var matchQuery = new Parse.Query(Parse.Object.extend("Matched"));
matchQuery.containedIn("offer", offers);
matchQuery.limit(1000);
return matchQuery.find();
}).then(function(matches){
console.log("inside matches query");
console.log(matches);
// Add the duty stuff here...
// We must call success or error
response.success("Success");
});
});
Let me know if it worked.
Please note that you shouldn't mix Cloud Code with ExpressJS code. The Cloud Code should be in main.js, and the ExpressJS code in app.js. Then, in Cloud Code main.js call require('cloud/app.js'); if you want the request pass through ExpressJS.
The line return Parse.Promise.when(merchantDuty) is executing before there are any promises in the merchantDuty array (initialized as empty).
So the whole function is terminating before your query find success function.
I think if you create and add query promises to the merchantDuty array you will fix your bug.
I also suggest you to use promise callbacks for the query methods. Like:
query.find().then(function(){
//success
}, function(error){
//error
});
You can then chain them by returning another promise and make the code better structured.
This question title was really worded incorrectly, but I have no idea how to word it. If anyone wants to edit it, please feel free.
Basically I'm trying to figure out how to get a specific instance of a player via the socket that the data came from, this is going to be used to relay movements through my server for my small 2d project. Currently I have an object that's storing all of the players by the identification number, some people say it's an object, some say it's a hashmap implementation, some say it's an array, whatever the hell it is, I'm using it.
var connectedPlayers = {};
When a connection is created I create a new player like so:
var playerId = Math.floor(Math.random()*(50000-1+1)+1);
var userId = Math.floor(Math.random()*(50000-1+1)+1);
var player = new Player(playerId, "Guest"+userId, socket);
connectedPlayers[playerId] = player;
Obviously this is just generating random names/identification numbers for now, but that's perfectly fine, this is executed on the socket.on('connection', function() call.
If you need to see the Player.js script it's just a basic prototype script which sends information to all players about the player logging in, this all works properly.
var playerId;
var playerName;
var socket;
var positionX, positionY;
function Player(playerId, playerName, socket) {
this.playerId = playerId;
this.playerName = playerName;
this.socket = socket;
this.positionX = 250;
this.positionY = 250;
socket.emit('login', { playerID: playerId, playerX: this.positionX, playerY: this.positionY, playerName: playerName});
socket.broadcast.emit('player-connected',{ playerID: playerId, playerX: this.positionX, playerY: this.positionY, playerName: playerName} )
}
Player.prototype.getId = function() {
return this.playerId;
};
Player.prototype.getName = function() {
return this.playerName;
};
Player.prototype.getSocket = function() {
return this.socket;
};
Player.prototype.getX = function() {
return this.positionX;
};
Player.prototype.getY = function() {
return this.positionY;
};
My problem is, I need to be able to find out which Player belongs to a socket when the data comes in, because I want to authoritatively send movements to the clients. To do this I need to not give the client any control when it comes to which playerId it contains, because if I let the client tell the server, we could have other players moving at random.
The only way I've thought of is something like this, but it doesn't seem very efficient.
for(var i in connectedPlayers) {
if(conntectedPlayers[i].getSocket() == socket) {
// do stuff
}
}
EDIT: Adding the entire sockets.on('connection') due to a request from comments.
socketIO.sockets.on('connection', function(socket) {
console.log("Connection maid.");
socket.on('login', function(json) {
sqlConnection.query('select * FROM Accounts', function(err, rows, fields) {
if(err) throw err;
for(var row in rows) {
if(rows[row].Username == json.username) {
var playerId = Math.floor(Math.random()*(50000-1+1)+1);
var player = new Player(playerId, "Guest"+playerId, socket);
connectedPlayers[playerId] = player;
}
}
});
});
socket.on('move-request', function(json) {
socket.emit('move-request', {x: json.x, y: json.y, valid: true});
});
});
You can just add one or more properties to the socket object upon connection:
socketIO.sockets.on('connection', function(socket) {
console.log("Connection maid.");
socket.on('login', function(json) {
sqlConnection.query('select * FROM Accounts', function(err, rows, fields) {
if(err) throw err;
for(var row in rows) {
if(rows[row].Username == json.username) {
var playerId = Math.floor(Math.random()*(50000-1+1)+1);
var player = new Player(playerId, "Guest"+playerId, socket);
connectedPlayers[playerId] = player;
// add custom properties to the socket object
socket.player = player;
socket.playerId = playerId;
}
}
});
});
socket.on('move-request', function(json) {
// you can access socket.playerId and socket.player here
socket.emit('move-request', {x: json.x, y: json.y, valid: true});
});
});
So, now anytime you get an incoming message on a socket, you then have access to the playerId and player. In your code, when a message arrives, the socket variable for this message is in your parent scope and accessible so you can then get the player and playerId.
I've run into this situation a few times when storing "objects" in a C hash. I use a 32 bit hash function specifically tailed for pointers.
I'm not sure what you could use to uniquely identify the socket instance and suggest you see what underlying fields exist for that purpose. If you do find something that uniquely identifies the socket, you can feed that into a hash algorithm to generate a key. Of course, you would need a hash implementation (most likely in JavaSCript from the looks of it). I found this: http://www.timdown.co.uk/jshashtable/.
Good luck!
I'm building this function for upload to the server small tile images.
The client builds the tileBuffer and then calls the fireTiles function.
Here I would like to build a loop based on the tileBuffer.length. The server will handle the control. So, i emit StartAddTiles and I immediately called back from the server with the AnotherTile event. The debugger shows me I've been called by the server and I see the code going into the socket.on('AnotherTile'... sentence.
The problem is that when the code reaches the AddTile emit function, it stops there and nothing happens. The server does not receive the request and the loop is terminated there.
Where is the error in my code?
function fireTiles (tileBuffer, mapSelected) {
var tiles = tileBuffer.length;
var tBx = 0;
try
{
var socket = io.connect('http://myweb:8080/');
socket.emit('StartAddTiles', tiles, mapSelected);
socket.on('AnotherTile', function (tlN){
if (tlN < tiles) {
var data = tileBuffer[tlN]; //uso tlN per far comandare il server
tBx++; // debug purpose
socket.emit('AddTile', mapSelected, data, tBx);
} else {
// something went wrong
alert('Error calculating tiles');
return;
}
});
}
catch(err)
{
document.getElementById('status').innerHTML = err.message;
}
}
Here is the server side:
io.sockets.on('connection', function(client) {
console.log('Connecting....');
// controls are limited, this is just a beginning
// Initiate loop
client.on('StartAddTiles', function(tiles, mapSelected) {
var mapId = mapSelected;
mapLoading[mapId] = { //Create a new Entry in The mapLoading Variable
tilesToLoad : tiles,
tilesLoaded : 0
}
console.log('Start loading '+mapLoading[mapId].tilesToLoad+' tiles.');
// Ask for the first tile
client.emit('AnotherTile', mapLoading[mapId].tilesLoaded);
//
});
// client add new Tile/Tiles
client.on('addTile', function(mapSelected, data, tBx) {
var mapId = mapSelected;
mapLoading[mapId].tilesLoaded = ++1;
console.log('Adding tile '+mapLoading[mapId].tilesLoaded+' of '+mapLoading[mapId].tilesToLoad+' tBx '+tBx);
// insert Tile
db_manager.add_tiles(tileBuffer, function(result) {
if (mapLoading[mapId].tilesLoaded == mapLoading[mapId].tilesToLoad) { // full map loaded
mapLoading[mapId] = ""; //reset the buffer
client.emit('TilesOk', mapLoading[mapId].tilesLoaded);
} else {
console.log('requesting tile num: '+mapLoading[mapId].tilesLoaded);
client.emit('AnotherTile', mapLoading[mapId].tilesLoaded);
}
//
});
});
The event names are case sensitive, you should probably use AddTile instead of addTile on the server side too.
I want to update a div with a list of anchors that I generate from a local database in chrome. It's pretty simple stuff, but as soon as I try to add the data to the main.js file via a callback everything suddenly becomes undefined. Or the array length is set to 0. ( When it's really 18. )
Initially, I tried to install it into a new array and pass it back that way.
Is there a setting that I need to specify in the chrome manifest.json in order to allow for communication with the database API? I've checked, but all I've been able to find was 'unlimited storage'
The code is as follows:
window.main = {};
window.main.classes = {};
(function(awe){
awe.Data = function(opts){
opts = opts || new Object();
return this.init(opts);
};
awe.Data.prototype = {
init:function(opts){
var self = this;
self.modified = true;
var db = self.db = openDatabase("buddy","1.0","LocalDatabase",200000);
db.transaction(function(tx){
tx.executeSql("CREATE TABLE IF NOT EXISTS listing ( name TEXT UNIQUE, url TEXT UNIQUE)",[],function(tx,rs){
$.each(window.rr,function(index,item){
var i = "INSERT INTO listing (name,url)VALUES('"+item.name+"','"+item.url+"')";
tx.executeSql(i,[],null,null);
});
},function(tx,error){
});
});
self._load()
return this;
},
add:function(item){
var self = this;
self.modified = true;
self.db.transaction(function(tx){
tx.executeSql("INSERT INTO listing (name,url)VALUES(?,?)",[item.name,item.url],function(tx,rs){
//console.log('success',tx,rs)
},function(tx,error){
//console.log('error',error)
})
});
self._load()
},
remove:function(item){
var self = this;
self.modified = true;
self.db.transaction(function(tx){
tx.executeSql("DELETE FROM listing where name='"+item.name+"'",[],function(tx,rs){
//console.log('success',tx,rs)
},function(tx,error){
//console.log('error',tx,error);
});
});
self._load()
},
_load:function(callback){
var self = this;
if(!self.modified)
return;
self.data = new Array();
self.db.transaction(function(tx){
tx.executeSql('SELECT name,url FROM listing',[],function(tx,rs){
console.log(callback)
for(var i = 0; i<rs.rows.length;i++)
{
callback(rs.rows.item(i).name,rs.rows.item(i).url)
// var row = rs.rows.item(i)
// var n = new Object()
// n['name'] = row['name'];
// n['url'] = row['url'];
}
},function(tx,error){
//console.log('error',tx,error)
})
})
self.modified = false
},
all:function(cb){
this._load(cb)
},
toString:function(){
return 'main.Database'
}
}
})(window.main.classes);
And the code to update the list.
this.database.all(function(name,url){
console.log('name','url')
console.log(name,url)
var data = []
$.each(data,function(index,item){
try{
var node = $('<div > '+item.name + '</div>');
self.content.append(node);
node.unbind();
node.bind('click',function(evt){
var t = $(evt.target).attr('href');
chrome.tabs.create({
"url":t
},function(evt){
self._tab_index = evt.index
});
});
}catch(e){
console.log(e)
}
})
});
From looking at your code above, I notice you are executing "self._load()" at the end of each function in your API. The HTML5 SQL Database is asynchronous, you can never guarantee the result. In this case, I would assume the result will always be 0 or random because it will be a race condition.
I have done something similar in my fb-exporter extension, feel free to see how I have done it https://github.com/mohamedmansour/fb-exporter/blob/master/js/database.js
To solve a problem like this, did you check the Web Inspector and see if any errors occurs in the background page. I assume this is all in a background page eh? Try to see if any error occurs, if not, I believe your encountering a race condition. Just move the load within the callback and it should properly call the load.
Regarding your first question with the unlimited storage manifest attribute, you don't need it for this case, that shouldn't be the issue. The limit of web databases is 5MB (last I recall, it might have changed), if your using a lot of data manipulation, then you use that attribute.
Just make sure you can guarantee the this.database.all is running after the database has been initialized.