I spent a while trying to diagnose this error.
First I had created a subclass of EventEmitter
File Client.js
var bindToProcess = function(fct) {
if (fct && process.domain) {
return process.domain.bind(fct)
}
return fct
};
function Client(){
EventEmitter.call(this);
}
util.inherits(Client, EventEmitter);
Client.prototype.success =
function(fct) {
this.on('success', bindToProcess(fct))
return this;
}
Client.prototype.login = function(username, password) {
body = {
username : username,
password : password
};
var self = this;
request.post(url, { json:true, body: body }, function (error, response, body) {
if (error ||response.statusCode != HTTPStatus.OK ) {
return self.emit('error', error);
}
return self.emit('success', body);
});
return this;
}
module.exports = Client
Then in another file in my Express App
File user.js
var Client = require('../utils/client');
var client = new Client();
// GET '/login'
exports.login = function(req, res){
client.login(username, password).success( function(user) {
res.redirect('/');
}).error( function(error) {
res.redirect('login');
});
}
The thing is though on the second request, the server crashes with the error:
Error: Can't set headers after they are sent.
In the interim I've solved the problem by created the Client inside the middleware rather than having it a global variable. I'm just curious why this is happening ?
Thanks,
(hopefully there is enough information)
What happens here is the call of the event handling function from the first request during second request because the variable client is shared between the requests.
At first, the client is created in the global scope. Then two handlers are attached to its events and then the request is actually performed and corresponding handler is called.
On the second request, two more handlers are attached to the same object and then on either success or fail two handlers (from previous call and from the current) are notified.
So you need to move the client creation to the action method or change how the code responds on the events - I can suggest promises-like approach: pass two callbacks as parameters to one method; or just the standard callback approach: pass the error result as first argument of the callback.
Related
here i am working on socket server and connecting it to the angular client every thing goes well the problem i am facing is when ever i call getRequest it should renders the get data from the message that i am getting
getRequest() {
this.sock.getData();
this.sock.socket.on('message', (data) => {
this.getresponseData = data;
});
this.sock.socket.on('disconnect',()=> {
console.log('A user disconnected');
});
}
here after this when i am calling the another method like postRequest() then the emitted message from the post request is triggering message event in get request()
below is my post request
postRequest() {
this.sock.postData(this.postData);
this.sock.socket.on('message', (data) => {
this.responseData = data;
});
this.sock.socket.on('disconnect',()=> {
console.log('A user disconnected');
});
}
in short when ever i call a postReuest the data emitted from mesasge is also going to message event in get request that should not happen.
On the Socket server side both the request i am using
socket.send(data);
Socket inherits every method of Emitter class which declares a once method for single shot listener.
The listener is deregistered after it is invoked the first time.
getRequest() {
this.sock.getData();
this.sock.socket.once('message', (data) => {
this.getresponseData = data;
});
this.sock.socket.on('disconnect',()=> {
console.log('A user disconnected');
});
}
You don't want to subscribe message on both the methods.
if you want to get response according to the different methods you should register different keys. just like "message". And send the message according to key from server.
this.sock.socket.on('getmessage', (data) => {
this.responseData = data;
});
this.sock.socket.on('postmessage', (data) => {
this.responseData = data;
});
Another approach would be to have some identifier in api response.
this.sock.socket.on('message', (data) => {
if(data.m.type == 'get')
this.responseData = data;
else
this.responsePostData = data;
});
Socket IO Ref
Another approach would be to use socket.ëmit
What you are doing wrong is you are explicitly calling
this.sock.socket.on('message', (data) => {
this.getresponseData = data;
});
above each time. This connects a single client multiple times which is not desired at all.
This function must be used singleton for all the lifecycle of your app except you are calling socket.removeAllListeners('disconnect'); explicitly.
Use only once and if you want to get stream of a data after you do a http request. Your choice is wrong. You may wanna use classical http request there.
In my controller called MapController I'm doing a function to do a parse of remote json files, and from an if-else structure add some values in an array called "parsewebservice", apparently everything is working fine but console.log ( parsewebservice); is not returning the values that were passed to the array "parsewebservice" in the place where it is returning it empty. But when I put it inside the forEach it returns, but everything cluttered and repeated then is not the right way.
I wanted to know why the values that were passed to the array "parsewebservice" are not going along with the variable after populada and what would be the correct way to do it?
Here is my code below:
/**
* MapController
*
* #description :: Server-side logic for managing Maps
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
index: function(req, res, next) {
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
datas.forEach(function(data, index) {
var req = require("request");
var url = data.address + "?f=pjson";
req(url, function(err, res, retorno) {
if (err) {
console.log(err);
} else {
var camadas = JSON.parse(retorno);
if (camadas.mapName) {
camadas.layers.forEach(function(campo, i) {
if (campo.subLayerIds != null) {
} else if (campo.subLayerIds == null) {
parsewebservice.push([i, "dynamicMapLayer", campo.name, data.address]);
}
});
} else if (camadas.serviceDataType) {
parsewebservice.push([null, "imageMapLayer", camadas.name, data.address]);
} else if (camadas.type) {
parsewebservice.push([null, "featureLayer", camadas.name, data.address]);
}
}
});
});
console.log(parsewebservice);
});
},
};
My first comment has to be that you should not combine function(req, res) with var req = require('request')... you lose your access to the original req object!
So, you need to run a list of async tasks, and do something when they are all complete. That will never be entirely easy, and no matter what, you will have to get used to the idea that your code does not run from top to bottom as you've written it. Your console.log at the bottom runs before any of the callbacks (functions you pass in) you pass to your external requests.
The right way to do this is to use promises. It looks like you are using this request library, whose returned requests can only accept callbacks, not be returned as promises. You can create your own promise wrapper for them, or use an alternative library (several are recommended on the page).
I don't want to write a whole intro-to-promises right here, so what I will do is give you a less pretty, but maybe more understandable way to run some code at the completion of all your requests.
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
// here we will write some code that we will run once per returned data
var processResponse = function(resp) {
parsewebservice.push(resp);
if(parsewebservice.length >= datas.length) {
// we are done, that was the final request
console.log(parsewebservice);
return res.send({data: parsewebservice)}); // or whatever
}
};
datas.forEach(function(data, index) {
var request = require("request");
var url = data.address + "?f=pjson";
request(url, function(err, res, retorno) {
// do some processing of retorno...
// call our function to handle the result
processResponse(retorno);
});
});
console.log(parsewebservice); // still an empty array here
});
I solved the problem.
the "request" module is asynchronous so we need to wait for it to respond and then send the response to the view.
To do this we created a function called "foo" to contain the foreach and the request, we made a callback of that function and finally we made the response (res.view) within that function, so that the controller response would only be sent after the response of the "foo" function to the callback. So we were able to parse.json the data from the "data" collection using foreach and the "request" module and send the objects to the view.
Many thanks to all who have helped me, my sincere thanks.
After struggling with socket.io connection authentication (here and here) and thanks to #sgress454, I realized how to get this to work and I am sending the authentication/authorization token as part of the query in the connection (see below).
Upon authentication failure (invalid/expired token or in-active user), I return the callback with false parameter to indicate the connection is rejected.
On the client side though, I am not sure how I should handled it and it seems the socket is trying to reconnect even after explicitly disconnecting - I keep seeing that it is trying to reconnect.
The client code is something like this:
var _onConnectError = function(err) {
if (err.description == 400) {
console.log("Connection rejected");
_disconnectAndCleanupSocket();
} else {
console.log("##SOCKET - connect_error", err.description, err);
}
}
var _disconnectAndCleanupSocket = function() {
if (io.socket) {
if (io.socket.isConnected()) {
io.socket.disconnect();
}
io.socket.removeAllListeners();
delete io.socket;
}
};
io.socket = io.sails.connect({ query: "token=" + token});
io.socket.on('connect', _onConnect);
io.socket.on('connect_error', _onConnectError);
io.socket.on('reconnect_error', _onConnectError);
On the server (config/sockets.js) I have:
beforeConnect: function(handshake, cb) {
var token = handshake._query ? handshake._query.token : null;
CipherService.verifyToken(token, function verifyTokenResults(err, decoded, info) {
if (err || !decoded) {
if (err.name === "TokenExpiredError") {
// token expired - user can't connect...
return cb(null, false);
} else {
// some other error...
return cb(err, false);
}
}
AuthUser.findOne(decoded.user.id).exec(function(err, user) {
if (err || !user || !user.is_active) {
return cb(null, false);
}
return cb(null, true);
});
});
// (`false` would reject the connection)
},
I have tried to find documentation and explored the response object (in developer tools) but the only thing I saw there was thedescription field which return 400 on rejection and 0 in case there is no response (e.g. server is down).
Is there some example/documentation for this? Overall, I didn't find detailed description of using the SailsSocket in non-standard cases (other then use io.sails.connect()).
What is the proper way to handle such rejection (and shouldn't it handle it as part of the sails socket.io client?)
As an aside, I cannot instantiate SailsSocket myself and only do this with the 'io.sails.connect()' function. Is that on purpose? Is there no option to "miss" an event when I create the socket with the connect method and only then assign event handlers?
The short answer to your question is that you can set the reconnection flag to turn automatic reconnection on or off:
// Disable auto-reconnect for all new socket connections
io.sails.reconnection = false;
// Disable auto-reconnect for a single new socket connection
var mySocket = io.sails.connect({reconnection: false});
As far as SailsSocket creation, you are correct in that io.sails.connect() is the only way to create a SailsSocket. Since the connection is asynchronous, any event handlers you bind immediately after calling connect will be added before the actual connection takes place, so you won't miss any notifications.
Note: I'm using Autobahn.js for the client-side WAMP implementation, and when.js for promises.
I'm trying to create re-usable code so that only one websocket 'session', or connection exists, and whenever a dev wants to subscribe to a topic using autobahn, they can just use the current connection object to do so if it already exists; else a new one is created.
My issue is that, if the connection already exists, I have to use a setTimeout() to wait for a second to make sure it's actually connected, and then duplicate all the subscription code - I don't like this at all.
Here's my current code:
(function() {
var connection = null;
subscribeTo('subject', __userId, __token, function(onconnect) {
console.log('Yay, connected');
});
function subscribeTo(subject, userId, token, onConnect, onDisconnect) {
if (connection === null)
{
connection = new ab.Session('ws://localhost:8080', function(onopen) {
connection.subscribe(JSON.stringify({subject: subject, userId: userId, token: token}), function(subscription, data) {
data = $.parseJSON(data);
// Do something with the data ...
});
if (typeof onConnect === 'function') {
onConnect();
}
}, function(onclose) {
if (typeof onDisconnect === 'function') {
onDisconnect();
}
}, { 'skipSubprotocolCheck': true });
}
}
})();
Great. Now the issue is, what if I have another subscribeTo() straight after the previous one? Connection won't be null any more, but it also won't be connected. So the following is what I have to do:
// subscribeTo() multiple times at the top ...
subscribeTo('subject', __userId, __token, function(onconnect) {
console.log('Yay, connected');
});
subscribeTo('anothersubject', __userId, __token, function(onconnect) {
console.log('Yay, connected');
});
// The first one works, the second one requires a setTimeout() for the connection
// if connection is NOT null...
} else {
setTimeout(function() {
connection.subscribe(topic... etc...) // Really!?
}, 1000);
}
Remove the setTimeout() and you'll get an error saying that "Autbahn is not connected".
Is there a better way to have a single, re-usable connection, without code-duplication, or am I doomed to create a new connection for each subscription because of the promises (perhaps I can use promises to my advantage here, although I haven't used them before this)?
This is all way too complex, unneeded and wrong. You want to do your subscribes in response to a session being created:
var session = null;
function start() {
// turn on WAMP debug output
//ab.debug(true, false, false);
// use jQuery deferreds instead of bundle whenjs
//ab.Deferred = $.Deferred;
// Connect to WAMP server ..
//
ab.launch(
// WAMP app configuration
{
// WAMP URL
wsuri: "ws://localhost:9000/ws",
// authentication info
appkey: null, // authenticate as anonymous
appsecret: null,
appextra: null,
// additional session configuration
sessionConfig: {maxRetries: 10, sessionIdent: "My App"}
},
// session open handler
function (newSession) {
session = newSession;
main();
},
// session close handler
function (code, reason, detail) {
session = null;
}
);
}
function main() {
session.subscribe("http://myapp.com/mytopic1", function(topic, event) {});
session.subscribe("http://myapp.com/mytopic2", function(topic, event) {});
session.subscribe("http://myapp.com/mytopic3", function(topic, event) {});
}
start();
The ab.launch helper will manage automatic reconnects for you (and also do WAMP-CRA authentication if required). init() is then automatically called again when a reconnect happens. Using raw Session object is not recommended (unless you know what you are doing).
Also: topics must be URIs from the http or https scheme. Using serialized objects (JSON) is not allowed.
I have a Node.js HTTP server running which goes like this (simplified):
http = require('http');
help = require('./modules/help').run;
router = require('./modules/router').run;
m = {something: require('./modules/something')};
var server = http.createServer(router).listen(8001);
"help" is a set of functions-helpers, for example:
module.exports.run = {
joinObjects: function(obj1, obj2) {
for (var prop in obj2) {
obj1[prop] = obj2[prop];
}
return obj1;
}
}
"router" handles the request (passes it further down and handles response to the client):
module.exports.run = function(req, res) {
var urlPath = url.parse(req.url).pathname;
switch(urlPath) {
case '/something':
requestHandler(req, res, 'something');
break;
...
}
function requestHandler(req, res, handler) {
var data = '';
req.on('data', function(chunk) {
data += chunk;
}
req.on('end', function() {
m[handler].run(req, data, function(response) {
response.headers = help.joinObjects(config.responseHeaders, response.headers);
res.writeHead(response.code, response.headers);
res.end(response.data);
});
}
}
}
The "handler" module/function runs the callback function and passes the response (code, headers and data) to it. The callback function then merges headers with a set of default headers that are set in the config file.
THE ISSUE: When there are two connections calling help.joinObjects() at the same time (that's my guess), response.headers property collides with the one of another user/connection and returns bad data. If I comment out the line that does the merging, this does not occur.
THE QUESTION: What's wrong with my approach to the "help" module? There is some kind of scope issue that I do not understand here.
As lazyhammer pointed out, the problem was the order in which I passed objects into the helper function: help.joinObjects(config.responseHeaders, response.headers).
Javascript passes variables by reference, so every time help.joinObjects() is called, it overwrites the config property instead of the user-specific object.