Node.js return result of file - javascript

I'd like to make a node.js function that, when calls, reads a file, and returns the contents. I'm having difficulty doing this because 'fs' is evented. Thus, my function has to look like this:
function render_this() {
fs.readFile('sourcefile', 'binary', function(e, content) {
if(e) throw e;
// I have the content here, but how do I tell people?
});
return /* oh no I can't access the contents! */;
};
I know that there might be a way to do this using non-evented IO, but I'd prefer an answer that allows me to wait on evented functions so that I'm not stuck again if I come to a situation where I need to do the same thing, but not with IO. I know that this breaks the "everything is evented" idea, and I don't plan on using it very often. However, sometimes I need a utility function that renders a haml template on the fly or something.
Finally, I know that I can call fs.readFile and cache the results early on, but that won't work because in this situation 'sourcefile' may change on the fly.

OK, so you want to make your development version to automatically load and re-render the file each time it changes, right?
You can use fs.watchFile to monitor the file and then re-render the template each time it changed, I suppose you've got some kind of global variable in your which states whether the server is running in dev or production mode:
var fs = require('fs');
var http = require('http');
var DEV_MODE = true;
// Let's encapsulate all the nasty bits!
function cachedRenderer(file, render, refresh) {
var cachedData = null;
function cache() {
fs.readFile(file, function(e, data) {
if (e) {
throw e;
}
cachedData = render(data);
});
// Watch the file if, needed and re-render + cache it whenever it changes
// you may also move cachedRenderer into a different file and then use a global config option instead of the refresh parameter
if (refresh) {
fs.watchFile(file, {'persistent': true, 'interval': 100}, function() {
cache();
});
refresh = false;
}
}
// simple getter
this.getData = function() {
return cachedData;
}
// initial cache
cache();
}
var ham = new cachedRenderer('foo.haml',
// supply your custom render function here
function(data) {
return 'RENDER' + data + 'RENDER';
},
DEV_MODE
);
// start server
http.createServer(function(req, res) {
res.writeHead(200);
res.end(ham.getData());
}).listen(8000);
Create a cachedRenderer and then access it's getData property whenever needed, in case you're in development mod it will automatically re-render the file each time it changes.

function render_this( cb ) {
fs.readFile('sourcefile', 'binary', function(e, content) {
if(e) throw e;
cb( content );
});
};
render_this(function( content ) {
// tell people here
});

Related

sails.js node.js Parse JSON on controller

In my controller called MapController I'm doing a function to do a parse of remote json files, and from an if-else structure add some values in an array called "parsewebservice", apparently everything is working fine but console.log ( parsewebservice); is not returning the values that were passed to the array "parsewebservice" in the place where it is returning it empty. But when I put it inside the forEach it returns, but everything cluttered and repeated then is not the right way.
I wanted to know why the values that were passed to the array "parsewebservice" are not going along with the variable after populada and what would be the correct way to do it?
Here is my code below:
/**
* MapController
*
* #description :: Server-side logic for managing Maps
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
index: function(req, res, next) {
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
datas.forEach(function(data, index) {
var req = require("request");
var url = data.address + "?f=pjson";
req(url, function(err, res, retorno) {
if (err) {
console.log(err);
} else {
var camadas = JSON.parse(retorno);
if (camadas.mapName) {
camadas.layers.forEach(function(campo, i) {
if (campo.subLayerIds != null) {
} else if (campo.subLayerIds == null) {
parsewebservice.push([i, "dynamicMapLayer", campo.name, data.address]);
}
});
} else if (camadas.serviceDataType) {
parsewebservice.push([null, "imageMapLayer", camadas.name, data.address]);
} else if (camadas.type) {
parsewebservice.push([null, "featureLayer", camadas.name, data.address]);
}
}
});
});
console.log(parsewebservice);
});
},
};
My first comment has to be that you should not combine function(req, res) with var req = require('request')... you lose your access to the original req object!
So, you need to run a list of async tasks, and do something when they are all complete. That will never be entirely easy, and no matter what, you will have to get used to the idea that your code does not run from top to bottom as you've written it. Your console.log at the bottom runs before any of the callbacks (functions you pass in) you pass to your external requests.
The right way to do this is to use promises. It looks like you are using this request library, whose returned requests can only accept callbacks, not be returned as promises. You can create your own promise wrapper for them, or use an alternative library (several are recommended on the page).
I don't want to write a whole intro-to-promises right here, so what I will do is give you a less pretty, but maybe more understandable way to run some code at the completion of all your requests.
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
// here we will write some code that we will run once per returned data
var processResponse = function(resp) {
parsewebservice.push(resp);
if(parsewebservice.length >= datas.length) {
// we are done, that was the final request
console.log(parsewebservice);
return res.send({data: parsewebservice)}); // or whatever
}
};
datas.forEach(function(data, index) {
var request = require("request");
var url = data.address + "?f=pjson";
request(url, function(err, res, retorno) {
// do some processing of retorno...
// call our function to handle the result
processResponse(retorno);
});
});
console.log(parsewebservice); // still an empty array here
});
I solved the problem.
the "request" module is asynchronous so we need to wait for it to respond and then send the response to the view.
To do this we created a function called "foo" to contain the foreach and the request, we made a callback of that function and finally we made the response (res.view) within that function, so that the controller response would only be sent after the response of the "foo" function to the callback. So we were able to parse.json the data from the "data" collection using foreach and the "request" module and send the objects to the view.
Many thanks to all who have helped me, my sincere thanks.

Which is the "right" way to handle a response that came late

Lets say that we have two buttons, each on are calling the following method:
var NUMBER_OF_IMAGE_REQUEST_RETRIES = 3;
var IMAGE_REQUEST_TIMEOUT = 3000;
processImage: function(image_data) {
var main_response = $q.defer();
var hash = getImageHash(image_data);
var requestsCounter = -1;
requestImage = function() {
$http.post(apiUrl, {params: {data: hash},timeout: IMAGE_REQUEST_TIMEOUT})
.then(function(response) {
return main_response.resolve(response.data);
}, function(error) {
if (++requestsCounter < NUMBER_OF_IMAGE_REQUEST_RETRIES) {
requestLabelsImage();
} else {
return main_response.reject();
}
});
};
requestLabelsImage();
return main_response.promise;
}
The method passes an image related data to the server, the server process the data and then response. Every time a user press a different button different image_data is being send to the server.
The problem:
The user press button 1, the method is called with image_data_1, and then he/she immediately press button 2 and the method is called with image_data_2. The processImage function is called by another method, lets say doSomethingWithTheResponse which only cares about the latest user's action, but the image_data_2 is proceed faster by the servers, so the client gets image_data_2 before image_data_1, so the client believes that image_data_1 was related to the user's latest action, which is not the case. How can we ensure that the client is always getting the response that is related to the users latest action?
Note: The hash is different for the differente image_data requests.
I was thinking something like:
var oldhash = null;
processImage: function(image_data) {
var main_response = $q.defer();
var hash = getImageHash(image_data);
oldhash = hash;
var requestsCounter = -1;
requestImage = function(hash) {
if(hash === oldhash){
$http.post(apiUrl, {params: {data: hash},timeout: IMAGE_REQUEST_TIMEOUT})
.then(function(response) {
return main_response.resolve(response.data);
}, function(error) {
if (++requestsCounter < NUMBER_OF_IMAGE_REQUEST_RETRIES) {
requestLabelsImage(hash);
} else {
return main_response.reject();
}
});
}
else {
main_response.reject();
}
}
requestLabelsImage(hash);
return main_response.promise;
}
But I am not 100% sure that this is the right approach.
Simply disregard the previous requests.
You can create a repository of requests (array or dictionary implementation is okay). Call .abort() on the previous ones once another request is made -- when you add it in your storage.
If you want a dictionary, there is a good example here (tackles a different topic, though), but here is a modified snippet of his code which is related to your case:
var _pendingRequests = {};
function abortPendingRequests(key) {
if (_pendingRequests[key]) {
_pendingRequests[key].abort();
}
}
Where the key can be.. say... a category of your action. You can name constants for it, or it can be just the name of the button pressed. It can even be a URL of your request; completely up to you.
There is an excellent explanation of the whole concept here:
jquery abort() ajax request before sending another
https://stackoverflow.com/a/3313022/594992
If your UI allows for initiation multiple actions, while processing of those actions are mutually exclusive, then you should probably use promises, and track active promises.
button1.addEventListener("click", function(evt) {
startRunning( task1.start() );
});
button2.addEventListener("click", function(evt) {
startRunning( task2.start() );
});
With a task runner like:
function startRunning( promise ) {
while(runningTasks.length>0) {
cancel( runningTasks.unshift() );
});
runningTasks.push( promise );
}
Your cancel function can come from anything that can deal with promises, like Angular's service.cancelRequest, or you can write your own code that takes the promise and smartly breaks off its operation.
Of course, if you're not using promises, then you probably want to start doing so, but if you absolutely can't you can use a manager object like:
button1.addEventListener("click", function(evt) { task1(); });
button2.addEventListener("click", function(evt) { task2(); });
with
var manager = [];
function cancelAll() {
while(manager.lenght>0) {
var cancelfn = manager.unshift()
cancelfn();
}
return true;
}
function task1() {
var running = cancelAll();
manager.push(function() { running = false; });
asyncDo(something1, function(result) {
if(!running) return;
// do your real thing
});
}
function task1() {
var running = cancelAll();
manager.push(function() { running = false; });
asyncDo(something2, function(result) {
if(!running) return;
// do your real thing
});
}
And you can put cancels on as many aspects as you need. If you need to cancel running XHRs, you might be able to do so, if you have multiple steps in your result handling, cut off at each step start, etc.
This sounds like an ideal use-case for promises. Basically, whenever a new request is made, you want to cancel any existing promises. I am not versed in AngularJS, but the following ng-specific links might prove useful:
Angularjs how to cancel resource promise when switching routes
Canceling A Promise In AngularJS

Node module: Don't return until all async requests have finished

I'm new to node and am having trouble understanding node's async behavior. I know this is a very frequently addressed question on SO, but I simply can't understand how to get any of the solutions I've read to work in my context.
I'm writing this module which I want to return an object containing various data.
var myModule = (function () {
var file,
fileArray,
items = [],
getBlock = function (fileArray) {
//get the data from the file that I want, return object
return block;
},
parseBlock = function (block) {
//[find various items in the block, put them into an "myItems" object, then
//take the items and do a look up against a web api as below]...
for (var i = 0, l = myItems.length; i < l; i ++) {
(function (i) {
needle.post(MY_URL, qstring, function(err, resp, body){
if (!err && resp.statusCode === 200){
myItems[i].info = body;
if (i === (myItems.length -1)) {
return myItems;
}
}
});
})(i);
}
},
getSomeOtherData = function (fileArray) {
//parse some other data from the file
}
return {
setFile: function (file) {
fileArray = fs.readFileSync(file).toString().split('\n');
},
render: function () {
var results = [];
results.someOtherData = getsomeOtherData();
var d = getBlock();
results.items = parseBlock(d);
return results;
}
}
})();
When I call this module using:
myModule.setFile('myFile.txt');
var res = myModule.render();
the variable res has the values from the someOtherData property, but not the items property. I understand that my long-running http request has not completed and that node just zooms ahead and finishes executing, but that's not what I want. I looked at a bunch of SO questions on this, and looked at using Q or queue-async, but with no success.
How do I get this module to return no data until all requests have completed? Or is that even possible in node? Is there a better way to design this to achieve my goal?
The problem in your example is your calling getBlock() but you have declared your function as getBlockData(). So you will not get a result. Try changing it to both the same.
Presuming that you have them both the same, your next problem is that your processing data from a file, so I presume that your reading the contents of the file and then parsing it.
If this is the case then there are sync reads that you can use to force sync, however I wouldn't recommend this.
You really want to structure your program based on events. Your thinking in the paradigm of 'call a function, when it returns continue'. You need to be thinking more along the lines of 'call a process and add a listener, the listener then does reply handling'.
This works very well for comms. You receive a request. You need to reply based on contents of file. So you start the read process with two possible results. It calls the completed function or the error function. Both would then call the reply function to process how to handle a reply for the request.
It's important not to block as you will be blocking the thread via which all processes are handled.
Hope that helps, if not add some comments and I will try and elaborate.
Have a look at this answer to another question to see a good example of processing a file using the standard listeners. All async calls have a listener concept for what can happen. All you need to do is pass a function name (or anon if you prefer) to them when you call them.
A quick example (based on node.js stream.Readable API:
fs.createReadStream(filename, {
'flags': 'r'
}).addListener( "data", function(chunk) {
// do your processing logic
}).addListener( "end", function(chunk) {
// do your end logic
response(...);
}).addListener( "error", function(chunk) {
// do your error logic
response(...);
}).addListener( "close",function() {
// do your close logic
});
function response(info) {
}

How do I run an asynchronous 'find' in a loop while incrementing the find parameter so I can generate unique custom id's?

I'm new to mongoose/mongodb and I am trying to do some sort of error handling with my document save.
I am trying to create a stub id to store into the db for easier data retrieval later on (and also to put into the url bar so people can send links to my website to that particular page more easily -- like jsfiddle or codepen).
Basically I want to search for a document with a page_id and if it exists, I want to regenerate that page_id and search until it gets to one that's unused like this:
while(!done){
Model.findOne({'page_id': some_hex}, function (err, doc) {
if(doc){
some_hex = generate_hex();
}
else
{
done = true;
}
});
}
model.page_id = some_hex;
model.save();
However, since mongoose is asynchronous, the while loop will pretty much run indefinitely while the find works in the background until it finds something. This will kill the resources on the server.
I'm looking for an efficient way to retry save() when it fails (with a change to page_id). Or to try and find an unused page_id. I have page_id marked as unique:true in my schema.
Retrying should be performed asynchronously:
var tryToSave = function(doc, callback) {
var instance = new Model(doc);
instance.page_id = generate_hex();
instance.save(function(err) {
if (err)
if (err.code === 11000) { // 'duplicate key error'
// retry
return tryToSave(doc, callback);
} else {
// another error
return callback(err);
}
}
// it worked!
callback(null, instance);
});
};
// And somewhere else:
tryToSave(doc, function(err, instance) {
if (err) ...; // handle errors
...
});

Handling multiple websocket subscriptions, but only one connection object, in a single Javascript function

Note: I'm using Autobahn.js for the client-side WAMP implementation, and when.js for promises.
I'm trying to create re-usable code so that only one websocket 'session', or connection exists, and whenever a dev wants to subscribe to a topic using autobahn, they can just use the current connection object to do so if it already exists; else a new one is created.
My issue is that, if the connection already exists, I have to use a setTimeout() to wait for a second to make sure it's actually connected, and then duplicate all the subscription code - I don't like this at all.
Here's my current code:
(function() {
var connection = null;
subscribeTo('subject', __userId, __token, function(onconnect) {
console.log('Yay, connected');
});
function subscribeTo(subject, userId, token, onConnect, onDisconnect) {
if (connection === null)
{
connection = new ab.Session('ws://localhost:8080', function(onopen) {
connection.subscribe(JSON.stringify({subject: subject, userId: userId, token: token}), function(subscription, data) {
data = $.parseJSON(data);
// Do something with the data ...
});
if (typeof onConnect === 'function') {
onConnect();
}
}, function(onclose) {
if (typeof onDisconnect === 'function') {
onDisconnect();
}
}, { 'skipSubprotocolCheck': true });
}
}
})();
Great. Now the issue is, what if I have another subscribeTo() straight after the previous one? Connection won't be null any more, but it also won't be connected. So the following is what I have to do:
// subscribeTo() multiple times at the top ...
subscribeTo('subject', __userId, __token, function(onconnect) {
console.log('Yay, connected');
});
subscribeTo('anothersubject', __userId, __token, function(onconnect) {
console.log('Yay, connected');
});
// The first one works, the second one requires a setTimeout() for the connection
// if connection is NOT null...
} else {
setTimeout(function() {
connection.subscribe(topic... etc...) // Really!?
}, 1000);
}
Remove the setTimeout() and you'll get an error saying that "Autbahn is not connected".
Is there a better way to have a single, re-usable connection, without code-duplication, or am I doomed to create a new connection for each subscription because of the promises (perhaps I can use promises to my advantage here, although I haven't used them before this)?
This is all way too complex, unneeded and wrong. You want to do your subscribes in response to a session being created:
var session = null;
function start() {
// turn on WAMP debug output
//ab.debug(true, false, false);
// use jQuery deferreds instead of bundle whenjs
//ab.Deferred = $.Deferred;
// Connect to WAMP server ..
//
ab.launch(
// WAMP app configuration
{
// WAMP URL
wsuri: "ws://localhost:9000/ws",
// authentication info
appkey: null, // authenticate as anonymous
appsecret: null,
appextra: null,
// additional session configuration
sessionConfig: {maxRetries: 10, sessionIdent: "My App"}
},
// session open handler
function (newSession) {
session = newSession;
main();
},
// session close handler
function (code, reason, detail) {
session = null;
}
);
}
function main() {
session.subscribe("http://myapp.com/mytopic1", function(topic, event) {});
session.subscribe("http://myapp.com/mytopic2", function(topic, event) {});
session.subscribe("http://myapp.com/mytopic3", function(topic, event) {});
}
start();
The ab.launch helper will manage automatic reconnects for you (and also do WAMP-CRA authentication if required). init() is then automatically called again when a reconnect happens. Using raw Session object is not recommended (unless you know what you are doing).
Also: topics must be URIs from the http or https scheme. Using serialized objects (JSON) is not allowed.

Categories

Resources