Wait for functions in for loop to finish - javascript

I have a for loop in NodeJS. Inside the loop is a function that gets data from a database.
var player2.schedule = {
opponent: //string
datetime: //date object
}
var schedule = "";
for (i=0; i<player2.schedule.length; i++) {
User.findOne({ id: player2.schedule[i].opponent }.select("name").exec(function(err, opponent) {
schedule += opponent.name;
});
}
The loop adds to a variable schedule with the results from the database call each time the loop goes round.
Now my problem is if I have code after the for loop that relies on this schedule variable, it can't. Because the variable is updated in the callback function from the database call, any code after the for loop happens asynchronously, so the variable hasn't been updated in time.
How can I make sure the next batch of code waits for the for loop and callbacks to finish first?

Here is a simple example using async:
var async = require('async');
async.each(player2.schedule, function(item, cb) {
User.findOne({ id: item.opponent })
.select("name")
.exec(function(err, opponent) {
if (err)
return cb(err);
schedule += opponent.name;
cb();
});
}, function(err) {
if (err)
throw err;
console.log('All done');
});

You can use async library function whilst() to wait for all queries to finish:
var async = require('async');
var i = 0;
var len = player2.schedule.length;
async.whilst(
function () { return i < len; },
function (callback) {
User.findOne({ id:player2.schedule[i].opponent}.select("name").exec(function(err, opponent) {
schedule += opponent.name;
i++;
callback(null); // assume no error (null)
});
},
function (err) {
// this function is executed after all queries are done
// schedule now has everything from the loop
}
);

Related

How do i nest a promise inside another promise function in node.js?

I have a file file which is surrounded by Promise function . I have a database operation inside this function which requires another promise too . Please check the code below
var p ;
var ted = dep.map(function(name){
return new Promise(function(resolve,reject){
/*..list of other tasks*/
for(int i = 0 ;i<3<;i++){
p = Promise.resolve(savemongo(myobj,str)); // this is async function. How do I wait till this operation is complete and then move to next
}
resolve();
)};
Now i have to export this module to a different file
Im using the below code
module.exports = Promise.all([ted,p]);
How do I wait till my savetomongodb function is complete .
Surrounding the whole thing by one new Promise call doesn't help anything. Inside it, you'd still have callback hell. And no, throwing Promise.resolve() at a function that doesn't return anything doesn't help either.
You will need to promisify the asynchronous primitives, i.e. the smallest parts that are asynchronous. In your case, that's distance.matrix and mongo's connect+insert:
function getMatrix(m, o, d) {
return new Promise(function(resolve, reject) {
m.matrix(o, d, function(err, distances) {
if (err) reject(err);
else resolve(distances);
});
});
}
function save(url, store, k) {
// cramming connect+insert in here is not optimal but let's not get into unnecessary detail
return new Promise(function(resolve, reject) {
MongoClient.connect(url, function(err, db) {
if (err)
reject(err);
else
db.collection(k).insert(store, function(err, results) {
if (err) reject(err);
else resolve(results);
db.close();
});
});
});
}
Now that we have those, we can actually use them and combine our promises into what you actually are looking for:
module.exports = Promise.all(dep.map(function(name) {
distance.departure_time(name);
return getMatrix(distance, origins, destinations).then(function(distances) {
if (!distances) throw new Error('no distances');
var promises = [];
if (distances.status == 'OK') {
for (var i=0; i < origins.length; i++) {
for (var j = 0; j < destinations.length; j++) {
var origin = distances.origin_addresses[i];
var destination = distances.destination_addresses[j];
if (distances.rows[0].elements[j].status == 'OK') {
var duration = distances.rows[i].elements[j].duration_in_traffic.value;
var myobj = {
destination: destination,
departure_time: name,
duration: duration
};
var str = destination.replace(/[,\s]+/g, '');
promises.push(save(url, myobj, str));
// ^^^^^^^^^^^^^^^^^^^^^
}
}
}
}
return Promise.all(promises); // now wait for all save results
});
}));

node.js promises not forcing order execution of functions

I have three functions that I want to use promises to force them to execute in order.
function 1 sends a http request, fetches JSON data and saved it to a file
function 2 loops through that file and updates the database according the difference values/values missing
function 3 will loop through the newly updated database and create a 2nd json file.
Currently function 1 works perfectly on its own with a setInterval of 30 minutes.
I want to start function 2 when function 1 has finished. then function 3 after function 2 has finished.
Using promises I am trying to attach function 2 to a simple finished log to understand how to use promises but not getting much success. The items from the for loop log but my Finished/err log before my for loop which shouldn't be happening. Any suggestions?
function readJson() {
return new Promise(function() {
fs.readFile(__dirname + "/" + "bitSkin.json", 'utf8', function read(err, data) {
if (err) { throw err; }
var bitCon = JSON.parse(data);
for(var i=0; i<7; i++) { //bitCon.prices.length; i++) {
var price = bitCon.prices[i].price
var itemName = bitCon.prices[i].market_hash_name;
(function() {
var iNameCopy = itemName;
var priceCopy = price;
logger.info(iNameCopy);
}());
}
});
});
};
function fin() {
logger.info("Finished");
}
readJson().then(fin(), console.log("err"));
Promises have no magical powers. They don't magically know when async code inside them is done. If you create a promise, you yourself have to resolve() or reject() it when the async code has an error or completes.
Then, in addition, you have to pass a function reference to a .then() handler, not the result of executing a function. .then(fin()) will call fin() immediately and pass it's return value to .then() which is not what you want. You want something like .then(fin).
Here's how you can resolve and reject the promise you created:
function readJson() {
return new Promise(function(resolve, reject) {
fs.readFile(__dirname + "/" + "bitSkin.json", 'utf8', function read(err, data) {
if (err) { return reject(err); }
var bitCon = JSON.parse(data);
for(var i=0; i<7; i++) { //bitCon.prices.length; i++) {
var price = bitCon.prices[i].price
var itemName = bitCon.prices[i].market_hash_name;
(function() {
var iNameCopy = itemName;
var priceCopy = price;
logger.info(iNameCopy);
}());
}
resolve(bitCon);
});
});
};
And, you could use that like this:
function fin() {
logger.info("Finished");
}
readJson().then(fin, function(err) {
console.log("err", err)
});
Summary of changes:
Added resolve, reject arguments to Promise callback so we can use them
Called reject(err) when there's an error
Called resolve() when the async code is done.
Passed a function reference for both .then() handlers.
FYI, when creating a promise wrapper around an async function, it is generally better to wrap just the function itself. This makes the wrapper 100% reusable and puts more of your code in the promise architecture which generally streamlines things and makes error handling easier. You could fix things up that way like this:
fs.readFilePromise = function(file, options) {
return new Promise(function(resolve, reject) {
fs.readFile(file, options, function(err, data) {
if (err) return reject(err);
resolve(data);
});
});
};
function readJson() {
return fs.readFilePromise(__dirname + "/" + "bitSkin.json", 'utf8').then(function(data) {
var bitCon = JSON.parse(data);
bitCon.prices.forEach(function(item) {
logger.info(item.market_hash_name);
});
return bitCon;
});
}

Asynchronously Write Large Array of Objects to Redis with Node.js

I created a Node.js script that creates a large array of randomly generated test data and I want to write it to a Redis DB. I am using the redis client library and the async library. Initially, I tried executing a redisClient.hset(...) command within the for loop that generates my test data, but after some Googling, I learned the Redis method is asynchronous while the for loop is synchronous. After seeing some questions on StackOverflow, I can't get it to work the way I want.
I can write to Redis without a problem with a small array or larger, such as one with 100,000 items. However, it does not work well when I have an array of 5,000,000 items. I end up not having enough memory because the redis commands seem to be queueing up, but aren't executed until after async.each(...) is complete and the node process does not exit. How do I get the Redis client to actually execute the commands, as I call redisClient.hset(...)?
Here a fragment of the code I am working with.
var redis = require('redis');
var async = require('async');
var redisClient = redis.createClient(6379, '192.168.1.150');
var testData = generateTestData();
async.each(testData, function(item, callback) {
var someData = JSON.stringify(item.data);
redisClient.hset('item:'+item.key, 'hashKey', someData, function(err, reply) {
console.log("Item was persisted. Result: " +reply);
});
callback();
}, function(err) {
if (err) {
console.error(err);
} else {
console.log.info("Items have been persisted to Redis.");
}
});
You could call eachLimit to ensure you are not executing too many redisClient.hset calls at the same time.
To avoid overflowing the call stack you could do setTimeout(callback, 0); instead of calling the callback directly.
edit:
Forget what I said about setTimeout. All you need to do is call the callback at the right place. Like so:
redisClient.hset('item:'+item.key, 'hashKey', someData, function(err, reply) {
console.log("Item was persisted. Result: " +reply);
callback();
});
You may still want to use eachLimit and try out which limit works best.
By the way - async.each is supposed to be used only on code that schedules the invocation of the callback in the javascript event queue (e.g. timer, network, etc) . Never use it on code that calls the callback immediately as was the case in your original code.
edit:
You can implement your own eachLimit function that instead of an array takes a generator as it's first argument. Then you write a generator function to create the test data. For that to work, node needs to be run with "node --harmony code.js".
function eachLimit(generator, limit, iterator, callback) {
var isError = false, j;
function startNextSetOfActions() {
var elems = [];
for(var i = 0; i < limit; i++) {
j = generator.next();
if(j.done) break;
elems.push(j.value);
}
var activeActions = elems.length;
if(activeActions === 0) {
callback(null);
}
elems.forEach(function(elem) {
iterator(elem, function(err) {
if(isError) return;
else if(err) {
callback(err);
isError = true;
return;
}
activeActions--;
if(activeActions === 0) startNextSetOfActions();
});
});
}
startNextSetOfActions();
}
function* testData() {
while(...) {
yield new Data(...);
}
}
eachLimit(testData(), 10, function(item, callback) {
var someData = JSON.stringify(item.data);
redisClient.hset('item:'+item.key, 'hashKey', someData, function(err, reply) {
if(err) callback(err);
else {
console.log("Item was persisted. Result: " +reply);
callback();
}
});
}, function(err) {
if (err) {
console.error(err);
} else {
console.log.info("Items have been persisted to Redis.");
}
});

NodeJS console.log executing before executing the FOR LOOP

I am trying to push some values to array by fetching data from Jenkins APIs, like below.
buildNum = 14;
async.waterfall([
function(callback){
for ( var i = buildNum; i > (buildNum-5); i--) {
(function(){
jenkins.build_info('BuildDefinitionRequest', i, function(err, data) {
if (err){ return console.log(err); }
var tmpObj = {};
tmpObj.jobID = data.fullDisplayName;
tmpObj.result = data.result;
tmpObj.dateTime = data.id;
console.log(tmpObj);
finalArray.push(tmpObj);
});
})();
}
callback(null, finalArray, 1);
},
function(finalArray, value, callback){
console.log(finalArray, value);
callback(null, 'done');
}
],function(err, result){
});
But "callback(null, finalArray, 1);" is getting called before the for loop finish its execution.
When I am printing the value of "finalArray" inside the for loop I am able to see all the values.
Technically the for loop has finished executing, but the jenkins.build_info calls haven't. You cannot make async calls inside of a for loop like that and expect the for loop to only finish after all the calls are complete. You're already using async, so this is an easy fix. I would do something like this:
var buildNum = 14;
var builds = [];
// just builds a collection for async to operate on
for(var i = buildNum; i > (buildNum - 5); i--) {
builds.push(i);
}
var finalArray = [];
async.each(builds, function(build, next) {
jenkins.build_info('BuildDefinitionRequest', build, function(err, data) {
if (err) { next(err); }
var job = {
jobID: data.fullDisplayName,
result: data.result,
dateTime: data.id
};
finalArray.push(job);
next();
});
}, function(err) {
// this won't be called until all the jenkins.build_info functional have completed, or there is an error.
console.log(finalArray);
});

How to ensure that a statement gets executed after a loop completes?

Below is a snapshot of my code from routes/index.js
exports.index = function(req, res){
var results=new Array();
for(var i=0; i<1000;i++){
//do database query or time intensive task here based on i
// add each result to the results array
}
res.render('index', { title: 'Home !' , results:results });
};
If I run this code , due to the asynchronous nature of javascript the last line gets executed before the loop gets completely processed . Hence my webpage doesnt have the results . How do I structure this such a way that the page gets loaded once the queries are completed ?
Updated
Inside the loop I have database code ( Redis ) like the below -
client.hgetall("game:" +i, function(err, reply) {
results.push(reply.name);
});
Use the async library:
exports.index = function(req, res){
var results=new Array();
async.forEach(someArray, function(item, callback){
// call this callback when any asynchronous processing is done and
// this iteration of the loop can be considered complete
callback();
// function to run after loop has completed
}, function(err) {
if ( !err) res.render('index', { title: 'Home !' , results:results });
});
};
If one of the tasks inside the loop is asynchronous, you need to pass the asynchronous task a callback that calls callback(). If you don't have an array to use in forEach, just populate one with integers 1-1000.
Edit: Given your latest code, just put the async callback() right after responses.push(reply.name).
exports.index = function(req, res) {
var events = require("events");
var e = new events.EventEmitter();
e.expected = 1000;
e.finished = 0;
e.results = [];
e.on("finishedQuery", (function(err, r) {
this.finished += 1;
this.results.push(r && r.name);
if (this.finished === this.expected) {
res.render('index', { title: 'Home !' , results:this.results });
};
}).bind(e));
for (var i = 0; i < e.expected; i++) {
client.hgetall("game:" + i, function(err, reply) {
e.emit("finishedQuery", err, reply);
});
};
};
Of course, the above code doesn't handle [1 or more] errors. You'd need to add logic that only responds (1) on the first error or (2) if no errors occur.

Categories

Resources