What can I use to replace nested async callbacks? - javascript

Lets say I wanna send an email then update the database, both actions are async. This is how I would normally write it.
send_email(function(err, id){
if(err){
console.log("error");
}else{
update_database(id,function(err, id){
if(err){
console.log("error");
}else{
console.log("success");
}
});
}
});
I would like to do this instead with middleware.
var mid = {};
mid.send_email = function(){
return function(next){
send_email(function(err,id){
if(err){
console.log("error");
}else{
next(id);
}
});
}
}
mid.update_database = function(){
return function(id,next){
update_database(id,function(err,id){
if(err){
console.log("error");
}else{
next(id);
}
});
}
}
mid.success = function(){
return function(id,next){
console.log("success")
next(id);
}
}
Stacking the middleware.
middleware.use(mid.send_email());
middleware.use(mid.update_database());
middleware.use(mid.success());
There are two main questions at hand.
How can I use middleware in place of nested callbacks?
Is it possible to pass variables to next()?

What you want is to be able to handle a async control flow. Alot of js library can help you to achieve this. You can try the Async library with the waterfall function since you want to be able to pass variables to the next function that will be executed :
https://github.com/caolan/async#waterfall
"Runs an array of functions in series, each passing their results to the next in the array. However, if any of the functions pass an error to the callback, the next function is not executed and the main callback is immediately called with the error."
Example :
async.waterfall([
function(callback){
callback(null, 'one', 'two');
},
function(arg1, arg2, callback){
callback(null, 'three');
},
function(arg1, callback){
// arg1 now equals 'three'
callback(null, 'done');
}
], function (err, result) {
// result now equals 'done'
});

You are probably better off using CommonJS module.exports.
You can create a file like this:
module.exports = function (){
function sendEmail(doneCallback){
// do your stuff, then when you are done:
if(!err){
doneCallback(whatever,args,you,need);
}
}
function updateDB(success){
// do your stuff, then when you are done:
success(whatever,args,you,need);
}
return {
send: sendEmail,
update: updateDB
};
};
Then in your server.js:
var lib = require('./mylib.js');
lib.send(function(result){
console.log(result);
});
This is a similar pattern, and it might give you a better idea of what I mean. It consists of the library baking a function and passing it to whoever needs to chain, like this (more down to earth example, client-side this time):
ui.bistate($('#mybutton'), function(restore){
$.ajax({
url: '/api/1.0/catfood',
type: 'PUT',
data: {
catfood: {
price: 1.23,
name: 'cheap',
text: 'Catzy'
}
}
}).done(function(res){
// stuff with res
restore();
});
});
and in the library, this is how restore is provided:
var ui = function(){
function bistate(button, action) {
var originalText = buttonText.data('text'),
disabledText = buttonText.data('text-disabled');
function restore(){
button.prop('disabled', false);
button.text(originalText);
}
function disable(){
button.prop('disabled', true);
button.text(disabledText);
}
button.on('click', function(){
disable();
action(restore);
});
restore();
}
return {
bistate: bistate
};
}();
Allowing the consumer to control the flow for when he wants to restore the button, and reliving the library from having to handle complex cases where the consumer wants to do an async operation in between.
In general the point is: passing callbacks back and forth is huge and not used widely enough.

I have been using Queue.js in my work for some time.

Related

How to make some synchronous code run before some other asynchronous code?

I have a function like this:
var download = function(url, name) {
http.get(url, function(response) {
// part1 : create a new folder if it doesn't exist
dir = './name';
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
// part 2: download and save file into that folder
response.on('data', function (data) {
fs.appendFileSync(dir, data);
});
})
}
I want part 1 to finish before part 2 runs (so that I can have the dir for part 2). How can I do that ?
(In the code above, as I know so far ( i am new to node.js), both parts will run simultaneously, so i'm not sure that part 1 will always finish before part 2 runs).
both parts will run simultaneously
No, they will not. existsSync and mkdirSync are blocking calls. So, only after they are executed the Event handler will be attached.
But, we should take advantage of the asynchronicity whenever applicable. In this case, you can use the exists and mkdir asynchronous counterparts.
So, your code can be loosely refactored like this
function download(url, name) {
function attachAppender(filename, response) {
response.on('data', function (data) {
fs.appendFile(filename, function (err) {
res.statusCode = err ? 500 : 200;
response.end();
});
});
}
http.get(url, function (response) {
var dir = './name';
fs.exists(dir, function (exists) {
if (!exists) {
fs.mkdir(dir, function (err) {
if (err) {
res.statusCode = 500;
res.end();
} else {
// pass the actual full file name
attachAppender(filename, response);
}
});
} else {
attachAppender(filename, response);
}
});
});
}
Note: fs.exists is deprecated and possibly removed soon. Better use fs.stat instead of it.
You are using sync functions, so that the calls are blocking. However, as thefoureye mentioned it is better to use the async versions, for performance reasons.
If you want to avoid the callback hell (i.e your code becomes more and more difficult to read as you chain asynchronous calls), you can use a library such as async.js that is written in the intent of trying to make it easier to write (and of course, easier to read).
Here is an example taken from the unit tests of async.js: each async function is called after the other.
var series = function(test){
var call_order = [];
async.series([
function(callback){
setTimeout(function(){
call_order.push(1);
callback(null, 1);
}, 25);
},
function(callback){
setTimeout(function(){
call_order.push(2);
callback(null, 2);
}, 50);
},
function(callback){
setTimeout(function(){
call_order.push(3);
callback(null, 3,3);
}, 15);
}
],
function(err, results){
test.ok(err === null, err + " passed instead of 'null'");
test.same(results, [1,2,[3,3]]);
test.same(call_order, [1,2,3]);
test.done();
});
}
There are lots of other initiatives in order to make series of async calls easier to read and write (async/await, fibers.js for example)

Node.js: How to run asynchronous code sequentially

I have this chunk of code
User.find({}, function(err, users) {
for (var i = 0; i < users.length; i++) {
pseudocode
Friend.find({
'user': curUser._id
}, function(err, friends) * * ANOTHER CALLBACK * * {
for (var i = 0; i < friends.length; i++) {
pseudocode
}
console.log("HERE I'm CHECKING " + curUser);
if (curUser.websiteaccount != "None") {
request.post({
url: 'blah',
formData: blah
}, function(err, httpResponse, body) { * * ANOTHER CALLBACK * *
pseudocode
sendMail(friendResults, curUser);
});
} else {
pseudocode
sendMail(friendResults, curUser);
}
});
console.log("finished friend");
console.log(friendResults);
sleep.sleep(15);
console.log("finished waiting");
console.log(friendResults);
}
});
There's a couple asynchronous things happening here. For each user, I want to find their relevant friends and concat them to a variable. I then want to check if that user has a website account, and if so, make a post request and grab some information there. Only thing is, that everything is happening out of order since the code isn't waiting for the callbacks to finish. I've been using a sleep but that doesn't solve the problem either since it's still jumbled.
I've looked into async, but these functions are intertwined and not really separate, so I wasn't sure how it'd work with async either.
Any suggestions to get this code to run sequentially?
Thanks!
I prefer the promise module to q https://www.npmjs.com/package/promise because of its simplicity
var Promises = require('promise');
var promise = new Promises(function (resolve, reject) {
// do some async stuff
if (success) {
resolve(data);
} else {
reject(reason);
}
});
promise.then(function (data) {
// function called when first promise returned
return new Promises(function (resolve, reject) {
// second async stuff
if (success) {
resolve(data);
} else {
reject(reason);
}
});
}, function (reason) {
// error handler
}).then(function (data) {
// second success handler
}, function (reason) {
// second error handler
}).then(function (data) {
// third success handler
}, function (reason) {
// third error handler
});
As you can see, you can continue like this forever. You can also return simple values instead of promises from the async handlers and then these will simply be passed to the then callback.
I rewrote your code so it was a bit easier to read. You have a few choices of what to do if you want to guarantee synchronous execution:
Use the async library. It provides some helper functions that run your code in series, particularly, this: https://github.com/caolan/async#seriestasks-callback
Use promises to avoid making callbacks, and simplify your code APIs. Promises are a new feature in Javascript, although, in my opinion, you might not want to do this right now. There is still poor library support for promises, and it's not possible to use them with a lot of popular libraries :(
Now -- in regards to your program -- there's actually nothing wrong with your code at all right now (assuming you don't have async code in the pseucode blocks). Your code right now will work just fine, and will execute as expected.
I'd recommend using async for your sequential needs at the moment, as it works both server and client side, is essentially guaranteed to work with all popular libraries, and is well used / tested.
Cleaned up code below
User.find({}, function(err, users) {
for (var i = 0; i < users.length; i++) {
Friend.find({'user':curUser._id}, function(err, friends) {
for (var i = 0; i < friends.length; i++) {
// pseudocode
}
console.log("HERE I'm CHECKING " + curUser);
if (curUser.websiteaccount != "None") {
request.post({ url: 'blah', formData: 'blah' }, function(err, httpResponse, body) {
// pseudocode
sendMail(friendResults, curUser);
});
} else {
// pseudocode
sendMail(friendResults, curUser);
}
});
console.log("finished friend");
console.log(friendResults);
sleep.sleep(15);
console.log("finished waiting");
console.log(friendResults);
}
});
First lets go a bit more functional
var users = User.find({});
users.forEach(function (user) {
var friends = Friend.find({
user: user._id
});
friends.forEach(function (friend) {
if (user.websiteaccount !== 'None') {
post(friend, user);
}
sendMail(friend, user);
});
});
Then lets async that
async.waterfall([
async.apply(Users.find, {}),
function (users, cb) {
async.each(users, function (user, cb) {
async.waterfall([
async.apply(Friends.find, { user, user.id}),
function (friends, cb) {
if (user.websiteAccount !== 'None') {
post(friend, user, function (err, data) {
if (err) {
cb(err);
} else {
sendMail(friend, user, cb);
}
});
} else {
sendMail(friend, user, cb);
}
}
], cb);
});
}
], function (err) {
if (err) {
// all the errors in one spot
throw err;
}
console.log('all done');
});
Also, this is you doing a join, SQL is really good at those.
You'll want to look into something called promises. They'll allow you to chain events and run them in order. Here's a nice tutorial on what they are and how to use them http://strongloop.com/strongblog/promises-in-node-js-with-q-an-alternative-to-callbacks/
You can also take a look at the Async JavaScript library: Async It provides utility functions for ordering the execution of asynchronous functions in JavaScript.
Note: I think the number of queries you are doing within a handler is a code smell. This problem is probably better solved at the query level. That said, let's proceed!
It's hard to know exactly what you want, because your psuedocode could use a cleanup IMHO, but I'm going to what you want to do is this:
Get all users, and for each user
a. get all the user's friends and for each friend:
send a post request if the user has a website account
send an email
Do something after the process has finished
You can do this many different ways. Vanilla callbacks or async work great; I'm going to advocate for promises because they are the future, and library support is quite good. I'll use rsvp, because it is light, but any Promise/A+ compliant library will do the trick.
// helpers to simulate async calls
var User = {}, Friend = {}, request = {};
var asyncTask = User.find = Friend.find = request.post = function (cb) {
setTimeout(function () {
var result = [1, 2, 3];
cb(null, result);
}, 10);
};
User.find(function (err, usersResults) {
// we reduce over the results, creating a "chain" of promises
// that we can .then off of
var userTask = usersResults.reduce(function (outerChain, outerResult) {
return outerChain.then(function (outerValue) {
// since we do not care about the return value or order
// of the asynchronous calls here, we just nest them
// and resolve our promise when they are done
return new RSVP.Promise(function (resolveFriend, reject){
Friend.find(function (err, friendResults) {
friendResults.forEach(function (result) {
request.post(function(err, finalResult) {
resolveFriend(outerValue + '\n finished user' + outerResult);
}, true);
});
});
});
});
}, RSVP.Promise.resolve(''));
// handle success
userTask.then(function (res) {
document.body.textContent = res;
});
// handle errors
userTask.catch(function (err) {
console.log(error);
});
});
jsbin

Asynchronous Calls and Recursion with Node.js

I'm looking to execute a callback upon the full completion of a recursive function that can go on for an undetermined amount of time. I'm struggling with async issues and was hoping to get some help here. The code, using the request module, is as follows:
var start = function(callback) {
request.get({
url: 'aaa.com'
}, function (error, response, body) {
var startingPlace = JSON.parse(body).id;
recurse(startingPlace, callback);
});
};
var recurse = function(startingPlace, callback) {
request.get({
url: 'bbb'
}, function(error, response, body) {
// store body somewhere outside these funtions
// make second request
request.get({
url: 'ccc'
}, function(error, response, body) {
var anArray = JSON.parse(body).stuff;
if (anArray) {
anArray.forEach(function(thing) {
request.get({
url: 'ddd'
}, function(error, response, body) {
var nextPlace = JSON.parse(body).place;
recurse(nextPlace);
});
})
}
});
});
callback();
};
start(function() {
// calls final function to print out results from storage that gets updated each recursive call
finalFunction();
});
It seems that once my code goes past the for loop in the nested requests, it continues out of the request and ends the initial function call while the recursive calls are still going on. I want it to not finish the highest-level iteration until all the nested recursive calls have completed (which I have no way of knowing how many there are).
Any help is GREATLY appreciated!
In your example you have no recursive calls. If I understand correctly you want to say that recurse(point, otherFunc); is the beginning of a recursive call.
Then just go back to the definition of the recursive call (which you have not shown in your post) and do this (add a third argument for a callback function to be called in the end of recursion; the caller will pass it as a parameter):
function recurse(startingPlace, otherFunc, callback_one) {
// code you may have ...
if (your_terminating_criterion === true) {
return callback_one(val); // where val is potentially some value you want to return (or a json object with results)
}
// more code you may have
}
Then in the original code that you posted, make this call instead (in the inner-most part):
recurse(startingPlace, otherFunc, function (results) {
// results is now a variable with the data returned at the end of recursion
console.log ("Recursion finished with results " + results);
callback(); // the callback that you wanted to call right from the beginning
});
Just spend some time and try to understand my explanation. When you understand, then you will know node. This is the node philosophy in one post. I hope it is clear. Your very first example should look like this:
var start = function(callback) {
request.get({
url: 'aaa.com'
}, function (error, response, body) {
var startingPlace = JSON.parse(body).id;
recurse(startingPlace, otherFunc, function (results) {
console.log ("Recursion finished with results " + results);
callback();
});
});
};
Below is only additional information in case you are interested. Otherwise you are set with the above.
Typically in node.js though, people return an error value as well, so that the caller knows if the function that was called has finished successfully. There is no big mystery here. Instead of returning just results people make a call of the form
return callback_one(null, val);
Then in the other function you can have:
recurse(startingPlace, otherFunc, function (recError, results) {
if (recErr) {
// treat the error from recursion
return callback(); // important: use return, otherwise you will keep on executing whatever is there after the if part when the callback ends ;)
}
// No problems/errors
console.log ("Recursion finished with results " + results);
callback(); // writing down `return callback();` is not a bad habit when you want to stop execution there and actually call the callback()
});
Update with my suggestion
This is my suggestion for the recursive function, but before that, it looks like you need to define your own get:
function myGet (a, callback) {
request.get(a, function (error, response, body) {
var nextPlace = JSON.parse(body).place;
return callback(null, nextPlace); // null for no errors, and return the nextPlace to async
});
}
var recurse = function(startingPlace, callback2) {
request.get({
url: 'bbb'
}, function(error1, response1, body1) {
// store body somewhere outside these funtions
// make second request
request.get({
url: 'ccc'
}, function(error2, response2, body2) {
var anArray = JSON.parse(body2).stuff;
if (anArray) {
// The function that you want to call for each element of the array is `get`.
// So, prepare these calls, but you also need to pass different arguments
// and this is where `bind` comes into the picture and the link that I gave earlier.
var theParallelCalls = [];
for (var i = 0; i < anArray.length; i++) {
theParallelCalls.push(myGet.bind(null, {url: 'ddd'})); // Here, during the execution, parallel will pass its own callback as third argument of `myGet`; this is why we have callback and callback2 in the code
}
// Now perform the parallel calls:
async.parallel(theParallelCalls, function (error3, results) {
// All the parallel calls have returned
for (var i = 0; i < results.length; i++) {
var nextPlace = results[i];
recurse(nextPlace, callback2);
}
});
} else {
return callback2(null);
}
});
});
};
Note that I assume that the get request for 'bbb' is always followed by a get request for 'ccc'. In other words, you have not hidden a return point for the recursive calls where you have the comments.
Typically when you write a recursive function it will do something and then either call itself or return.
You need to define callback in the scope of the recursive function (i.e. recurse instead of start), and you need to call it at the point where you would normally return.
So, a hypothetical example would look something like:
get_all_pages(callback, page) {
page = page || 1;
request.get({
url: "http://example.com/getPage.php",
data: { page_number: 1 },
success: function (data) {
if (data.is_last_page) {
// We are at the end so we call the callback
callback(page);
} else {
// We are not at the end so we recurse
get_all_pages(callback, page + 1);
}
}
}
}
function show_page_count(data) {
alert(data);
}
get_all_pages(show_page_count);
I think you might find caolan/async useful. Look especially into async.waterfall. It will allow you to pass results from a callback from another and when done, do something with the results.
Example:
async.waterfall([
function(cb) {
request.get({
url: 'aaa.com'
}, function(err, res, body) {
if(err) {
return cb(err);
}
cb(null, JSON.parse(body).id);
});
},
function(id, cb) {
// do that otherFunc now
// ...
cb(); // remember to pass result here
}
], function (err, result) {
// do something with possible error and result now
});
If your recursive function is synchronous, just call the callback on the next line:
var start = function(callback) {
request.get({
url: 'aaa.com'
}, function (error, response, body) {
var startingPlace = JSON.parse(body).id;
recurse(startingPlace, otherFunc);
// Call output function AFTER recursion has completed
callback();
});
};
Else you need to keep a reference to the callback in your recursive function.
Pass the callback as an argument to the function and call it whenever it is finished.
var start = function(callback) {
request.get({
url: 'aaa.com'
}, function (error, response, body) {
var startingPlace = JSON.parse(body).id;
recurse(startingPlace, otherFunc, callback);
});
};
Build your code from this example:
var udpate = function (callback){
//Do stuff
callback(null);
}
function doUpdate() {
update(updateDone)
}
function updateDone(err) {
if (err)
throw err;
else
doUpdate()
}
doUpdate();
With ES6, 'es6-deferred' & 'q'. You could try as following,
var Q = require('q');
var Deferred = require('es6-deferred');
const process = (id) => {
var request = new Deferred();
const ids =//do something and get the data;
const subPromises = ids.map(id => process(id));
Q.all(subPromises).then(function () {
request.resolve();
})
.catch(error => {
console.log(error);
});
return request.promise
}
process("testId").then(() => {
console.log("done");
});

a way to know when all callbacks are done in javascript

I have many calls to a service at the end of which i want to write to a file my final collection when all the callbacks of the service have returned.
is there there a way to be sure that all callbacks are done ?
for (id in idsCollection) {
object.callService(id, function (res) {
collection.push(res);
});
}
filewriter.writetoFile("filename.json", JSon.Stringify(collection));
EDIT : just for the record i'm using cheerio with nodeJS.
Create an array. Push something onto the array each time you set up a callback. Pop something off it each time the callback runs. Check to see if the array is empty inside the callback function. If it is empty, then all the callbacks are done.
I typically use the node-async library for this sort of thing. It makes it easy to do exactly what you're talking about:
async.each(yourArray,
function(element, next) {
// this callback gets called for each element in your array
element.doSomething(function(returnValue){
next(returnValue) // call next when you're done
}
}, function(err, returnValues) {
// when all the elements in the array are processed, this is called
if (err) return console.log(err);
console.log(returnValues) // this is an array of the returnValues
});
})
You could simply count them. In your case it seems you already know how many callbacks there are going to be.
var remaining = idsCollection.length; // assuming array
for (id in idsCollection) {
object.callService(id, function (res) {
collection.push(res);
remaining -= 1; // decrement by 1 per callback
// here you can check if remaining === 0 (all done)
});
}
you can use nimble lib http://caolan.github.io/nimble/.
nimble paralel example
var _ = require('nimble');
_.parallel([
function (callback) {
setTimeout(function () {
console.log('one');
callback();
}, 25);
},
function (callback) {
setTimeout(function () {
console.log('two');
callback();
}, 0);
}
], function(){
console.log('done')
});
output
> two
> one
> done
I see many answers here, but I hope that this solution may still help someone.
Create a promise for each callback to be extinguished as such:
function funcToLoop(arg){
return new Promise((resolve, reject) => {
try{
funcWithCallback(arg, (cbArg) => {
// do your stuff
resolve(cbArg)
});
} catch (e) {
reject(e)
}
});
}
Then, you can create a loop as a async function and handle eventual results/states/etc here:
async function mainLoop(array){
let results = [];
for (let arg of array){
results.push(await funcToLoop(arg))
}
// handle results
}
... or you can have a sync function, collect the promises and handle them:
function mainLoop(array){
let promises = [];
for (let arg of array){
promises.push(funcToLoop(arg))
}
Promise.all(promises).then(()=>{
// handle promises
})
}
Claudio
jQuery.Deferred() objects might be what you are looking for.
OR if you are using HTML5 you can use promises .
Here is how to create promises
var promise = new Promise(function(resolve, reject) {
// do a thing, possibly async, then…
if (/* everything turned out fine */) {
resolve("Stuff worked!");
}
else {
reject(Error("It broke"));
}
});
And here is how to use them
promise.then(function(result) {
console.log(result); // "Stuff worked!"
}, function(err) {
console.log(err); // Error: "It broke"
});
Check this link for more info
If you're using jQuery, you can use $.when
Example:
exmCall1 = $.getJson(..);
exmCall2 = $.getJson(..);
$.when(exmCall1, exmCall2).done(function (exmCall1Ret, exmCall2Ret) {
//do stuff
});
You can read the actual documentation here: http://api.jquery.com/jquery.when/
Or do some hardcode:
var running;
for (id in idsCollection) {
object.callService(id, function (res) {
collection.push(res);
running += 1;
});
}
var loop = setInterval(function() {
if(running >= idsCollection.length) {
filewriter.writetoFile("filename.json", JSon.Stringify(collection));
clearInterval(loop);
}
, 500);

node.js - how do I run a series of callback functions in order?

like many other peoples, I want to turn a async function of a third party module (Patio) into a sync function.
function get_user_message_list(parameters, array, response)
{
var new_array = [];
for (var i in array) {
var json = array[i];
json['content']['users_seen'] = ["1757842565"];
json['content']['users_not_seen'] = [];
new_array.push(json);
}
console.log("NEW ARRAY :");
console.log(new_array);
response.writeHeader(200, {'Content-Type':'application/json'});
response.end(JSON.stringify(new_array));
}
function get_json_message(parameters, json)
{
console.log("JSON OBJECT :");
console.log(json);
var dataset = db.from(TABLES.USER).join(TABLES.MOVIE_LIST, {MLUserId: sql.URId}).join(TABLES.MOVIE, {MVId: sql.MLMovieId});
dataset.where('MLSeen={seen} AND MVSourceId={movie} AND MVSource={source} AND URId!={user}', {seen: 1, movie: json['content']['movie_id'], source: json['content']['movie_source'], user:parameters.FACEBOOK_ID}).all().then(function(users){
if (users) {
for (var j in users) {
json['content']['users_seen'].push(users[j].URId);
}
}
//console.log(json['content']['users_seen']);
dataset.where('MLSeen={seen} AND MVSourceId={movie} AND MVSource={source} AND URId!={user}', {seen: 0, movie: json['content']['movie_id'], source: json['content']['movie_source'], user:parameters.FACEBOOK_ID}).all().then(function(users){
if (users) {
for (var j in users) {
json['content']['users_not_seen'].push(users[j].URId);
}
}
console.log(json);
}, errorHandler);
}, errorHandler);
}
In the get_user_message_list function I iterate into an array and for each iteration I calling the async function. In this async function I'm using Patio module to make request to MySQL database. But like you can see, I must wait for the query result to be get after sending a result to the previous function.
How can I wait for the query result to be got before I send it to the next function?
You CAN and you SHOULD turn async functions into something that behaves like sync functions when a problem needs to be fixed. You can't is never the correct answer, the shouldn't is for the programmer to answer.
So, I recently found some code in the nodeunit module which may help you. It fires the async functions, keep track of which are ready. After all requests are in, fires the callback. This could be the idea behind the solution to your problem (so no, this is not the final solution).
async.forEachSeries = function (arr, iterator, callback) {
if (!arr.length) {
return callback();
}
var completed = 0;
var iterate = function () {
iterator(arr[completed], function (err) {
if (err) {
callback(err);
callback = function () {};
}
else {
completed += 1;
if (completed === arr.length) {
callback();
}
else {
iterate();
}
}
});
};
iterate();
};
This test triggered me to see how it was done:
exports['series'] = function (test) {
var call_order = [];
async.series([
function (callback) {
setTimeout(function () {
call_order.push(1);
callback(null, 1);
}, 25);
},
function (callback) {
setTimeout(function () {
call_order.push(2);
callback(null, 2);
}, 50);
},
function (callback) {
setTimeout(function () {
call_order.push(3);
callback(null, 3, 3);
}, 15);
}
],
function (err, results) {
test.equals(err, null);
test.same(results, [1, 2, [3, 3]]);
test.same(call_order, [1, 2, 3]);
test.done();
});
};
Happy programming!
You can't and you shouldn't. This would effectively block your Node.JS server and you would loose every advantage Node.JS provides. Also it is against the whole asynchronous idea behind Node.JS.
Just pass callback parameter to your function:
function get_json_message(parameters, json, callback){ // <---- note the callback
// some other stuff
dataset.where( ...
// some other stuff
dataset.where( ...
// some other stuff
// I've finished the job, call the callback
callback(); // <--- you can pass additional params here
});
});
}
and call it like this:
get_json_message( params, json, function() {
console.log('Hello world!');
// do whatever you like inside callback
});
I've been using syncrhonize.js with great success. There's even a pending pull request (which works quite well) to support async functions which have multiple parameters. Far better and easier to use than node-sync imho. Added bonus that it has easy-to-understand and thorough documentation, whereas node-sync does not.

Categories

Resources