I have an array, I need to recompile with the use of some edits. I do it with the help of async.concat(), but something is not working.
Tell me, where is the mistake?
async.concat(dialogs, function(dialog, callback) {
if (dialog['viewer']['user_profile_image'] != null) {
fs.exists(IM.pathToUserImage + dialog['viewer']['user_profile_image'].replace('%s', ''), function(exits) {
if (exits) {
dialog['viewer']['user_profile_image'] = dialog['viewer']['user_profile_image'].replace('%s', '');
}
callback(dialog);
});
}
}, function() {
console.log(arguments);
});
In my opinion, everything is logical. callback is invoked immediately after the first iteration. But how can I send the data after the completion of processing the entire array?
Thank you!
Instead of callback(dialog);, you want
callback(null,dialog);
because the first parameter to the callback function is an error object. The reason that console.log(arguments) is getting called after the first iteration is because async thinks an error occurred.
I solved the problem but did not understand its meaning. The problem was due to the element which was null instead of the processed value. The program broke off at this point, but do not throw away any errors / warnings.
async.map(dialogs, function(dialog, callback) {
if (dialog['viewer']['user_profile_image'] == null) {
dialog['viewer']['user_profile_image'] = IM.pathToUserImage;
}
fs.exists(IM.pathToUserImage + dialog['viewer']['user_profile_image'].replace('%s', ''), function(exits) {
if (exits) {
dialog['viewer']['user_profile_image'] = dialog['viewer']['user_profile_image'].replace('%s', '');
}
callback(null, dialog);
});
}, function(err, rows) {
if (err) throw err;
console.log(rows);
});
Though I am a little late to post this answer, I see none of us has used .concat function in the way it supposed to be used.
I have created a snippet that says the proper implementation of this function.
let async = require('async');
async.concat([1, 2, 3], hello, (err, result) => {
if (err) throw err;
console.log(result); // [1, 3]
});
function hello(time, callback) {
setTimeout(function () {
callback(time, null)
}, time * 500);
}
Related
I am new to NodeJs and I'm finding the Non Blocking and Asynchronous nature of JS extremely difficult to understand and handle,
I have a piece of code which is supposed to Iterate an array
and for every iteration, I'm supposed to make a DB update.
Can someone provide the correct implementation of Async library functions and help fix my code?
Code example -
function updateFunction(conn, requestBody, callback) {
let arr = [];
async.each(requestBody.arr, function(item, callback) {
let sqlData = []
let columns = "";
if(item.columnData != null){
sqlData.push(item.columnData);
columns += "`columnName` = ?,";
}
if(columns != ''){
columns = columns.substring(0,columns.length-1);
let sqlQuery = 'UPDATE someTable SET '+columns
+' WHERE id = "' + item.id + '"';
conn.query(sqlQuery, sqlData, function (err, result) {
if (err) {
return callback(err, false);
}
})
}
else{
return callback(null, false);
}
columns = "";
sqlData = [];
},
function(err, results) {
//Code never reaches here, don't know why
if (err) {
return callback(err, false);
}
else{
return callback(null, true);
}
});
} // END
During your database query call, on a successful query your callback is not called, therefore causing your code to never reach the final callback.
You will want to add another return statement after your if (err) { return callback(err); } to let async know your database query is finished.
And another thing, according to the docs, the async each method's final callback does not invoke with results in its callback.
A callback which is called when all iteratee functions have finished, or an error occurs. Invoked with (err).
Therefore, it is not required for you to pass a value into the callback statement within your iteratee function.
Modify your code to do this and it will work.
conn.query(sqlQuery, sqlData, function (err, result) {
if (err) {
return callback(err);
}
return callback(null);
})
Hope this helps.
conn.query(sqlQuery, sqlData, async function (err, result) {
if (err) {
return await callback(err, false);
}
})
Something like this.. so the function callback is async here and we gave await which actually waits until the return call is finished..
I understand the concept of callbacks in Javascript. For example this sort of code makes complete sense to me -
function processArray(arr, callback) {
var resultArr = new Array();
for (var i = arr.length-1; i >= 0; i--)
resultArr[i] = callback(arr[i]);
return resultArr;
}
var arr = [1, 2, 3, 4];
var arrReturned = processArray(arr, function(arg) {return arg * -1;});
// arrReturned would be [-1, -2, -3, -4]
I understand that callback is just a placeholder function which could be any function that we write later. However there are certain types of callbacks that I have no clue how to write. For example -
function processData (callback) {
fetchData(function (err, data) {
if (err) {
console.log("An error has occured. Abort everything!");
callback(err);
}
data += 1;
callback(data);
});
}
How is the callback to fetchData defined? How does the program know what is err and what is data?
Normally in node.js the structure of a callback is basically like this function(err, result)
This means that the first argument is the error and the second argument is the data. when you have no error you simply pass a null value. This is a standard already.
The second code you posted you should change it like follows:
function processData (callback) {
fetchData(function (err, data) {
if (err) {
console.log("An error has occured. Abort everything!");
callback(err);
}
data += 1;
callback(null, data);
});
}
In the above case, if you did not want to increase the databy 1 you could simply call fetchData(callback).
My two cents :
In JavaScript everything is object and so does function. So functions can be passed as argument to other functions. This enables you to use callback as arguement and then using those callbacks in your api implementation to execute user's task after accomplishing API's task. Here if you would try to re-read your code then you will realize your misconception. Other users have also quoted the same thing here.
EDIT
function fetchdata(callback){
var err = "This is Error message";
var data = {samplekey : "samplevalue"};
callback(err, data); // here in the implementation of fetchdata
// we are making those data and feeding to the callback function.
}
function callback(err, data){
// Implementation of call back
}
Here for making readable purpose I have put callback function separate. You can pass it simply as well.
I think you do not understand JavaScript completely.
I got this example from here
function doSomething(callback) {
// ...
// Call the callback
callback('stuff', 'goes', 'here');
}
function foo(a, b, c) {
// I'm the callback
alert(a + " " + b + " " + c);
}
doSomething(foo);
I have this chunk of code
User.find({}, function(err, users) {
for (var i = 0; i < users.length; i++) {
pseudocode
Friend.find({
'user': curUser._id
}, function(err, friends) * * ANOTHER CALLBACK * * {
for (var i = 0; i < friends.length; i++) {
pseudocode
}
console.log("HERE I'm CHECKING " + curUser);
if (curUser.websiteaccount != "None") {
request.post({
url: 'blah',
formData: blah
}, function(err, httpResponse, body) { * * ANOTHER CALLBACK * *
pseudocode
sendMail(friendResults, curUser);
});
} else {
pseudocode
sendMail(friendResults, curUser);
}
});
console.log("finished friend");
console.log(friendResults);
sleep.sleep(15);
console.log("finished waiting");
console.log(friendResults);
}
});
There's a couple asynchronous things happening here. For each user, I want to find their relevant friends and concat them to a variable. I then want to check if that user has a website account, and if so, make a post request and grab some information there. Only thing is, that everything is happening out of order since the code isn't waiting for the callbacks to finish. I've been using a sleep but that doesn't solve the problem either since it's still jumbled.
I've looked into async, but these functions are intertwined and not really separate, so I wasn't sure how it'd work with async either.
Any suggestions to get this code to run sequentially?
Thanks!
I prefer the promise module to q https://www.npmjs.com/package/promise because of its simplicity
var Promises = require('promise');
var promise = new Promises(function (resolve, reject) {
// do some async stuff
if (success) {
resolve(data);
} else {
reject(reason);
}
});
promise.then(function (data) {
// function called when first promise returned
return new Promises(function (resolve, reject) {
// second async stuff
if (success) {
resolve(data);
} else {
reject(reason);
}
});
}, function (reason) {
// error handler
}).then(function (data) {
// second success handler
}, function (reason) {
// second error handler
}).then(function (data) {
// third success handler
}, function (reason) {
// third error handler
});
As you can see, you can continue like this forever. You can also return simple values instead of promises from the async handlers and then these will simply be passed to the then callback.
I rewrote your code so it was a bit easier to read. You have a few choices of what to do if you want to guarantee synchronous execution:
Use the async library. It provides some helper functions that run your code in series, particularly, this: https://github.com/caolan/async#seriestasks-callback
Use promises to avoid making callbacks, and simplify your code APIs. Promises are a new feature in Javascript, although, in my opinion, you might not want to do this right now. There is still poor library support for promises, and it's not possible to use them with a lot of popular libraries :(
Now -- in regards to your program -- there's actually nothing wrong with your code at all right now (assuming you don't have async code in the pseucode blocks). Your code right now will work just fine, and will execute as expected.
I'd recommend using async for your sequential needs at the moment, as it works both server and client side, is essentially guaranteed to work with all popular libraries, and is well used / tested.
Cleaned up code below
User.find({}, function(err, users) {
for (var i = 0; i < users.length; i++) {
Friend.find({'user':curUser._id}, function(err, friends) {
for (var i = 0; i < friends.length; i++) {
// pseudocode
}
console.log("HERE I'm CHECKING " + curUser);
if (curUser.websiteaccount != "None") {
request.post({ url: 'blah', formData: 'blah' }, function(err, httpResponse, body) {
// pseudocode
sendMail(friendResults, curUser);
});
} else {
// pseudocode
sendMail(friendResults, curUser);
}
});
console.log("finished friend");
console.log(friendResults);
sleep.sleep(15);
console.log("finished waiting");
console.log(friendResults);
}
});
First lets go a bit more functional
var users = User.find({});
users.forEach(function (user) {
var friends = Friend.find({
user: user._id
});
friends.forEach(function (friend) {
if (user.websiteaccount !== 'None') {
post(friend, user);
}
sendMail(friend, user);
});
});
Then lets async that
async.waterfall([
async.apply(Users.find, {}),
function (users, cb) {
async.each(users, function (user, cb) {
async.waterfall([
async.apply(Friends.find, { user, user.id}),
function (friends, cb) {
if (user.websiteAccount !== 'None') {
post(friend, user, function (err, data) {
if (err) {
cb(err);
} else {
sendMail(friend, user, cb);
}
});
} else {
sendMail(friend, user, cb);
}
}
], cb);
});
}
], function (err) {
if (err) {
// all the errors in one spot
throw err;
}
console.log('all done');
});
Also, this is you doing a join, SQL is really good at those.
You'll want to look into something called promises. They'll allow you to chain events and run them in order. Here's a nice tutorial on what they are and how to use them http://strongloop.com/strongblog/promises-in-node-js-with-q-an-alternative-to-callbacks/
You can also take a look at the Async JavaScript library: Async It provides utility functions for ordering the execution of asynchronous functions in JavaScript.
Note: I think the number of queries you are doing within a handler is a code smell. This problem is probably better solved at the query level. That said, let's proceed!
It's hard to know exactly what you want, because your psuedocode could use a cleanup IMHO, but I'm going to what you want to do is this:
Get all users, and for each user
a. get all the user's friends and for each friend:
send a post request if the user has a website account
send an email
Do something after the process has finished
You can do this many different ways. Vanilla callbacks or async work great; I'm going to advocate for promises because they are the future, and library support is quite good. I'll use rsvp, because it is light, but any Promise/A+ compliant library will do the trick.
// helpers to simulate async calls
var User = {}, Friend = {}, request = {};
var asyncTask = User.find = Friend.find = request.post = function (cb) {
setTimeout(function () {
var result = [1, 2, 3];
cb(null, result);
}, 10);
};
User.find(function (err, usersResults) {
// we reduce over the results, creating a "chain" of promises
// that we can .then off of
var userTask = usersResults.reduce(function (outerChain, outerResult) {
return outerChain.then(function (outerValue) {
// since we do not care about the return value or order
// of the asynchronous calls here, we just nest them
// and resolve our promise when they are done
return new RSVP.Promise(function (resolveFriend, reject){
Friend.find(function (err, friendResults) {
friendResults.forEach(function (result) {
request.post(function(err, finalResult) {
resolveFriend(outerValue + '\n finished user' + outerResult);
}, true);
});
});
});
});
}, RSVP.Promise.resolve(''));
// handle success
userTask.then(function (res) {
document.body.textContent = res;
});
// handle errors
userTask.catch(function (err) {
console.log(error);
});
});
jsbin
I have 2 functions that I'm running asynchronously. I'd like to write them using waterfall model. The thing is, I don't know how..
Here is my code :
var fs = require('fs');
function updateJson(ticker, value) {
//var stocksJson = JSON.parse(fs.readFileSync("stocktest.json"));
fs.readFile('stocktest.json', function(error, file) {
var stocksJson = JSON.parse(file);
if (stocksJson[ticker]!=null) {
console.log(ticker+" price : " + stocksJson[ticker].price);
console.log("changing the value...")
stocksJson[ticker].price = value;
console.log("Price after the change has been made -- " + stocksJson[ticker].price);
console.log("printing the the Json.stringify")
console.log(JSON.stringify(stocksJson, null, 4));
fs.writeFile('stocktest.json', JSON.stringify(stocksJson, null, 4), function(err) {
if(!err) {
console.log("File successfully written");
}
if (err) {
console.error(err);
}
}); //end of writeFile
} else {
console.log(ticker + " doesn't exist on the json");
}
});
} // end of updateJson
Any idea how can I write it using waterfall, so i'll be able to control this? Please write me some examples because I'm new to node.js
First identify the steps and write them as asynchronous functions (taking a callback argument)
read the file
function readFile(readFileCallback) {
fs.readFile('stocktest.json', function (error, file) {
if (error) {
readFileCallback(error);
} else {
readFileCallback(null, file);
}
});
}
process the file (I removed most of the console.log in the examples)
function processFile(file, processFileCallback) {
var stocksJson = JSON.parse(file);
if (stocksJson[ticker] != null) {
stocksJson[ticker].price = value;
fs.writeFile('stocktest.json', JSON.stringify(stocksJson, null, 4), function (error) {
if (err) {
processFileCallback(error);
} else {
console.log("File successfully written");
processFileCallback(null);
}
});
}
else {
console.log(ticker + " doesn't exist on the json");
processFileCallback(null); //callback should always be called once (and only one time)
}
}
Note that I did no specific error handling here, I'll take benefit of async.waterfall to centralize error handling at the same place.
Also be careful that if you have (if/else/switch/...) branches in an asynchronous function, it always call the callback one (and only one) time.
Plug everything with async.waterfall
async.waterfall([
readFile,
processFile
], function (error) {
if (error) {
//handle readFile error or processFile error here
}
});
Clean example
The previous code was excessively verbose to make the explanations clearer. Here is a full cleaned example:
async.waterfall([
function readFile(readFileCallback) {
fs.readFile('stocktest.json', readFileCallback);
},
function processFile(file, processFileCallback) {
var stocksJson = JSON.parse(file);
if (stocksJson[ticker] != null) {
stocksJson[ticker].price = value;
fs.writeFile('stocktest.json', JSON.stringify(stocksJson, null, 4), function (error) {
if (!err) {
console.log("File successfully written");
}
processFileCallback(err);
});
}
else {
console.log(ticker + " doesn't exist on the json");
processFileCallback(null);
}
}
], function (error) {
if (error) {
//handle readFile error or processFile error here
}
});
I left the function names because it helps readability and helps debugging with tools like chrome debugger.
If you use underscore (on npm), you can also replace the first function with _.partial(fs.readFile, 'stocktest.json')
First and foremost, make sure you read the documentation regarding async.waterfall.
Now, there are couple key parts about the waterfall control flow:
The control flow is specified by an array of functions for invocation as the first argument, and a "complete" callback when the flow is finished as the second argument.
The array of functions are invoked in series (as opposed to parallel).
If an error (usually named err) is encountered at any operation in the flow array, it will short-circuit and immediately invoke the "complete"/"finish"/"done" callback.
Arguments from the previously executed function are applied to the next function in the control flow, in order, and an "intermediate" callback is supplied as the last argument. Note: The first function only has this "intermediate" callback, and the "complete" callback will have the arguments of the last invoked function in the control flow (with consideration to any errors) but with an err argument prepended instead of an "intermediate" callback that is appended.
The callbacks for each individual operation (I call this cbAsync in my examples) should be invoked when you're ready to move on: The first parameter will be an error, if any, and the second (third, fourth... etc.) parameter will be any data you want to pass to the subsequent operation.
The first goal is to get your code working almost verbatim alongside the introduction of async.waterfall. I decided to remove all your console.log statements and simplified your error handling. Here is the first iteration (untested code):
var fs = require('fs'),
async = require('async');
function updateJson(ticker,value) {
async.waterfall([ // the series operation list of `async.waterfall`
// waterfall operation 1, invoke cbAsync when done
function getTicker(cbAsync) {
fs.readFile('stocktest.json',function(err,file) {
if ( err ) {
// if there was an error, let async know and bail
cbAsync(err);
return; // bail
}
var stocksJson = JSON.parse(file);
if ( stocksJson[ticker] === null ) {
// if we don't have the ticker, let "complete" know and bail
cbAsync(new Error('Missing ticker property in JSON.'));
return; // bail
}
stocksJson[ticker] = value;
// err = null (no error), jsonString = JSON.stringify(...)
cbAsync(null,JSON.stringify(stocksJson,null,4));
});
},
function writeTicker(jsonString,cbAsync) {
fs.writeFile('stocktest.json',jsonString,function(err) {
cbAsync(err); // err will be null if the operation was successful
});
}
],function asyncComplete(err) { // the "complete" callback of `async.waterfall`
if ( err ) { // there was an error with either `getTicker` or `writeTicker`
console.warn('Error updating stock ticker JSON.',err);
} else {
console.info('Successfully completed operation.');
}
});
}
The second iteration divides up the operation flow a bit more. It puts it into smaller single-operation oriented chunks of code. I'm not going to comment it, it speaks for itself (again, untested):
var fs = require('fs'),
async = require('async');
function updateJson(ticker,value,callback) { // introduced a main callback
var stockTestFile = 'stocktest.json';
async.waterfall([
function getTicker(cbAsync) {
fs.readFile(stockTestFile,function(err,file) {
cbAsync(err,file);
});
},
function parseAndPrepareStockTicker(file,cbAsync) {
var stocksJson = JSON.parse(file);
if ( stocksJson[ticker] === null ) {
cbAsync(new Error('Missing ticker property in JSON.'));
return;
}
stocksJson[ticker] = value;
cbAsync(null,JSON.stringify(stocksJson,null,4));
},
function writeTicker(jsonString,cbAsync) {
fs.writeFile('stocktest.json',jsonString,,function(err) {
cbAsync(err);
});
}
],function asyncComplete(err) {
if ( err ) {
console.warn('Error updating stock ticker JSON.',err);
}
callback(err);
});
}
The last iteration short-hands a lot of this with the use of some bind tricks to decrease the call stack and increase readability (IMO), also untested:
var fs = require('fs'),
async = require('async');
function updateJson(ticker,value,callback) {
var stockTestFile = 'stocktest.json';
async.waterfall([
fs.readFile.bind(fs,stockTestFile),
function parseStockTicker(file,cbAsync) {
var stocksJson = JSON.parse(file);
if ( stocksJson[ticker] === null ) {
cbAsync(new Error('Missing ticker property in JSON.'));
return;
}
cbAsync(null,stocksJson);
},
function prepareStockTicker(stocksJson,cbAsync) {
stocksJson[ticker] = value;
cbAsync(null,JSON.stringify(stocksJson,null,4));
},
fs.writeFile.bind(fs,stockTestFile)
],function asyncComplete(err) {
if ( err ) {
console.warn('Error updating stock ticker JSON.',err);
}
callback(err);
});
}
Basically nodejs (and more generally javascript) functions that require some time to execute (be it for I/O or cpu processing) are typically asynchronous, so the event loop (to make it simple is a loop that continuously checks for tasks to be executed) can invoke the function right below the first one, without getting blocked for a response. If you are familiar with other languages like C or Java, you can think an asynchronous function as a function that runs on another thread (it's not necessarily true in javascript, but the programmer shouldn't care about it) and when the execution terminates this thread notifies the main one (the event loop one) that the job is done and it has the results.
As said once the first function has ended its job it must be able to notify that its job is finished and it does so invoking the callback function you pass to it. to make an example:
var callback = function(data,err)
{
if(!err)
{
do something with the received data
}
else
something went wrong
}
asyncFunction1(someparams, callback);
asyncFunction2(someotherparams);
the execution flow would call: asyncFunction1, asyncFunction2 and every function below until asyncFunction1 ends, then the callback function which is passed as the last parameter to asyncFunction1 is called to do something with data if no errors occurred.
So, to make 2 or more asynchronous functions execute one after another only when they ended you have to call them inside their callback functions:
function asyncTask1(data, function(result1, err)
{
if(!err)
asyncTask2(data, function(result2, err2)
{
if(!err2)
//call maybe a third async function
else
console.log(err2);
});
else
console.log(err);
});
result1 is the return value from asyncTask1 and result2 is the return value for asyncTask2. You can this way nest how many asynchronous functions you want.
In your case if you want another function to be called after updateJson() you must call it after this line:
console.log("File successfully written");
So i have a csv file containing my information, i need to do a mass add/update
exports.add_questions_from_file = function (file_path, surveyid, callback)
{
var U = [{}];
fs.readFile(file_path, 'utf8', function(err, data){
if (err){
console.log(err);
callback(err,null);
}else{
console.log(data);
d = data.split(/\r\n|\n/);
for (x=0;x <d.length;x++)
{
line = d[x].split(',');
if (line[0] == "") {return};
RQuestion.add_by_line (line,function (err, question)
{
U.push({id:question.id});
console.log(U);
});
}
}
});
Survey.update({_id:surveyid},{$push:{"SurveyQuestions":U}},function (err,numAffected, rawResponse) {
console.log(rawResponse);
RET = {"module":"survey","operation": "add", "status":"OK"};
callback(RET);
});
};
But even though im using callback functions the update seems to happen with the same object always, even the console.log here
U.push({id:question.id});
console.log(U);
returns the same object (even that all the other were created)
Im doing something wrong?
I see a few issues.
First for:
if (line[0] == "") {return};
Don't you mean to use a break or continue instead? Otherwise the entire function will quit if there is a blank line anywhere in the file. This is very important because Survey.update won't get called either.
Second: I assumed that RQuestion.add_by_line and Survey.update are doing something async like updating a database. Your code needs to be restructured to wait for those async items to complete before moving on to the next step. I'd recommend an npm package named async for that.
fs.readFile(file_path, 'utf8', function(err, data){
if (err){
console.log(err);
callback(err,null);
}else{
d = data.split(/\r\n|\n/);
async.map(d, function(line, callback) {
//this function is called for each line
add_by_line (line,function (err, question)
{
callback(err,{id:question.id});
});
}, function(err, results) {
//this function is called when all of the items are done
console.log("done with async");
console.dir(results);
Survey.update({_id:surveyid},{$push:{"SurveyQuestions":results},function (err,numAffected, rawResponse) {
console.log(rawResponse);
RET = {"module":"survey","operation": "add", "status":"OK"};
callback(RET);
});
});
}
});