Calling JavaScript async function in for loop - javascript

I have a for loop and I want to call an async function on each iteration. But I am getting a JS stack trace error. Below is the prototype of my code. I have also used IIFE pattern but it's not working.
for(let i = 0; i<99999;i++){
getData(i, function(err, result){
if(err) return err;
else{
console.log(result);
}
});
}
function getData(number, callback){
request('http://someapiurl'+number, function(err, response){
if(err) callback(err, null);
else{
callback(null, response)
}
})
}

You re trying to make 99999 at the same time, this just won t work on most browsers.
Use promises instead, push them in an array, then use Promise.all to let the browser handle the request, simply handle the final resolved response.
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
var p1 = Promise.resolve(3);
var p2 = 1337;
var p3 = new Promise((resolve, reject) => {
setTimeout(resolve, 100, 'foo');
});
Promise.all([p1, p2, p3]).then(values => {
console.log(values); // [3, 1337, "foo"]
});

First of all I highly suggest that you debug your code to see exactly what happens with the for loop.
Its very likely that the loop runs completely before its contents (the getData function) executes, asynchronous behavior of via a fetching data request. If you tried using the IIFE pattern you'd have at least wrapped the contents inside your for loop in a way that guarantees its contents to execute. An example of an implementation looks like so:
for(let i = 0; i<99999;i++){
(function(){ // <-- IIFE used to represent a closure.
getData(i, function(err, result){
if(err) return err;
else{
console.log(result);
}
});
})(i);
}
To let #mika sit on the subject of promises for you, which you can read on their answer; just keep in mind the promise anti-pattern and you'll be fine. It is especially tricky if you are not used to promises and want to use them in a for loop.

Related

Can this recursive code be written as an iterative code using while/for loop in NodeJS/Javascript

I've written a piece of code which takes two argument, first being some URL and second is an integer for how many times the URL must get downloaded (I know there is no point downloading same URL again and again but this code is just a sample one and in actual code the URL is picked randomly from a database table) and as of now the code is written as a recursive function. Here is how my current code looks like,
const request = require("request");
function downloadUrl(url, numTimes) {
if (numTimes > 0) {
console.log(url, numTimes);
request.get(url, function (err, resp, buffer) {
if (err) {
return err;
}
console.log(`MimeType: ${resp.headers['content-type']}, Size: ${buffer.length}, numTimes: ${numTimes}`);
downloadUrl(url, --numTimes);
});
}
}
function main() {
downloadUrl('http://somerandomurl', 5); // the URL here might get picked randomly from an array or a table
}
main();
What I want to know is, can this recursive code be written as an iterative code using a while or a for loop? I've tried writing following code,
function downloadUrl(url, numTimes) {
for (let i = 0; i< numTimes; i++) {
request.get(url, function (err, resp, buffer) {
if (err) {
return err;
}
console.log(`MimeType: ${resp.headers['content-type']}, Size: ${buffer.length}, numTimes: ${numTimes}`);
});
}
}
But this code seems to get executed in parallel which obviously it will because in Node.js the async code doesn't wait for the statement to complete before proceeding to the next statement unlike a programming language like Java.
My question is, is there a way I can write iterative codes to behave exactly like my recursive codes? My recursive codes executes sequentially where numTimes variable is decremented by one and gets printed sequentially from 5 to 1.
I've tried my best to keep my question clear but in case something is not clear or confusing, please feel free to ask.
I guess that you want your http request be ended to make another one, correct me if im wrong, but you can use await in your method.
const request = require('request');
async function downloadUrl(url, numTimes) {
for (let i = 0; i< numTimes; i++) {
const objToResolve = await doDownload(url);
if(objToResolve.err){
console.log(`Error: ${objToResolve.err}, try: ${i}`);
}else{
console.log(`Size: ${objToResolve.buffer.length}, try: ${i}`);
}
}
}
// wrap a request in an promise
function doDownload(url) {
return new Promise((resolve, reject) => {
request(url, (err, resp, buffer) => {
if (err) {
reject({err});
}else{
resolve({err, resp, buffer});
}
});
});
}
// now to program the "usual" way
// all you need to do is use async functions and await
// for functions returning promises
function main() {
console.log('main chamado');
downloadUrl('http://www.macoratti.net/11/05/c_aspn3c.htm', 5);
}
main();
EDIT:
By considering timeout you can handle better your requests
const request = require('request');
async function downloadUrl(url, numTimes) {
for (let i = 0; i< numTimes; i++) {
try{
const objToResolve = await doDownload(url);
if(objToResolve.err){
console.log(`Error: ${objToResolve}, try: ${i}`);
}else{
console.log(`Size: ${objToResolve.buffer.length}, try: ${i}`);
}
}catch(timeout){
console.log(`Error: ${timeout}, try: ${i}`);
}
}
}
// wrap a request in an promise
function doDownload(url) {
const timeout = new Promise((resolve, reject) => {
setTimeout(() => {
reject(new Error('timeout'));
}, 300);
});
const requestPromisse = new Promise((resolve, reject) => {
request({uri:url, timeout:3000}, (err, resp, buffer) => {
if (err) {
reject({err});
}else{
resolve({err, resp, buffer});
}
});
});
return Promise.race([timeout,requestPromisse]);
}
// now to program the "usual" way
// all you need to do is use async functions and await
// for functions returning promises
function main() {
console.log('main called');
downloadUrl('http://www.macoratti.net/11/05/c_aspn3c.htm', 5);
}
// run your async function
main();
Reference: Synchronous Requests in Node.js
Every recursive code can be transformed in a non recursive ones :) So what does the recursive magic ? It just abuse the call stack to be a store for partial results. In fact you can build your own stack. Javascript make this very easy.
You can use some arrays to store your partial results.
using shift() to remove the first item of an array.
Using pop() to Remove the last element of an array:
Using push() to add to the end of an array
Using unshift() to add to the beginning of an array
Using splice() to add elements within an array
So with those its very simple to build your own "url" stack.
push and pop will be your best friends.
instead of your recursion just push the url to the array as long as you can not download
if you can download pop the url from the array.
The length of the array will give you all the time the stack counter.
The job is done if your array has the length of 0 :)
So in simple words: if you recognize that the "mess" to clean up becomes deeper push it to the array and if you can remove some "mess" do this tiny job and pop it from the array.
Thats nothing else as the recursion does. But without the need to annoy the os or interpreter. In the good old days such call stacks was very limited. So this own stack building will break those limits. It might also be way more mem sufficient. Cause you only store whats really needed.
I get what you're asking for - I think you're looking for a generator. Basically you just want a controlled loop where you don't iterate to the next item until the first is totally complete doing it's business.
I mean behind the scenes it basically is still just a recursive-ish function - it just wraps it up to act like a sequential, controlled loop.

Return value from a mongodb query from nodejs

EDIT
OK I read here."You can't usefully return with asynchronous functions. You'll have to work with the result within the callback. This is due to the nature of asynchronous programming: "exit immediately, setting up a callback function to be called sometime in the future. And, at least with the current standard of ECMAScript 5, you can't get around this. As JavaScript is single-threaded, any attempt to wait for the callback will only lock up the single thread, keeping the callback and the return user forever pending in the event queue."
Is this still the case today?
ORIGINAL QUESTION
I have a problem accessing my variable outside the function in my node.js application.
const url = "mongodb://localhost:27017/";
getAllSampleTypes();
// I would like to have the variable "requested" accessible here
function getAllSampleTypes() {
MongoClient.connect(url, function (err, db) {
var dbo = db.db("myDb");
dbo.collection("data").distinct("sample_type", {}, (function (err, requested) {
// variable "requested" is accessible here
})
);
});
}
I tried with async/await but I still have the same problem.
function getTypes() {
MongoClient.connect(url, async function (err, db) {
let dbo = db.db("myDb");
return await dbo.collection("data").distinct("sample_type", {});
});
}
console.log(getTypes()); //Promise { undefined }
I don't think you are going to be able to achieve what you are looking for. Async await only works once you are in scope of an async function. Your top level calls are not inside an async function so you are forced to handle the returned Promise or callback.
e.g. getAllSampleTypes().then(function(response){});
Here are a couple of samples that are similar to what you want, but either way, the top level call into an async function will have to handle the response as a Promise.
const url = "mongodb://localhost:27017/";
getAllSampleTypes().then(function(sample_types){
// Do something here.
});
async function getAllSampleTypes() {
var db = await mongo.connect(url);
var dbo = db.db("myDb");
return await dbo.collection("data").distinct("sample_type", {});
}
It's important to understand that async await really isn't anything magical, behind the scenes it's translated to Promises really. That's why your top level call into an async function can handle the response with a .then(). It's just really much cleaner to read. The code above would roughly get translated and executed as:
const url = "mongodb://localhost:27017/";
getAllSampleTypes().then(function(sample_types){
// Do something here.
});
function getAllSampleTypes() {
return new Promise(function(resolve, reject){
mongo.connect(url).then(function(db){
var dbo = db.db("myDb");
dbo.collection("data").distinct("sample_type", {}).then(function(results) {
resolve(results);
});
});
});
}
getTypes doesn't return anything. You've gotta pass it up
If you're gonna use async/await try something like
async function getTypes() {
const db = MongoClient.connect(url);
const dbo = db.db("myDb");
return await dbo.collection("data").distinct("sample_type", {});
}
console.log(await getTypes());
These might be helpful:
How can I use asyn-await with mongoclient and how-to-use-mongodb-with-promises-in-node-js
Also, you should probably close the connection with db.close() somewhere

How to assign a variable in callback function in a callback function in javascript

So I have found this question which seems pretty similar but I do not understand the answer at all I tried to implement it but I do not recognize the patterns of the answer in my code. similar question
Now here is my problem, I have this piece of code :
var fs = require('fs');
var index = JSON.parse(fs.readFileSync('../data/7XXX7/index.json', 'utf8'));
window = {};
var indicators = require('./indicators');
var parser = new window.patient.Indicator('tes', 'test');
var i = 0;
function create_indicators() {
var result = [];
fs.readdirSync('../data/7XXX7/files/').forEach(file => {
fs.readFile('../data/7XXX7/files/' + file, 'utf8', function (err, data) {
if (err)
throw err;
let $ = {};
$.poids = parser.poids(data);
$.taille = parser.taille(data);
$.temperature = parser.temperature(data);
$.tension = parser.tension(data);
$.pouls = parser.pouls(data);
$.ps = parser.ps(data);
$.saturation = parser.saturation(data);
for (var j in index.files)
{
if (index.files[j].name === file)
{
$.id = index.files[j].name;
$.date = index.files[j].date;
$.name = index.files[j].IntituleSession;
break;
}
}
if ($.poids || $.taille || $.temperature || $.tension || $.pouls || $.ps || $.saturation)
{
result.push($);
console.log(result); // print the actual state of result
// console.log(i); prints 0 then 1 then ...
i++;
}
});
console.log(i); // prints 0
});
console.log(result); // prints []
return result;
}
let result = create_indicators();
console.log(result); // prints []
And it displays :
[]
Why does the callback function in readFile has it's own variables ? Cause it's asynchronous ? But when I use readFileSync it doesn't work too.
How to make result get all the values I put into it ? when I console log result after result.push($); it works so that's not my parser, i is also properly indented each time.
Your code doesn't wait for the files to get read and have the result pushed to result before moving on. Where you're doing asynchronous operations on items in an array, I would recommend using promises and using Promise.all() to wait for each file to get read and processed before you try using the result. You could do something like this:
function create_indicators() {
const result = fs.readdirSync('../data/7XXX7/files/').map(file =>
new Promise((resolve, reject) => {
fs.readFile('../data/7XXX7/files/' + file, 'utf8', (err, data) => {
if (err) reject(err);
// do whatever
if ($.poids || /* ... */ $.saturation) {
// ...
resolve($); // instead of `result.push($);`
} else {
resolve(); // can't reject for `Promise.all()` to work
}
})
}));
return Promise.all(result).then(items => items.filter(item => item));
}
create_indicators().then(indicators => {
// do something with your list of indicators
}).catch(err => {
// handle error
});
It creates a promise for each file in your directory that resolves when the file has been processed. It resolves with the item if there is one or nothing if your condition is not met, rejecting if there's an error (promise equivalent to throw). Since you only want the items that meet your condition, you can then do a filter on the result of Promise.all() to get rid of any undefined in the array (you could also get rid of the condition checking in the fs.readFile callback and do it instead in the filter if you'd like). This returns a promise that resolves with your filtered list.
Here's your problem:
fs.readFileSync('../data/7XXX7/files/' + file, 'utf8', function (err, data) {
The readFileSync doesn't take a callback as an argument. It returns the data or raises an exception. It is synchronous (as the "Sync" in the name suggests) and you're using it as if it was asynchronous.
See the docs:
https://nodejs.org/api/fs.html
readFileSync doesn't callback. It is synchronous.
use fs.readdir to get the list of files you want to read. See How do you get a list of the names of all files present in a directory in Node.js?
Need to understand how callback works.
readFileSync doesn't callback. It might be helpful to explain how callback works in asynchronous fs.readFile and fs.readdir
When you are doing asynchronous operations, because you don't know when it is going to be finished, you pass in a function (callback) in the parameter, and run it at the end of the operation.
fs.readFile('/etc/passwd', function (err, data) {
if (err) throw err;
console.log(data);
});
fs.readFile in the above code will run the function (err, data) when it finishes executing and pass in the data as the second parameter. If error occurs it will pass in the error as the first parameter.
You can also get a callback function defining what to do when the parsing is over. The callback will need to take error and result. (if you need the error)
Read:
http://fredkschott.com/post/2014/03/understanding-error-first-callbacks-in-node-js/
So your create_indicators function should take a callback function.
fs = require("fs")
function create_indicators(folderPath, callback) {
let result = [];
fs.readdir(folderPath, (err, files) => {
if (err)
callback(err, null); //pass the error to callback if there is any
else {
files.forEach((file, index, filesArray) => {
fs.readFile(file, (err, data) => {
if (err)
callback(err, null); //pass the error to callback if there is any
else {
//.....parse....
result.push(data);
// pass data to callback function when it is the last result
if (result.length == filesArray.length)
callback(null, result);
}
});
});
}
})
}
When you call it, pass in what you want to do with the result and error as a function.
create_indicators(".", function(err,result){
if (err)
console.error("Got error:", err);
else
console.log("Got result:", result);
//do what you want with the final result
})
Once you got the callback working, look into Promise which will make this procedure cleaner and easier. Read: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise

create a json object with responses from different async functions

My goal is to create a JSON object, from a paragraph of text, that I can then insert as a document into MongoDB. I'm using nodejs and wanted to go for the async approach.
My JSON has parameters like so
{
height:height,
weight:weight
}
My logic is this
create a module with async functions that parse the text and extract weight and height using regex.
but then how would I combine all the responses from these functions into one JSON that I can import at once?
I'm thinking something like this
var get_height = require().height;
var get_weight = require().weight;
exports.contr = function(){
var height,
weight;
get_height(text, function(err, res){
if(err)
throw(err)
height=res;
});
get_weight(text, function(err, res){
if(err)
throw(err)
weight=res;
});
//All other async functions
combine_json(height, weight, ... , function(err, res){
if(err)
throw(err);
console.log(res); //the json was successfully inserted into mongoDB
});
}
I find async confusing and in the above example I'm not sure about two things
wouldn't combine_json run without waiting for the data from the previous two functions (weight, height)
what is the best practice to handle such cases? Should i just use sync functions and wait top-to-bottom for each one to do its thing and then run the final one or I can leverage async?
The simplest way to wait for the results of two independent asynchronous functions is to use promises and Promise.all. For this we'll assume get_height and get_weight return a Promise and can be used as such:
get_height().then(function (height) { console.log(height); });
Then it's trivial to combine two of those promises:
Promise.all([get_height(), get_weight()]).then(function (results) {
combine_json(results[0], results[1]);
});
See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise for documentation and details.
If you do not know nothing of Promises, you first should know how callbacks works. If you do not want a elegant solution with Promise.all() and you just want your code working, you need nested functions. When you are inside get_height callback you should to call get_weight and same when you are inside get_weight callback you should call combine_json() . The only problem is that you have to wait for get_height to call get_weight. This is solved with Promise.all() as it's said.
get_height(text, function(err, height){
if(err)
throw(err);
get_weight(text, function(err, weight){
if(err)
throw(err);
//All other async functions
combine_json(height, weight, ... , function(err, res){
if(err)
throw(err);
console.log(res); //the json was successfully inserted into mongoDB
});
});
});
Promises are your best bet, but if you don't want to use them for some reason and prefer the callback style, then
function get_height_and_weight(text, callback) {
var have_height = false;
var have_weight = false;
var result = {};
get_height(text, function(err, height) {
if (err) callback(err);
have_height = true;
result.height = height;
if (have_weight) callback(null, result);
});
get_weight(text, function(err, weight) {
if (err) callback(err);
have_weight = true;
result.weight = weight;
if (have_height) callback(null, result);
});
}
This is a special case of the parallel async call case which could be handled better by async.parallel.

Lodash: _.forEach with function

I am trying to use the lodash forEach method with a nested function that calls a mongo database.
var jobs = [];
_.forEach(ids, function(id) {
JobRequest.findByJobId(id, function(err, result) {
if(err) callback(err);
jobs.push(result);
});
});
callback(null, jobs);
I am having problems because the forEach and callbacks will run through before the inner function is ever called. How can I resolve this?
I want the callback to be called after the for each and inner function have completed.
One more approach is to wrap everything into promises, in this case job results will be pushed into array in correct order:
var promises = ids.map(function(id) {
return new Promise(function(resolve, reject) {
JobRequest.findByJobId(id, function (err, result) {
if (err) reject(err);
resolve(result);
});
});
});
Promise.all(promises).then(function(jobs) {
callback(null, jobs);
}, callback);
// or shorter: Promise.all(promises).then(callback.bind(null, null), callback);
Note, that you also need to handle potential situation when JobRequest.findByJobId request fails, with promises it's very easy: just pass callback as error callback to Promise.all.
JobRequest.findByJobId is an asynchronous operation. You cannot block asynchronous operations in JavaScript, so you'll need to manually synchronize by counting. Example (error handling omitted for the sake of brevity):
var results = [];
var pendingJobCount = ids.length;
_.forEach(ids, function(id) {
JobRequest.findByJobId(id, function(err, result) {
results.push(result);
if (--pendingJobCount === 0) callback(null, results);
});
});
There are, of course, wrapper constructs for doing stuff like this, but I prefer to explain how it actually works. Check out dfsq's answer for more details on one of those wrappers, called promises.
Also note that asynchronous operations may complete out of order. The order in the results array will not necessarily match the order of the ids array. If you need that information connected, you'll need to track it yourself, for example by collecting the results in a map instead of an array:
var results = {};
var pendingJobCount = ids.length;
_.forEach(ids, function(id) {
JobRequest.findByJobId(id, function(err, result) {
results[id] = result;
if (--pendingJobCount === 0) callback(null, results);
});
});
This example assumes that there are no duplicates in your ids array. Results for duplicate keys would be overridden.
Error handling would work similarly, by inserting additional information into your result. Another example:
results.push({id: id, error: null, value: result});

Categories

Resources