Maybe this is a general issue, and i need a solution to my case : due to the non blocking aspect of javascript , I don't find how can I execute my function with all iteration in for loop , and here is my example ,
var text_list=[]
for (var i = 0; i < 10; i++) {
var element = array[index];
tesseract.process("img"+i+".jpg", options, function (err, text) {
if (err) {
return console.log("An error occured: ", err);
}
text_list.push(text)
});
}
console.log(text_list) //
And the result as if I do :
tesseract.process("img"+9+".jpg"...
tesseract.process("img"+9+".jpg"...
tesseract.process("img"+9+".jpg"...
.
.
.
and what i need is :
tesseract.process("img"+0+".jpg"...
tesseract.process("img"+1+".jpg"...
tesseract.process("img"+2+".jpg"...
.
.
.
Your question does not really explain what result you are getting and your code looks like it's missing parts of the code. So, all I can really do here to help is to explain generically (using your code where possible) how to solve this class of problem.
If you are ending up with a lot of results that all reference the last value of i in your loop, then you are probably trying to reference i in an async callback but because the callback is called sometime later, the for loop has already finished long before the callback executes. Thus, your value of i is sitting on the last value it would have in the for loop. But, your question doesn't actually show code that does that, so this is just a guess based on the limited result you describe. To solve that type of issue, you have make sure you're separately keeping track of i for each iteration of the loop. There are many ways to do that. In ES6, using let in the for loop definition will solve that entire issue for you. One can also construct a closure, use .forEach(), etc...
Async operations with a loop require extra work and coding to deal with. The modern solution is to convert your async operations to use promises and then use features such as Promise.all() to both tell you when all the async operations are done and to keep the results in order for you.
You can also code it manually without promises. Here's a manual version:
const len = 10;
let text_list = new Array(10);
let doneCnt = 0;
let errFlag = false;
// using let here so each invocation of the loop gets its own value of i
for (let i = 0; i < len; i++) {
tesseract.process("img"+i+".jpg", options, function (err, text) {
if (err) {
console.log("An error occured: ", err);
// make sure err is wrapped in error object
// so you can tell errors in text_list array from values
if (!(err instanceof Error)) {
err = new Error(err);
}
text_list[i] = err;
errFlag = true;
} else {
text_list[i] = text;
}
// see if we're done with all the requests
if (++doneCnt === len) {
if (errFlag) {
// deal with situation where there were some errors
} else {
// put code here to process finished text_list array
}
}
});
}
// you can't process results here because async operations are not
// done yet when code here runs
Or, using promises, you can make a "promisified" version of tesseract.process() and then use promise functionality to track multiple async operations:
// make promisified version of tesseract.process()
tesseract.processP = function(img, options) {
return new Promise(function(resolve, reject) {
tesseract.process(img, options, function(err, text) {
if (err) {
reject(err)
} else {
resolve(text);
}
});
});
}
const len = 10;
let promises = [];
for (let i = 0; i < len; i++) {
promises.push(tesseract.processP("img"+i+".jpg", options));
}
Promise.all(promises).then(function(results) {
// process results array here (in order)
}).catch(function(err) {
// handle error here
});
Related
I've written a piece of code which takes two argument, first being some URL and second is an integer for how many times the URL must get downloaded (I know there is no point downloading same URL again and again but this code is just a sample one and in actual code the URL is picked randomly from a database table) and as of now the code is written as a recursive function. Here is how my current code looks like,
const request = require("request");
function downloadUrl(url, numTimes) {
if (numTimes > 0) {
console.log(url, numTimes);
request.get(url, function (err, resp, buffer) {
if (err) {
return err;
}
console.log(`MimeType: ${resp.headers['content-type']}, Size: ${buffer.length}, numTimes: ${numTimes}`);
downloadUrl(url, --numTimes);
});
}
}
function main() {
downloadUrl('http://somerandomurl', 5); // the URL here might get picked randomly from an array or a table
}
main();
What I want to know is, can this recursive code be written as an iterative code using a while or a for loop? I've tried writing following code,
function downloadUrl(url, numTimes) {
for (let i = 0; i< numTimes; i++) {
request.get(url, function (err, resp, buffer) {
if (err) {
return err;
}
console.log(`MimeType: ${resp.headers['content-type']}, Size: ${buffer.length}, numTimes: ${numTimes}`);
});
}
}
But this code seems to get executed in parallel which obviously it will because in Node.js the async code doesn't wait for the statement to complete before proceeding to the next statement unlike a programming language like Java.
My question is, is there a way I can write iterative codes to behave exactly like my recursive codes? My recursive codes executes sequentially where numTimes variable is decremented by one and gets printed sequentially from 5 to 1.
I've tried my best to keep my question clear but in case something is not clear or confusing, please feel free to ask.
I guess that you want your http request be ended to make another one, correct me if im wrong, but you can use await in your method.
const request = require('request');
async function downloadUrl(url, numTimes) {
for (let i = 0; i< numTimes; i++) {
const objToResolve = await doDownload(url);
if(objToResolve.err){
console.log(`Error: ${objToResolve.err}, try: ${i}`);
}else{
console.log(`Size: ${objToResolve.buffer.length}, try: ${i}`);
}
}
}
// wrap a request in an promise
function doDownload(url) {
return new Promise((resolve, reject) => {
request(url, (err, resp, buffer) => {
if (err) {
reject({err});
}else{
resolve({err, resp, buffer});
}
});
});
}
// now to program the "usual" way
// all you need to do is use async functions and await
// for functions returning promises
function main() {
console.log('main chamado');
downloadUrl('http://www.macoratti.net/11/05/c_aspn3c.htm', 5);
}
main();
EDIT:
By considering timeout you can handle better your requests
const request = require('request');
async function downloadUrl(url, numTimes) {
for (let i = 0; i< numTimes; i++) {
try{
const objToResolve = await doDownload(url);
if(objToResolve.err){
console.log(`Error: ${objToResolve}, try: ${i}`);
}else{
console.log(`Size: ${objToResolve.buffer.length}, try: ${i}`);
}
}catch(timeout){
console.log(`Error: ${timeout}, try: ${i}`);
}
}
}
// wrap a request in an promise
function doDownload(url) {
const timeout = new Promise((resolve, reject) => {
setTimeout(() => {
reject(new Error('timeout'));
}, 300);
});
const requestPromisse = new Promise((resolve, reject) => {
request({uri:url, timeout:3000}, (err, resp, buffer) => {
if (err) {
reject({err});
}else{
resolve({err, resp, buffer});
}
});
});
return Promise.race([timeout,requestPromisse]);
}
// now to program the "usual" way
// all you need to do is use async functions and await
// for functions returning promises
function main() {
console.log('main called');
downloadUrl('http://www.macoratti.net/11/05/c_aspn3c.htm', 5);
}
// run your async function
main();
Reference: Synchronous Requests in Node.js
Every recursive code can be transformed in a non recursive ones :) So what does the recursive magic ? It just abuse the call stack to be a store for partial results. In fact you can build your own stack. Javascript make this very easy.
You can use some arrays to store your partial results.
using shift() to remove the first item of an array.
Using pop() to Remove the last element of an array:
Using push() to add to the end of an array
Using unshift() to add to the beginning of an array
Using splice() to add elements within an array
So with those its very simple to build your own "url" stack.
push and pop will be your best friends.
instead of your recursion just push the url to the array as long as you can not download
if you can download pop the url from the array.
The length of the array will give you all the time the stack counter.
The job is done if your array has the length of 0 :)
So in simple words: if you recognize that the "mess" to clean up becomes deeper push it to the array and if you can remove some "mess" do this tiny job and pop it from the array.
Thats nothing else as the recursion does. But without the need to annoy the os or interpreter. In the good old days such call stacks was very limited. So this own stack building will break those limits. It might also be way more mem sufficient. Cause you only store whats really needed.
I get what you're asking for - I think you're looking for a generator. Basically you just want a controlled loop where you don't iterate to the next item until the first is totally complete doing it's business.
I mean behind the scenes it basically is still just a recursive-ish function - it just wraps it up to act like a sequential, controlled loop.
Actually I'm not sure that Title of my question is 'correct', if you
have any idea with it, you could leave a comment and I'll rename it.
I am trying to rewrite my old function which make http-requests and insert many object at mongoDB via mongoose. I already have a working version of it, but I face a problem while using it. Basically, because when I'm trying to insertMany 20 arrays from 20+ request with ~50'000 elements from one request it cause a huge memory leak. Even with MongoDB optimization.
Logic of my code:
function main() {
server.find({locale: "en_GB"}).exec(function (err, server) {
for (let i = 0; i < server.length; i++) { //for example 20 servers
rp({url: server[i].slug}).then(response => {
auctions.count({
server: server[i].name,
lastModified: {$gte: response.data.files[0].lastModified}
}).then(function (docs) {
if (docs < 0) {
//We don't insert data if they are already up-to-date
}
else {
//I needed response.data.files[0].url and server[i].name from prev. block
//And here is my problem
requests & insertMany and then => loop main()
})
}
})
}).catch(function (error) {
console.log(error);
})
}
})
}
main()
Actually I have already trying many different things to fix it. First-of-all I was trying to add setInterval after else block like this:
setTimeout(function () {
//request every server with interval, instead of all at once
}, 1000 * (i + 1));
but I create another problem for myself because I needed to recursive my main() function right after. So I can't use: if (i === server[i].length-1) to call garbage collector or to restart main() because not all server skip count validation
Or let's see another example of mine:
I change for (let i = 0; i < server.length; i++) from 3-rd line to .map and move it from 3-rd line close to else block but setTimeout doesn't work with .map version, but as you may already understand script lose correct order and I can't make a delay with it.
Actually I already understand how to fix it at once. Just re-create array via let array_new = [], array_new.push = response.data.files[0].url with use of async/await. But I'm not a big expert in it, so I already waste a couple of hours. So the only problem for now, that I don't know how to return values from else block
As for now I'm trying to form array inside else block
function main() {
--added let array_new = [];
[v1]array_new.url += response.data.files[0].url;
[v2]array_new.push(response.data.files[0].url);
return array_new
and then call array_new array via .then , but not one of these works fine for now. So maybe someone will give me a tip or show me already answered question #Stackoverflow that could be useful in my situation.
Since you are essentially dealing with promises, you can refactor your function logic to use async await as follows:
function async main() {
try {
const servers = await server.find({locale: "en_GB"}).exec()
const data = servers.map(async ({ name, slug }) => {
const response = await rp({ url: slug })
const { lastModified, url } = response.data.files[0]
const count = await auctions.count({
server: name,
lastModified: { $gte: lastModified }
})
let result = {}
if (count > 0) result = { name, url }
return result
}).filter(d => Object.keys(d).length > 0)
Model.insertMany(data)
} catch (err) {
console.error(err)
}
}
Your problem is with logic obscured by your promises. Your main function recursively calls itself N times, where N is the number of servers. This builds up exponentially to eat memory both by the node process and MongoDB handling all the requests.
Instead of jumping into async / await, start by using the promises and waiting for the batch of N queries to complete before starting another batch. You can use [Promise.all] for this.
function main() {
server.find({locale: "en_GB"}).exec(function (err, server) {
// need to keep track of each promise for each server
let promises = []
for (let i = 0; i < server.length; i++) {
let promise = rp({
url: server[i].slug
}).then(function(response) {
// instead of nesting promises, return the promise so it is handled by
// the next then in the chain.
return auctions.count({
server: server[i].name,
lastModified: {
$gte: response.data.files[0].lastModified
}
});
}).then(function (docs) {
if (docs > 0) {
// do whatever you need to here regarding making requests and
// inserting into DB, but don't call main() here.
return requestAndInsert();
}
}).catch(function (error) {
console.log(error);
})
// add the above promise to out list.
promises.push(promise)
}
// register a new promise to run once all of the above promises generated
// by the loop have been completed
Promise.all(promises).then(function () {
// now you can call main again, optionally in a setTimeout so it waits a
// few seconds before fetchin more data.
setTimeout(main, 5000);
})
})
}
main()
I have such a loop :
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
var docs = collection.find({ id: { "$in": temparray}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
}
The loop iterates for two times. In first iteration it gets 200 elements, in second, it gets 130 elements. And when I open the .txt file, I see only 130 names. I guess because of the async nature of Node.js, only second part of the array is processed. What should I do to get all parts of the array to be processed? Thanks in advance.
EDIT : I finally turned the code to this :
var generalArr = [];
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
generalArr.push(temparray);
}
async.each(generalArr, function(item, callback)
{
var docs = collection.find({ id: { "$in": item}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
callback(null);
})
When I change this line :
var docs = collection.find({ id: { "$in": item}}).toArray();
To this line :
var docs = collection.find({ id: { "$in": item}}).project({ name: 1 }).toArray();
It works, I'm able to print all names. I guess there is a problem with memory when I try without .project(). How can I make this work without using project? Should I change some memory limits? Thanks in advance.
I think your code is unnecessary complicated and appending file in a loop is very expensive when compared to in-memory computation. A better way would be to write to file just once.
var i, j, temparray, chunk = 200;
for (i = 0, j = document.mainarray.length; i < j; i += chunk) {
temparray = document.mainarray.slice(i, i + chunk);
generalArr.push(temparray);
}
const queryPromises = [];
generalArr.forEach((item, index) => {
queryPromises.push(collection.find({ id: { "$in": item } }).toArray());
});
let stringToWrite = '';
Promise.all(queryPromises).then((result) => {
result.forEach((item) => {
item.forEach((element) => {
//create a single string which you want to write
stringToWrite = stringToWrite + "\n" + element.name;
});
});
fs.appendFile("C:/Users/x/Desktop/names.txt", stringToWrite, function (err) {
if (err) {
return console.log(err);
} else {
// call your callback or return
}
});
});
In the code above, I do the following.
Wait for all the db queries to finish
Lets iterate over this list and create one string that we need to write to the file
Write to the file
Once you go asynchronous you cannot go back - all your code needs to be asynchronous. In node 8 you handle this with async and await keywords. In older versions you can use Promise - async/await are just syntax sugar for it anyway.
However, most of the API in node are older than Promise, and so they use callbacks instead. There is a promisify function to update callback functions to promises.
There are two ways to handle this, you can let all the asynchronous actions happen at the same time, or you can chain them one after another (which preserves order but takes longer).
So, collection.find is asynchronous, it either takes a callback function or returns a Promise. I'm going to assume that the API you're using does the latter, but your problem could be the former (in which case look up promisify).
var findPromise = collection.find({ id: { "$in": item}});
Now, at this point findPromise holds the running find action. We say this is a promise that resolves (completes successfully) or rejects (throws an error). We want to queue up an action to do once it completes, and we do that with then:
// The result of collection.find is the collection of matches
findPromise.then(function(docs) {
// Any code we run here happens asynchronously
});
// Code here will run first
Inside the promise we can return further promises (allowing them to be chained - complete one async, then complete the next, then fire the final resolve once all done) or use Promise.all to let them all happen in parallel and resolve once done:
var p = new Promise(function(resolve, reject) {
var findPromise = collection.find({ id: { "$in": item}});
findPromise.then(function(docs) {
var singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
var singleDoc = docs[i];
if(!singleDoc)
continue;
for(var t = 0; t < singleDoc.length; t++)
singleDocNames.push(singleDoc[t].name);
}
// Resolve the outer promise with the final result
resolve(singleDocNames);
});
});
// When the promise finishes log it to the console
p.then(console.log);
// Code inline here will fire before the promise
This is much easier in node 8 with async/await:
async function p() {
// Await puts the rest of this function in the .then() of the promise
const docs = await collection.find({ id: { "$in": item}});
const singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
// ... synchronous code unchanged ...
}
// Resolve the outer promise with the final result
return singleDocNames;
});
// async functions can be treated like promises
p().then(console.log);
If you need to write the results to a text file asynchronously there are a couple of ways to do it - you can wait until the end and write all of them, or chain a promise to write them after each find, though I find parallel IO operations tend to be at more risk of deadlocks.
Code above have multiple issues about asynchronous control flow. Similar code possible can exists, but only if case of using ES7 async/await operators on all async operation.
Of course, you can easily achieve solution by promises sequence. Solution:
let flowPromise = Promise.resolve();
const chunk = 200;
for (let i=0,j=document.mainarray.length; i<j; i+=chunk) {
flowPromise = flowPromise.then(() => {
const temparray = document.mainarray.slice(i,i+chunk);
const docs = collection.find({ id: { "$in": temparray}}).toArray();
return docs.then((singleDoc) => {
let innerFlowPromise = Promise.resolve();
if(singleDoc) {
console.log("single doc length : " + singleDoc.length);
for(let t = 0, len = singleDoc.length; t < len;t++) {
innerFlowPromise = innerFlowPromise.then(() => new Promise((resolve, reject) =>
fs.appendFile(
"C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n",
err => (err ? reject(err) : resolve())
)
));
}
}
return innerFlowPromise;
}
});
}
flowPromise.then(() => {
console.log('Done');
}).catch((err) => {
console.log('Error: ', err);
})
When use async-like control flow, based on Promises, always remember that every loop and function call sequence will not pause execution till async operation be done, so include all then sequences manually. Or use async/await syntax.
Which version of nodejs are you using? You should use the native async/await support which is built into newer versions nodejs (no libraries required). Also note, fs.appendFile is asyncronous so you need to either use a library like promisify to transform the callback into a promise or just use the appendFileSync and suffer the blocking IO (but might be okay for you, depending on the use case.)
async function(){
...
for(var item of generalArr) {
var singleDoc = await collection.find({ id: { "$in": item}}).toArray();
// if(singleDoc) { this won't do anything, since collection.find will always return something even if its just an empty array
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++){
fs.appendFileSync("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n");
}
};
}
var docs = collection.find({ id: { "$in": document.mainarray}}), // returns a cursor
doc,
names = [],
toInsert;
function saveToFile(cb) {
toInsert = names.splice(0,100);
if(!toInsert.length) return cb();
fs.appendFile("C:/Users/x/Desktop/names.txt", toInsert.join("\n"), cb);
}
(function process() {
if(docs.hasNext()) {
doc = docs.next();
doc.forEach(function(d) {
names.push(d.name);
});
if(names.length === 100) {
// save when we have 100 names in memory and clear the memory
saveToFile(function(err) {
process();
});
} else {
process();
}
} else {
saveToFile(function(){
console.log('All done');
});
}
}()); // invoke the function
If you can't solve your issue using core modules and basic nodejs, there is most likely a lack of understanding of how things work or insufficient knowledge about a library (in this case FileSystem module).
Here is how you can solve your issue, without 3th party libraries and such.
'use strict';
const
fs = require('fs');
let chunk = 200;
// How many rounds of array chunking we expect
let rounds = Math.ceil(mainArray.length/chunk);
// copy to temp (for the counter)
let tempRounds = rounds;
// set file name
let filePath = './names.txt'
// Open writable Stream
let myFileStream = fs.createWriteStream(filePath);
// from round: 0-${rounds}
for (let i = 0; i < rounds; i++) {
// assume array has ${chunk} elements left in this round
let tempChunk = chunk;
// if ${chunk} is to big i.e. i=3 -> chunk = 600 , but mainArray.length = 512
// This way we adjust the last round for "the leftovers"
if (mainArray.length < i*chunk) tempChunk = Math.abs(mainArray.length - i*chunk);
// slice it for this round
let tempArray = mainArray.slice(i*chunk, i*chunk + tempChunk);
// get stuff from DB
let docs = collection.find({ id: { "$in": tempArray}}).toArray();
docs.then(function(singleDoc){
// for each name in the doc
for (let j = 0; j < singleDoc.length; j++) {
// write to stream
myFileStream.write(singleDoc[t].name + "\n");
}
// declare round done (reduce tempRounds) and check if it hits 0
if (!--tempRounds) {
// if all rounds are done, end the stream
myFileStream.end();
// BAM! you done
console.log("Done")
}
});
}
The key is to use fs.WritableStreams :)
link here to docs
I am trying to implement a for loop that iterates through a list and subsequently calls two functions, only if the first function results are found.
The issue is that the second function (search.similar) might be taking longer to fetch results.
With the code below, when I run, all of the appropriate output from (search.locate) is correct, but only the last element's results from myList are stored from the (search.similar) function.
ie. all_results = [[cat_res1,mouse_res2],[dog_res1,mouse_res2],[mouse_res1,mouse_res2]]
How do I fix this to append the right results in the right order?
ie. all_results = [[cat_res1,cat_res2],[dog_res1,dog_res2],[mouse_res1,mouse_res2]]
var search = require('./search');
var myList = ['cat','dog','mouse'];
var all_results = [];
for (i=0; i<myList.length; i++){
/* locate function*/
search.locate(myList[i], function (err, searchResult){
if (err){
console.log("Error");
return;
}
if (!searchResult){
console.log("Cannot find it");
return;
}
/*similarity function*/
/* seems to take longer*/
search.similar(myList[i], function (err, similarResult){
if (err){
return;
}
if (!similarResult){
return;
}
var res1 = searchResult.data;
var res2 = similarResult.data;
/* append results to array*/
all_results.push([res1,res2]);
}
});
}
Javascript can be thought of as asynchronous, in that the execution of particular functions do not necessarily happen synchronously, however "describing JavaScript as asynchronous is perhaps misleading. It's more accurate to say that JavaScript is synchronous and single-threaded with various callback mechanisms"
In order to accomplish your goal, though you may still get some ordering issues with the top array, you will need to wrap your .similar() call in another function that takes both arguments. Your reference to the "item" on the top search is changing:
function searchNestedSimilar(item, topRes) {
search.similar(item, function (err, similarResult) {
if (err){
return;
}
if (!topRes){
return;
}
var res1 = topRes.data
var res2 = similarResult.data
// append results to array
all_results.push([res1,res2])
}
}
function searchLocate(item) {
search.locate(item, function (err, searchResult) {
if (err){
console.log("Error");
return;
}
if (!searchResult){
console.log("Cannot find it");
return;
}
searchNestedSimilar(item, searchResults);
}
I encapsulated both calls to keep it modular, but since "item" is in the closure, you really only need the searchLocate() function to wrap your capture your item reference during iteration.
This is a good case for Promises (see Bluebird JS for example http://bluebirdjs.com/docs/getting-started.html) or you could do it with
async.map().
This page talks about it well, too. http://promise-nuggets.github.io/articles/14-map-in-parallel.html
There are many Stack Overflows discussing Promises as well. Understanding JS Promises for example.
A rough example of how to write this with a Promise:
var search = require('./search');
var myList = ['cat','dog','mouse']
var all_results = []
var Promise = require('bluebird');
var locate = Promise.promisify(search.locate);
var similar = Promise.promisify(search.similar);
for (i = 0; i < myList.length; i++){
// locate function
locate(myList[i], function (err, searchResult) {
if (err) {
console.log("Error");
return;
}
if (!searchResult){
console.log("Cannot find it");
return;
}
}).then(function(result) {
//similarity function
similar(myList[i], function (err, similarResult) {
if (err){
return;
}
if (!similarResult){
return;
}
var res1 = searchResult.data
var res2 = similarResult.data
// append results to array
all_results.push([res1,res2])
}).finally(function() {
// NOP
});
});
}
I have an async operation in a loop which fetches a result and pushes to an array like this:
arr = []
while (some_limit_reaches) {
async_operation(arg, function(err, data) {
if (!err)
arr.push(data)
})
}
// now arr is not completely filled until all async
// operations are finished in the loop above
the problem is that array is not completely filled until all async operations are done, how can I have a fully filled array after loop is over without using setTimeout?
You're trying to make a an asynchronous operation synchronous. You need to check if your desired state is true in your asynchronous callback. Try something like this.
while (some_limit_reaches) {
async_operation(arg, function(err, data) {
if (!err)
arr.push(data);
if(check_if_the_array_is_full){
//Call some function that continues your operation
}
})
}
This way your processing won't continue until all the array is full.
This solution is a little bit verbose, but respects the separation of tasks. Its schema is similar to kriskowal's q.all.
var arr = [], limit = 10, i=0, terminated_operations = 0;
while (i < limit) {
async_operation(arg, function(err, data) {
if (!err) {
arr.push(data);
operationTerminated(arr);
}
});
}
function operationTerminated(data) {
terminated_operations++;
if( terminated_operations === limit - 1) {
doStuff(data);
terminated_operations = 0;
}
}
function doStuff(data) {
console.log('all data returned', data);
}
The first snippet represents the core logic. The second function is only a trigger of the action declared in the third one.
Edit:
In order to answer at the original question in the title
How can I make sure that an async operation does not keep array in my code empty?
I recommend to return data=undefined in case of async_operation failure, so you can accept also [] as valid return value and keep higher control in core logic. This way you can rewrite the loop as:
while (i < limit) {
async_operation(arg, function(err, data) {
if(err) {
console.error('error occurred', err);
break;
}
if(data) {
arr.push(data);
operationTerminated(arr);
}
});
}