Resolving promises inside for loop - javascript

I am trying to read a JSON object using a for loop to format the JSON data and send it back to the client by putting the formatted response into a model object.
Inside for loop, i am dealing with two promises based upon few conditions. There are two functions, each having a promise returned.How can I get my final data after all the promises are resolved? Thanks in advance.
for (var i = 0, i<jsonData.length; i++){
if(someCOndition){
getSomeData().then(function(data){
//some operation using data
})
}
if(someOtherCOndition){
getSomeOtherData().then(function(data){
//some operation using data
})
}
}

Promise.all([ promise1, promise2 ]) (Promise.all() on MDN) in case of standard JS Promises (ES2015+). It returns a new promise, which gets resolved once all passed promises get resolved. But be aware - it will get rejected immediately when at least one promise gets rejected (it won't wait for any other promise).

You might do as follows;
var promises = [],
JSONData_1 = ["chunk_11","chunk_12","chunk_13"],
JSONData_2 = ["chunk_21","chunk_22","chunk_23"],
getJSONData = (b,i) => new Promise((resolve,reject) => setTimeout(_ => b ? resolve(JSONData_1[i])
: resolve(JSONData_2[i]),1000));
for (var i = 0; i < JSONData_1.length; i++){
if(Math.random() < 0.5) promises.push(getJSONData(true,i));
else promises.push(getJSONData(false,i));
}
Promise.all(promises)
.then(a => console.log(a));

You can use jQuery.when().
var deferredList = [];
for (var i = 0, i<jsonData.length; i++){
if(someCOndition){
deferredList.push(getSomeData().then(function(data){
//some operation using data
}))
}
if(someOtherCOndition){
taskList.push(getSomeOtherData().then(function(data){
//some operation using data
}))
}
}
JQuery.when(taskList).done(function(){
// final to do..
}).fail(){
// even if single one fails ! be aware of this
}
jQuery.when() MDN

You can do it in multiple ways. We can also use for of loop with async..await to get the result synchronously while looping, if that is a requirement. Something like this:
function downloadPage(url) {
return Promise.resolve('some value');
}
async function () {
for(let url of urls) {
let result = await downloadPage(url);
// Process the result
console.log(result);
}
}

You could do something like this..
var arr=[],arr2=[];
for (var i = 0, i<jsonData.length; i++){
if(someCOndition){
//push onto the array inputs for getSomeData()
arr.push(jsonData[i]);
}
if(someOtherCOndition){
arr2.push(jsonData[i]);
}
}
processArr(0);
processArr2(0);
function processArr(idx){
if (idx>=arr.length) {
//done
}
else {
getSomeData().then(function(data){
// some operation using data
// optionally store in a results array
// recurse
processArr(idx+1)
})
}
}
function processArr2(idx){
if (idx>=arr2.length) {
//done
}
else {
getSomeotherData().then(function(data){
// some operation using data
// recurse
processArr2(idx+1)
})
}
}

Related

Looping through promise and waiting?

I made a function that connects to an API which returns an array with 100 objects at a time. Without the loop the getPageData() function works when you pass an int. However when I try to iterate through it, it gives me back nothing. Set timeout function doesn't seem to help the loop.
Please help
async function updateWholeDB(){
var results = [];
for (let i = 0; i < getPages(); i++) {
setTimeout(function() {
getPageData(i).then((data) => {
console.log(data)
results.push(data) ;
})
}, 2000)
}
return Promise.all(results);
}
Promise.all() does it's work properly when you pass it an array of promises, not an array of results. It then returns a promise that resolves to an array of results. So, you're not using it properly.
In addition, there doesn't appear to be any reason for a setTimeout() here if things are coded properly.
If what you're trying to do is to end up with an array of results from calling getPageData() a bunch of times, you can do that like this:
function updateWholeDB(){
let promises = [];
for (let i = 0; i < getPages(); i++) {
promises.push(getPageData(i));
}
return Promise.all(promises);
}
// usage
updateWholeDB().then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
This assumes that getPageData() returns a promise that resolves properly with the data you are interested in. If that's not the case, then you will have to show us that code too so we can offer advice on how to fix it.

Make Synchronus Api inside for Loop

I have an array of Objects/String.
this.addtenant().subscribe(r => {
let a = ['plant1', 'plant2'];
for (let b of a) {
this.myService.saveAllData(b).subscribe(r => {
console.log("result" r);
});
}
});
and getAllData is in myservice file which returns an Observable.
saveAlldata(b) {
this.http.post(url,b);
}
The problem is since i am using subscribe the call is being asynchronus, i want to have it something like:
first "plant1" post call has to finish, and then plant2 has to be made. In simple words synchronus call.
I think you should use async/await for synchronous calls.
Here is one of the tutorial:
https://www.techiediaries.com/javascript-async-await-tutorial/
A sample code demo would be something like:
responseArr: any[] = [];
func1()
let x = [1,2,3];
func2(x);
}
async func2(arr) { // make the function async
for(let i=0; i<arr.length ; i++){
const response: any = await this.myService(arr[i]); // wait on each service call. This will give synchronous behaviour
this.handleResponse(response); // handle response in a function if there is common functionality in each response
}
reuturn responseArr; // return the final response
}
handleResponse(response) {
responseArr.push(response);
}
I have found a solution by myself. You can use an async await inside a loop, since forEach is not promise aware and cannot be used with Promise. Here is the final code:
this.addtenant().subscribe(async r => {
let a = ['plant1', 'plant2'];
for(let index=0; index=a.length; index++)
await this.myService.saveAllData(b).toPromise().then(r => {
console.log("result" r);
});
}
}
});
As await works with Promise, so toPromise will replace subscribe()

Resolving Promises sequentially not working

I have a code to:
Read last three data from Firebase
Iterate each retrieved data
Push a Promise-returning function expression to an array of Promise to be processed sequentially later
Process said array sequentially
Code:
firebase.database().ref('someRef').limitToLast(3).on('value', snapshot => {
let promiseArray = [];
snapshot.forEach(e => {
promiseArray.push(() => {
firebase.storage().ref(e.key).getDownloadURL().then(url => {
//Do something with URL
//In this case, I print out the url to see the order of URL retrieved
//Unfortunately, the order was incorrect
return 'Resolved, please continue'; //Return something to resolve my Promise
});
});
});
let result = Promise.resolve([]);
promiseArray.forEach(promise => {
result = result.then(promise);
});
});
I think that it should already be correct. However, the result I want to get is wrong. What did I miss?
EDIT
I seem to have missed a point. In my Promise array, I want the first function to resolve the Promise it returns first before continuing to the second function.
You should use reduce. A very good example you will find here: https://decembersoft.com/posts/promises-in-serial-with-array-reduce/
forEach is a synchronous method. You can use map to create the array of promises and then use promise.all.
firebase.database().ref('someRef').limitToLast(3).on('value', snapshot => {
let promiseArray = [];
const promiseArray = snapshot.map(e => firebase.storage().ref(e.key).getDownloadURL());
Promise.all(promiseArray).then((resultArr) => {
// Do anything with your result array
});
}
For sequential execution of promises you can use async await.
firebase.database().ref('someRef').limitToLast(3).on('value', async (snapshot) => {
let promiseArray = [];
const promiseArray = snapshot.map(e => firebase.storage().ref(e.key).getDownloadURL());
let result;
for(let i = 0; i < promiseArray.length; i++) {
result = await promiseArray[i];
}
});
I figured it out: apparently, I forgot to my function is not returning a Promise. Because of that, when I'm chaining the thens, it's not waiting for my Promise to resolve first as it wasn't even returned in the first place. I'm basically returning a void, thus the functions continue without waiting for the previous Promise to resolve. Simply adding return fixes the problem:
firebase.database().ref('someRef').limitToLast(3).on('value', snapshot => {
let promiseArray = [];
snapshot.forEach(e => {
promiseArray.push(() => {
return firebase.storage().ref(e.key).getDownloadURL().then(url => { //Add return here
//Do something with URL
//In this case, I print out the url to see the order of URL retrieved
//Unfortunately, the order was incorrect
return 'Resolved, please continue'; //Return something to resolve my Promise
});
});
});
let result = Promise.resolve([]);
promiseArray.forEach(promise => {
result = result.then(promise);
});
});

Call function after Promise.all inside a loop

So I wrote javascript about promise. I made two promise inside a for loop like this:
for(let i=0; i<globalName.length; i++ ){
let debug = globalName[i];
var promise1 = new Promise(function(resolve,reject){
var j = searchStart(startT,debug);
resolve(j)
}).then(function(result){
sxx = result;
});
var promise2 = new Promise(function(resolve,reject){
var k = searchEnd(endT,debug);
resolve(k);
}).then (function(result){
syy = result;
});
Promise.all([promise1, promise2]).then(function(values) {
let localed = [];
entry[i] = sxx;
exit[i] = syy;
localed.push({
"name" : debug,
"first" : entry[i],
"last" : exit[i]
});
xtable.rows.add(localed).draw();
});
}
In each promise, I call function searchStart(startT,debug) and searchEnd(endT,debug), which within each function, I also wrote promise script that return value from an API (ready called API from a device, when I called it, returns JSON data). JSON data works fine, and I can access it with my function and returned some intended value.
With the Promise.all when my function returns value, I write the data into table provided from DataTables. But of course because the function run when two promises above resolved, it can only write to my table with every each row of data.
Now, what I want to ask is, can I somehow manage to write all data first, and after the data is complete I call other function to write to table?
You can .map each debug to its associated Promise.all, so that you have an array of Promise.alls. Then, after calling Promise.all on that array, you can add all rows at once.
Note that since searchStart and searchEnd look to already return Promises, there's no need for the explicit Promise constructor antipattern - simply use the existing Promise alone. Also, by returning a value inside a .then, you can avoid having to use outer variables like sxx, syy, entry[i], and exit[i]:
const promiseAlls = globalName.map((debug, i) => {
return Promise.all([
debug, // see below for note
searchStart(startT, debug),
searchEnd(endT, debug)
]);
});
Promise.all(promiseAlls).then((allArrs) => {
allArrs.forEach(([
name, // this is the same as the "debug" variable above
first, // this is the same as `entry[i]`, or `sxx`, in your original code
last // this is the same as `exit[i]`, or `syy`, in your original code
]) => {
const localed = [{ name, first, last }];
xtable.rows.add(localed).draw();
});
});
The debug is used in the initial Promise.all even though it's not a Promise so that it can be passed along and used with its other associated values, once they've been resolved.
I am not clear about what you want, but I have two answers which may help you
Solution 1: It will solve each promise at a time then proceed for next
function searchStartAndEnd(flag = false, date, debug){
return new Promise((resolve, reject)=>{
var j;
if(flag){
j = searchStart(date, debug);
}else{
j = searchStart(date, debug);
}
resolve(j)
})
}
for(let i=0; i<globalName.length; i++ ){
let debug = globalName[i];
sxx = await searchStartAndEnd(true, startT, debug);
syy = await searchStartAndEnd(false, endT, debug) ;
localed.push({
"name" : debug,
"first" : sxx,
"last" : syy]
});
xtable.rows.add(localed).draw();
}
solution 2: It will solve all promise parallel then do move to the next task, then move next iteration
function searchStartAndEnd(flag = false, date, debug){
return new Promise((resolve, reject)=>{
var j;
if(flag){
j = searchStart(date, debug);
}else{
j = searchStart(date, debug);
}
resolve(j)
})
}
for(let i=0; i<globalName.length; i++ ){
let debug = globalName[i];
sxx = await ;
[sxx, syy] = await Promise.all([searchStartAndEnd(true, startT, debug),
searchStartAndEnd(false, endT, debug)])
localed.push({
"name" : debug,
"first" : sxx,
"last" : syy]
});
xtable.rows.add(localed).draw();
}

How to handle async Node.js in a loop

I have such a loop :
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
var docs = collection.find({ id: { "$in": temparray}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
}
The loop iterates for two times. In first iteration it gets 200 elements, in second, it gets 130 elements. And when I open the .txt file, I see only 130 names. I guess because of the async nature of Node.js, only second part of the array is processed. What should I do to get all parts of the array to be processed? Thanks in advance.
EDIT : I finally turned the code to this :
var generalArr = [];
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
generalArr.push(temparray);
}
async.each(generalArr, function(item, callback)
{
var docs = collection.find({ id: { "$in": item}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
callback(null);
})
When I change this line :
var docs = collection.find({ id: { "$in": item}}).toArray();
To this line :
var docs = collection.find({ id: { "$in": item}}).project({ name: 1 }).toArray();
It works, I'm able to print all names. I guess there is a problem with memory when I try without .project(). How can I make this work without using project? Should I change some memory limits? Thanks in advance.
I think your code is unnecessary complicated and appending file in a loop is very expensive when compared to in-memory computation. A better way would be to write to file just once.
var i, j, temparray, chunk = 200;
for (i = 0, j = document.mainarray.length; i < j; i += chunk) {
temparray = document.mainarray.slice(i, i + chunk);
generalArr.push(temparray);
}
const queryPromises = [];
generalArr.forEach((item, index) => {
queryPromises.push(collection.find({ id: { "$in": item } }).toArray());
});
let stringToWrite = '';
Promise.all(queryPromises).then((result) => {
result.forEach((item) => {
item.forEach((element) => {
//create a single string which you want to write
stringToWrite = stringToWrite + "\n" + element.name;
});
});
fs.appendFile("C:/Users/x/Desktop/names.txt", stringToWrite, function (err) {
if (err) {
return console.log(err);
} else {
// call your callback or return
}
});
});
In the code above, I do the following.
Wait for all the db queries to finish
Lets iterate over this list and create one string that we need to write to the file
Write to the file
Once you go asynchronous you cannot go back - all your code needs to be asynchronous. In node 8 you handle this with async and await keywords. In older versions you can use Promise - async/await are just syntax sugar for it anyway.
However, most of the API in node are older than Promise, and so they use callbacks instead. There is a promisify function to update callback functions to promises.
There are two ways to handle this, you can let all the asynchronous actions happen at the same time, or you can chain them one after another (which preserves order but takes longer).
So, collection.find is asynchronous, it either takes a callback function or returns a Promise. I'm going to assume that the API you're using does the latter, but your problem could be the former (in which case look up promisify).
var findPromise = collection.find({ id: { "$in": item}});
Now, at this point findPromise holds the running find action. We say this is a promise that resolves (completes successfully) or rejects (throws an error). We want to queue up an action to do once it completes, and we do that with then:
// The result of collection.find is the collection of matches
findPromise.then(function(docs) {
// Any code we run here happens asynchronously
});
// Code here will run first
Inside the promise we can return further promises (allowing them to be chained - complete one async, then complete the next, then fire the final resolve once all done) or use Promise.all to let them all happen in parallel and resolve once done:
var p = new Promise(function(resolve, reject) {
var findPromise = collection.find({ id: { "$in": item}});
findPromise.then(function(docs) {
var singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
var singleDoc = docs[i];
if(!singleDoc)
continue;
for(var t = 0; t < singleDoc.length; t++)
singleDocNames.push(singleDoc[t].name);
}
// Resolve the outer promise with the final result
resolve(singleDocNames);
});
});
// When the promise finishes log it to the console
p.then(console.log);
// Code inline here will fire before the promise
This is much easier in node 8 with async/await:
async function p() {
// Await puts the rest of this function in the .then() of the promise
const docs = await collection.find({ id: { "$in": item}});
const singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
// ... synchronous code unchanged ...
}
// Resolve the outer promise with the final result
return singleDocNames;
});
// async functions can be treated like promises
p().then(console.log);
If you need to write the results to a text file asynchronously there are a couple of ways to do it - you can wait until the end and write all of them, or chain a promise to write them after each find, though I find parallel IO operations tend to be at more risk of deadlocks.
Code above have multiple issues about asynchronous control flow. Similar code possible can exists, but only if case of using ES7 async/await operators on all async operation.
Of course, you can easily achieve solution by promises sequence. Solution:
let flowPromise = Promise.resolve();
const chunk = 200;
for (let i=0,j=document.mainarray.length; i<j; i+=chunk) {
flowPromise = flowPromise.then(() => {
const temparray = document.mainarray.slice(i,i+chunk);
const docs = collection.find({ id: { "$in": temparray}}).toArray();
return docs.then((singleDoc) => {
let innerFlowPromise = Promise.resolve();
if(singleDoc) {
console.log("single doc length : " + singleDoc.length);
for(let t = 0, len = singleDoc.length; t < len;t++) {
innerFlowPromise = innerFlowPromise.then(() => new Promise((resolve, reject) =>
fs.appendFile(
"C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n",
err => (err ? reject(err) : resolve())
)
));
}
}
return innerFlowPromise;
}
});
}
flowPromise.then(() => {
console.log('Done');
}).catch((err) => {
console.log('Error: ', err);
})
When use async-like control flow, based on Promises, always remember that every loop and function call sequence will not pause execution till async operation be done, so include all then sequences manually. Or use async/await syntax.
Which version of nodejs are you using? You should use the native async/await support which is built into newer versions nodejs (no libraries required). Also note, fs.appendFile is asyncronous so you need to either use a library like promisify to transform the callback into a promise or just use the appendFileSync and suffer the blocking IO (but might be okay for you, depending on the use case.)
async function(){
...
for(var item of generalArr) {
var singleDoc = await collection.find({ id: { "$in": item}}).toArray();
// if(singleDoc) { this won't do anything, since collection.find will always return something even if its just an empty array
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++){
fs.appendFileSync("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n");
}
};
}
var docs = collection.find({ id: { "$in": document.mainarray}}), // returns a cursor
doc,
names = [],
toInsert;
function saveToFile(cb) {
toInsert = names.splice(0,100);
if(!toInsert.length) return cb();
fs.appendFile("C:/Users/x/Desktop/names.txt", toInsert.join("\n"), cb);
}
(function process() {
if(docs.hasNext()) {
doc = docs.next();
doc.forEach(function(d) {
names.push(d.name);
});
if(names.length === 100) {
// save when we have 100 names in memory and clear the memory
saveToFile(function(err) {
process();
});
} else {
process();
}
} else {
saveToFile(function(){
console.log('All done');
});
}
}()); // invoke the function
If you can't solve your issue using core modules and basic nodejs, there is most likely a lack of understanding of how things work or insufficient knowledge about a library (in this case FileSystem module).
Here is how you can solve your issue, without 3th party libraries and such.
'use strict';
const
fs = require('fs');
let chunk = 200;
// How many rounds of array chunking we expect
let rounds = Math.ceil(mainArray.length/chunk);
// copy to temp (for the counter)
let tempRounds = rounds;
// set file name
let filePath = './names.txt'
// Open writable Stream
let myFileStream = fs.createWriteStream(filePath);
// from round: 0-${rounds}
for (let i = 0; i < rounds; i++) {
// assume array has ${chunk} elements left in this round
let tempChunk = chunk;
// if ${chunk} is to big i.e. i=3 -> chunk = 600 , but mainArray.length = 512
// This way we adjust the last round for "the leftovers"
if (mainArray.length < i*chunk) tempChunk = Math.abs(mainArray.length - i*chunk);
// slice it for this round
let tempArray = mainArray.slice(i*chunk, i*chunk + tempChunk);
// get stuff from DB
let docs = collection.find({ id: { "$in": tempArray}}).toArray();
docs.then(function(singleDoc){
// for each name in the doc
for (let j = 0; j < singleDoc.length; j++) {
// write to stream
myFileStream.write(singleDoc[t].name + "\n");
}
// declare round done (reduce tempRounds) and check if it hits 0
if (!--tempRounds) {
// if all rounds are done, end the stream
myFileStream.end();
// BAM! you done
console.log("Done")
}
});
}
The key is to use fs.WritableStreams :)
link here to docs

Categories

Resources