Trouble understanding async/await nodejs - javascript

Ok so I´m having troubles understanding how async/await, Promises, etc. in nodejs work, this is my first time programming in an asynchronous language.
What im trying to do here is basically select one random entry from the mongoose-model "SecSolution". Currently when arr is returned it´s empty, and the debug message on the bottom gets printed before the debug on the top gets printed.
I just want the function to return "arr" after it gets its value.
async function getRandomCardIds(deckIdentifier, cardCount) {
let arr;
switch (deckIdentifier) {
case 102:
await SecSolution.count().exec(async function (err, count) {
let promises = [];
var random = Math.floor(Math.random() * count);
for (let i = 0; i < 2; i++) {
promises.push((await SecSolution.findOne().skip(random).lean())._id);
}
arr = await Promise.all(promises);
debug("This gets printed second" + arr);
});
break;
}
debug("this gets printed first");
return arr;
}
Thanks in advance!

Do not use callbacks when working with async/await. (And when working with plain promises, use only then callbacks). Also you shouldn't use await on a promise that you still need as a promise object, to pass it to Promise.all. Your code should be
async function getRandomCardIds(deckIdentifier, cardCount) {
switch (deckIdentifier) {
case 102:
const count = await SecSolution.count(); // a promise(like object)
let promises = [];
var random = Math.floor(Math.random() * count);
for (let i = 0; i < 2; i++) {
promises.push(SecSolution.findOne().skip(random).lean());
}
let arr = await Promise.all(promises);
debug("This gets printed second" + arr);
return [arr[0]._id, arr[1]._id];
break;
}
debug("this gets printed first");
return undefined;
}
Instead of accessing the _ids on the objects in the result array, you could also have transformed the promises directly (similar to what you tried with the await):
promises.push(SecSolution.findOne().skip(random).lean().then(card => card._id));

well basically you have to think that it will try to run everything and all the code that needs to wait to be resolved wont stop the process of running all the code.
thus, looking at your code, we can see the following:
1) define arr as undefined then go inside the switch.
2) in the switch statement we have an await so it will wait (but it wont stop the other code to run because it is not on the same statement), it will resolve later.
3)prints the debug message
4) returns undefined because the await inside of switch is not resolved.
5) one day the statement is resolved but nothing you can do about it.
an example could be the following.
function resolveAfter2Seconds() {
return new Promise(resolve => {
setTimeout(() => {
resolve('resolved');
}, 2000);
});
}
async function asyncCall() {
console.log('calling');
var result = await resolveAfter2Seconds();
console.log(result);
// expected output: "resolved"
}
asyncCall();
so what you can do in your case is the following:
function resolveInnerFunct() {
return new Promise(resolve => {
let promises = [];
var random = Math.floor(Math.random() * count);
for (let i = 0; i < 2; i++) {
promises.push(SecSolution.findOne().skip(random).lean())._id));
}
Promise.all(promises).then(values=> {return resolve(values)};
});
}
async function asyncCall() {
console.log('calling');
let arr;
switch (deckIdentifier) {
case 102:
// this will wait until the resolveInnerFunct resolves.
arr = await resolveInnerFunct();
debug("this wont get printed before the other message")
break;
}
debug("this gets printed first");
return arr;
}
asyncCall();

Related

How to return array at end of loop (Newbie question)

The following code (which I've simplified for clarity) loops through and returns the cardsToInsert before each part of the loop finishes, so the array doesn't get built properly.
The loops build the array correctly, but the result got returned near the beginning, not after it was built.
How do I get it to finish all the loops before returning the array?
async function loopCards(cardsToGet) {
for (let cardToGet of cardsToGet) {
getDataFunctionWhichReturnsAPromise()
.then(async (card) => {
writeCard = await buildingCard(card)
cardsToInsert.push(writeCard)
}
return cardsToInsert
}
thanks
Full Code Added as Requested
// wixData.get() returns a promise
async function loopCards(cardsToGet) {
let writeCard
let buildCard
for (let index = 0; index < cardsToGet.length; index++) {
const cardToGet = cardsToGet[index].card
buildCard = wixData.get("Card", cardToGet)
.then(async (card) => {
writeCard = await buildingCard(card)
cardsToInsert.push(writeCard)
}
)
.catch((err) => {
let errorMsg = err;
console.log("getcard error: " + errorMsg);
return errorMsg
});
}
return cardsToInsert
}
Here is a detailed explaination
loops are synchronous.
Promises are asynchronous.
To get the data from promises you need to wait for it to finish using callback,async-await or promise.
In your code, you are putting .then to access the result of wixData but the whole wixData.get("Card", cardToGet).then(async (card) => {}) is an async function for loopCards(cardsToGet) and because of this your loop finishes and the result array is empty.
Solution -
Just wait for wixData.get("Card", cardToGet) to finish and then do the manipulations. Here is a solution using async await.
async function loopCards(cardsToGet) {
let cardsToInsert = [];
for (let index = 0; index < cardsToGet.length; index++) {
const cardToGet = cardsToGet[index].card
let card = await wixData.get("Card", cardToGet)
let writeCard = await buildingCard(card)
cardsToInsert.push(writeCard)
}
return cardsToInsert
}
In the above code I wait for the wixData.get("Card", cardToGet) to finish and store the result in get card. This should fix your problem.
but this code by performance is not optimal as you are waiting for each network call. what you can do is execute all the Promises at once using Promise.all
Here is the code for that with error handling -
async function loopCards(cardsToGet){
try {
return await Promise.all( cardsToGet.map( cardToGet => buildCard( cardToGet.card )));
}
catch (error) {
console.log(error);
}
}
async function buildCard(cardToGet){
try {
let card = await wixData.get("Card", cardToGet);
let writeCard = await buildingCard(card);
return writeCard;
}
catch (error) {
console.log("getcard error: " + error);
return error;
}
}
The above code might have some erros since I haven't tested it but I hope you get the approach.
you should wrap loop with in promise .then or also you are using async its mean a very first item of array will be print and then it will await
getDataFunctionWhichReturnsAPromise()
.then(async (card) => {
for (let cardToGet of cardsToGet) {
writeCard = await buildingCard(card)
cardsToInsert.push(writeCard)}
}

How to make JS wait for previous lines of code to finish first?

So i have an issue with JS and nodeJS, and is that it runs whole code at the same time and doesnt wait for a previous function to finish its work (compared to python). How do i make it first finish its function, push the results to array and only then print to console the whole array? await doesnt seem to work in any kind of for loop
const fetch = require('node-fetch')
const fetchlink = async (i) => {
let url = `http://linktofetch`
let response = await fetch(url, {
method: 'GET'
})
const answer = await response.json()
return answer
}
const arr = []
let pushtoarr = async (value) => {
arr.push(value)
}
let main = async () => {
for(let i=1;i < 10; i++){
const answer = fetchlink(i).then((response) => {
response.data.items.forEach(el =>{
pushtoarr(el.name)
}
)
})
}
console.log(arr)
}
main()
When doing foo.then(bar), bardoesn't execute immediately, instead you're just registering a callback that will execute bar later on, and you should instead be doing const baz = await foo; bar(baz).
So, in your example, you should rewrite your code as:
const fetch = require('node-fetch')
const fetchlink = async (i) => {
let url = `http://linktofetch`;
let response = await fetch(url, { method: 'GET' });
const answer = await response.json();
return answer;
}
(async () => {
const arr = [];
for (let i=1; i<10; i++) {
const response = await fetchLink(i);
for (const el of response.data.items) {
arr.push(el.name);
}
}
console.log(arr);
})();
Didn't test but it will look somehow like this
const fetch = require('node-fetch')
(async () => {
const arr = []
for(let i=1;i < 10; i++) {
const response = await fetchlink(i)
const answer = response.data.items.forEach(el => arr.push(el))
}
console.log(arr)
})()
async function fetchlink (i) {
let url = `http://linktofetch`
let response = await fetch(url, {
method: 'GET'
})
return response.json()
}
The problem is that you're trying to do an asynchronous task synchronously. There are generally two ways you can go about executing an async function and which one you use depends on what you need from the function.
Non-Blocking
In general, an async function will return a Promise. In order to get the results of a promise you have to unwrap it like so,
asyncFunction(args).then((promiseResult) => { doStuff(promiseResult); });
The key part is that you unwrap the promise using then which will only trigger after the original promise has finished. This means that code execution will not wait for the promise to get unwrapped to execute the lines after. For example:
asyncFunction(args).then((promiseResult) => { doStuff(promiseResult); });
console.log('done');
In this case the log function will generally happen before the doStuff function gets called.
Blocking
In the event that you want to block or wait for a promise to unwrap, you need to use the await keyword like so,
const promiseResult = await asyncFunction(args);
doStuff(promiseResult);
console.log('done');
In this example, no code after the await line will get executed until the asyncFunction resolves. The important thing to understand is that it only is true within the scope of code you are in. If there is a non-blocking async function being executed inside of asyncFunction, it will not wait to finish resolving that before returning to doStuff.
I will omit the actual modification to fix your code as it seems a few other people have beat me to that, however, I hope that explanation helps.
Use promise
example usage below
'use strict';
var promiseCount = 0;
function testPromise() {
let thisPromiseCount = ++promiseCount;
let log = document.getElementById('log');
log.insertAdjacentHTML('beforeend', thisPromiseCount +
') Started (<small>Sync code started</small>)<br/>');
// We make a new promise: we promise a numeric count of this promise, starting from 1 (after waiting 3s)
let p1 = new Promise(
// The executor function is called with the ability to resolve or
// reject the promise
(resolve, reject) => {
log.insertAdjacentHTML('beforeend', thisPromiseCount +
') Promise started (<small>Async code started</small>)<br/>');
// This is only an example to create asynchronism
window.setTimeout(
function() {
// We fulfill the promise !
resolve(thisPromiseCount);
}, Math.random() * 2000 + 1000);
}
);
// We define what to do when the promise is resolved with the then() call,
// and what to do when the promise is rejected with the catch() call
p1.then(
// Log the fulfillment value
function(val) {
log.insertAdjacentHTML('beforeend', val +
') Promise fulfilled (<small>Async code terminated</small>)<br/>');
}).catch(
// Log the rejection reason
(reason) => {
console.log('Handle rejected promise ('+reason+') here.');
});
log.insertAdjacentHTML('beforeend', thisPromiseCount +
') Promise made (<small>Sync code terminated</small>)<br/>');
}
reference:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise

JavaScript recursion returning a promise never resolving

In JavaScript, I have an array of objects being some tasks to do. I iterate through this array with a for loop with await, calling a function doOneTask with returns a Promise.
That works pretty well as long as the code inside doOneTask works as expected. However, those things often fail. Trying again helps almost all the time. So, I'd like to implement a procedure for auto-retrying inside the JavaScript code.
My idea was a recursive function: In case of a failure, doOneTask call itself till the promise if finally resolved.
My code looks like this:
var tasks = [{label: 'task0'},{label: 'task1'},{label: 'task2'}];
async function mainFunction() {
for(let k = 0; k < tasks.length; k++) {
await doOneTask(tasks[k]);
console.log("doOneTask done for index " + k);
}
console.log("End reached!");
}
function doOneTask(task) {
return new Promise(async function (resolve,reject) {
console.log("Starting with: " + task.label);
let checkIfDoeSomeStuffWorked = await doSomeAsyncStuff();
if(checkIfDoeSomeStuffWorked == false) {
console.log(task.label + ": FAILED");
return doOneTask(task);
}
else {
console.log(task.label + ": SUCCESS");
resolve(true);
}
});
}
function doSomeAsyncStuff() {
return new Promise(function (resolve,reject) {
var myRandom = Math.random();
if(myRandom < 0.3) {
resolve(true);
}
else {
resolve(false);
}
});
}
mainFunction();
(In real life, doSomeAsyncStuff is a backend call which often fails. The random() part is just for demonstration. In reality, I also limit the number of trials, before stopping the script.)
However, it doesn't work. In case of a failure, the script stops after having reached the SUCCESS console log. I never get back to the loop and the next items in the loop never get executed.
You have no use for the q library dependency. async functions always return a Promise, so you can simplify your code quite a bit -
async function doOneTask (task) {
const result = await doSomeAsyncStuff()
if (result === false) {
console.log(`${task} failed`)
return doOneTask(task)
}
else {
console.log(`${task} passed`)
return true
}
}
Your fake function doSomeAsyncStuff can be cleaned up too -
async function doSomeAsyncStuff () {
return Math.random() < 0.3
}
But let's add a fake delay of 1 second so that we can show things working 100% -
async function doSomeAsyncStuff () {
return new Promise(resolve =>
setTimeout(resolve, 1000, Math.random() < 0.3)
)
}
Last, your main function uses a really old looping convention. As you're using modern JavaScript, you might as well use for-of syntax -
async function main (tasks = []) {
for (const t of tasks) {
await doOneTask(t)
}
return "done"
}
Finally we run the program -
const tasks =
[ 'task0', 'task1', 'task2' ]
main(tasks).then(console.log, console.error)
// task0 failed
// task0 passed
// task1 failed
// task1 failed
// task1 passed
// task2 passed
// done
Expand the snippet below to verify the results in your own browser -
async function doOneTask (task) {
const result = await doSomeAsyncStuff()
if (result === false) {
console.log(`${task} failed`)
return doOneTask(task)
}
else {
console.log(`${task} passed`)
return true
}
}
async function doSomeAsyncStuff () {
return new Promise(resolve =>
setTimeout(resolve, 1000, Math.random() < 0.3)
)
}
async function main (tasks = []) {
for (const t of tasks) {
await doOneTask(t)
}
return "done"
}
const tasks =
[ 'task0', 'task1', 'task2' ]
main(tasks).then(console.log, console.error)
// task0 failed
// task0 passed
// task1 failed
// task1 failed
// task1 passed
// task2 passed
// done
After having completed the question, but just before posting, something came into my mind: In the setup above, I don't resolve the very same promise object when I finally reach success, but for each function call a new promise object is generated. My solution/workaround is quite simple: use q the promise library passing the promise from one function call to the next function call:
var q = require('q');
async function doOneTask(task,promiseObj) {
if(!promiseObj) {
var promiseObj = q.defer();
}
console.log("Starting with: " + task.label);
let checkIfDoeSomeStuffWorked = await doSomeAsyncStuff();
if(checkIfDoeSomeStuffWorked == false) {
console.log(task.label + ": FAILED");
return doOneTask(task,promiseObj);
}
else {
console.log(task.label + ": SUCCESS");
promiseObj.resolve(true);
}
return promiseObj.promise;
}
That way, we make sure that the very same promise object which is generated at the first call of doOneTask is resolved in the end - event after the 20th execution.

How to handle async Node.js in a loop

I have such a loop :
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
var docs = collection.find({ id: { "$in": temparray}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
}
The loop iterates for two times. In first iteration it gets 200 elements, in second, it gets 130 elements. And when I open the .txt file, I see only 130 names. I guess because of the async nature of Node.js, only second part of the array is processed. What should I do to get all parts of the array to be processed? Thanks in advance.
EDIT : I finally turned the code to this :
var generalArr = [];
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
generalArr.push(temparray);
}
async.each(generalArr, function(item, callback)
{
var docs = collection.find({ id: { "$in": item}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
callback(null);
})
When I change this line :
var docs = collection.find({ id: { "$in": item}}).toArray();
To this line :
var docs = collection.find({ id: { "$in": item}}).project({ name: 1 }).toArray();
It works, I'm able to print all names. I guess there is a problem with memory when I try without .project(). How can I make this work without using project? Should I change some memory limits? Thanks in advance.
I think your code is unnecessary complicated and appending file in a loop is very expensive when compared to in-memory computation. A better way would be to write to file just once.
var i, j, temparray, chunk = 200;
for (i = 0, j = document.mainarray.length; i < j; i += chunk) {
temparray = document.mainarray.slice(i, i + chunk);
generalArr.push(temparray);
}
const queryPromises = [];
generalArr.forEach((item, index) => {
queryPromises.push(collection.find({ id: { "$in": item } }).toArray());
});
let stringToWrite = '';
Promise.all(queryPromises).then((result) => {
result.forEach((item) => {
item.forEach((element) => {
//create a single string which you want to write
stringToWrite = stringToWrite + "\n" + element.name;
});
});
fs.appendFile("C:/Users/x/Desktop/names.txt", stringToWrite, function (err) {
if (err) {
return console.log(err);
} else {
// call your callback or return
}
});
});
In the code above, I do the following.
Wait for all the db queries to finish
Lets iterate over this list and create one string that we need to write to the file
Write to the file
Once you go asynchronous you cannot go back - all your code needs to be asynchronous. In node 8 you handle this with async and await keywords. In older versions you can use Promise - async/await are just syntax sugar for it anyway.
However, most of the API in node are older than Promise, and so they use callbacks instead. There is a promisify function to update callback functions to promises.
There are two ways to handle this, you can let all the asynchronous actions happen at the same time, or you can chain them one after another (which preserves order but takes longer).
So, collection.find is asynchronous, it either takes a callback function or returns a Promise. I'm going to assume that the API you're using does the latter, but your problem could be the former (in which case look up promisify).
var findPromise = collection.find({ id: { "$in": item}});
Now, at this point findPromise holds the running find action. We say this is a promise that resolves (completes successfully) or rejects (throws an error). We want to queue up an action to do once it completes, and we do that with then:
// The result of collection.find is the collection of matches
findPromise.then(function(docs) {
// Any code we run here happens asynchronously
});
// Code here will run first
Inside the promise we can return further promises (allowing them to be chained - complete one async, then complete the next, then fire the final resolve once all done) or use Promise.all to let them all happen in parallel and resolve once done:
var p = new Promise(function(resolve, reject) {
var findPromise = collection.find({ id: { "$in": item}});
findPromise.then(function(docs) {
var singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
var singleDoc = docs[i];
if(!singleDoc)
continue;
for(var t = 0; t < singleDoc.length; t++)
singleDocNames.push(singleDoc[t].name);
}
// Resolve the outer promise with the final result
resolve(singleDocNames);
});
});
// When the promise finishes log it to the console
p.then(console.log);
// Code inline here will fire before the promise
This is much easier in node 8 with async/await:
async function p() {
// Await puts the rest of this function in the .then() of the promise
const docs = await collection.find({ id: { "$in": item}});
const singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
// ... synchronous code unchanged ...
}
// Resolve the outer promise with the final result
return singleDocNames;
});
// async functions can be treated like promises
p().then(console.log);
If you need to write the results to a text file asynchronously there are a couple of ways to do it - you can wait until the end and write all of them, or chain a promise to write them after each find, though I find parallel IO operations tend to be at more risk of deadlocks.
Code above have multiple issues about asynchronous control flow. Similar code possible can exists, but only if case of using ES7 async/await operators on all async operation.
Of course, you can easily achieve solution by promises sequence. Solution:
let flowPromise = Promise.resolve();
const chunk = 200;
for (let i=0,j=document.mainarray.length; i<j; i+=chunk) {
flowPromise = flowPromise.then(() => {
const temparray = document.mainarray.slice(i,i+chunk);
const docs = collection.find({ id: { "$in": temparray}}).toArray();
return docs.then((singleDoc) => {
let innerFlowPromise = Promise.resolve();
if(singleDoc) {
console.log("single doc length : " + singleDoc.length);
for(let t = 0, len = singleDoc.length; t < len;t++) {
innerFlowPromise = innerFlowPromise.then(() => new Promise((resolve, reject) =>
fs.appendFile(
"C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n",
err => (err ? reject(err) : resolve())
)
));
}
}
return innerFlowPromise;
}
});
}
flowPromise.then(() => {
console.log('Done');
}).catch((err) => {
console.log('Error: ', err);
})
When use async-like control flow, based on Promises, always remember that every loop and function call sequence will not pause execution till async operation be done, so include all then sequences manually. Or use async/await syntax.
Which version of nodejs are you using? You should use the native async/await support which is built into newer versions nodejs (no libraries required). Also note, fs.appendFile is asyncronous so you need to either use a library like promisify to transform the callback into a promise or just use the appendFileSync and suffer the blocking IO (but might be okay for you, depending on the use case.)
async function(){
...
for(var item of generalArr) {
var singleDoc = await collection.find({ id: { "$in": item}}).toArray();
// if(singleDoc) { this won't do anything, since collection.find will always return something even if its just an empty array
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++){
fs.appendFileSync("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n");
}
};
}
var docs = collection.find({ id: { "$in": document.mainarray}}), // returns a cursor
doc,
names = [],
toInsert;
function saveToFile(cb) {
toInsert = names.splice(0,100);
if(!toInsert.length) return cb();
fs.appendFile("C:/Users/x/Desktop/names.txt", toInsert.join("\n"), cb);
}
(function process() {
if(docs.hasNext()) {
doc = docs.next();
doc.forEach(function(d) {
names.push(d.name);
});
if(names.length === 100) {
// save when we have 100 names in memory and clear the memory
saveToFile(function(err) {
process();
});
} else {
process();
}
} else {
saveToFile(function(){
console.log('All done');
});
}
}()); // invoke the function
If you can't solve your issue using core modules and basic nodejs, there is most likely a lack of understanding of how things work or insufficient knowledge about a library (in this case FileSystem module).
Here is how you can solve your issue, without 3th party libraries and such.
'use strict';
const
fs = require('fs');
let chunk = 200;
// How many rounds of array chunking we expect
let rounds = Math.ceil(mainArray.length/chunk);
// copy to temp (for the counter)
let tempRounds = rounds;
// set file name
let filePath = './names.txt'
// Open writable Stream
let myFileStream = fs.createWriteStream(filePath);
// from round: 0-${rounds}
for (let i = 0; i < rounds; i++) {
// assume array has ${chunk} elements left in this round
let tempChunk = chunk;
// if ${chunk} is to big i.e. i=3 -> chunk = 600 , but mainArray.length = 512
// This way we adjust the last round for "the leftovers"
if (mainArray.length < i*chunk) tempChunk = Math.abs(mainArray.length - i*chunk);
// slice it for this round
let tempArray = mainArray.slice(i*chunk, i*chunk + tempChunk);
// get stuff from DB
let docs = collection.find({ id: { "$in": tempArray}}).toArray();
docs.then(function(singleDoc){
// for each name in the doc
for (let j = 0; j < singleDoc.length; j++) {
// write to stream
myFileStream.write(singleDoc[t].name + "\n");
}
// declare round done (reduce tempRounds) and check if it hits 0
if (!--tempRounds) {
// if all rounds are done, end the stream
myFileStream.end();
// BAM! you done
console.log("Done")
}
});
}
The key is to use fs.WritableStreams :)
link here to docs

Resolving promises inside for loop

I am trying to read a JSON object using a for loop to format the JSON data and send it back to the client by putting the formatted response into a model object.
Inside for loop, i am dealing with two promises based upon few conditions. There are two functions, each having a promise returned.How can I get my final data after all the promises are resolved? Thanks in advance.
for (var i = 0, i<jsonData.length; i++){
if(someCOndition){
getSomeData().then(function(data){
//some operation using data
})
}
if(someOtherCOndition){
getSomeOtherData().then(function(data){
//some operation using data
})
}
}
Promise.all([ promise1, promise2 ]) (Promise.all() on MDN) in case of standard JS Promises (ES2015+). It returns a new promise, which gets resolved once all passed promises get resolved. But be aware - it will get rejected immediately when at least one promise gets rejected (it won't wait for any other promise).
You might do as follows;
var promises = [],
JSONData_1 = ["chunk_11","chunk_12","chunk_13"],
JSONData_2 = ["chunk_21","chunk_22","chunk_23"],
getJSONData = (b,i) => new Promise((resolve,reject) => setTimeout(_ => b ? resolve(JSONData_1[i])
: resolve(JSONData_2[i]),1000));
for (var i = 0; i < JSONData_1.length; i++){
if(Math.random() < 0.5) promises.push(getJSONData(true,i));
else promises.push(getJSONData(false,i));
}
Promise.all(promises)
.then(a => console.log(a));
You can use jQuery.when().
var deferredList = [];
for (var i = 0, i<jsonData.length; i++){
if(someCOndition){
deferredList.push(getSomeData().then(function(data){
//some operation using data
}))
}
if(someOtherCOndition){
taskList.push(getSomeOtherData().then(function(data){
//some operation using data
}))
}
}
JQuery.when(taskList).done(function(){
// final to do..
}).fail(){
// even if single one fails ! be aware of this
}
jQuery.when() MDN
You can do it in multiple ways. We can also use for of loop with async..await to get the result synchronously while looping, if that is a requirement. Something like this:
function downloadPage(url) {
return Promise.resolve('some value');
}
async function () {
for(let url of urls) {
let result = await downloadPage(url);
// Process the result
console.log(result);
}
}
You could do something like this..
var arr=[],arr2=[];
for (var i = 0, i<jsonData.length; i++){
if(someCOndition){
//push onto the array inputs for getSomeData()
arr.push(jsonData[i]);
}
if(someOtherCOndition){
arr2.push(jsonData[i]);
}
}
processArr(0);
processArr2(0);
function processArr(idx){
if (idx>=arr.length) {
//done
}
else {
getSomeData().then(function(data){
// some operation using data
// optionally store in a results array
// recurse
processArr(idx+1)
})
}
}
function processArr2(idx){
if (idx>=arr2.length) {
//done
}
else {
getSomeotherData().then(function(data){
// some operation using data
// recurse
processArr2(idx+1)
})
}
}

Categories

Resources