How to insert data into db with index as the title? I think this is typical async question but I can't solve it. The order of i been inserted not in sequence.
const { times } = require('lodash')
module.exports = (async () => {
try {
const createJob = async (i) => {
console.log(i) //executed first 1 - 50 first
const { data } = await axios
.post('http://localhost:3000/job/create', {
"title": i,
"created_at": Date.now()
})
if(data) {
console.log('job created. ', data)
}
}
times(50, (i) => {
createJob(++i)
});
} catch(e) {
console.log('Error creating ad. ', e)
}
})()
You can chain your promises so each createJob is called after the previous one is finished. You can create an array of indexes and use array.reduce to do the chaining. In the code below, I replace your axios call with a new Promise(...), just for simulating:
var createJob = async (i) => {
console.log(i) //executed first 1 - 50 first
const { data } = await new Promise((resolve, reject) => setTimeout(() => resolve({ data: i}), 1000));
if (data) {
console.log('job created. ', data);
}
return data;
}
var arr = Array.from(new Array(10), (e, i) => i);
arr.reduce((m, o) => m.then(() => createJob(o)), Promise.resolve());
You can also use Promise.all to solve this. Essentially you save all the promises of the request in an array. Once, all the requests are complete, you can iterate over them.
const axios = require('axios');
module.exports = (async () => {
try {
const createJob = async (i, url) => {
console.log(i, url) //executed first 1 - 50 first
return axios.get(url);
}
const a = ['http://httpbin.org/anything/123', 'http://httpbin.org/anything/456']
const promiseArray = [];
for (let j = 0; j < 4; j++) {
promiseArray.push(createJob(j, a[j % 2]));
}
Promise.all(promiseArray).then((result) => {
console.log('result', typeof (result));
for (let i = 0; i < result.length; i++) {
console.log(result[i].data.url);
}
});
} catch (e) {
console.log('Error creating ad. ', e)
}
})()
I have used httpbin for making actual calls and random payload to make sure the execution order is always the same.
Related
I have a function that reads files in a directory asynchronously (readdir) and filters for csv files. I also have an async function that calls readdir filtered for csv files and then iterates through them with fast-csv. Logging to the console the list and its length within the .on('end') function, I can see that they produce the desired results. however, my async call only resolves the first iteration.
const fs = require(`fs`);
const path = require(`path`);
const csv = require(`fast-csv`);
var ofsActivities = [];
const currDir = path.join(__dirname + `/../Downloads/`);
const readdir = async dirname => {
return new Promise((resolve, reject) => {
fs.readdir(dirname, (error, filenames) => {
error ? reject(error) : resolve(filenames);
});
});
};
const filtercsvFiles = (filename) => {
return filename.split(`.`)[1] == `csv`;
};
const ofsDataObjectArray = async () => {
return readdir(currDir).then(async filenames => {
return await new Promise((resolve, reject) => {
filenames = filenames.filter(filtercsvFiles);
for (let i = 0; i < filenames.length; i++) {
let currFilePath = currDir + filenames[i];
console.log(`Reading File: ${filenames[i]}`);
csv
.parseFile(currFilePath)
.on(`data`, (data) => {
//Doing stuff
})
.on(`error`, error => reject(error))
.on(`end`, () => resolve(ofsActivities)); //Inserting a console.log(ofsActivities.length) logs the correct and expected length on the last iteration
}
});
});
};
(async () => {
let list = await ofsDataObjectArray(); // This seems to only resolve the first iteration within the promise
console.log(list.length);
})();
You need to call resolve() only when the LAST csv.parseFile() is done. You're calling it when the FIRST one is done, thus the promise doesn't wait for all the others to complete. I'd suggest you promisify csv.parseFile() by itself and then await that inside the loop or accumulate all the promises from csv.parseFile() and use Promise.all() with all of them.
Here's using await on each csv.parseFile():
const ofsDataObjectArray = async () => {
return readdir(currDir).then(async filenames => {
filenames = filenames.filter(filtercsvFiles);
for (let i = 0; i < filenames.length; i++) {
let currFilePath = currDir + filenames[i];
console.log(`Reading File: ${filenames[i]}`);
await new Promise((resolve, reject) => {
csv.parseFile(currFilePath)
.on(`data`, (data) => {
//Doing stuff
})
.on(`error`, reject)
.on(`end`, () => resolve(ofsActivities));
});
}
return ofsActivities;
});
};
Or, here's running them in parallel with Promise.all():
const ofsDataObjectArray = async () => {
return readdir(currDir).then(filenames => {
filenames = filenames.filter(filtercsvFiles);
return Promise.all(filenames.map(file => {
let currFilePath = currDir + file;
console.log(`Reading File: ${file}`);
return new Promise((resolve, reject) => {
csv.parseFile(currFilePath)
.on(`data`, (data) => {
//Doing stuff
})
.on(`error`, error => reject(error))
.on(`end`, () => resolve(ofsActivities));
});
}))
});
};
P.S. It's unclear from your question what final result you're trying to accumulate (you have left that out) so you will have to add that to this code in the "doing stuff" code or by modifying the resolve(something) code.
I have a button when user click on it I will send a request and receive answer. If user click 100 times on this button I want to send 100 requests to server and each request send after previous. because I need previous response in next request.
example:
<button #click="sendRequest">send</button>
methods:{
sendRequest:function(){
axios.post('https:/url/store-project-item', {
'id': this.project.id,
"items": this.lists,
'labels': this.labels,
'last_update_key': this.lastUpdateKey,
'debug': 'hYjis6kwW',
}).then((r) => {
if (r.data.status) {
this.change = false
this.lastUpdateKey = r.data.lastUpdateKey;
this.showAlert('success')
} else {
if (r.data.state == "refresh") {
this.showAlert('error')
this.getProject()
} else {
this.showAlert('error')
}
}
}).catch(() => {
this.showAlert('error')
})
}}
I keep a higher-order function (i.e. a function that returns a function) withMaxDOP (DOP = degrees-of-parallelism) handy for this kind of thing:
const withMaxDOP = (f, maxDop) => {
const [push, pop] = createAsyncStack();
for (let x = 0; x < maxDop; ++x) {
push({});
}
return async(...args) => {
const token = await pop();
try {
return await f(...args);
} finally {
push(token);
}
};
};
The function makes use of an async stack data structure (implementation is in the attached demo), where the pop function is async and will only resolve when an item is available to be consumed. maxDop tokens are placed in the stack. Before invoking the supplied function, a token is popped from the stack, sometimes waiting if no token is immediately available. When the supplied completes, the token is returned to the stack. This has the effect of limiting concurrent calls to the supplied function to the number of tokens that are placed in the stack.
You can use the function to wrap a promise-returning (i.e. async) function and use it to limit re-entrancy into that function.
In your case, it could be used as follows:
sendRequest: withMaxDOP(async function(){ /*await axios.post...*/ }, 1)
to ensure that no call to this function ever overlaps another.
Demo:
const createAsyncStack = () => {
const stack = [];
const waitingConsumers = [];
const push = (v) => {
if (waitingConsumers.length > 0) {
const resolver = waitingConsumers.shift();
if (resolver) {
resolver(v);
}
} else {
stack.push(v);
}
};
const pop = () => {
if (stack.length > 0) {
const queueItem = stack.pop();
return typeof queueItem !== 'undefined' ?
Promise.resolve(queueItem) :
Promise.reject(Error('unexpected'));
} else {
return new Promise((resolve) => waitingConsumers.push(resolve));
}
};
return [push, pop];
};
const withMaxDOP = (f, maxDop) => {
const [push, pop] = createAsyncStack();
for (let x = 0; x < maxDop; ++x) {
push({});
}
return async(...args) => {
const token = await pop();
try {
return await f(...args);
} finally {
push(token);
}
};
};
// example usage
const delay = (duration) => {
return new Promise((resolve) => setTimeout(() => resolve(), duration));
};
async function doSomething(name) {
console.log("starting");
// simulate async IO
await delay(1000);
const ret = `hello ${name}`;
console.log(`returning: ${ret}`);
return ret;
}
const limitedDoSomething = withMaxDOP(doSomething, 1);
//call limitedDoSomething 5 times
const promises = [...new Array(5)].map((_, i) => limitedDoSomething(`person${i}`));
//collect the resolved values and log
Promise.all(promises).then(v => console.log(v));
I know that old school for loop works in the traditional way - that it waits for the await to finish getting results.
But in my use case, I need to read a file from local/s3 and process it line by line, and for each line I need to call an External API.
Generally I use await inside the loop because all are running inside a lambda and I don't want to use all memory for running it parallelly.
Here I am reading the file using a stream.on() method, and in order to use await inside that, I need to add async in read method, like so:
stream.on('data',async () =>{
while(data=stream.read()!==null){
console.log('line');
const requests = getRequests(); // sync code,no pblms
for(let i=0;i<requests.length;i++){
const result = await apiCall(request[i);
console.log('result from api')
const finalResult = await anotherapiCall(result.data);
}
}
});
This is working but order in which the lines are processed is not guaranteed. I need all in a sync manner. Any help?
Complete Code
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
console.log('Read line');
console.log(id);
try {
for (let i = 0;i < XMLRequests.length;i++) {
totalRequestsCount++;
console.log('Sending request');
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log("got response");
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
failedRequestsCount++;
} else {
successRequestsCount++;
console.log('Response from the Lost Server');
console.log(response[i].data);
}
}
} catch (err) {
console.log(err);
}
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
});
}
async function sendLoSTRequest (options, params) {
const { axios } = options;
const { url, data } = params;
if (url) {
return axios.post(url, data);
// eslint-disable-next-line no-else-return
} else {
console.log('URL is not found');
return null;
}
}
Code needs to flow like so:
read a line in a sync way
process the line and transform the line into an array of two members
for every member call API and do stuff
once line is complete, look for another line, all done in order
UPDATE: Got a workaround..but it fires stream.end() without waiting stream to finish read
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const { ERR_DIR, fs, xml2js, LOST_URL, byline, event } = options;
const { key } = params;
const responseObject = {};
let totalRecordsCount = 0;
let totalRequestsCount = 0;
let failedRequestsCount = 0;
let successRequestsCount = 0;
let dataObject = {};
const queue = (() => {
let q = Promise.resolve();
return fn => (q = q.then(fn));
})();
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
// eslint-disable-next-line no-loop-func
queue(async () => {
try {
for (let i = 0;i < XMLRequests.length;i++) {
console.log('Sending request');
console.log(id);
totalRequestsCount++;
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log('got response');
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
// console.log('Response have the error:');
// await handleError(options, { err: responseObj, id });
failedRequestsCount++;
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
} else {
console.log('Response from the Lost Server');
console.log(response[i].data);
successRequestsCount++;
}
}
} catch (err) {
console.log(err);
}
});
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
Object.assign(responseObject, {
failedRequestsCount,
successRequestsCount,
totalRecordsCount,
totalRequestsCount
});
});
}
Thank You
The sample code at the top of your question could be rewritten like
const queue = (() => {
let q = Promise.resolve();
return (fn) => (q = q.then(fn));
})();
stream.on('data', async() => {
while (data = stream.read() !== null) {
console.log('line');
const requests = getRequests(); // sync code,no pblms
queue(async () => {
for (let i = 0; i < requests.length; i++) {
const result = await apiCall(request[i]);
console.log('result from api');
const finalResult = await anotherapiCall(result.data);
}
});
}
});
Hopefully that will be useful for the complete code
If anyone want a solution for synchronisely process the file, ie, linebyline read and execute some Async call, it's recommended to use inbuilt stream transform. There we can create a transform function and return a callback when finishes.
That's will help of any one face this issues.
Through2 is a small npm library that also can be used for the same.
By following a good answer by T.J. Crowder to a SO Thread, I managed to combine a loop of async tasks with Promise.all. The actual problem is, first I want to read one excel file in a Promisified function and a list of image files in the second Promisified function.
Here is the code functions performing files reading.
import { User } from "./types";
import * as XLSX from "xlsx";
// Loading users data from Excel Data... Id,Name,CardNo
export async function loadUsersData(usersFile: File) {
let result_users: User[] =await new Promise((resolve) => {
var reader = new FileReader();
reader.onload = function (e) {
const data = e.target.result;
const readedData = XLSX.read(data, { type: 'binary' });
const wsname = readedData.SheetNames[0];
const ws = readedData.Sheets[wsname];
/* Convert array to json*/
const parsedData = XLSX.utils.sheet_to_json(ws, { header: 1, blankrows: false });
parsedData.shift();
const users: User[] = parsedData.map((item: any) => {
const id = item[0].toString().trim();
const name = item[1].toString().trim();
const cardNo = item[2].toString().trim();
const user: User = { id, name, cardNo };
return user;
});
resolve(users);
}
reader.readAsBinaryString(usersFile)
});
return result_users;
}
//Loading Images of Users Faces to display in material table along with other user info
export async function loadUsersFaces(users: User[], facesList: FileList) {
const facesArray = Array.from(facesList)
const promises=facesArray.map(async face=>{
return await readFace(face, users);
})
let result_users: any=await Promise.all(promises);
return result_users
}
function readFace(face: File,users:User[]) {
return new Promise((resolve) => {
const reader = new FileReader();
reader.onload = function (e) {
let faceBase64String = e.target.result; //getting Base64String of image to render as custom column in material-table as https://material-table.com/#/docs/features/custom-column-rendering
users.map(user => {
if (face.name.includes(user.id) && face.name.includes(user.name)) {
let newUser={ ...user, face: faceBase64String };
console.log(`Resoling ${JSON.stringify(newUser)}`);
resolve(newUser);
}
})
}
reader.readAsDataURL(face)
});
}
And here is the code of Actions performing files reading one after the other.
//Here is usersFile is an excel file Blob and FileList contain list of image files
export const loadUsers = (usersFile: File,faces: FileList) => (dispatch:Dispatch) => {
dispatch(actions.startCall({ callType: callTypes.list }));
usersService.loadUsersData(usersFile).then((users:any)=>{ // Don't know how to tell compiler that it's User[]
usersService.loadUsersFaces(users,faces).then((users:any)=>{
console.log(users); // Here I should have users including Base64 Strings of face images in face property
dispatch(actions.usersFetched({ totalCount:users.length, entities:users }));
})
})
};
My answer to this other question comes close to answering this, but I'm not sure it completely does.
Since you want to use the first operation's result in the second, and the second operation's result in the third, etc., you can't run the asynchronous actions in parallel. So you have to run them in series.
If you can use an async function (well supported these days), you'd do that like something this:
async function doSeriesOfThings() {
let lastResult = /* the first value to pass, perhaps `undefined` or `null` */;
for (const obj of arrayofObjs) {
lastResult = await doSomeAsyncStuff(obj, lastResult);
}
return lastResult;
}
Live Example:
const arrayofObjs = [
{value: 1},
{value: 2},
{value: 3},
];
function doSomeAsyncStuff(obj, value) {
console.log(`doSomeAsyncStuff(${JSON.stringify(obj)}, ${value})`);
return new Promise(resolve => {
setTimeout(() => {
resolve(obj.value + value);
}, Math.random() * 500);
});
}
async function doSeriesOfThings() {
let lastResult = 0;
for (const obj of arrayofObjs) {
lastResult = await doSomeAsyncStuff(obj, lastResult);
}
return lastResult;
}
doSeriesOfThings()
.then(result => console.log(`Final result: ${result}`))
.catch(error => console.error(`Error: ${error.message || String(error)}`));
If you also need an array of results, just build it up in the function:
async function doSeriesOfThings() {
const results = [];
let lastResult = /* the first value to pass, perhaps `undefined` or `null` */;
for (const obj of arrayofObjs) {
lastResult = await doSomeAsyncStuff(obj, lastResult)
results.push(lastResult);
}
return results;
}
Live Example:
const arrayofObjs = [
{value: 1},
{value: 2},
{value: 3},
];
function doSomeAsyncStuff(obj, value) {
console.log(`doSomeAsyncStuff(${JSON.stringify(obj)}, ${value})`);
return new Promise(resolve => {
setTimeout(() => {
resolve(obj.value + value);
}, Math.random() * 500);
});
}
async function doSeriesOfThings() {
const results = [];
let lastResult = 0;
for (const obj of arrayofObjs) {
lastResult = await doSomeAsyncStuff(obj, lastResult)
results.push(lastResult);
}
return results;
}
doSeriesOfThings()
.then(result => console.log(`Final result: ${JSON.stringify(result)}`))
.catch(error => console.error(`Error: ${error.message || String(error)}`));
If you can't use an async function, it's fairly similar, but you build up a promise chain:
function doSeriesOfThings() {
let promise = Promise.resolve(/* the first value to pass, perhaps `undefined` or `null` */);
for (const obj of arrayofObjs) {
promise = promise.then(result => doSomeAsyncStuff(obj, result));
// Or you can write it like this: `promise = promise.then(doSomeAsyncStuff);`
}
return promise;
}
Live Example:
const arrayofObjs = [
{value: 1},
{value: 2},
{value: 3},
];
function doSomeAsyncStuff(obj, value) {
console.log(`doSomeAsyncStuff(${JSON.stringify(obj)}, ${value})`);
return new Promise(resolve => {
setTimeout(() => {
resolve(obj.value + value);
}, Math.random() * 500);
});
}
function doSeriesOfThings() {
let promise = Promise.resolve(0);
for (const obj of arrayofObjs) {
promise = promise.then(result => doSomeAsyncStuff(obj, result));
// Or you can write it like this: `promise = promise.then(doSomeAsyncStuff);`
}
return promise;
}
doSeriesOfThings()
.then(result => console.log(`Final result: ${result}`))
.catch(error => console.error(`Error: ${error.message || String(error)}`));
And again, if you need an array of results, you can do that too:
function doSeriesOfThings() {
const results = [];
let promise = Promise.resolve(/* the first value to pass, perhaps `undefined` or `null` */);
for (const obj of arrayofObjs) {
promise = promise.then(result => doSomeAsyncStuff(obj, result).then(result => {
results.push(result);
return result;
}));
// Or you can write it like this: `promise = promise.then(doSomeAsyncStuff);`
}
return promise.then(() => results);
}
Live Example:
const arrayofObjs = [
{value: 1},
{value: 2},
{value: 3},
];
function doSomeAsyncStuff(obj, value) {
console.log(`doSomeAsyncStuff(${JSON.stringify(obj)}, ${value})`);
return new Promise(resolve => {
setTimeout(() => {
resolve(obj.value + value);
}, Math.random() * 500);
});
}
function doSeriesOfThings() {
const results = [];
let promise = Promise.resolve(0);
for (const obj of arrayofObjs) {
promise = promise.then(result => doSomeAsyncStuff(obj, result).then(result => {
results.push(result);
return result;
}));
// Or you can write it like this: `promise = promise.then(doSomeAsyncStuff);`
}
return promise.then(() => results);
}
doSeriesOfThings()
.then(result => console.log(`Final result: ${JSON.stringify(result)}`))
.catch(error => console.error(`Error: ${error.message || String(error)}`));
If you want to use loops to chain promises you will need async & await
async function chainPromiseNTimes(function_returning_promise,n,data){
for(let i=0;i<n;i++) {
data = await function_returning_promise(data)
// data (modified) will be passed to new promise in next iteration
}
return data;
}
let result = await chainPromiseNTimes(doSomeAsyncStuff, 5, arrayofObjs)
You could try pushing functions returning promises instead of promises onto your array. This way you could simply call them when the data from the last promise is actually available.
function doSomeAsyncStuff(arrayofObjs) {
// this is not rly asynchronous but for the purpose of example will do
return new Promise(function(resolve) {
const result = arrayofObjs.map(obj => ++obj);
resolve(result);
});
}
async function waitForPromiseChain(initialData, functionCallbacks) {
let temp = initialData;
for (let i = 0, l = functionCallbacks.length; i < l; i++)
temp = await functionCallbacks[i](temp);
return temp;
}
const promises = [];
for (i = 0; i < 5; i++) {
promises.push((arrayofObjs) => doSomeAsyncStuff(arrayofObjs));
}
waitForPromiseChain([0, 0, 0, 0, 0], promises)
.then(console.log);
In the example above I tried to keep code as close to your original as possible. However i took the liberty of redesigning function callbacks to accept any function in a chain instead of a single one.
If you are opposed to using async/await the same effect can be achieved with usage of normal then, even if with some difficulty.
function doSomeAsyncStuff(arrayofObjs) {
// this is not rly asynchronous but for the purpose of example will do
return new Promise(function(resolve) {
const result = arrayofObjs.map(obj => ++obj);
resolve(result);
});
}
function waitForPromiseChain(initialData, functionCallbacks) {
let temp = Promise.resolve(initialData);
for (let i = 0, l = functionCallbacks.length; i < l; i++)
temp = temp.then(data => functionCallbacks[i](data));
return temp;
}
const promises = [];
for (i = 0; i < 5; i++) {
promises.push((arrayofObjs) => doSomeAsyncStuff(arrayofObjs));
}
waitForPromiseChain([0, 0, 0, 0, 0], promises)
.then(console.log);
Don't know if the terminology is correct, but I have an array of objects, which also has other arrays in it. I need to go through each of these items. If the operation wasn't async it would look something like this:
myArray.forEach(x => {
x.otherArray.forEach(y => {
doSomething(y)
})
})
However the doSomething function is async, and unfortunately I am well aware that during these iterations I can't simply through a couple asyncs and awaits to make it work.
Usually, when I need to do promises during a iteration, I do the following:
await myArray.reduce((p, item) => {
return p.then(() => {
return doAsyncSomething(item)
})
}, Promise.resolve())
But because I am doing two iterations at once, this becomes a bit more complicated, so how do I go about it?
I currently have something like this, but it doesn't seem to be the right way:
await myArray.reduce((p, item) => {
return item.someArray.reduce((promise, it, index) => {
return promise.then(() => {
return doAsyncSomething()
})
}, Promise.resolve())
}, Promise.resolve())
I know I could just organize my objects into an array through the two forEach and then do the reduce with the doSomething in it, but I doubt it's the most efficient or elegant way of getting it done. So how could I do this?
try this:
let objArray = [ {otherArray: [1,2]}, {otherArray: [3,4]}, {otherArray: [5,6]} ];
function doAsyncSomething(item) {
return Promise.resolve(item);
}
async function doit() {
let s = 0;
for(const x of objArray)
for(const y of x.otherArray)
s+= await doAsyncSomething(y);
return s;
}
doit().then(v => {
console.log(v);
});
or try recurcive call like this:
let objArray = [ {otherArray: [1,2]}, {otherArray: [3,4]}, {otherArray: [5,6]} ];
let index = 0;
let subIndex = 0;
function doAsyncSomething(item) {
return new Promise(resolve => {
console.log("proc item", item);
resolve(item);
});
}
async function doit() {
return await doAsyncSomething(objArray[index].otherArray[subIndex]);
}
function go() {
doit().then(v => {
console.log(v);
subIndex++;
if (subIndex >= objArray[index].otherArray.length) {
subIndex = 0;
index++;
}
if (index < objArray.length)
go();
});
}
Assuming you want all operations to happen in parallel, you can use Promise.all():
async function () { // I assume you already have this
// ...
let asyncOps = [];
myArray.forEach(x => {
x.otherArray.forEach(y => {
asyncOps.push(doSomething(y));
})
})
await Promise.all(asyncOps);
}
function doSomething (x) {
return new Promise((ok,fail) =>
setTimeout(() => {
console.log(x);
ok();
},10));
}
let foo = [[1,2,3,4],[5,6,7,8]];
async function test() {
let asyncOps = [];
foo.forEach(x =>
x.forEach(y =>
asyncOps.push(doSomething(y))));
await Promise.all(asyncOps);
}
test();
If you want to do the async operations sequentially it's even simpler:
async function () { // I assume you already have this
// ...
for (let i=0; i<myArray.length; i++) {
let x = myArray[i];
for (let j=0; j<x.length; j++) {
let y = x[j];
await doSomething(y);
}
}
}
Pass on the promise into the inner loop when reducing:
await myArray.reduce((p, item) =>
item.someArray.reduce((p, it, index) =>
p.then(() => doAsyncSomething(it)),
p // <<<
),
Promise.resolve()
)
Or I'd prefer:
for(const { someArray } of myArray) {
for(const it of someArray) {
await doSomethingAsync(it);
}
}
If you want to run the tasks in parallel:
await Promise.all(
myArray.flatMap(item => item.someArray.map(doSomethingAsnyc))
);