Why does this recursive function not run asynchronously? - javascript

I have a start(node, array) function that should perform a DFS by traversing an object tree via recursive calls to an API through callMsGraph(token, end) until image properties are found at the end of the tree, at which point they are pushed to array. The function seems like it works, but I can't get the output unless I wrap it in a 2 second setTimeout which indicates the recursion is not being waited on to complete. I would want to play around with async/await more, but it's not at the top-level.
I'm not sure if the nextNode.then is doing anything or maybe callMsGraph() needs to be awaited on differently to how I know. A solution would be much appreciated.
shelfdb.data = async (accessToken) => {
const token = accessToken;
const endpoint = 'https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/01GNYB5KPQ57RHLPZCJFE2QMVKT5U3NYY3/children'
function start(node, array) {
if(node.value.length > 0) {
node.value.forEach(function(child) {
var end = 'https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/' + child.id + '/children';
var nextNode = callMsGraph(token, end);
nextNode.then(function(currResult) {
if (currResult.value.length > 0) {
if ('image' in currResult.value[0]) {
currResult.value.forEach(function(imgChild) {
let img = {
'name': imgChild.name,
'job': imgChild.parentReference.path.split("/")[6],
'path': imgChild.webUrl,
'id': imgChild.id
}
array.push(img);
})
// complete storing images at tail object, go one level up after loop
return;
}
// if no 'image' or value, go into child
start(currResult, array);
}
}).catch(function(e) {
console.error(e.message);
})
})
}
return array;
}
var res = await callMsGraph(token, endpoint); // start recursion
var output = start(res, []);
console.log(output); // only displays value if wrapped in setTimeout
return output; // empty []
}
Each query to the API via callMsGraph(), returns an object like this, where subsequent queries are made with the id of each object/folder (as new endpoint) in value until an object with image property is found. The MS Graph API requires that folders are expanded at each level to access their children.
{
id: '01GNYB5KPQ57RHLPZCJFE2QMVKT5U3NYY3'
value: [
{
id: '01GNYB5KJMH5T4GXADUVFZRSITWZWNQROS',
name: 'Folder1',
},
{
id: '01GNYB5KMJKILOFDZ6PZBZYMXY4BGOI463',
name: 'Folder2',
}
]
}
This is the callMsGraph() helper:
function callMsGraph(accessToken, graphEndpoint) {
const headers = new Headers();
const bearer = `Bearer ${accessToken}`;
headers.append("Authorization", bearer);
const options = {
method: "GET",
headers: headers
};
return fetch(graphEndpoint, options)
.then(response => response.json())
.catch(error => {
console.log(error);
throw error;
});
}

The rule with promises is that once you opt into one (more likely, are forced into it by a library), all code that needs to block for a result anywhere after it also has to await. You can't "go back" to sync and if even a single piece of the promise chain between where the promise starts and where you want its result isn't awaited, the result will be unreachable*.
Taking a snippet of the code:
function start(node, array) { // not async!
// ..
node.value.forEach(function(child) { // doesn't await!
// ..
nextNode.then(function(currResult) {
// this promise is not hooked up to anything!
start(...) // recurse without await!
There's no await in front of then, start doesn't return a promise and isn't awaited recursively, and forEach has no way to await its callback's asynchronous results, so each promise in the nextNode.then chain is orphaned into the void forever*.
The solution is a structure like this:
async function start(node, array) {
// ..
for (const child of node.value) {
// ..
const currResult = await callMsGraph(token, end);
// ..
await start(...);
array.push(currResult);
}
// returns a promise implicitly
}
// ..
await start(...);
// `array` is populated here
Or Promise.all, which runs in parallel and returns an array (which could replace the parameter array):
function start(node, array) {
return Promise.all(node.value.map(async child => {
const currResult = await callMsGraph(token, end);
// ..
await start(...);
return currResult;
}));
}
I'd be happy to provide a minimal, runnable example, but the code you've provided isn't runnable, so you'll have to massage this a bit to work for you. If you make sure to await everything, you're good to go (and generally avoid mixing .then and async/await--the latter seems easier for this use case).
* (for all practical intents and purposes)

There is a few places where you are not handling promises returned in you code. nextNode.then if your forEach loop is just "called", next line of the code will not wait for it to complete, forEach loop will complete execution before then callbacks are called.
I changed you code a bit, but I have no way to check if it works correctly due to i would need to populate dummy data for callMsGraph but if you encounter any - tell me and I'll modify the answer
shelfdb.data = async (accessToken) => {
const token = accessToken;
const endpoint = 'https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/01GNYB5KPQ57RHLPZCJFE2QMVKT5U3NYY3/children'
const images = [];
async function start(node, array) {
if (node.value.length <= 0) return array; // or === 0 or whatever
for (const child of node.value) {
const end = `https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/${child.id}/children`;
const nextNode = await callMsGraph(token, end);
if (nextNode.value.length > 0) {
if ('image' in nextNode.value[0]) {
const mapped = nextNode.value.map(imgChild => {
return {
'name': imgChild.name,
'job': imgChild.parentReference.path.split("/")[6],
'path': imgChild.webUrl,
'id': imgChild.id
}
});
array.push(...mapped);
}
// if no 'image' or value, go into child
await start(nextNode, array);
}
}
return array;
}
var res = await callMsGraph(token, endpoint);
var output = await start(res, []);
console.log(output);
return output;
}
Also, please, feel free to add a try{} catch{} blocks in any place you need them, I skipped them

Related

Asyncronicity in a reduce() function WITHOUT using async/await

I am patching the exec() function to allow subpopulating in Mongoose, which is why I am not able to use async/await here -- my function will be chained off a db call, so there is no opportunity to call await on it, and within the submodule itself, there I can't add async/await outside of an async function itself.
With that out of the way, let's look at what I'm trying to do. I have two separate arrays (matchingMealPlanFoods and matchingMealPlanRecipeFoods) full of IDs that I need to populate. Both of them reside on the same array, foods. They each require a db call with aggregation, and the problem in my current scenario is that only one of the arrays populates because they are happening asynchronously.
What I am trying to do now is use the reduce function to return the updated foods array to the next run of reduce so that when the final result is returned, I can replace the entire foods array once on my doc. The problem of course is that my aggregate/exec has not yet returned a value by the time the reduce function goes into its next run. Is there a way I can achieve this without async/await here? I'm including the high-level structure here so you can see what needs to happen, and why using .then() is probably not viable.
EDIT: Updating code with async suggestion
function execute(model, docs, options, lean, cb) {
options = formatOptions(options);
let resolvedCount = 0;
let error = false;
(async () => {
for (let doc of docs) {
let newFoodsArray = [...doc.foods];
for (let option of options) {
const path = option.path.split(".");
// ... various things happen here to prep the data
const aggregationOptions = [
// // $match, then $unwind, then $replaceRoot
];
await rootRefModel
.aggregate(aggregationOptions)
.exec((err, refSubDocuments) => {
// more stuff happens
console.log('newFoodsArray', newFoodsArray); // this is to check whether the second iteration is using the updated newFoods Array
const arrToReturn = newFoodsArray.map((food) => {
const newMatchingArray = food[nests[1]].map((matchingFood) => {
//more stuff
return matchingFood;
});
const updatedFood = food;
updatedFood[`${nests[1]}`] = newMatchingArray;
return updatedFood;
});
console.log('arrToReturn', arrToReturn);
newFoodsArray = [...arrToReturn];
});
}
};
console.log('finalNewFoods', newFoodsArray); // this should log after the other two, but it is logging first.
const document = doc.toObject();
document.foods = newFoodsArray;
if (resolvedCount === options.length) cb(null, [document]);
}
})()
EDIT: Since it seems it will help, here is the what is calling the execute function I have excerpted above.
/**
* This will populate sub refs
* #param {import('mongoose').ModelPopulateOptions[]|
* import('mongoose').ModelPopulateOptions|String[]|String} options
* #returns {Promise}
*/
schema.methods.subPopulate = function (options = null) {
const model = this.constructor;
if (options) {
return new Promise((resolve, reject) => execute(model, [this], options, false, (err, docs) => {
if (err) return reject(err);
return resolve(docs[0]);
}));
}
Promise.resolve();
};
};
We can use async/await just fine here, as long as we remember that async is the same as "returning a Promise" and await is the same as "resolving a Promise's .then or .catch".
So let's turn all those "synchronous but callback-based" calls into awaitables: your outer code has to keep obeying the API contract, but since it's not meant to a return a value, we can safely mark our own version of it as async, and then we can use await in combination with promises around any other callback based function calls in our own code just fine:
async function execute(model, docs, options, lean, andThenContinueToThis) {
options = formatOptions(options);
let option, resolvedCount = 0;
for (let doc of docs) {
let newFoodsArray = [...doc.foods];
for (option of options) {
// ...things happen here...
const aggregationOptions = [/*...data...*/];
try {
const refSubDocuments = await new Promise((resolve, reject) => rootRefModel
.aggregate(aggregationOptions)
.exec((err, result) => err ? reject(err) : resolve(result));
// ...do some work based on refSubDocuments...
}
// remember to forward errors and then stop:
catch (err) {
return andThenContinueToThis(err);
}
}
// remember: bind newFoodsArray somewhere so it doesn't get lost next iteration
}
// As our absolutely last action, when all went well, we trigger the call forwarding:
andThenContinueToThis(null, dataToForward);
}

How to use async/ await for a 'for statement' only

I want to use async function 'submitToTheOthers' and I want from let items = [] it will run after 'submitToTheOthers' is done. But it didn't wait until 'submitToTheOthers' is done. Probably it is because no await in 'submitToTheOthers'. Therefore, I want to make for statement in 'submitToTheOthers' as await like, but I don't know how.
I could use await at 'updateGeneralInfoToOther', but then I couldn't run
updateGeneralInfoToOther for all servers at the almost same time, and if one of server has a error then I couldn't run from the server in the for statement. I want use await or promise like something for statement or little larger range.
What will be good way to make it wait 'submitToTheOthers'?
ps)I hope get to know older syntax because I likely need to use it on internet explorer XD XD TT
Settings.vue
<template>
<div>
<button class="point" #click="submitTotalServer()">submitTotalServer</button>
</div>
</template>
<script>
import {
updateGeneralInfoToOther,
} from '#/api/settings'
export default {
data() {
return {
serverNames: ['a server', 'b server'],
idxs: [0,1],
serverFullAddress: ['http://localhost:12345','http://randomUrl:12345' ]
serverResCheck: [],
}
},
methods: {
async submitTotalServer() {
await this.submitToTheOthers() // this function
let items = [] // here
for(let i=0; i< this.idxs.length ; i++){
if(!this.serverResCheck[i]){
items.push(this.serverNames[i])
}
}
if(items == []){
alert('saved in all servers')
}else{
alert(`[${items}] Error`)
}
},
async submitToTheOthers(){
let data = {
data : 'test',
}
for (let i=0; i< this.idxs.length ;i++){
updateGeneralInfoToOther(1, data, this.serverFullAddress[i]').then((res) => // axios function
{
if (res.status == 200) {
this.serverResCheck[i]= true
}else{
this.serverResCheck[i]= false
}
})
}
}
},
</script>
api/settings.js
// ...
export function updateGeneralInfoToOther(id, data, serverAddress) {
axios.defaults.timeout = 5000;
data.serverAddress = serverAddress
return axios.post(`/api/settings/generalToOther/${id}`, data)
}
// ...
The crux of the problem is that your for loop is not awaiting for the promise returned by updateGeneralInfoToOther() to resolve. To get your code to work, it is a quick fix of forcing the loop to await, i.e.:
for (let i=0; i< this.idxs.length ;i++) {
await updateGeneralInfoToOther(1, data, this.serverFullAddress[i]).then((res) => {
if (res.status == 200) {
this.serverResCheck[i]= true
} else {
this.serverResCheck[i]= false
}
})
}
However, this solution is not ideal, in the sense that you have to await each request individually: there is really no reason to do that. A better solution will be to simply perform all these async operations together, and then wait for all of them to be resolved. Your submitToOthers() function will return instead an array of promises:
return this.idx.map((_entry, i) => {
return updateGeneralInfoToOther(1, data, this.serverFullAddress[i]).then((res) => {
if (res.status == 200) {
this.serverResCheck[i] = true
} else {
this.serverResCheck[i] = false
}
})
});
Then, it is just a matter of using Promise.all() to wait for this array of promises to be resolved:
async submitTotalServer() {
await Promise.all(this.submitToTheOthers());
// Rest of the logic
// ...
}
maybe this is what u need
for await
async function test() {
let array=[1,2,3,4,5]
for await (const elem of array) {
console.log(elem);
}
}
By marking a function async you're telling it: if you run into an await, do not proceed to rest of code until the awaited promise has returned (resolved or rejected). But inside your submitToTheOthers function, you're not using await at all, so the requests are made all at once and the function returns without waiting for them.
Although placing an await in front of each call would solve your problem, inside the loop, each iteration would wait for the previous one to finish, which will surely take a lot longer than if you sent all the requests at once. From what you've shown, it looks like they could be run in parallel.
For this, you could use Promise.all() which is designed to handle multiple separate promises in parallel. The following should work, without the need to make any other change to your code:
submitToTheOthers() {
return Promise.all(
this.idxs.map((_, i) => updateGeneralInfoToOther(
1,
{ data: 'test' },
this.serverFullAddress[i]
)
.then(r => {
this.serverResCheck[i] = r.status === 200;
return r;
})
.catch(e => {
this.serverResCheck[i] = false;
return e;
})
));
}
Even though you've marked async submitToTheOthers, you aren't actually making it behave like an async function.
Inside that function, I assume it's this line that's async: updateGeneralInfoToOther(1, data, this.serverFullAddress[i]'). You call this function in a loop, but the problem is that you don't await it, nor do you return the promise.
You'll need to put await in front of it, but that will make it wait for each one to process the next one in the loop. The alternative is storing each promise in an array and then doing await Promise.all(promises) at the end of the function.

Pushing elements into the array works only inside the loop

I got some data which I'm calling from API and I am using axios for that. When data is retrieved, I dump it inside of a function called "RefractorData()" just to organize it a bit, then I push it onto existing array. The problems is, my array gets populated inside forEach and I can console.log my data there, but once I exit the loop, my array is empty.
let matches: any = new Array();
const player = new Player();
data.forEach(
async (match: any) => {
try {
const result = await API.httpRequest(
`https://APILink.com/matches/${match.id}`,
false
);
if (!result) console.log("No match info");
const refractored = player.RefractorMatch(result.data);
matches.push({ match: refractored });
console.log(matches);
} catch (err) {
throw err;
}
}
);
console.log(matches);
Now the first console.log inside forEach is displaying data properly, second one after forEach shows empty array.
Managed to do it with Promise.all() and Array.prototype.map()
.
const player = new Player();
const matches = result.data;
const promises = matches.map(async (match: any) => {
const response: any = await API.httpRequest(
`https://API/matches/${match.id}`,
false
);
let data = response.data;
return {
data: player.RefractorMatch(data)
};
});
const response: any = await Promise.all(promises);
You must understand that async functions almost always run later, because they deppend on some external input like a http response, so, the second console.log is running before the first.
There a few ways to solve this. The ugliest but easiest to figure out is to create a external promise that you will resolve once all http requests are done.
let matches = [];
let promise = new Promise((resolve) => {
let complete = 0;
data.forEach((match: any) => {
API.httpRequest(...).then((result) => {
// Your logic here
matches.push(yourLogicResult);
complete++;
if (complete === data.length) {
resolve();
}
}
}
};
console.log(matches); // still logs empty array
promise.then(() => console.log(matches)); // now logs the right array
You can solve this using other methods, for example Promise.all().
One very helpful way to solve it is using RxJs Observables. See https://www.learnrxjs.io/
Hope I helped you!

Array of queries for `for await` loop for postgresql transaction helper

I made a transaction function that simplifies this action for me like this (it working):
export async function transaction(queriesRaw) {
let allResults = []
const client = await pool.connect()
try {
await client.query('BEGIN')
var queries = queriesRaw.map(q => {
return client.query(q[0], q[1])
})
for await (const oneResult of queries) {
allResults.push(oneResult)
}
await client.query('COMMIT')
} catch (err) {
await client.query('ROLLBACK')
} finally {
client.release()
return allResults
}
}
And do transactions like this:
let results = await transaction([
['UPDATE readers SET cookies=5 WHERE id=$1;', [1]],
['INSERT INTO rewards (id) VALUES ($1);', [3]]
])
Transaction should do queries one at a time in array index sequence (so rollback to previos values will work correctly) and return in the same order (sometimes i need return values from some queries)
As i understand it starts already in map map function. In for await i just wait for results of it and second query may complete faster that previos.
So how can i fix this?
P.S. Maybe something like new Promise() instead map is the rigth way?
Change this:
var queries = queriesRaw.map(q => {
return client.query(q[0], q[1])
})
for await (const oneResult of queries) {
allResults.push(oneResult)
}
To:
for(const q of rawQueries) {
let result = await client.query(q[0], q[1]);
allResults.push(result);
});
If i got you correctly, Just use a for loop with proper await, instead of a callback style loop.
So you can wait with the function to return unil everything is chronologically executed, With some thinking, you can easily add aa revoke() function or something..
...
export async function transaction(queriesRaw) {
let allResults = []
const client = await pool.connect()
try {
await client.query('BEGIN')
for(var i = 0; i < queriesRaw.length;i++) {
var res = await client.query(queriesRaw[i][0], queriesRaw[i][1])
allResults.push(res)
}
await client.query('COMMIT')
} catch (err) {
await client.query('ROLLBACK')
// do you maybe wanna errors to results to?
// allResults.push(err)
} finally {
client.release()
return allResults
}
}
Info,
Have a look at for example async module, or something similar. So you will not have to think about things like this.

generators for asynchronous iteration of large file

Say I have a function called openShapeFile, which reads a file, and produces a Promise which wraps source object which has a read function, which is returns a Promise which wraps the actual value in the Shapefile, and has a .done boolean value which can be used to tell if the end of the file has been reached.
In effect, the shapefile.open from here:
https://www.npmjs.com/package/shapefile
If I now want to read a file into a database, I can say:
openShapeFile(`shapefile.shp`).then((source) => source.read()
.then(function log(result) {
if (result.done) {
return
} else {
let query = `INSERT INTO geodata(geometry, id, featcode) VALUES(ST_GeomFromGeoJSON('${
JSON.stringify(Object.assign({}, result.value.geometry, {coordinates: result.value.geometry.coordinates.map(JSON.stringify)}))
}'), '${
result.value.properties.ID
}', ${
result.value.properties.FEATCODE
});`
query = query.split('"[[').join('[[').split(']]"').join(']]')
return pool.query(query).then((result) => {
return source.read().then(log)
})
}
})).then(() => console.log(dirCount)).catch(err => 'Problem here')))
This just about works, but has a recursive Promise (weird)
So as an exercise and/or to see if it would produce more clarity, I decided to rewrite it into generators, producing something like so:
function *insertQuery(query) {
const result = pool.query(query)
return result
}
const shapeFileGenerator = co.wrap(function* (source) {
while (true) {
const result = yield source.read()
if (result.done) {
return yield {}
} else {
let query = `INSERT INTO geodata(geometry, id, featcode) VALUES(ST_GeomFromGeoJSON('${
JSON.stringify(Object.assign({}, result.value.geometry, {coordinates: result.value.geometry.coordinates.map(JSON.stringify)}))
}'), '${
result.value.properties.ID
}', ${
result.value.properties.FEATCODE
});`
query = query.split('"[[').join('[[').split(']]"').join(']]')
yield* insertQuery(query)
}
}
})
openShapeFile(`shapefile.shp`).then((source) => {
const s = shapeFileGenerator(source)
})))
Now this works! It reads all of the data!
However, I kind of hate the infinite loop, and I never call .next directly. How can I rework this? What would be an idiomatic way to do something like this with generators? It seems like I should be able to write a proper generator with s.next() resulting in a source.read()?
I would write
async function readFileToDB(filename) {
const source = await openShapeFile(filename);
for (let {value, done} = await source.read(); !done; {value, done} = await source.read()) {
const query = `INSERT INTO geodata(geometry, id, featcode) VALUES(ST_GeomFromGeoJSON('${
JSON.stringify(value.geometry)
}'), '${
value.properties.ID
}', ${
value.properties.FEATCODE
});`
const result = await pool.query(query);
}
console.log(dirCount);
}
readFileToDB(`shapefile.shp`).catch(err => console.error('Problem here', err));
tough I don't think there's anything wrong with the recursive solution either.
It seems like I should be able to write a proper generator with s.next() resulting in a source.read()?
No, generators are synchronous. You might want to have a look at the async iteration proposal though.
You can code your logic as if it was synchronous, and execute via sequential executor nsynjs. Below is slightly modified working example tested on this file:
main.js:
var nsynjs = require('nsynjs');
var shapefile = require('shapefile');
function synchrobousCode(shapefile /*, pool */) {
var source = shapefile.open('UScounties.shp').data;
var result = source.read().data;
while(result && !result.done) {
var query = "INSERT INTO geodata(geometry, id, featcode) VALUES('" +
JSON.stringify(Object.assign({}, result.value.geometry, {coordinates: result.value.geometry.coordinates.map(JSON.stringify)})) +
"'), '" +
result.value.properties.ID +
"'," +
result.value.properties.FEATCODE +
"')";
console.log(query.length);
// uncomment line below to sequentially insert to the DB
// var queryRes = pool.query(query).data;
result = source.read().data;
}
}
nsynjs.run(synchrobousCode,null,shapefile /*, pool */ ,function () {
console.log('all done');
})
Nsynjs will automatically detect if some function call returns promise. If yes, it will wait for promise to resolve, put result of it to data property, and only then continue to next expression.

Categories

Resources