Async/Await with jasmine - javascript

I am using protractor and Jasmine for testing.
Say I have an async it function.
it('something', async function(){
for(var k = 0; k < 5 ; k++){
(function(jj){
/...........
await something == whatever
............./
})(k);
}
})
i want to use await inside the for loop but I am unable to do so.
Also how can i use async/await to make the code cleaner inside a protractor each function like
it('something', async function(){
element.locator.each((x)=>{
await element.locator.sendKeys('something')
})
})
if anyone can help this would be awesome. As now my whole test spec looks like a big mess of promises and callbacks.
it('description', async function() {
common.separator()
console.log('***************************************')
console.log('something');
console.log('***************************************')
common.separator();
finished_ispresent = await common.finished.isPresent();
if( finished_ispresent == true) {
console.log('finished is present = ' + finished_ispresent);
common.separator();
}
common.somebutton.click();
await browser.sleep(1000);
//array declaration for storing boolean results
some_array =[];
some_array1 =[];
some_array2 =[];
some_array3 =[];
some_array4 =[];
//the outer for loop
for(var k = 0; k < 5 ; k++){
browser.wait(EC.visibilityOf(common.something),2000);
common.something.click();
Something_V = await common.something.get(k).getText();
browser.wait(EC.visibilityOf(common.something),5000);
common.something.click();
common.datepicker(k);
}
});
what happens is things happen so quickly that nothing works and returns a slew of errors.
Sir I have one more doubt regarding .each functions. How do you handle each functions when using async await?
it('something', function(){
element.all.locator.each((element_each)=>{
console.log(element_each);
})
});
How do you handle this with Each?

If you use async/await, no need to use closure and nested await in each.
Let's say there are 5 input boxes on page, we need to input some value for each box, I will show examples of using and non-using async/await
// code non-using await
let values= ['a', 'b', 'c', 'd', 'e'];
it('something', function(){
for(let i=0;i<5;i++) {
(function(j){ // we have to use javascript closure at here
element.all(by.css('input')).get(j).sendKeys(values[j])
})(i);
}
})
// code using await
it('something', async function(){
// you can use count() to get the total input box, rather than hardcode 5
// let cnt = await element.all(by.css('input')).count();
let cnt = 5;
for(let i=0;i<cnt;i++) {
await element.all(by.css('input')).get(i).sendKeys(values[i])
}
})
Two points you should pay attention when use async/await.
1) disable protractor's promise management (control flow) in conf.js
// protractor conf.js
exports.config= {
SELENIUM_PROMISE_MANAGER: false,
}
2) any code line which return promise, you need to add await ahead. otherwise the script execution order will become mess.
it('description', async function() {
common.separator()
console.log('***************************************')
console.log('something');
console.log('***************************************')
common.separator();
finished_ispresent = await common.finished.isPresent();
if( finished_ispresent == true) {
console.log('finished is present = ' + finished_ispresent);
common.separator();
}
await common.somebutton.click();
await browser.sleep(1000);
//array declaration for storing boolean results
some_array =[];
some_array1 =[];
some_array2 =[];
some_array3 =[];
some_array4 =[];
//the outer for loop
for(var k = 0; k < 5 ; k++){
await browser.wait(EC.visibilityOf(common.something),2000);
await common.something.click();
Something_V = await common.something.get(k).getText();
await browser.wait(EC.visibilityOf(common.something),5000);
await common.something.click();
await common.datepicker(k);
}
});
using await in .each
it('using await in each()', async function(){
await browser.get("https://www.npmjs.com/");
element.all(by.css('nav > ul >li >a')).each(async function(item){
var txt = await item.getText();
console.log(txt);
})
})

Related

Promise.all with for loop in node.js

I collect the information of each page from 1 to 10 as API in node.js.
Now I use this code.
async function myWork() {
let results = []
let tmp
let param
for (i=1; i<11; i++) {
param = {'page': i}
tmp = await callMyApi(param) // return a list
results.push(...tmp)
}
return results
}
In this case, each callMyApi behaves like sync.
But I don't care about page order.
So, to speed it up, I want to use something like promise.all to process it in parallel.
How can I use promise.all in for loop in this case?
You can use Promise.all() with concat().
async function myWork() {
let results = [];
let promises = [];
let param;
for (i=1; i<11; i++) {
let param = {'page': i}
let tmpPromise = callMyApi(param);
promises .push(tmpPromise);
}
//promises is now an array of promises, and can be used as a param in Promise.all()
let resolved = await Promise.all(promises);
//resolved is an array of resolved promises, each with value returned by async call
let indivResult = resolved.forEach(a =>
results = results.concat(a));
//for each item in resolved array, push them into the final results using foreach, you can use different looping constructs here but forEach works too
return results;
}
Example below:
async function myWork() {
let results = [];
let param;
for (i = 1; i < 11; i++) {
param = { page: i };
results.push(callMyApi(param));
}
const res = await Promise.all(results);
return res.flat();
}

How to make an API call inside for loop, withour using async await

Having a for loop inside a promise. How can i get response from getData API without using async await. The parameters used inside getData are coming from for loop.
var res = EService.webApi.get.GetActiveData(formModel.project.EID);
res.then(
async function (result) {
//success
var data = result.data;
var eList= data.BodyData;
var jList= [];
for (var i = 0; i < eList.length; i++) {
let entity = await getData(eList[i].EntityID);
if (eList[i].typeID !== 16) {
jList.push({
Name: eList[i].Name + " - " + e[i].typeName + " - " + entity.Name,
EID: eList[i].EntityID,
model: eList[i],
});
}
}
}
If I understand what you're asking, it sounds like you want to invoke all of the async requests right away but have each one await its result, rather than invoking them in serial where each one awaits the previous operation. Maybe something like this:
for (var i = 0; i < eList.length; i++) {
(async (j) => {
// The rest of your logic, but using the passed `j` instead of `i`
})(i);
}
The anonymous async function isn't awaited, but internally each call to that function can await the call to getData to use its result.
Though if you want to do something with jList or any other result/side-effect afterward then you'd need to await the whole thing. Maybe put all of the promises into an array and await the array. Perhaps something like:
let promises = [];
for (var i = 0; i < eList.length; i++) {
promises.push((async (j) => {
// The rest of your logic, but using the passed `j` instead of `i`
})(i));
}
Promise.all(promises).then(() => ...);
The overall goal being that all of the operations run in parallel, rather than in serial like in the original loop.

Javascript await inside a loop

I am trying to work with an api where I have to send a request for each item in a list.
However, I see that the loop doesn't seem to wait for every request, i.e, the loop doesn't work as expected. Here's the code below
getInfo = async () => {
const mylist = ["item1","item2","item3","item4","item5","item6","item7"]
const responses = []
const len = mylist.length
for (let i = 0; i < len; i++) {
//console.log("inside loop")
await axios.get("some_url/"+mylist[i])
.then(res => {
responses.push(res.data)
})
}
When I run the program, all the console.log("inside loop") executes immediately without waiting for the request to be complete.
How can I modify the code so as to wait for each response to be completed before updating the for loop counter variable?
You could try re-arranging the code to something like this. But using a Promise.all with Array.prototype.map would be more idiomatic solution for the problem.
await the async call (remove unnecessary .then call) and then console.log
getInfo = async () => {
const mylist = ["item1","item2","item3","item4","item5","item6","item7"]
const responses = []
const len = mylist.length
for (let i = 0; i < len; i++) {
responses.push((await axios.get("some_url/"+mylist[i])).data)
console.log("inside loop")
}
}
Internally, await is translated into a Promise chain. Since the for loop can't be transformed into a Promise-continuation, you'll need to convert it to a Promise-based construct.
Depending on what you want to achieve there are multiple ways to go about it.
Constructing the responses array could be done with a map statement.
const promises = mylist.map(item => {
return axios.get("some_url/"+item).then(res => { return res.data; })
});
const data = await Promise.all(promises);
No manual pushing items around or fiddling with the array length.

Make Synchronus Api inside for Loop

I have an array of Objects/String.
this.addtenant().subscribe(r => {
let a = ['plant1', 'plant2'];
for (let b of a) {
this.myService.saveAllData(b).subscribe(r => {
console.log("result" r);
});
}
});
and getAllData is in myservice file which returns an Observable.
saveAlldata(b) {
this.http.post(url,b);
}
The problem is since i am using subscribe the call is being asynchronus, i want to have it something like:
first "plant1" post call has to finish, and then plant2 has to be made. In simple words synchronus call.
I think you should use async/await for synchronous calls.
Here is one of the tutorial:
https://www.techiediaries.com/javascript-async-await-tutorial/
A sample code demo would be something like:
responseArr: any[] = [];
func1()
let x = [1,2,3];
func2(x);
}
async func2(arr) { // make the function async
for(let i=0; i<arr.length ; i++){
const response: any = await this.myService(arr[i]); // wait on each service call. This will give synchronous behaviour
this.handleResponse(response); // handle response in a function if there is common functionality in each response
}
reuturn responseArr; // return the final response
}
handleResponse(response) {
responseArr.push(response);
}
I have found a solution by myself. You can use an async await inside a loop, since forEach is not promise aware and cannot be used with Promise. Here is the final code:
this.addtenant().subscribe(async r => {
let a = ['plant1', 'plant2'];
for(let index=0; index=a.length; index++)
await this.myService.saveAllData(b).toPromise().then(r => {
console.log("result" r);
});
}
}
});
As await works with Promise, so toPromise will replace subscribe()

How to handle async Node.js in a loop

I have such a loop :
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
var docs = collection.find({ id: { "$in": temparray}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
}
The loop iterates for two times. In first iteration it gets 200 elements, in second, it gets 130 elements. And when I open the .txt file, I see only 130 names. I guess because of the async nature of Node.js, only second part of the array is processed. What should I do to get all parts of the array to be processed? Thanks in advance.
EDIT : I finally turned the code to this :
var generalArr = [];
var i,j,temparray,chunk = 200;
for (i=0,j=document.mainarray.length; i<j; i+=chunk) {
temparray = document.mainarray.slice(i,i+chunk);
generalArr.push(temparray);
}
async.each(generalArr, function(item, callback)
{
var docs = collection.find({ id: { "$in": item}}).toArray();
docs.then(function(singleDoc)
{
if(singleDoc)
{
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++)
{
fs.appendFile("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n", function(err) {
if(err) {
return console.log(err);
}
});
}
}
});
callback(null);
})
When I change this line :
var docs = collection.find({ id: { "$in": item}}).toArray();
To this line :
var docs = collection.find({ id: { "$in": item}}).project({ name: 1 }).toArray();
It works, I'm able to print all names. I guess there is a problem with memory when I try without .project(). How can I make this work without using project? Should I change some memory limits? Thanks in advance.
I think your code is unnecessary complicated and appending file in a loop is very expensive when compared to in-memory computation. A better way would be to write to file just once.
var i, j, temparray, chunk = 200;
for (i = 0, j = document.mainarray.length; i < j; i += chunk) {
temparray = document.mainarray.slice(i, i + chunk);
generalArr.push(temparray);
}
const queryPromises = [];
generalArr.forEach((item, index) => {
queryPromises.push(collection.find({ id: { "$in": item } }).toArray());
});
let stringToWrite = '';
Promise.all(queryPromises).then((result) => {
result.forEach((item) => {
item.forEach((element) => {
//create a single string which you want to write
stringToWrite = stringToWrite + "\n" + element.name;
});
});
fs.appendFile("C:/Users/x/Desktop/names.txt", stringToWrite, function (err) {
if (err) {
return console.log(err);
} else {
// call your callback or return
}
});
});
In the code above, I do the following.
Wait for all the db queries to finish
Lets iterate over this list and create one string that we need to write to the file
Write to the file
Once you go asynchronous you cannot go back - all your code needs to be asynchronous. In node 8 you handle this with async and await keywords. In older versions you can use Promise - async/await are just syntax sugar for it anyway.
However, most of the API in node are older than Promise, and so they use callbacks instead. There is a promisify function to update callback functions to promises.
There are two ways to handle this, you can let all the asynchronous actions happen at the same time, or you can chain them one after another (which preserves order but takes longer).
So, collection.find is asynchronous, it either takes a callback function or returns a Promise. I'm going to assume that the API you're using does the latter, but your problem could be the former (in which case look up promisify).
var findPromise = collection.find({ id: { "$in": item}});
Now, at this point findPromise holds the running find action. We say this is a promise that resolves (completes successfully) or rejects (throws an error). We want to queue up an action to do once it completes, and we do that with then:
// The result of collection.find is the collection of matches
findPromise.then(function(docs) {
// Any code we run here happens asynchronously
});
// Code here will run first
Inside the promise we can return further promises (allowing them to be chained - complete one async, then complete the next, then fire the final resolve once all done) or use Promise.all to let them all happen in parallel and resolve once done:
var p = new Promise(function(resolve, reject) {
var findPromise = collection.find({ id: { "$in": item}});
findPromise.then(function(docs) {
var singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
var singleDoc = docs[i];
if(!singleDoc)
continue;
for(var t = 0; t < singleDoc.length; t++)
singleDocNames.push(singleDoc[t].name);
}
// Resolve the outer promise with the final result
resolve(singleDocNames);
});
});
// When the promise finishes log it to the console
p.then(console.log);
// Code inline here will fire before the promise
This is much easier in node 8 with async/await:
async function p() {
// Await puts the rest of this function in the .then() of the promise
const docs = await collection.find({ id: { "$in": item}});
const singleDocNames = [];
for(var i = 0; i < docs.length; i++) {
// ... synchronous code unchanged ...
}
// Resolve the outer promise with the final result
return singleDocNames;
});
// async functions can be treated like promises
p().then(console.log);
If you need to write the results to a text file asynchronously there are a couple of ways to do it - you can wait until the end and write all of them, or chain a promise to write them after each find, though I find parallel IO operations tend to be at more risk of deadlocks.
Code above have multiple issues about asynchronous control flow. Similar code possible can exists, but only if case of using ES7 async/await operators on all async operation.
Of course, you can easily achieve solution by promises sequence. Solution:
let flowPromise = Promise.resolve();
const chunk = 200;
for (let i=0,j=document.mainarray.length; i<j; i+=chunk) {
flowPromise = flowPromise.then(() => {
const temparray = document.mainarray.slice(i,i+chunk);
const docs = collection.find({ id: { "$in": temparray}}).toArray();
return docs.then((singleDoc) => {
let innerFlowPromise = Promise.resolve();
if(singleDoc) {
console.log("single doc length : " + singleDoc.length);
for(let t = 0, len = singleDoc.length; t < len;t++) {
innerFlowPromise = innerFlowPromise.then(() => new Promise((resolve, reject) =>
fs.appendFile(
"C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n",
err => (err ? reject(err) : resolve())
)
));
}
}
return innerFlowPromise;
}
});
}
flowPromise.then(() => {
console.log('Done');
}).catch((err) => {
console.log('Error: ', err);
})
When use async-like control flow, based on Promises, always remember that every loop and function call sequence will not pause execution till async operation be done, so include all then sequences manually. Or use async/await syntax.
Which version of nodejs are you using? You should use the native async/await support which is built into newer versions nodejs (no libraries required). Also note, fs.appendFile is asyncronous so you need to either use a library like promisify to transform the callback into a promise or just use the appendFileSync and suffer the blocking IO (but might be okay for you, depending on the use case.)
async function(){
...
for(var item of generalArr) {
var singleDoc = await collection.find({ id: { "$in": item}}).toArray();
// if(singleDoc) { this won't do anything, since collection.find will always return something even if its just an empty array
console.log("single doc length : " + singleDoc.length);
var t;
for(t = 0, len = singleDoc.length; t < len;t++){
fs.appendFileSync("C:/Users/x/Desktop/names.txt", singleDoc[t].name + "\n");
}
};
}
var docs = collection.find({ id: { "$in": document.mainarray}}), // returns a cursor
doc,
names = [],
toInsert;
function saveToFile(cb) {
toInsert = names.splice(0,100);
if(!toInsert.length) return cb();
fs.appendFile("C:/Users/x/Desktop/names.txt", toInsert.join("\n"), cb);
}
(function process() {
if(docs.hasNext()) {
doc = docs.next();
doc.forEach(function(d) {
names.push(d.name);
});
if(names.length === 100) {
// save when we have 100 names in memory and clear the memory
saveToFile(function(err) {
process();
});
} else {
process();
}
} else {
saveToFile(function(){
console.log('All done');
});
}
}()); // invoke the function
If you can't solve your issue using core modules and basic nodejs, there is most likely a lack of understanding of how things work or insufficient knowledge about a library (in this case FileSystem module).
Here is how you can solve your issue, without 3th party libraries and such.
'use strict';
const
fs = require('fs');
let chunk = 200;
// How many rounds of array chunking we expect
let rounds = Math.ceil(mainArray.length/chunk);
// copy to temp (for the counter)
let tempRounds = rounds;
// set file name
let filePath = './names.txt'
// Open writable Stream
let myFileStream = fs.createWriteStream(filePath);
// from round: 0-${rounds}
for (let i = 0; i < rounds; i++) {
// assume array has ${chunk} elements left in this round
let tempChunk = chunk;
// if ${chunk} is to big i.e. i=3 -> chunk = 600 , but mainArray.length = 512
// This way we adjust the last round for "the leftovers"
if (mainArray.length < i*chunk) tempChunk = Math.abs(mainArray.length - i*chunk);
// slice it for this round
let tempArray = mainArray.slice(i*chunk, i*chunk + tempChunk);
// get stuff from DB
let docs = collection.find({ id: { "$in": tempArray}}).toArray();
docs.then(function(singleDoc){
// for each name in the doc
for (let j = 0; j < singleDoc.length; j++) {
// write to stream
myFileStream.write(singleDoc[t].name + "\n");
}
// declare round done (reduce tempRounds) and check if it hits 0
if (!--tempRounds) {
// if all rounds are done, end the stream
myFileStream.end();
// BAM! you done
console.log("Done")
}
});
}
The key is to use fs.WritableStreams :)
link here to docs

Categories

Resources