Here is my code
let loadInitialImages = ($) => {
let html = "";
let images = new Array();
const APIURL = "https://api.shutterstock.com/v2/images/licenses";
const request = async() => {
const response = await fetch(APIURL, { headers: auth_header() } );
const json = await response.json();
json.data.map((v) => images.push(v.image.id)); //this is where the problem is
}
request();
// I can see the contents of the array when I log it.
console.log(images);
// But I can't see any elements when logging this way:
images.map((id) => console.log(id));
}
Everything is working fine here but the problem is when I'm pushing the elements into the array is goes out of the array braces [] below is the screenshot of my array:
I'm not able to loop through the array here.
This is how a usual Array looks like in Console
See Array braces here. Elements appear to be inside [1, 2, 3]
Since your request function is async you need to treat its result as a Promise.
This is also the reason why you see it represented differently in the chrome console. An empty array gets printed, but the references in the console are updated dynamically, so you can still expand it and see the contents.
If you want to log the contents of the array statically, you could use something like JSON.stringify to print it. This will print a string representation of the exact state of the array at the time of logging.
// You will need to check the output in the browser console.
// Your code could be reduced to this:
const a = [];
setTimeout(() => a.push(1, 2), 100);
console.log('a:', a);
// A filled array logs differently:
const b = [1, 2];
console.log('b:', b);
// Stringify gives you a fixed state:
const c = [];
setTimeout(() => c.push(1, 2), 100);
console.log('c:', JSON.stringify(c));
Regarding your code, on top of waiting for request(), if you are using map you should take advantage of how it works. You can use it to generate your entire array without using push for example. If you still want to use your array and push() to it, you should use json.data.forEach instead of json.data.map since it doesn't duplicate the array.
// Making your function `async` so you can `await` for the `request()`
let loadInitialImages = async ($) => {
let html = "";
const APIURL = "https://api.shutterstock.com/v2/images/licenses";
const request = async () => {
const response = await fetch(APIURL, { headers: auth_header() } );
const json = await response.json();
// Array.map will return a new array with the results of applying
// the given function to the original array, you can use that as
// an easy way to return your desired array.
return json.data.map((v) => v.image.id);
}
// Since request() is async, you need to wait for it to complete.
const images = await request();
// Array.forEach lets you iterate over an array without generating a
// copy. If you use map here, you would be making an unneeded copy
// of your images array.
images.forEach(i => console.log(i));
}
Snippet below demonstrates your issue (your case is arr1, you want arr2).
In case loadInitialImages can't be async use arr3 scenario.
async function main(){
let arr1 = [], arr2 = [], arr3 = [];
const getArray = ()=> (new Promise(resolve=>setTimeout(()=>{resolve([1,2,3])},1000)))
async function request(arr, number){
var result = await getArray();
result.forEach((el)=>(arr.push(el)))
console.log(`inner${number}`, arr)
return result;
}
request(arr1, 1);
console.log("outer1", arr1)
await request(arr2, 2);
console.log("outer2", arr2)
request(arr3, 3).then(()=>{
console.log("then3",arr3)
})
console.log("outer3", arr3)
}
main();
I think the probleme is that the console.log() is fired before the array is populated, and becose the console.log work with reference it print both state of array (when it's empty, and after populating it with .map)
you can test this code ?
the console is directly after the loop
let loadInitialImages = ($) => {
let html = "";
let images = new Array();
const APIURL = "https://api.shutterstock.com/v2/images/licenses";
const request = async() => {
const response = await fetch(APIURL, { headers: auth_header() } );
const json = await response.json();
json.data.map((v) => images.push(v.image.id)); //this is where the problem is
console.log(images);
}
request();
}
let loadInitialImages = ($) => {
let html = "";
let images = new Array();
const APIURL = "https://api.shutterstock.com/v2/images/licenses";
const request = async() => {
const response = await fetch(APIURL, { headers: auth_header() } );
const json = await response.json();
json.data.map((v) => images.push(v.image.id)); //this is where the problem is
console.log(images);
}
request();
}
loadInitialImages();
Related
I am fairly new to Javascript and I understand that it executes asynchronously. I tried using the callback method to fetch the secret values and then execute next block of code. But it is not waiting.
This is the function that fetches the keyvault secret values
function getsecret_values(client,secret_name,callback) {
let val = []
for (let i =0;i<secret_name.length;i++){
client.getSecret(secret_name[i]).then((latestSecret) => {
val[i] = latestSecret.value;
})
}
callback(val)
}
I am calling getsecret_values function from main block
let vaultName = result.database;
const url = `https://${vaultName}.vault.azure.net`;
const credential = new ClientSecretCredential(result.host, result.user, result.password);
const client = new SecretClient(url, credential);
let secret_values = []
getsecret_values(client, secrets, function(result) {
secret_values = result
console.log(secret_values)
});
console.log(secret_values)
\\next code block
Both the console.log returns empty array.
I want my code to wait till the secret values are fetched and put into secret_values array and then proceed to next block of code. How do I achieve this?
the easiest way is to use Async Await pattern, which uses promises in the background. Trying not to change your code much:
async function getsecret_values(client,secret_name) {
let val = []
for (let i =0;i<secret_name.length;i++){
const latestSecret = await client.getSecret(secret_name[i])
val[i] = latestSecret.value;
}
return val
}
in your main block:
getsecret_values(client, secrets).then(function(result) {
secret_values = result
console.log(secret_values)
});
console.log(secret_values) // will still be an empty array as the then function has not been executed yet....
my approach would be:
async function getsecret_values(client,secret_name) {
let val = []
for (let i =0;i<secret_name.length;i++){
const latestSecret = await client.getSecret(secret_name[i])
val[i] = latestSecret.value;
}
return val
}
// main:
async function main() {
let vaultName = result.database;
const url = `https://${vaultName}.vault.azure.net`;
const credential = new ClientSecretCredential(result.host, result.user, result.password);
const client = new SecretClient(url, credential);
const secret_values = await getsecret_values(client, secrets)
console.log(secret_values)
}
main()
I made this custom function and put it outside globally which normally would work. I also tried moving it inside the main async puppeteer function but also doesn't work. Its a simple function. In each page evaluate function I call this and pass the selector. But, its saying not defined and promise rejection which is weird because the function isn't a promise....Please help
const grabDomConvertNodlistToArray = (grabDomHtmlPath) => {
// grabbing node list from html selector all
const nList = document.querySelectorAll(grabDomHtmlPath);
// converting nodelist to array to be returned
const array = Array.from(nList);
return array;
};
I tried turning the function into an async function adding a new parameter page. I then added async to my evaluate function and then passes the puppeteer page as an argument and still errors and not working.
const grabDomConvertNodlistToArray = async (page, grabDomHtmlPath) => {
try {
// grabbing node list from html selector all
const nList = await page.document.querySelectorAll(grabDomHtmlPath);
// converting nodelist to array to be returned
const array = Array.from(nList);
return array;
} catch (error) {
console.log(error);
}
};
So I have your typical puppeteer setup where you awai browser.newPage() then you goto(url). Then i added this;
await page.exposeFunction("grabDomConvertNodlistToArray", grabDomConvertNodlistToArray);
added async to my evaluate callback function aka async() => {}. But still when calling my custom function inside the above evaluate function it doesn't work for some reason.
Found A Solution But, It Doesn't Work For Me. I'm Getting array.forEach is not a method which indicates to me that inside my grabDomConvertNodlistToArray function its not grabbing the nodeList or converting it into an array. If it did then forEach would be a function.
Solution 3
const browser = await puppeteer.launch();
const page = await browser.newPage();
await page.goto(someURL);
var functionToInject = function(){
return 1+1;
}
var otherFunctionToInject = function(input){
return 6
}
await page.exposeFunction("functionToInject", functionToInject)
await page.exposeFunction("otherFunctionToInject", otherFunctionToInject)
var data = await page.evaluate(async function(){
console.log('woo I run inside a browser')
return await functionToInject() + await otherFunctionToInject();
});
return data
So erase the two functions above and convert it to use my function below.
const grabDomConvertNodlistToArray = (grabDomHtmlPath) => {
// grabbing node list from html selector all
const nList = document.querySelectorAll(grabDomHtmlPath);
// converting nodelist to array to be returned
const array = Array.from(nList);
return array;
};
Running my js file results in an error of array.forEach isn't a function which is weird because if the function worked as intended the const array inside my evaluate function would be an array because its = to the above function which is returning an array. So.....idk whats going on think it has something to do with the document.querySelectorAll() line.
const rlData = async () => {
const browser = await puppeteer.launch(
{
headless: true,
},
{
args: ["--flag-switches-begin", "--disable-features=OutOfBlinkCors", "--flag-switches-end"],
}
);
const pageBodies = await browser.newPage();
await pageBodies.goto("https://test.com/bodies", {
waitUntil: "load",
});
const grabDomConvertNodlistToArray = (grabDomHtmlPath) => {
// grabbing node list from html selector all
const nList = document.querySelectorAll(grabDomHtmlPath);
// converting nodelist to array to be returned
const array = Array.from(nList);
return array;
};
await pageBodies.exposeFunction("grabDomConvertNodlistToArray", grabDomConvertNodlistToArray);
const rlBodyNames = await pageBodies.evaluate(async () => {
// grabs all elements in html to make nodelist & converts it to an array
const array = grabDomConvertNodlistToArray(".testbodies > div > h1");
// push the data collected from array into data array and returned
const data = [];
array.forEach((element) => {
data.push(element.textContent);
});
return data;
});
}
rlData();
Guess I'm going to have to move the document.querySelectorAll functionality out of the custom function and back in the evaluate. However, the whole reason of making that custom function was to reduce the same code being used multiple times since my overall crawler is 238 lines long with a lot of repetitiveness. Not being able to call custom functions like mine is horrible for refactoring same code executions.
I gave up trying to get this to work and decided just to do it this way. Yeah it makes your code repetitive if you have more pages to scrape so you will be using the same code many times which is what I was trying to avoid but, puppeteer is the worse for refactoring your code maybe down the line the developers of said package will add the ability to easily use custom functions like how I was trying too.
const testNames = await pageBodies.evaluate(() => {
const nodeList = document.querySelectorAll(".test > div h2");
const array = Array.from(nodeList);
const data = [];
array.forEach((element) => {
data.push(element.textContent);
});
return data;
});
exposeFunction() is not suitable for your case: the exposed function is intended to tranfer data between browser and Node.js contexts so it can be wrapped under the hood in a code that serialize and deserialize arguments and returned data and some unserializable data (as DOM elements) can be lost. Try this instead:
const rlData = async () => {
const browser = await puppeteer.launch(
{
headless: true,
},
{
args: ["--flag-switches-begin", "--disable-features=OutOfBlinkCors", "--flag-switches-end"],
}
);
const pageBodies = await browser.newPage();
await pageBodies.evaluateOnNewDocument(() => {
window.grabDomConvertNodlistToArray = function grabDomConvertNodlistToArray(grabDomHtmlPath) {
// grabbing node list from html selector all
const nList = document.querySelectorAll(grabDomHtmlPath);
// converting nodelist to array to be returned
const array = Array.from(nList);
return array;
}
});
await pageBodies.goto("https://test.com/bodies", {
waitUntil: "load",
});
const rlBodyNames = await pageBodies.evaluate(() => {
// grabs all elements in html to make nodelist & converts it to an array
const array = grabDomConvertNodlistToArray(".testbodies > div > h1");
// push the data collected from array into data array and returned
const data = [];
array.forEach((element) => {
data.push(element.textContent);
});
return data;
});
}
rlData();
I got some data which I'm calling from API and I am using axios for that. When data is retrieved, I dump it inside of a function called "RefractorData()" just to organize it a bit, then I push it onto existing array. The problems is, my array gets populated inside forEach and I can console.log my data there, but once I exit the loop, my array is empty.
let matches: any = new Array();
const player = new Player();
data.forEach(
async (match: any) => {
try {
const result = await API.httpRequest(
`https://APILink.com/matches/${match.id}`,
false
);
if (!result) console.log("No match info");
const refractored = player.RefractorMatch(result.data);
matches.push({ match: refractored });
console.log(matches);
} catch (err) {
throw err;
}
}
);
console.log(matches);
Now the first console.log inside forEach is displaying data properly, second one after forEach shows empty array.
Managed to do it with Promise.all() and Array.prototype.map()
.
const player = new Player();
const matches = result.data;
const promises = matches.map(async (match: any) => {
const response: any = await API.httpRequest(
`https://API/matches/${match.id}`,
false
);
let data = response.data;
return {
data: player.RefractorMatch(data)
};
});
const response: any = await Promise.all(promises);
You must understand that async functions almost always run later, because they deppend on some external input like a http response, so, the second console.log is running before the first.
There a few ways to solve this. The ugliest but easiest to figure out is to create a external promise that you will resolve once all http requests are done.
let matches = [];
let promise = new Promise((resolve) => {
let complete = 0;
data.forEach((match: any) => {
API.httpRequest(...).then((result) => {
// Your logic here
matches.push(yourLogicResult);
complete++;
if (complete === data.length) {
resolve();
}
}
}
};
console.log(matches); // still logs empty array
promise.then(() => console.log(matches)); // now logs the right array
You can solve this using other methods, for example Promise.all().
One very helpful way to solve it is using RxJs Observables. See https://www.learnrxjs.io/
Hope I helped you!
I have an array which is passed through a function using javascript, I can't see anything wrong with the code but it doesn't pass over the first array correctly so it can be parsed.
The idea is the first array is 56 items, it then calls the parseData function which is supposed to split this array into chunks of 7.
Here is the two functions:
static async validateRowValues() {
let data = [];
await cy.get('tr > td > div.dlCell')
.each(function (row) {
let d = row.get(0).innerText;
data.push(d);
});
console.log(data);
let response = await this.parseData(data);
console.log({response});
}
static async parseData(tData) {
console.log(tData);
let array = [];
let coll_array = [];
debugger;
await tData.forEach(async (v, index) => {
await array.push(v);
if (index % 6 === 0 && index !== 0) {
await coll_array.push(array);
array = [];
}
});
return coll_array;
}
The first console.log within parseData does return the 56 items, however by the time it reaches tData.forEach it has completely lost its data, and the parsing returns an empty array when it returns coll_array.
If anyone has any ideas?
As of now I will take it you getting your data fine in array.
ex. arr = [1,2,3,.....58]
Use below code to split in chunks of 7
arr = arr.reduce((acc,data,index)=>{
if(index==0 || index%7==0) acc.push([])
acc[acc.length-1].push(data)
return acc
},[])
The above code will return
arr = [ [1,..,7], [8,...14], ....]
We have resolved this.
It turns out everything in Cypress is a promise so the first function needed to have a .then
static async validateRowValues() {
let data = [];
await cy.get('tr > td > div.dlCell')
.each(function (row) {
let d = row.get(0).innerText;
data.push(d);
}).then(() => {
this.parseData(data);
});
}
The console in the end returns empty array.
The console runs before ids.map function finishes
var ids = [];
var allLync = []
var user = await User.findOne(args.user)
ids.push(user._id)
user.following.map(x => {
ids.push(x)
})
ids.map(async x => {
var lync = await Lync.find({ "author": x })
lync.map(u => {
allLync.push[u]
})
})
console.log(allLync)
What am I doing wrong?
The .map code isn't awaited, so the console.log happens before the mapping happens.
If you want to wait for a map - you can use Promise.all with await:
var ids = [];
var allLync = []
var user = await User.findOne(args.user)
ids.push(user._id)
user.following.map(x => {
ids.push(x)
})
// note the await
await Promise.all(ids.map(async x => {
var lync = await Lync.find({ "author": x })
lync.map(u => {
allLync.push(u); // you had a typo there
})
}));
console.log(allLync)
Note though since you're using .map you can shorten the code significantly:
const user = await User.findOne(args.user)
const ids = users.following.concat(user._id);
const allLync = await Promise.all(ids.map(id => Lync.find({"author": x })));
console.log(allLync);
Promise.map() is now an option that would be a tiny bit more succinct option, if you don't mind using bluebird.
It could look something like:
const user = await User.findOne(args.user);
const ids = users.following.concat(user._id);
const allLync = await Promise.map(ids, (id => Lync.find({"author": x })));
console.log(allLync);
http://bluebirdjs.com/docs/api/promise.map.html. I have really enjoyed using it.