I've tried going through this question and also this, but I can't seem to figure out how to make my requirements work.
I want to call https://reqres.in/api/users a number of times over a loop. This AJAX call returns only the first page of dummy users. After I get the first page, I want to call the next pages.
Here's my code :
$(document).ready(function() {
function getMoreUsers() {
var def = $.Deferred();
var requests = [];
for (var j=2; j<=4; j++) {
console.log("getting info for page # " + j);
requests.push(
$.ajax("https://reqres.in/api/users?page=" + j).done(function() {
console.log("got info for page # " + j);
def.resolve();
})
);
}
return def.promise();
}
function getAllUsers() {
var def = $.Deferred();
var requests = [];
for (var i=0; i< 2; i++) {
console.log("iteration # " + i);
requests.push(
$.ajax("https://reqres.in/api/users").done(function(data) {
console.log("got first page info");
getMoreUsers();
def.resolve();
})
);
}
return def.promise();
}
getAllUsers().done(function() {
console.log("all completed");
});
});
The output that I get is this :
iteration # 0
iteration # 1
got first page info
getting info for page # 2
getting info for page # 3
getting info for page # 4
all completed
got first page info
getting info for page # 2
getting info for page # 3
getting info for page # 4
got info for page # 5
However, I want this :
iteration # 0
got first page info
getting info for page # 2
got info for page # 2
getting info for page # 3
got info for page # 3
getting info for page # 4
got info for page # 4
iteration # 1
got first page info
getting info for page # 2
got info for page # 2
getting info for page # 3
got info for page # 3
getting info for page # 4
got info for page # 4
all completed
I don't even understand how page # 5 came in the output when I'm looping till 4, and it came 6 times, like below :
Why not keep it simple?
var getUsers = function(i) {
$.ajax("https://reqres.in/api/users?page=" + i).done(function() {
if (i < 5) {
getUsers(i + 1);
}else{
//done!
}
});
}
getUsers(1);
Update:
Thanks, recursion does seem to work, but if I attach a done() to getUsers() like so - getUsers(1).done(function() { console.log("all done");}); it doesn't fire. I don't understand. I thought $.ajax() returned a deferred object on its own.
my code was just a hint how can you resolve your issue. anyways let me help you futher.
there is simple way:
$.ajax("https://reqres.in/api/users?page=" + i).done(function() {
// use data, store it in array outside or draw HTML
if (i < 5) {
getUsers(i + 1);
}else{
//done! do something when finished
// iAmDoneHere()
}
});
but if you want to use deferred: so $.ajax returns the Deferred. Recursion works well but I guess you want to exectule final "downloaded all!" function. In such case you need to improve code a bit.
var pages = [];
var getUsers = function(maxPage, currentPage, deferred) {
var deferred = false;
if (!currentPage) {
// this is the top function call
// the top call without recursion
var currentPage = 1;
deferred = $.Deferred();
}
$.ajax(..."?page="+currentPage).done(function(){
// we got page info, great! what next?
pages.push({insert the page data here});
// what next?
// if there is more to fetch, do it
if (i < maxPage) {
// pass maxPage, page to parse + 1 and top deferred
var subd = getUsers(maxPage, i + 1, deferred);
}else{
// if there is more to parse, do it
// we downloaded the final page
// so now we can finally resolve the top deferred
// which was passed in every recursion
deferred.resolve();
}
}
return deferred;
}
getUsers(10).done(function(){
// executed when all pages are done
// results are stored in pages[]
});
the worst part is I wrote already a lot and this still could be improved (i should pages[] variable as global/parent scope)
i want to say managing asynchronous callbacks is really easy but it's more advanced that making a simple callback.
if you work on bigger project you'll propably write or use some class that will do all of it for you without worrying about anything for example
var getUsers = function(maxPages) {
var d = $.Deferred();
var pages = [];
var queue = new Queue();
for (var i=0;i<maxPages;i++) {
queue.push(function(page){
return $.ajax(.."?page="+page).done(function(){pages.push(page);});
}, i);
}
queue.run(function(){
d.resolve(pages);
});
return d;
}
getUsers(10).done(function(pages){/* all pages in "pages" */});
this is done the right way, and you won't repeat your code if you will want to use queue in other place. also there ale plenty ready npm packages out there
also I need to mention I can see you really want to stick to deferred white lot of people just use callbacks instead deferred or promises for simple tasks.
// Deferred way
function doJob(arg1, arg2) {
var d = $.Deferred();
window.setTimeout(function(){
d.resolve();
}, 100);
return d;
}
// Callback way
function doJob(arg1, arg2, callback) {
window.setTimeout(function(){
callback();
}, 100);
}
which save a code a bit and complexity but offers less layers and options for developer. Both methods are fine. I am saying all of this to let you know there are many methods and there is no definitve answer how to answer your question.
I would go with some Queue, the callback solution is the simplest, the Deferred/Promise + recursion solution is OK.
On a second thougth you don't need the deferred. Recursion is better.
$(document).ready(function () {
var apiUrl = "https://reqres.in/api/users";
var baseAjaxConfig = {
method: "GET",
url: apiUrl
};
var page = 1;
var maxUsers = 5; //CHANGE THIS ACCORDING TO WHAT YOU WANT
function getUser(page) {
var ajaxConfig = $.extend({}, baseAjaxConfig, {data: {page: page}});
$.ajax(ajaxConfig).done(function () {
(page < maxUsers) && getUser(page+1);
}).fail(function () {
(page < maxUsers) && getUser(page+1);
});
}
getUser(page);
});
here's a fiddle ==> https://jsfiddle.net/tonysamperi/5j8166be/
Related
I've spent the last few days trying to tackle this issue and have read all sorts of solutions on StackOverflow and other sites.
I'm building a site that grabs XML data from an outside source and then gets more XML depending on the results to build a network graph. The problem is that I have to essentially wait until this loop of AJAX calls (which may loop into more AJAX calls) is finished before drawing.
I don't know if this just has an especially high cognitive load, but it really has me stumped.
My Code:
function cont(outerNodes) {
for (var i = 0; i < outerNodes.length; i++) {
var node = outerNodes.pop();
getXML(node["label"], node["id"]);
}
// I want the code to wait until loop is done, and then draw.
draw(nodes, edges);
}
function getXML(term, fromId) {
var url = someURL;
$.ajax({
url: url,
dataType: "xml",
success: function(result) {
var outerNodes = process(result, fromId, term);
cont(outerNodes);
}
});
}
Note: I understand I may be completely misunderstanding JavaScript synchronicity here, and I very likely am. I have used callbacks and promises successfully in the past, I just can't seem to wrap my head around this one.
If I have not been totally clear, please let me know.
I did try implementing a counter of sorts that is incremented in the process() function, like so:
if (processCount < 15) {
for (var i = 0; i < outerNodes.length; i++) {
var node = outerNodes.pop();
getXML(node["label"], node["id"]);
}
} else {
draw(nodes, edges);
}
However, this ended up with several draw() calls which made my performance abysmal.
There are nice new well-supported APIs and language constructs we can use. The Fetch API, await, and for...of loops.
The Fetch API uses Promises. Promises can be awaited. The for...of loop is aware of await and won't continue the loop until the await has passed.
// Loop through, one-at-a-time
for (const node of outerNodes) {
// Make the HTTP request
const res = await fetch(someUrl);
// Do something with the response here...
}
Don't forget a try/catch (which also works with await), and check res.ok.
Brad's answer changes the code to by synchronious and to me that defeats the purpose. If you are constantly waiting on all request to be finished then it could take a while, while normal browsers can handle multiple requests.
The problem you have in your original questions is with scope. Since each call to cont(outerNodes) will trigger it's own scope, it has no idea what are calls are doing. So basically if you call cont(outerNodes) twice, each call will handle it's own list of outernodes and then call draw.
The solution is to share information between the different scopes, which can be done with one, but preferably two global variables: 1 to track active processes and 1 to track errors.
var inProcess = 0;
var nrErrors = 0;
function cont(outerNodes) {
//make sure you have outerNodes before you call outerNodes.length
if (outerNodes) {
for (var i = 0; i < outerNodes.length; i++) {
var node = outerNodes.pop();
inProcess++; //add one more in process
getXML(node["label"], node["id"]);
}
}
//only trigger when nothing is in proces.
if (inProcess==0) {
// I want the code to wait until loop is done, and then draw.
draw(nodes, edges);
}
}
function getXML(term, fromId) {
var url = someURL;
$.ajax({
url: url,
dataType: "xml",
success: function(result) {
var outerNodes = process(result, fromId, term);
inProcess--; //one is done
cont(outerNodes);
},
error: function() {
inProcess--; //one is done
nrErrors++; //one more error
cont(null); //run without new outerNodes, to trigger a possible draw
}
});
}
Please note that I track nrErrors but dont do anything with it. You could use it to stop further processing all together or warn the user that the draw is not complete.
[important] Keep in mind that this works in javascript because at best it mimics multithreading. That means the the call to inProcess--; and then right after cont(outerNodes); is always execute directly after eachother.
If you would port this to a true multithreading environment, it could very well be that another scope/version of cont(null); would cut in between the two lines and there would still be multiple draws.
The best way to solve this question should be using either promise or callback.
If you really want to avoid promise or callback(Although i don't know why...)
You can try with a counter.
let processCount = 0;
// Increasing the processCount in getXML callback method
function getXML(term, fromId) {
var url = someURL;
$.ajax({
url: url,
dataType: "xml",
success: function(result) {
processCount++;
var outerNodes = process(result, fromId, term);
cont(outerNodes);
}
});
}
for (var i = 0; i < outerNodes.length; i++) {
var node = outerNodes.pop();
getXML(node["label"], node["id"]);
}
while (processCount < outerNodes.length) {
// do nothing, just wait'
}
draw(nodes, edges);
If after testing it many times, you know that it will never take more than say 5 seconds... you can use a setTimeout.
function cont(outerNodes) {
for (var i = 0; i < outerNodes.length; i++) {
var node = outerNodes.pop();
getXML(node["label"], node["id"]);
}
// Display a 5 second progress bar here
setTimeout(function(){ draw(nodes, edges); },5000);
}
This question already has an answer here:
PNG is not being rendered using PhantomJS with multiple attempts in a loop
(1 answer)
Closed 6 years ago.
well my code is something like this just a few lines
var a = "http://lnmtl.com/chapter/renegade-immortal-chapter-";
var b = 558;
var d = "rennegrade_ch";
var f = ".png";
var page = require('webpage').create();
var i = 0;
for (i = b; i < 560; i++) {
var c = a + i;
console.log(c);
page.open(c, function () {
var e = d + i + f;
console.log(e);
page.render(e);
});
}
phantom.exit();
the webpage can be rendered individually but once i put it inside for loop all it does is print the first console output properly but the second one it skips i guess its not entering the page.open function then for loop value increases then same thing happens again I have no idea why its not entering render function i tried to put var page = require('webpage').create();
inside for loop too but still no change
UPDATE: On another question stackoverflow.com/questions/31621577/png-is-not-being-rendered-using-phantomjs-with-multiple-attempts-in-a-loop?rq=1
it was pointed that this method wont work because of async nature of function but the example code provided in it isnt helpful enough can anyone example and i also tried set timeout as suggested in it still same thing happens so any other idea ?
Your phantom.exit() call kills the PhantomJS browser before you do any rendering. You have to wait for the rendering to end before you can exit(). You need to have some mechanism to say when the rendering is done. I'd suggest wrapping each of your renders in a Promise. Then using a Promise.all() to wait for all the render promises to resolve. After they resolve, exit PhantomJS.
Right now, you have what is below, which does not respect the asynchronous nature of page.open():
for (...) {
// I probably wont finish because phantom dies almost immediately
page.open(c, function () {
// I won't finish running since phantom dies
page.render(e);
});
}
// I'm going to kill the phantom almost immediately
phantom.exit();
You want something like the code below, which will wait for all the renders to finish. This will put renders of each of the sites we provide in a subdirectory "renders".
Note: You will need to install the es6-promise shim for this to work since PhantomJS does not yet support Promises. Thanks for the comment about that Artjon B
/*jslint node:true*/
/*globals phantom, sayHello*/
"use strict";
var Promise = require("es6-Promise").Promise;
// Array of URLs that we want to render
var urlArr = [
{
name: "google",
url: "http://www.google.com"
},
{
name: "yahoo",
url: "http://www.yahoo.com"
},
{
name: "bing",
url: "http://www.bing.com"
}
];
// Map URLs to promises
var proms = urlArr.map(function (url) {
// Return a promise for each URL
return new Promise(function (resolve, reject) {
// Make a page
var page = require("webpage").create();
// Open the URL
console.log("opening: " + url.name);
page.open(url.url, function () {
// Render the page
page.render("render/" + url.name + ".png");
console.log("done rendering: " + url.name);
// Say that we are done with rendering
resolve();
});
});
});
// Wait for all rendering to finish
Promise.all(proms).then(function () {
console.log("closing phantom");
// Exit phantom
phantom.exit();
});
For async request inside loop you should use asynchronous library so you can debug your code and don't get memory leak issue
async-js will be good in your case
npm install async
var async = require('async');
var a = "http://lnmtl.com/chapter/renegade-immortal-chapter-";
var b = 558;
var d = "rennegrade_ch";
var f = ".png";
var page = require('webpage').create();
var i = 0;
async.whilst(
function() {
return i < 560;
},
function(callback) {
i++;
var c = a + i;
console.log(c);
page.open(c, function() {
var e = d + i + f;
console.log(e);
page.render(e);
callback(null, i);
});
},
function(err, n) {
if(err) console.log(err);
phantom.exit();
});
I have an array and i need to send values of array to webservice through http post request one by one . For the node.js , i'm using "async" package to do that for ex: async.eachSeries doing it well , how can i do that same thing for angular.js , my normal async code;
//this code sends all queries of array (maybe 5.000 request at same time , it is hard to process for webservice :=) ) at same time and wait for all responses.
//it works but actually for me , responses should wait others at end of loop should work one by one
//like async.eachSeries module!
for (var i = 0; i < myArr.lenght; i++) {
(function (i) {
var data = {
"myQuery": myArr[i].query
};
$http.post("/myServiceUrl", data).success(function (result) {
console.log(result);
});
})(i);
}
Both Matt Way and Chris L answers Correct , you can investigate Chris's answer for understanding about async to sync functions in for loops.
You can use $q to create a similar requirement by chaining promises together. For example:
var chain = $q.when();
angular.forEach(myArr, function(item){
chain = chain.then(function(){
var data = {
myQuery: item.query
};
return $http.post('/myServiceUrl', data).success(function(result){
console.log(result);
});
});
});
// the final chain object will resolve once all the posts have completed.
chain.then(function(){
console.log('all done!');
});
Essentially you are just running the next promise once the previous one has completed. Emphasis here on the fact that each request will wait until the previous one has completed, as per your question.
function logResultFromWebService(value)
{
$http.post("/myServiceUrl", value).success(console.log);
}
angular.forEach(myArray, logResultFromWebService);
If I understand your question correctly. You want to run a for loop in a synchronized manner such that the next iteration only occurs once the previous iteration is completed. For that, you can use a synchronized loop/callbacks. Especially if the order matters.
var syncLoop = function (iterations, process, exit) {
var index = 0,
done = false,
shouldExit = false;
var loop = {
next: function () {
if (done) {
if (shouldExit && exit) {
return exit(); // Exit if we're done
}
}
// If we're not finished
if (index < iterations) {
index++; // Increment our index
process(loop); // Run our process, pass in the loop
// Otherwise we're done
} else {
done = true; // Make sure we say we're done
if (exit) exit(); // Call the callback on exit
}
},
iteration: function () {
return index - 1; // Return the loop number we're on
},
break: function (end) {
done = true; // End the loop
shouldExit = end; // Passing end as true means we still call the exit callback
}
};
console.log('running first time');
loop.next();
return loop;
}
For your particular implementation:
syncLoop(myArray.length, function (loop) {
var index = loop.iteration();
var data = {
"myQuery": myArray[index].query
};
$http.post("/myServiceUrl", data).success(function (result) {
console.log(result);
loop.next();
});
}, function () {
console.log('done');
});
If you intend on doing something with the data once returned (such as perform calculations) you can do so with this method because you will return the data in a specified order.
I implemented something similar in a statistical calculation web app I built.
EDIT:
To illustrate the problem I had when using $q.when I have set up a fiddle. Hopefully this will help illustrate why I did this the way I did.
https://jsfiddle.net/chrislewispac/6atp3w8o/
Using the following code from Matt's answer:
var chain = $q.when(promise.getResult());
angular.forEach(myArr, function (item) {
chain = chain.then(function () {
$rootScope.status = item;
console.log(item);
});
});
// the final chain object will resolve once all the posts have completed.
chain.then(function () {
console.log('all done!');
});
And this fiddle is an example of my solution:
https://jsfiddle.net/chrislewispac/Lgwteone/3/
Compare the $q version to my version. View the console and imagine those being delivered to the user interface for user intervention in the process and/or performing statistical operations on the sequential returns.
You will see that it does not sequentially give the numbers 1,2,3,4 etc. either in the console or in the view in Matt's answer. It 'batches' the responses and then returns them. Therefore, if step 3 is not to be run depending on the response in step 2 there is not, at least in the answer provided, a way to break out or explicitly control the synchronous operation here. This presents a significant problem when attempting to perform sequential calculations and/or allow the user to control break points, etc.
Now, I am digging through both the $q libraries and the Q library to see if there is a more elegant solution for this problem. However, my solution does work as requested and is very explicit which allows me to place the function in a service and manipulate for certain use cases at my will because I completely understand what it is doing. For me, that is more important than using a library (at least at this stage in my development as a programmer and I am sure there are lots of other people at the same stage on StackOverflow as well).
If the order doesn't matter in which they are sent
var items = [/* your array */];
var promises = [];
angular.forEach(items, function(value, key){
var promise = $http.post("/myServiceUrl", { "myQuery": value.query });
promises.push(promise);
});
return $q.all(promises);
I have a list of 600 url's that i want to loop over requests to.
Would it be possible to console.log the content of each one as i receive it as opposed to waiting for the 600 to finish then return it all as 1?
Sorry if this seems a bit vague, not sure on the correct terms to use to describe this.
Meteor.methods({
getNations: function () {
this.unblock();
var result = Meteor.http.get('https://www.easports.com/uk/fifa/ultimate-team/api/fut/item?jsonParamObject=%7B%22page%22:1%7D');
var totalPages = JSON.parse(result.content).totalPages;
for (var i = 1; i < totalPages; i++) {
try {
var page = Meteor.http.get('https://www.easports.com/uk/fifa/ultimate-team/api/fut/item?jsonParamObject=%7B%22page%22:' + i + '%7D');
console.log(JSON.parse(page.content));
} catch(e) {
console.log('Cannot get page', e);
}
}
return result;
}
})
This will get all the pages, insert them into a collection and make them available on the client. There are a few caveats though. If you call the method multiple times you will get duplicates in the database, not sure if that's what you intended. Also, the error logging only happens on the server and is never displayed to the client, dont know if that's what you want either. Note that the second Meteor.http.get is passed a callback which makes it run async.
# Shared code
Nations = new Mongo.Collection('nations')
# Server
Meteor.methods({
getNations: function () {
this.unblock();
var result = Meteor.http.get('https://www.easports.com/uk/fifa/ultimate-team/api/fut/item?jsonParamObject=%7B%22page%22:1%7D');
var totalPages = JSON.parse(result.content).totalPages;
var callback = function(err, page) {
if (err)
console.log('Cannot get page', e);
else
Nations.insert(page);
}
for (var i = 1; i < totalPages; i++) {
Meteor.http.get('https://www.easports.com/uk/fifa/ultimate-team/api/fut/item?jsonParamObject=%7B%22page%22:' + i + '%7D', callback);
}
return result;
}
});
Meteor.publish('nations', function() {
return Nations.find();
});
# Client
Meteor.subscribe('nations');
I have done a lot of reading around this, but ultimately the tutorials and guides I have found differ too much for me to get a decent grasp on this concept.
This is what I want to achieve:
1) Simple http request from our server [Any API for demonstration]
2) Run a function with data from (1). [Remove a property from the object]
3) Use result and length of (2) to run a loop of $http requests to our server. [Or any server]
4) This will result in 6 different objects. Run a function on these 6 objects. [Add a property]
5) Once ALL of this is done, run a separate function [Log "finished"]
How can this be achieved using promises? How do I pass data from (1) via a promise to (2)? Is this the right way to achieve what I need to do?
If anyone can show me how this should be structured it would be immensely helpful; I have kept the functions as simple as possible for this question.
Yes, promises are very nice to structure solutions for this kind of problems.
Simplified solution (more or less pseudo-code):
$http(...)
.then(function(response) {
// do something with response, for example:
var list = reponse.data.list;
// return it so that you can use it in the next 'then'.
return list;
})
.then(function(list) {
var promises = [];
angular.forEach(list, function(item) {
// perform a request for each item
var promise = $http(...).then(function(itemResponse) {
itemResponse.extraProperty = true;
return itemResponse;
});
// we make an array of promises
promises.push(promise);
});
// combine all promises into one and return it for the next then()
return $q.all(promises);
})
.then(function(itemsList) {
// itemsList is now an array of all parsed item responses.
console.log(itemsList);
});
(Hopefully this is right, I did not tested it.)
As you can see, you can return values in a callback to pass it to the next then(), or you can pass a promise, and this will result in calling the next callback when it resolves. $q.all() is used to combine multiple promises into one and resolve if all are resolved.
Edit: I realised that you can optionally leave out these three lines:
return list;
})
.then(function(list) {
But it is nice syntax though, because the separation of tasks is more visible.
Check code below, it could contains syntax error, the important is the structure. Step3 contains multiple(6) $http requests, it waits until the last request response to return a unique response object (array) containing response for each $http requets.
//Step 1
var Step1 = function () {
$http.get('api/controller').success(function (resp) {
var object1 = resp;
Step2(object1);
Step3(object1).then(function (resp) {
//resp.data is an array containing the response of each $http request
Step4(resp);
Step5();
});
});
}
//Step2
var Step2 = function(obj){
//do whatever with the object
}
//Step3
var Step3 = function (object1) {
var call = $q.defer();
var get1 = $http.get(object1[0].url);
var get2 = $http.get(object[1].url2);
//...
var get6 = $http.get(object[5].url6);
$q.all([get1, get2,..get6]).then(function (resp) {
call.resolve(resp);
});
return call.promise;
}
//Step4
var Step4 = function (resp) {
for (var i=0; i<resp.data.lenght;i++){
DoWhatEver(resp.data[i]);
};
}
//Step5
var Step5 = function () {
alert("Finished");
}
Step1(); //Call Step1 function
Don't know why you have difficulty implementing this, but maybe $q.all() is what you're missing:
var config1={method:'GET',url:'/api/...'};
$http(config1).success(function(resultsFrom1){
functionForResultsOf1(resultsFrom1);
})
var functionForResultsOf1 = function(resultsOf1){
//remove something from the result, assuming this is a synchronous operation
resultsOf1.splice()...;
var promises=makePromises(*pass whatever you want*);
$q.all(promises).then(function(aggregateOfAllCallsToServer){
angular.forEach(aggregateOfAllCallsToServer,function(data){
//do something to data from each call to the server
})
console.log("finished");
})
}
var makePromises = function(serverUrls){
var promises = [];
angular.forEach(serverUrls, function(url) {
var promise=$http({
url : '/api/'+url,
method: 'GET',
})
promises.push(promise);
});
return $q.all(promises);
}