I have an array of Facebook groupIDs that I want to check in synchronous order, and if one fails, to skip to the next. When they all finish executing, I want to send all the results to my server.
The below code explains the gist of what I want to accomplish (of course it does not work):
var groups = [1111,2222,3333];
var feed_collection = [];
// Unfortunately this for loop does not wait for the FB api calls to finish before looping to next
for(var x = 0; x < groups.length; x++){
FB.api("/"+groups[x]+"/feed", function(response, err){
feed_collection += response;
});
}
// Send the feed_collection to server
feed_collection.sendToServer();
How can I get my for loop to wait? I know I can use $q.all(), however I am stuck on how to generate the promises and save them to a promise_array. This code is on the client-side and I am using AngularJS 1, but I am open to any approach. Mucho gracias!
function getGroupFeed(groupId) {
var deferred = $q.defer();
FB.api('/' + groupId + '/feed', function (response, err) {
if (err) return deferred.reject(err);
return deferred.resolve(response);
});
return deferred.promise;
}
That is how you can quickly generate a promise in Angular. Now you can also create an array of promises:
var groupPromises = groups.map(function (groupId) {
return getGroupFeed(groupId);
});
Or if you insist on a for loop:
var groupPromises = [];
for (var i = 0, len = groups.length; i < len; i++) {
groupPromises.push(getGroupFeed(group[i]));
}
This array of promises can then be used in $q.all, which is only resolved once all promises in the array are resolved.
Related
I'm pretty new here and I have a problem on my backend nodejs.
I have a list of object that will help me find a car in the database, and so I prepare promises to get data one by one and send that inside a promise.all to trigger the promises.
the function getCar is working every time with data I sent but When I do the promise.all with the array then it will have an error of pool of connection. What is the probleme ?
function requestData (listOfCar) {
const promiseList = [];
for (let i = 0; i < listOfCar.length; i++) {
promiseList.push(getCar(listOfCar[i]));
}
return Promise.all(promiseList); // crash but sending promises one by one is working
}
function getCar(carFinder) {
// do things and return a query find in sequelize to find a car
// and so it return a promise
}
Promise are always directly trigger, they do not wait to be trigger inside the promise.all.
So your problem is that you are sending I guess way to many request inside the database and so the pool of connection do not accept it anymore
To fix that you can add more pool of connection or you can simply trigger promise little chunk of 5 and await the promise.all
async function requestData (listOfCar) {
const carLists = [];
for (let i = 0; i < listOfCar.length; i++) {
const tmpListOfPromise = [];
for (let j = 0; j < 5 && i < listOfCar; j++) {
const tmpListOfPromise.push(getCar(listOfCar[i]));
i = i + 1;
}
await Promise.all(tmpListOfPromise).then((response) => {
carLists = carLists.concat(response); // this will push every response in the list
});
}
}
function getCar(carFinder) {
// do things and return a query find in sequelize to find a car
// and so it return a promise
}
I've got an array of names which I need to retrieve data from, and I'm currently doing this with $.getJSON inside a loop. It works and I can retrieve the data, but to output the correct value I need to use setTimeout or similar. I'm wondering if there's a more refined way of doing what I'm looking to achieve.
Here's what I've got.
var names = ["riotgames", "example"];
var online = [];
for (var i = 0; i < names.length; i++) {
$.getJSON('https://api.twitch.tv/kraken/streams/' + names[i], function(data) {
if (data.stream != null) {
online.push(data.stream.channel.display_name);
};
});
}
console.log(online) // outputs []
setTimeout(function() {
console.log(online) // outputs correctly
}, 1000);
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
When doing $.getJSON, you are triggering asynchronous requests. This means they run in the background. You do not wait for them to finish, their callbacks will trigger (like an event) once the request is done.
This means you cannot access online from outside the callback(s).
If you want to "wait" for all the requests to finish, then I suggest using promises. You can use $.when to combine all the requests into one promise then run a callback once everything is done.
var names = ["riotgames", "example"];
var promises = [];
for (var i = 0; i < names.length; i++) {
// $.getJSON returns a promise
promises.push($.getJSON('https://api.twitch.tv/kraken/streams/' + names[i]));
}
// Combine all promises
// and run a callback
$.when.apply($, promises).then(function(){
var online = [];
// This callback will be passed the result of each AJAX call as a parameter
for(var i = 0; i < arguments.length; i++){
// arguments[i][0] is needed because each argument
// is an array of 3 elements.
// The data, the status, and the jqXHR object
online.push(arguments[i][0].stream.channel.display_name);
}
console.log(online);
});
I've searched high and low but I just can't seem to wrap my head around q.defer() and creating my own promise.
I have a service getDataService which does exactly that - $http.gets data from a REST server. However only one of each variable can be sent at a time, so if user wants to query server for two entities and return the entire associated data they must send two requests. Because of this I had to use a method which kept i as the actual count (closure) which then runs my get data function the appropriate amount of times:
keepICorrect: function (security) {
var self = this;
for (var i = 0 ; i < entity.length; i++) {
self.getDataFromREST(security, i);
}
},
I call this from my main controller as a promise :
$scope.apply = function (security) {
var myDataPromise = getDataService.keepICorrect(security);
myDataPromise.then(function () {
//DO STUFF
}, 1);
}, function (error) {
alert("Error Retrieving Data");
return $q.reject(error);
});
}
This worked when using .getDataFromREST() but obviously doesn't now as I have to route through my new loop function, keepICorrect().
My question is how on earth do I create a promise which spans from my service to my controller, but not only that, also waits to resolve or fail depending on whether the i amount of requests have completed?
You need to create an array of promises
keepICorrect: function (security) {
var self = this;
var promises = [];
for (var i = 0 ; i < entity.length; i++) {
promises.push(self.getDataFromREST(security, i));
}
return promises;
},
And then wait for all of them to complete using the $q library in Angular
$q.all(getDataService.keepICorrect(security))
.then(....
I have the following loop which I have to make on my client API. On each iteration of loop I must add data returned from the API call as an object to an object array, then at the end of the loop I need to display the content of the object array.
Due to the nature of JS code execution (asynchronous) displaying the object array content always return undefined, so I was wondering if someone can please help me with a solution to this problem. Thanks.
var invoiceObj = {};
var invoiceObjArray = [];
for (var i=0; i< 5; i++)
{
//getAllInvoices returns a promise from $http.GET calls...
ClientInvoiceService.getAllInvoices(i).then(function(invoice){
invoiceObj = { invoiceNum: invoice.Result[0].id,
clientName: invoice.Result[0].clientName};
invoiceObjArray.push(invoiceObj);
}, function(status){
console.log(status);
});
}
console.log(invoiceObjArray[0]); //return undefined
console.log(invoiceObjArray[1]); //return undefined
What you'll need to do is store all promises and then pass these to $q.all (scroll all the way down), which will wrap them in one big promise that will only be resolved if all are resolved.
Update your code to:
var invoiceObj = {};
var invoiceObjArray = [];
var promises = [];
for (var i=0; i< 5; i++)
{
//getAllInvoices returns a promise...
promises.push(ClientInvoiceService.getAllInvoices(i).then(function(invoice){
invoiceObj = { invoiceNum: invoice.Result[0].id,
clientName: invoice.Result[0].clientName};
invoiceObjArray.push(invoiceObj);
}, function(status){
console.log(status);
}));
}
$q.all(promises).then(function(){
console.log(invoiceObjArray[0]);
});
This Egghead video is a nice tutorial to using $q.all:
I have a service SQLService on my PhoneGap/AngularJS app that runs when the app is loading. It iterates through a long array of Guidelines, and makes a DB transaction for each one. How can I signal that the final transaction has been completed?
What I want to have happen is something like:
In the controller, call `SQLService.ParseJSON`
ParseJSON calls `generateIntersectionSnippets`
`generateIntersectionSnippets` makes multiple calls to `getKeywordInCategory``
When the final call to getKeywordInCategory is called, resolve the whole chain
SQLService.ParseJSON is complete, fire `.then`
I really don't understand how to combine the multiple asynchronous calls here. ParseJSON returns a promise which will be resolved when generateIntersectionSnippets() is completed, but generateIntersectionSnippets() makes multiple calls to getKeywordInCategory which also returns promises.
Here's a simplified version of what's not working (apologies for any misplaced brackets, this is very stripped down).
What I want to happen is for $scope.ready = 2 to run at the completion of all of the transactions. Right now, it runs as soon as the program has looped through generateIntersectionSnippets once.
in the controller:
SQLService.parseJSON().then(function(d) {
console.log("finished parsing JSON")
$scope.ready = 2;
});
Service:
.factory('SQLService', ['$q',
function ($q) {
function parseJSON() {
var deferred = $q.defer();
function generateIntersectionSnippets(guideline, index) {
var snippet_self, snippet_other;
for (var i = 0; i < guideline.intersections.length; i++) {
snippet_self = getKeywordInCategory(guideline.line_id, snippets.keyword).then(function() {
//Should something go here?
});
snippet_other = getKeywordInCategory(guideline.intersections[i].line_id, snippets.keyword).then(function() {
//Should something go here?
});
}
}
deferred.resolve(); //Is fired before the transactions above are complete
}
generateIntersectionSnippets();
return deferred.promise;
} //End ParseJSON
function getKeywordInCategory(keyword, category) {
var deferred = $q.defer();
var query = "SELECT category, id, chapter, header, snippet(guidelines, '<b>', '</b>', '...', '-1', '-24' ) AS snip FROM guidelines WHERE content MATCH '" + keyword + "' AND id='" + category + "';",
results = [];
db.transaction(function(transaction) {
transaction.executeSql(query, [],
function(transaction, result) {
if (result != null && result.rows != null) {
for (var i = 0; i < result.rows.length; i++) {
var row = result.rows.item(i);
results.push(row);
}
}
},defaultErrorHandler);
deferred.resolve(responses);
},defaultErrorHandler,defaultNullHandler);
return deferred.promise;
}
return {
parseJSON : parseJSON
};
}]);
I'd appreciate any guidance on what the correct model is to doing a chain of promises that includes an iteration across multiple async transactions- I know that how I have it right now is not correct at all.
You can use $q.all() to wait for a list of promises to be resolved.
function parseJSON() {
var deferred = $q.defer();
var promiseList = [];
for (var i = 0; i < guideline.intersections.length; i++) {
promiseList.push(getKeywordInCategory(guideline.line_id, snippets.keyword));
promiseList.push(getKeywordInCategory(guideline.intersections[i].line_id, snippets.keyword));
}
$q.all(promiseList).then(function() {
deferred.resolve();
});
return deferred.promise;
} //End ParseJSON