Know when jqXHRs of an array are all completed - javascript

I'm trying to run some code once all the jqXHR elements of an array are completed (have either succeeded or failed).
You can see the full code here: http://jsfiddle.net/Lkjcrdtz/4/
Basically I'm expecting the always hook from here:
$.when
.apply(undefined, reqs)
.always(function(data) {
console.log('ALL ALWAYS', data);
});
to run when all the requests that were piled up there have either succeeded or failed. Currently, you can observe in the console that ALL ALWAYS is logged earlier.

A simple solution for modern browsers would be to use the newer fetch() API along with Promise.all()
var makeReq = function(url, pos) {
var finalUrl = url + pos;
// intentionally make this request a failed one
if (pos % 2 === 0) {
finalUrl = "https://jsonplaceholder.typicode.com/423423rfvzdsv";
}
return fetch(finalUrl).then(function(resp) {
console.log('Request for user #', pos);
// if successful request return data promise otherwise return something
// that can be used to filter out in final results
return resp.ok ? resp.json() : {error: true, status: resp.status, id: pos }
})
};
// mock API
var theUrl = "https://jsonplaceholder.typicode.com/users/";
var reqs = [];
for (var i = 1; i <= 5; i++) {
reqs.push(makeReq(theUrl, i));
}
Promise.all(reqs).then(function(results) {
console.log('---- ALL DONE ----')
// filter out good requests
results.forEach(function(o) {
if (o.error) {
console.log(`No results for user #${o.id}`);
} else {
console.log(`User #${o.id} name = ${o.name}`);
}
})
})

Related

Checking if image exists produces 403 error

I have a range of hosted images, but the amount and url can change. However, they always follow the same incrementing naming convention. In order to store the images that exist, I loop through the available images until I encounter an error. At that point I store the results and continue with the operations.
function checkForExistingImage(u, r = [], i = 0){
var url = u + i;
$.get(url).done(function() {
r.push(url);
checkForExistingImage(u, r, i + 1);
}).fail(function(){
//continue working with the 'r' array
});
}
However, this will always result in a 403 (Image not found) error in the console, because the last image checked will never exist.
How can I not trigger this error, or maybe suppress it if needed?
I would definitely rewrite this in a more civilised manner. This function does not log any errors outside of those that are explicitly logged by console.log (which you can remove).
It should be safer to use this as it does not bombard the server with too many requests per second, but you can reduce or remove the timeout if that's not a problem.
function Timeout(time) {
return new Promise(function(resolve) {setTimeout(resolve, time);});
}
async function checkForExistingImages(baseUrl, maxImages, startImage = 0) {
const results = [];
// Some sanity check on params
if(maxImages < startImage) {
let tmp = maxImages;
maxImages = startImage + 1;
startImage = maxImages;
}
// from i to max
for(let i=startImage; i<maxImages; ++i) {
// Create image URL, change this as needed
const imageURL = baseUrl + i + ".png";
// `fetch` does not throw usually, but we wanted to avoid errors in console
try {
// Response will have 200/404/403 or something else
const response = await fetch(imageURL);
if(response.status == 200) {
results.push(imageURL);
}
else {
console.info("Image",imageURL,"was removed.");
// stop loading
break;
}
}
// complete failure, some major error occured
catch(e) {
console.warn("Image",imageURL, "failed to send request!");
break;
}
// If you're getting throttled by the server, use timeout
await Timeout(200);
}
return results;
}

How to adjust variable based on fetch result

I am working on a script that will fetch posts from a paginated REST API provided by a website managed by WordPress.
What I realized was that WordPress REST API is paginated and got a cap of 100 objects per request.
Within the code below, I am trying to fetch posts page by page and with an amount of 20 posts per page. In the end, I am joining all the fetched objects into one big object.
My problem is, that the fetch fails with HTTP 404 response, since the last request contain less than 20 posts.
I would like to adjust the variable named 'limitPerPage', if the fetch returns with a 404 and decrement the variable until i get a 200 HTTP response.
My challenge is, that I not experienced working with fetch promise.
Please see my current script below:
console.log('REST API is this: ' + apiUrl);
const getPosts = async function(pageNo = 1) {
let limitPerPage = 20;
let requestUrl = apiUrl + `?page=${pageNo}&per_page=${limitPerPage}`;
let apiResults = await fetch(requestUrl)
.then(function(response){
return response.json();
})
.catch(function(error){
console.log(error.status);
});
return apiResults;
}
const getEntirePostList = async function(pageNo = 1) {
const results = await getPosts(pageNo);
console.log('Retreiving data from API for page : ' + pageNo);
if (results.length > 0) {
return results.concat(await getEntirePostList(pageNo+1));
} else {
return results;
}
};( async () => {
const entireList = await getEntirePostList();
console.log(entireList);
})
();
I expect the code to decrement the variable 'limitPerPage' by 1, if the fetch returns a 404 HTTP response.
I do not necessary ask for a final solution to my problem. I would appreciate a suggestion to another way for me to structure my code, to get the result I need.
Thanks!
I think the following code should work. Always use try...catch block inside async functions
let entireList = [];
let finishEvent = new Event('finished');
document.addEventListener('finished', function (e) {
console.log(entireList);
}, false);
const getPosts = function (pageNo = 1) {
let limitPerPage = 20;
let requestUrl = `${apiUrl}?page=${pageNo}&per_page=${limitPerPage}`;
return fetch(requestUrl)
.then(function (response) {
return response.json();
})
.catch(function (error) {
console.log(error.status);
return false;
});
}
const getEntirePostList = async function (pageNo = 1) {
try {
const results = await getPosts(pageNo);
console.log('Retreiving data from API for page : ' + pageNo);
if (results && (results.length > 0)) {
entireList.concat(results);
getEntirePostList(pageNo + 1);
} else {
document.dispatchEvent(finishEvent);
}
return;
} catch(e) {
console.log(e)
}
};
getEntirePostList();
I managed to solve the issue myself with the help from the suggestions above. Please find the solution here:
// Constant variable with the assigned value of a joined string containing the base URL structure of the REST API request.
const apiUrl = websiteUrl + '/wp-json/wp/v2/punkt/';
// Logging out the base URL of the REST API.
console.log('REST API is this: ' + apiUrl);
// Local variable with the default value of true assigned. Variable is used to control the paginated fetch of the API.
let keepfetchingPosts = true;
// Local variable that contains the limit of posts per request.
let limitPerPage = 20;
// Constant variable, which is assigned a function as the value. The function is async and will return a promise.
// The function takes one argument. The argument holds the value of the page number.
const getPosts = async function(pageNo = 1) {
// Local variable assigned with the base URL string of the REST API request. Additional queries are added to the end of the string.
let requestUrl = apiUrl + `?page=${pageNo}&per_page=${limitPerPage}`;
// Logging out the REST API request
console.log('URL is this: ' + requestUrl);
// Logging out the argument 'pageNo'
console.log('Retreiving data from API for page : ' + pageNo);
// Local variable assigned with a fetch function that returns a promise. The URL request are used as the function argument.
let apiResults = await fetch(requestUrl)
// If request is success, then log and return the following to the local variable.
.then(function(response){
// Logging out the status code of the response.
console.log('HTTP response is: ' + response.status);
// return JSON and status code of the XHR request
return {
data: response.json(),
status: response.status
}
})
// Catch the error and log it out within the console.
.catch(function(error){
console.log('HTTP response is: ' + error.status)
});
// If the length of the request is less than the limitPerPage variable and status code is 200, then...
if (apiResults.length < limitPerPage && apiResults.status === 200){
// Set the boolean to false
keepfetchingPosts = false;
// Return the JSON of the successfull response.
return apiResults.data;
} else if (apiResults.status === 200) {
// If the status code is 200, then return the JSON of the successfull response
return apiResults.data;
} else {
// Otherwise, set the boolean to false
keepfetchingPosts = false;
}
}
// Defining a constant variable that holds and async fynction. An async functon will always return a promise.
// The function takes one argument, which is set to 1 by default.
const getEntirePostList = async function(pageNo = 1) {
// Try and catch statement used to handle the errors that might occur.
try {
// Constant variable which is set to the return variable of the function getPost(). Get post returns the successfull paginated response of the request.
const results = await getPosts(pageNo);
// Logging out a string including the length of the array.
console.log('Current array contain ' + results.length + ' items...');
// Conditional statement that checks if the length of the array named 'results' is less than the variable named limitPerPage. Condition is also checked, if bolean is true.
// If the conditions are met, the code will join the arrays into one big array.
if (results.length < limitPerPage && keepfetchingPosts === true) {
// Logging out a string that indicates an attempt to combine that last array to the existing array.
console.log('Combining last array!');
// Return the combined array.
return results;
} else if (keepfetchingPosts === true) {
// Logging out a string that indicates an attempt to combine the recent fetched array to the existing array.
console.log('Combining arrays!');
// Returning the new combined array and increments the pageNo variable with 1.
return results.concat(await getEntirePostList(pageNo+1));
} else {
// Logging out a string that indicates the script will stop fetching more posts from the REST API.
console.log('Stop fetching posts and return results');
// Returning the complete array.
return results;
}
// Catch statement that takes the argument of the error that occured.
} catch(error) {
// Logging out the error.
console.log(error);
}
};( async () => {
// Constant variable with the assigned value received from the function
const entireList = await getEntirePostList();
// Logging out the enite list of results collected from the REST API
console.log(entireList);
})
();
The code above returns a complete array of the JSON response from all paginated REST API calls.
You could use a while loop, and decrement limitPerPage if the status code isn't 200:
console.log('REST API is this: ' + apiUrl);
const getPosts = async pageNo => {
let limitPerPage = 20;
let requestUrl = apiUrl + `?page=${pageNo}&per_page=${limitPerPage}`;
let res = { status: 0 }
while (res.status !== 200) {
res = await fetch(requestUrl)
.then(r => ({ data: r.json(), status: r.status }))
.catch(({status}) => console.log(status))
limitPerPage--
}
return res.data
}
const getEntirePostList = async (pageNo = 1) => {
const results = await getPosts(pageNo);
console.log('Retreiving data from API for page : ' + pageNo);
return results.length > 0
? results.concat(await getEntirePostList(pageNo + 1))
: results;
}
(async () => {
const entireList = await getEntirePostList();
console.log(entireList);
})()

Issue with mongoose.save never returning inside of promise

Update !!
I fixed my initial issue with the help of Dacre Denny answer below however when writing tests for my code it turned out that the changes were not being saved before the server responded therefor the company collection in my test database was empty, I fixed this issue with the following code
Companies.find({ company_name: company.company_name }).then(found => {
if (found.length !== 0) {
return res.status(400).json({ error: "Company already exists" });
}
var userForms = company.users;
company.users = [];
const finalCompany = new Companies(company);
console.log(finalCompany);
var userPromises = [];
for (var x = 0; x < userForms.length; x++) {
var user = userForms[x].user;
user.company = finalCompany._id;
userPromises.push(userCreation(user));
}
return Promise.all(userPromises).then(responses => {
for (var x in responses) {
if (!responses[x].errors) {
finalCompany.addUser(responses[x]._id);
} else {
res.status(400).json(responses[x]);
}
}
return finalCompany;
});
})
// I moved the save in here !!!
.then((finalCompany) => {
finalCompany.save().then(()=>{
res.status(200).json({signup:"Successful"});
})
},(err) => {
res.json({error: err});
});
});
Original Issue
I am trying to create a mongoose document to represent a company, this code saves the model in my db however it does not seem to be responding with a status code or reply to postman when I make a request
I've used a debugger to step through the code but I am very rusty on my JS and I am afraid I've gone into deep water with promises thats gone over my head.
router.post('/c_signup', auth.optional, (req, res, next) => {
const { body: { company } } = req;
var error_json = cbc(company);
if( error_json.errors.length > 0 ){
return res.status(422).json(error_json);
}
Companies.find({company_name: company.company_name})
.then((found) => {
if (found.length !== 0) {
return res.status(400).json({error: "Company already exists"});
}
var userForms = company.users;
company.users = [];
const finalCompany = new Companies(company);
var userPromises = [];
for (var x =0; x < userForms.length; x ++) {
var user = userForms[x].user;
user.company = finalCompany._id;
userPromises.push(userCreation(user));
}
Promise.all(userPromises).then((responses) => {
for (var x in responses){
if (!responses[x].errors){
finalCompany.addUser(responses[x]._id);
}
else {
res.status(400).json(responses[x]);
}
}
console.log("h2");
finalCompany.save(function () {
console.log("h3");
return res.status(200);
});
})
});
return res.status(404);
});
This is the output from the debug but the execution is hanging here
h2
h3
There are a few issues here:
First, the save() function is asynchronous. You'll need to account for that by ensuring the promise that save() returns, is returned to the handler that it's is called in.
The same is true with the call to Promise.all() - you'll need to add that promise to the respective promise chain by returning that promise to the enclosing handler (see notes below).
Also, make sure the request handler returns a response either via res.json(), res.send(), etc, or by simply calling res.end(). That signals that the request has completed and should address the "hanging behaviour".
Although your code includes res.json(), there are many cases where it's not guaranteed to be called. In such cases, the hanging behaviour would result. One way to address this would be to add res.end() to the end of your promise chain as shown below:
Companies.find({ company_name: company.company_name }).then(found => {
if (found.length !== 0) {
return res.status(400).json({ error: "Company already exists" });
}
var userForms = company.users;
company.users = [];
const finalCompany = new Companies(company);
var userPromises = [];
for (var x = 0; x < userForms.length; x++) {
var user = userForms[x].user;
user.company = finalCompany._id;
userPromises.push(userCreation(user));
}
/* Add return, ensure that the enclosing then() only resolves
after "all promises" here have completed */
return Promise.all(userPromises).then(responses => {
for (var x in responses) {
if (!responses[x].errors) {
finalCompany.addUser(responses[x]._id);
} else {
res.status(400).json(responses[x]);
}
}
console.log("h2");
/* Add return, ensure that the enclosing then() only resolves
after the asnyc "save" has completed */
return finalCompany.save(function() {
console.log("h3");
return res.status(200);
});
});
})
.then(() => {
res.end();
},(err) => {
console.error("Error:",err);
res.end();
});

In calling URLs iteratively using http.get() and resolving using $q.all()

I am implementing this scenario where I have to fetch data from multiple URLs iteratively and process it with some business logic and display on screen. I am implementing this in the controller as it is a requirement. All is well until part-1 and I am getting the 6 promise objects in the promises array. But, I am not getting the data into metricData. I am seeing a null in the console while running in the browser. I am sure that the data is coming in the URL response. I feel I am doing something silly in the $q.all method. Is this correct?
var calculateMutationsInDepth = function(){
//Part-1
var promises=[];
var metricData=[];
for(var depth=0 ; depth<6 ; depth++){
var resourceUrl = urlService(depth);
promises.push($http.get(resourceUrl)
.then(function(response){
return response.data;
},function(status){
return status;
}));
}
//Part-2 Resolving the promise array below
$q.all(promises).then(function(data){
for(var eachResult=0; eachResult < data.length; eachResult++){
if(null != data[eachResult]){
var eachDataObject = data[eachResult];
//For debugging console.log(eachDataObject);
for(var objCount=0; objCount < eachDataObject.length; objCount++){
if(eachDataObject[objCount].scope === "PRJ" || eachDataObject[objCount].scope === "FIL")
metricData.push(eachDataObject[objCount]);
}
}
}
});
if(metricData != null){
analyzeMutationData(metricData); //Calling a function with the aggregated data array where business logic is present
}
};
calculateMutationsInDepth(); //Calling the above function
Yes, something silly.
As written, analyzeMutationData(metricData) is called synchronously whereas metricData is populated asynchronously inside the $q.all(promises).then() callback.
Also, as written the error handler function(status){ return status; } is inappropriate. Either :
omit the error handler entirely and allow any single $http error to prevent further processing in Part 2, or
return null, allowing processing in Part 2, and the if(dataObject != null) test in part 2 to filter out any such error.
Here's the revised code with a few other tidies and a demonstration of what can be done if calculateMutationsInDepth() returns a promise.
var calculateMutationsInDepth = function() {
//Part-1
var depth, promises = [];
for(depth=0; depth<6; depth++) {
promises.push($http.get(urlService(depth))
.then(function(response) {
return response.data;
}, function(error) {
return null; // error recovery - `dataObject` below will be null
}));
}
//Part-2 Aggregate the promises, extract metric data and apply business logic
return $q.all(promises).then(function(data) { // note `return` here
var dataObject, i, j, metricData = [];
for(i=0; i<data.length; i++) {
dataObject = data[i];
if(dataObject != null) {
for(j=0; j<dataObject.length; j++) {
if(dataObject[j].scope === "PRJ" || dataObject[j].scope === "FIL") {
metricData.push(dataObject[j]);
}
}
}
}
// Analyse here, inside the .then()
if(metricData.length > 0) { // metricData is an array and will never be null, therefore test metricData.length.
return analyzeMutationData(metricData);
}
return null;
});
};
calculateMutationsInDepth().then(function(analysis) {
// all complete
// `analysis` is either null or whatever `analyzeMutationData(metricData)` returned.
}).catch(function(error) {
console.log(error);
});
Hope this helps you out! Let me know if it doesn't.

How to reorder execution of asynchronous functions?

if (option == 'Follow All') {
for (var i = 0; i < userArray.length; i++) {
followUser(params..);
}
// How to get this part to execute after followUser is done? (Basically when the for loop finishes)
alert("There was a problem processing your request on Twitter to follow the following users: " + $('#errored-users').val());
$('#errored-users').val('');
}
How can I call this first multiple times and wait for it to finish?
var followUser = function(params..) {
$.post('/api/1.0/followUser.php', {
'user_to_follow': username,
'username': user
}).done(function(data) { {
if (!is_batch) {
alert("There was a problem processing your request on Twitter to follow #" + username);
} else {
//This currently gets executed last?
var names = $('#errored-users').val();
if (names == "") {
names = '#' + username + ', ';
} else {
names += '#' + username + ', ';
}
$('#errored-users').val(names);
}
};
Since you are already using jQuery, you can easily use the AJAX requests/promises and wait for all of them to complete. $.when can help you a lot with this:
var followUser = function(params..) {
// return the promise!
return $.post('/api/1.0/followUser.php', { ... });
};
if (option == 'Follow All') {
var promises = [];
for (var i = 0; i < userArray.length; i++) {
promises.push(followUser(...));
}
$.when.apply(null, promises)
.done(function() {
// all users were successfully followed
})
.fail(function() {
// at least one follow failed; no information about the others
alert("There was a problem processing your request...");
$('#errored-users').val('');
});
}
This will call the .done handler when all requests have completed, but it will call the .fail handler as soon as just one has failed.
If instead you want some handler to run after all requests have completed (either success or failure) you 'd need to do it more manually, for example:
var followUser = function(params..) {
// return the promise!
return $.post('/api/1.0/followUser.php', { ... });
};
if (option == 'Follow All') {
var outcomes = { done: [], failed: [] };
var total = userArray.length;
function allFinished() {
return outcomes.done.length + outcomes.failed.length == total;
}
for (var i = 0; i < total; i++) {
followUser(...)
.done(function() {
outcomes.done.push(username);
})
.fail(function() {
outcomes.failed.push(username);
})
// this must come last
.always(function() {
if (allFinished) {
// outcomes contains the results
}
})
}
}
This will still use jQuery's notion of a request having succeeded or failed, which is based on Twitter's HTTP response codes. If you want to customize this behavior you can amend followUser as such:
var followUser = function(params..) {
return $.post('/api/1.0/followUser.php', { ... })
.then(
// first argument handles HTTP response successes, but you can
// convert them to failures here:
function(data) {
if (convertSuccessToFailure) {
return $.Deferred.reject(data);
}
});
};
As of jQuery 1.5 any of the $.ajax family of functions return a promise - and you can combine multiple promises into a new promise that will be resolved when all the child promises are resolved using $.when:
function followUser(/* params */) {
return $.post('/api/1.0/followUser.php', {
user_to_follow: username,
username: user
});
}
var userRequests = [];
for (var i = 0, l = userArray.length; i < l; i++) {
userRequests.push(followUser(/* params */));
}
$.when.apply($, userRequests).then(function(data) { /* etc. */ });
You could define a global variable which holds the number of calls to followUser:
if (option == 'Follow All') {
var countUsers = userArray.length;
for (var i = 0; i < countUsers; i++) {
followUser(params..);
}
}
Then you change the anonymous function to count backwards and execute your last statement if all users are done:
function(data) {
if (!is_batch) {
alert("There was a problem processing your request on Twitter to follow #" + username);
} else {
(...)
}
countUsers--;
if(countUsers == 0){
alert("There was a problem processing your request on Twitter to follow the following users: " + $('#errored-users').val());
$('#errored-users').val('');
}
};
A potential solution for this is to use Promises (see here for an in-depth explanation). It provides a new style of coding in Javascript that effectively enables you to make asynchronous code synchronous. (This is a big simplification of Promises - see here for an article explaining it a little bit more).
There are various implementations which you could use. The one I most use is found here: https://github.com/cujojs/when. The examples provided within it's wiki demonstrates the power of promises (see here).
The basic outline for your code using when.js would look and read something like this:
if (option == 'Follow All') {
var deferreds = [];
for (var i = 0; i < userArray.length; i++) {
deferreds.push(followUser(params..));
}
when.all(deferreds).then(function everythingWasFine(suceededUsernames) {
//do something with the responses e.g.
alert(succeededUsernames.length + ' users were followed');
},
function somethingWentWrong(failedUsernames) {
alert("There was a problem processing your request on Twitter to follow the following users: " + failedUsernames.join(','));
});
}
var followUser = function(params..) {
var defer = when.defer();
$.post('/api/1.0/followUser.php', {
'user_to_follow': username,
'username': user
}).done(function(data) {
if (failure) {
defer.reject(username);
} else {
defer.resolve(username);
}
});
return when.promise;
};

Categories

Resources