How to adjust variable based on fetch result - javascript

I am working on a script that will fetch posts from a paginated REST API provided by a website managed by WordPress.
What I realized was that WordPress REST API is paginated and got a cap of 100 objects per request.
Within the code below, I am trying to fetch posts page by page and with an amount of 20 posts per page. In the end, I am joining all the fetched objects into one big object.
My problem is, that the fetch fails with HTTP 404 response, since the last request contain less than 20 posts.
I would like to adjust the variable named 'limitPerPage', if the fetch returns with a 404 and decrement the variable until i get a 200 HTTP response.
My challenge is, that I not experienced working with fetch promise.
Please see my current script below:
console.log('REST API is this: ' + apiUrl);
const getPosts = async function(pageNo = 1) {
let limitPerPage = 20;
let requestUrl = apiUrl + `?page=${pageNo}&per_page=${limitPerPage}`;
let apiResults = await fetch(requestUrl)
.then(function(response){
return response.json();
})
.catch(function(error){
console.log(error.status);
});
return apiResults;
}
const getEntirePostList = async function(pageNo = 1) {
const results = await getPosts(pageNo);
console.log('Retreiving data from API for page : ' + pageNo);
if (results.length > 0) {
return results.concat(await getEntirePostList(pageNo+1));
} else {
return results;
}
};( async () => {
const entireList = await getEntirePostList();
console.log(entireList);
})
();
I expect the code to decrement the variable 'limitPerPage' by 1, if the fetch returns a 404 HTTP response.
I do not necessary ask for a final solution to my problem. I would appreciate a suggestion to another way for me to structure my code, to get the result I need.
Thanks!

I think the following code should work. Always use try...catch block inside async functions
let entireList = [];
let finishEvent = new Event('finished');
document.addEventListener('finished', function (e) {
console.log(entireList);
}, false);
const getPosts = function (pageNo = 1) {
let limitPerPage = 20;
let requestUrl = `${apiUrl}?page=${pageNo}&per_page=${limitPerPage}`;
return fetch(requestUrl)
.then(function (response) {
return response.json();
})
.catch(function (error) {
console.log(error.status);
return false;
});
}
const getEntirePostList = async function (pageNo = 1) {
try {
const results = await getPosts(pageNo);
console.log('Retreiving data from API for page : ' + pageNo);
if (results && (results.length > 0)) {
entireList.concat(results);
getEntirePostList(pageNo + 1);
} else {
document.dispatchEvent(finishEvent);
}
return;
} catch(e) {
console.log(e)
}
};
getEntirePostList();

I managed to solve the issue myself with the help from the suggestions above. Please find the solution here:
// Constant variable with the assigned value of a joined string containing the base URL structure of the REST API request.
const apiUrl = websiteUrl + '/wp-json/wp/v2/punkt/';
// Logging out the base URL of the REST API.
console.log('REST API is this: ' + apiUrl);
// Local variable with the default value of true assigned. Variable is used to control the paginated fetch of the API.
let keepfetchingPosts = true;
// Local variable that contains the limit of posts per request.
let limitPerPage = 20;
// Constant variable, which is assigned a function as the value. The function is async and will return a promise.
// The function takes one argument. The argument holds the value of the page number.
const getPosts = async function(pageNo = 1) {
// Local variable assigned with the base URL string of the REST API request. Additional queries are added to the end of the string.
let requestUrl = apiUrl + `?page=${pageNo}&per_page=${limitPerPage}`;
// Logging out the REST API request
console.log('URL is this: ' + requestUrl);
// Logging out the argument 'pageNo'
console.log('Retreiving data from API for page : ' + pageNo);
// Local variable assigned with a fetch function that returns a promise. The URL request are used as the function argument.
let apiResults = await fetch(requestUrl)
// If request is success, then log and return the following to the local variable.
.then(function(response){
// Logging out the status code of the response.
console.log('HTTP response is: ' + response.status);
// return JSON and status code of the XHR request
return {
data: response.json(),
status: response.status
}
})
// Catch the error and log it out within the console.
.catch(function(error){
console.log('HTTP response is: ' + error.status)
});
// If the length of the request is less than the limitPerPage variable and status code is 200, then...
if (apiResults.length < limitPerPage && apiResults.status === 200){
// Set the boolean to false
keepfetchingPosts = false;
// Return the JSON of the successfull response.
return apiResults.data;
} else if (apiResults.status === 200) {
// If the status code is 200, then return the JSON of the successfull response
return apiResults.data;
} else {
// Otherwise, set the boolean to false
keepfetchingPosts = false;
}
}
// Defining a constant variable that holds and async fynction. An async functon will always return a promise.
// The function takes one argument, which is set to 1 by default.
const getEntirePostList = async function(pageNo = 1) {
// Try and catch statement used to handle the errors that might occur.
try {
// Constant variable which is set to the return variable of the function getPost(). Get post returns the successfull paginated response of the request.
const results = await getPosts(pageNo);
// Logging out a string including the length of the array.
console.log('Current array contain ' + results.length + ' items...');
// Conditional statement that checks if the length of the array named 'results' is less than the variable named limitPerPage. Condition is also checked, if bolean is true.
// If the conditions are met, the code will join the arrays into one big array.
if (results.length < limitPerPage && keepfetchingPosts === true) {
// Logging out a string that indicates an attempt to combine that last array to the existing array.
console.log('Combining last array!');
// Return the combined array.
return results;
} else if (keepfetchingPosts === true) {
// Logging out a string that indicates an attempt to combine the recent fetched array to the existing array.
console.log('Combining arrays!');
// Returning the new combined array and increments the pageNo variable with 1.
return results.concat(await getEntirePostList(pageNo+1));
} else {
// Logging out a string that indicates the script will stop fetching more posts from the REST API.
console.log('Stop fetching posts and return results');
// Returning the complete array.
return results;
}
// Catch statement that takes the argument of the error that occured.
} catch(error) {
// Logging out the error.
console.log(error);
}
};( async () => {
// Constant variable with the assigned value received from the function
const entireList = await getEntirePostList();
// Logging out the enite list of results collected from the REST API
console.log(entireList);
})
();
The code above returns a complete array of the JSON response from all paginated REST API calls.

You could use a while loop, and decrement limitPerPage if the status code isn't 200:
console.log('REST API is this: ' + apiUrl);
const getPosts = async pageNo => {
let limitPerPage = 20;
let requestUrl = apiUrl + `?page=${pageNo}&per_page=${limitPerPage}`;
let res = { status: 0 }
while (res.status !== 200) {
res = await fetch(requestUrl)
.then(r => ({ data: r.json(), status: r.status }))
.catch(({status}) => console.log(status))
limitPerPage--
}
return res.data
}
const getEntirePostList = async (pageNo = 1) => {
const results = await getPosts(pageNo);
console.log('Retreiving data from API for page : ' + pageNo);
return results.length > 0
? results.concat(await getEntirePostList(pageNo + 1))
: results;
}
(async () => {
const entireList = await getEntirePostList();
console.log(entireList);
})()

Related

Why does this recursive function not run asynchronously?

I have a start(node, array) function that should perform a DFS by traversing an object tree via recursive calls to an API through callMsGraph(token, end) until image properties are found at the end of the tree, at which point they are pushed to array. The function seems like it works, but I can't get the output unless I wrap it in a 2 second setTimeout which indicates the recursion is not being waited on to complete. I would want to play around with async/await more, but it's not at the top-level.
I'm not sure if the nextNode.then is doing anything or maybe callMsGraph() needs to be awaited on differently to how I know. A solution would be much appreciated.
shelfdb.data = async (accessToken) => {
const token = accessToken;
const endpoint = 'https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/01GNYB5KPQ57RHLPZCJFE2QMVKT5U3NYY3/children'
function start(node, array) {
if(node.value.length > 0) {
node.value.forEach(function(child) {
var end = 'https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/' + child.id + '/children';
var nextNode = callMsGraph(token, end);
nextNode.then(function(currResult) {
if (currResult.value.length > 0) {
if ('image' in currResult.value[0]) {
currResult.value.forEach(function(imgChild) {
let img = {
'name': imgChild.name,
'job': imgChild.parentReference.path.split("/")[6],
'path': imgChild.webUrl,
'id': imgChild.id
}
array.push(img);
})
// complete storing images at tail object, go one level up after loop
return;
}
// if no 'image' or value, go into child
start(currResult, array);
}
}).catch(function(e) {
console.error(e.message);
})
})
}
return array;
}
var res = await callMsGraph(token, endpoint); // start recursion
var output = start(res, []);
console.log(output); // only displays value if wrapped in setTimeout
return output; // empty []
}
Each query to the API via callMsGraph(), returns an object like this, where subsequent queries are made with the id of each object/folder (as new endpoint) in value until an object with image property is found. The MS Graph API requires that folders are expanded at each level to access their children.
{
id: '01GNYB5KPQ57RHLPZCJFE2QMVKT5U3NYY3'
value: [
{
id: '01GNYB5KJMH5T4GXADUVFZRSITWZWNQROS',
name: 'Folder1',
},
{
id: '01GNYB5KMJKILOFDZ6PZBZYMXY4BGOI463',
name: 'Folder2',
}
]
}
This is the callMsGraph() helper:
function callMsGraph(accessToken, graphEndpoint) {
const headers = new Headers();
const bearer = `Bearer ${accessToken}`;
headers.append("Authorization", bearer);
const options = {
method: "GET",
headers: headers
};
return fetch(graphEndpoint, options)
.then(response => response.json())
.catch(error => {
console.log(error);
throw error;
});
}
The rule with promises is that once you opt into one (more likely, are forced into it by a library), all code that needs to block for a result anywhere after it also has to await. You can't "go back" to sync and if even a single piece of the promise chain between where the promise starts and where you want its result isn't awaited, the result will be unreachable*.
Taking a snippet of the code:
function start(node, array) { // not async!
// ..
node.value.forEach(function(child) { // doesn't await!
// ..
nextNode.then(function(currResult) {
// this promise is not hooked up to anything!
start(...) // recurse without await!
There's no await in front of then, start doesn't return a promise and isn't awaited recursively, and forEach has no way to await its callback's asynchronous results, so each promise in the nextNode.then chain is orphaned into the void forever*.
The solution is a structure like this:
async function start(node, array) {
// ..
for (const child of node.value) {
// ..
const currResult = await callMsGraph(token, end);
// ..
await start(...);
array.push(currResult);
}
// returns a promise implicitly
}
// ..
await start(...);
// `array` is populated here
Or Promise.all, which runs in parallel and returns an array (which could replace the parameter array):
function start(node, array) {
return Promise.all(node.value.map(async child => {
const currResult = await callMsGraph(token, end);
// ..
await start(...);
return currResult;
}));
}
I'd be happy to provide a minimal, runnable example, but the code you've provided isn't runnable, so you'll have to massage this a bit to work for you. If you make sure to await everything, you're good to go (and generally avoid mixing .then and async/await--the latter seems easier for this use case).
* (for all practical intents and purposes)
There is a few places where you are not handling promises returned in you code. nextNode.then if your forEach loop is just "called", next line of the code will not wait for it to complete, forEach loop will complete execution before then callbacks are called.
I changed you code a bit, but I have no way to check if it works correctly due to i would need to populate dummy data for callMsGraph but if you encounter any - tell me and I'll modify the answer
shelfdb.data = async (accessToken) => {
const token = accessToken;
const endpoint = 'https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/01GNYB5KPQ57RHLPZCJFE2QMVKT5U3NYY3/children'
const images = [];
async function start(node, array) {
if (node.value.length <= 0) return array; // or === 0 or whatever
for (const child of node.value) {
const end = `https://graph.microsoft.com/v1.0/sites/webgroup.sharepoint.com,23e7ef7a-a529-4dde-81ba-67afb4f44401,0fa8e0f7-1c76-4ad0-9b6e-a485f9bfd63c/drive/items/${child.id}/children`;
const nextNode = await callMsGraph(token, end);
if (nextNode.value.length > 0) {
if ('image' in nextNode.value[0]) {
const mapped = nextNode.value.map(imgChild => {
return {
'name': imgChild.name,
'job': imgChild.parentReference.path.split("/")[6],
'path': imgChild.webUrl,
'id': imgChild.id
}
});
array.push(...mapped);
}
// if no 'image' or value, go into child
await start(nextNode, array);
}
}
return array;
}
var res = await callMsGraph(token, endpoint);
var output = await start(res, []);
console.log(output);
return output;
}
Also, please, feel free to add a try{} catch{} blocks in any place you need them, I skipped them

How to set new variable value within a while loop in Node.js?

I am new to node.js and am trying to get an initial page key from an api response header and loop through the pages until the page key header no longer exists, meaning I've reached the last page and the loop can end.
For some reason the pageKey never gets changes in the while loop the way I have it set up:
async function doThis() {
let gotAllPages = false;
let pageKey;
var response = await got(url, options);
try {
pageKey = response.headers['next-page'];
} catch (e) {
gotAllPages = true;
}
while (!gotAllPages) {
var pageOptions = {
method: 'GET',
headers: {
Authorization: `xxx`,
nextPage: pageKey
}
};
response = await got(url, pageOptions);
try {
pageKey = response.headers['next-page'];
console.log(pageKey);
} catch (e) {
console.log("No header value found, loop is done.");
gotAllPages = true;
break;
}
}
}
This ends up going in an infinite loop because the pageKey never gets changed from the first response so it just keeps trying to grab the same page.
Being new to Node.js, I don't really understand if there's a scoping issue with how I've set the variables up or what, so any advice would be appreciated!
Try-catch blocks do not fail when setting a variable to undefined or null. Instead of using a try-catch block, use an if statement.
pageKey = response.headers['next-page'];
if (!pageKey) {
console.log("No header value found, loop is done.");
gotAllPages = true;
break;
}
The key access response.headers['next-page']; isn't throwing anything, so the catch where gotAllPages is set never runs.
We can fix this, and tidy up a bit...
async function getAllPages() {
let results = [];
let nextPage = 0;
let options = {
method: 'GET',
headers: { Authorization: `xxx` }
};
while (nextPage !== undefined) {
options.headers.nextPage = nextPage;
let response = await got(url, options);
results.push(response);
nextPage = response.headers['next-page'];
}
return result;
}
This assumes a couple things: (1) that the API expects nextPage === 0 on the first request, (2) that the API signals no more pages by leaving the next-page header undefined, (3) that the caller wants the full response, rather than some part like response.data or response.body, (4) that the caller catches errors thrown here and handles them.

Checking if image exists produces 403 error

I have a range of hosted images, but the amount and url can change. However, they always follow the same incrementing naming convention. In order to store the images that exist, I loop through the available images until I encounter an error. At that point I store the results and continue with the operations.
function checkForExistingImage(u, r = [], i = 0){
var url = u + i;
$.get(url).done(function() {
r.push(url);
checkForExistingImage(u, r, i + 1);
}).fail(function(){
//continue working with the 'r' array
});
}
However, this will always result in a 403 (Image not found) error in the console, because the last image checked will never exist.
How can I not trigger this error, or maybe suppress it if needed?
I would definitely rewrite this in a more civilised manner. This function does not log any errors outside of those that are explicitly logged by console.log (which you can remove).
It should be safer to use this as it does not bombard the server with too many requests per second, but you can reduce or remove the timeout if that's not a problem.
function Timeout(time) {
return new Promise(function(resolve) {setTimeout(resolve, time);});
}
async function checkForExistingImages(baseUrl, maxImages, startImage = 0) {
const results = [];
// Some sanity check on params
if(maxImages < startImage) {
let tmp = maxImages;
maxImages = startImage + 1;
startImage = maxImages;
}
// from i to max
for(let i=startImage; i<maxImages; ++i) {
// Create image URL, change this as needed
const imageURL = baseUrl + i + ".png";
// `fetch` does not throw usually, but we wanted to avoid errors in console
try {
// Response will have 200/404/403 or something else
const response = await fetch(imageURL);
if(response.status == 200) {
results.push(imageURL);
}
else {
console.info("Image",imageURL,"was removed.");
// stop loading
break;
}
}
// complete failure, some major error occured
catch(e) {
console.warn("Image",imageURL, "failed to send request!");
break;
}
// If you're getting throttled by the server, use timeout
await Timeout(200);
}
return results;
}

How to make my function wait before it returns another call

I have a service that returns some translations. It makes a apex call which is async in nature and returns the translations. Every time I get a set of results I store the translations inside session storage and next time when a call comes I first check it inside the cached translations and if available then I return. The problem comes when the user sends multiple request for translations for the same set of values. For that I am keeping a track of the apex call already in progress. However I am not sure how to make my request wait for the previous apex call to end and return the results.
Here is the service code.
let labelQueue = [];
let labelMap;
let requestCustomLabels = function(labelsArr, language) {
return new Promise(function(resolve, reject) {
let labelsArray = [...labelsArr];
let returnedLabels = {};
let cachedLabels = JSON.parse(sessionStorage.getItem("customLabels")) || {};
labelMap = { ...cachedLabels };
if (Object.keys(labelMap).length > 0) {
labelsArray.forEach(item => {
if (Object.keys(labelMap).indexOf(item) > -1) {
labelsArray = labelsArray.filter(label => label !== item);
//push the available lables inside returnedLabels
returnedLabels[item] = labelMap[item];
}
});
}
if (Object.keys(returnedLabels).length === labelsArr.length) {
resolve(returnedLabels);
}
if (labelQueue.length && labelsArray.length) {
labelsArray.forEach(item => {
if (labelQueue.indexOf(item) > -1) {
labelsArray = labelsArray.filter(label => label !== item);
}
});
}
if (labelsArray.length) {
// store labels to the queue
labelsArray.forEach(label => {
labelQueue.push(label);
});
// then make call to apex for the result and store it inside returnedLabels & localmap and remove them from labelsQueue
fetchLabels(labelsArray, returnedLabels, language).then(
labelsReturned => {
returnedLabels = { ...returnedLabels, ...labelsReturned };
resolve(returnedLabels);
}
);
} else {
// wait for apex to return the call;
}
});
};
let fetchLabels = function(labelsArray, returnedLabels, language) {
return new Promise(function(resolve, reject) {
getUserProfile().then(function(userData) {
language = language || userData.language;
getCustomLabel(labelsArray, language)
.then(res => {
let labels = JSON.parse(res).data;
for (const key in labels) {
if (key !== "language" && key !== "totalSize") {
//when the apex call returns remove the labels from the label Queue.
labelQueue = labelQueue.filter(label => label !== key);
returnedLabels[key] = labels[key];
labelMap[key] = labels[key];
}
}
// store localsMap to the storage
sessionStorage.setItem("customLabels", JSON.stringify(labelMap));
resolve(returnedLabels);
})
.catch(function(error) {
console.error("error : ", error);
reject(error);
});
});
});
};
I actually scripted this, mostly for fetching language files that are called from multiple elements on the page in a shadow dom app.
You need to:
Check if the promise is stored.
Save the ajax promise as a variable.
Store the variable, if it's not stored.
Fetch the promise, or returned the stored promise.
Remove the promise.
var ajaxQueue = {};
/**
* #description Avoids unnecessary calls to server
* #param {String} url The api call
* #param {String} method 'GET', 'POST', 'PUT', 'DELETE'
* #param {String} data If sending data through POST/PUT
*/
function queueAJAXCalls(url, method, data) {
var property = FXAjaxBehavior.getQueueProperty(url, method);
if (firstTimeRequesting(property)) { // 1
var ajaxPromise = someGeneralCodeForFetchingAjaxData(url, method, data) // 2
.then(function(serverData) {
removeFromAJAXQueue(property); // 5
return serverData;
}).catch(function(error) {
removeFromAJAXQueue(property); // 5
console.warn(error);
return false;
});
storeInAJAXQueue(property, ajaxPromise); // 3
}
return getAJAXFromQueue(property); // 4
}
function getQueueProperty(url, method) {
return encodeURIComponent(method + url);
}
function firstTimeRequesting(property) {
return !ajaxQueue[property];
}
function removeFromAJAXQueue(property) {
delete ajaxQueue[property];
}
function storeInAJAXQueue(property, ajaxPromise) {
ajaxQueue[property] = ajaxPromise;
}
function getAJAXFromQueue(property) {
return ajaxQueue[property];
}
Example:
queueAJAXCalls('http://somedomain.com/api/fetchstuff', 'GET')
.then(function(serverResponse) {
// do stuff, like using JSON.parse(serverResponse)
})
Be aware the danger with parsing the jsondata in one place, namely in someGeneralCodeForFetchingAjaxData, and returning arrays and objects, because that means that changing the array/object in one place will change it in all places.
Instead of implementing this, I guess you could also use a Service Worker.

Know when jqXHRs of an array are all completed

I'm trying to run some code once all the jqXHR elements of an array are completed (have either succeeded or failed).
You can see the full code here: http://jsfiddle.net/Lkjcrdtz/4/
Basically I'm expecting the always hook from here:
$.when
.apply(undefined, reqs)
.always(function(data) {
console.log('ALL ALWAYS', data);
});
to run when all the requests that were piled up there have either succeeded or failed. Currently, you can observe in the console that ALL ALWAYS is logged earlier.
A simple solution for modern browsers would be to use the newer fetch() API along with Promise.all()
var makeReq = function(url, pos) {
var finalUrl = url + pos;
// intentionally make this request a failed one
if (pos % 2 === 0) {
finalUrl = "https://jsonplaceholder.typicode.com/423423rfvzdsv";
}
return fetch(finalUrl).then(function(resp) {
console.log('Request for user #', pos);
// if successful request return data promise otherwise return something
// that can be used to filter out in final results
return resp.ok ? resp.json() : {error: true, status: resp.status, id: pos }
})
};
// mock API
var theUrl = "https://jsonplaceholder.typicode.com/users/";
var reqs = [];
for (var i = 1; i <= 5; i++) {
reqs.push(makeReq(theUrl, i));
}
Promise.all(reqs).then(function(results) {
console.log('---- ALL DONE ----')
// filter out good requests
results.forEach(function(o) {
if (o.error) {
console.log(`No results for user #${o.id}`);
} else {
console.log(`User #${o.id} name = ${o.name}`);
}
})
})

Categories

Resources