Execute A Function When All xmlHttpRequests Are Complete - javascript

BRIEF
I have a website which gets data from an API. So it makes some post and get requests to a server once the website is opened. Let us assume it makes 5 different xmlHttpRequests as soon as the page loads but we cannot exactly know the time each call takes from start to finish.
FOR EXAMPLE
Call to one endpoint takes 2 seconds to complete and the other one takes 1 second to complete. So that means after 2 seconds both the calls are said to be finished.
TO-DO
I want to execute a function after all the xmlHttpRequests are complete because I am using window.performance.getEntries() to fetch all the requests initiated by the webpage and send it to my server as I am doing a web analytics project in which I need to access the times taken by each network request and convert the data into a chart.
EXTRA-QUESTION OR HINT
Is there any event that can be attached to the window as shown below to listen for all the network calls and execute my piece of code when all are finished.
window.addEventListener("load", function (event) {
//execute some code here
}
In the load event I am not able to capture all the requests by using performance.getEntries() because load fires before the ajax calls are finished.
As shown above I ask. Is there any trick or any event or anything in JavaScript by which we can wait untill all the XMLHTTPREQUESTS are finished and then execute some code.

WORKING SOLUTION
We can track browser AJAX calls using the code below :
const ajaxCallStatus = () => {
window.ajaxCalls = 0; // initial ajax calls
window.ajaxEndpoints = []; // keeping track of all ajax call urls (endpoints)
const origOpen = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function() {
window.ajaxCalls++;
this.addEventListener('load', (event) => {
if (!window.ajaxEndpoints.includes(event['currentTarget'].responseURL)) {
window.ajaxEndpoints.push(event['currentTarget'].responseURL);
}
window.ajaxCalls--;
switch (window.ajaxCalls) {
case 0: // when ajax calls finish this runs
addData(window.ajaxEndpoints);
break;
}
});
origOpen.apply(this, arguments);
}
}
ADD DATA FUNCTION - Used for sending data to some backend.
function addData(arr, origOpen) {
XMLHttpRequest.prototype.open = origOpen;
axios.post('url', data)
.then((res) => console.log(res))
.catch((err) => console.log(err));
}

I would suggest you to wrap your requests in promises and then use Promise.all(...) like so:
const makeRequest = () => {
return new Promise((resolve, reject) => {
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
resolve(xhttp.responseText); // On success, resolve the promise
} else {
reject(); // On failure, reject it
}
};
xhttp.open("GET", "filename", true); // Needs to be set to your needs. This is just an example
xhttp.send();
});
}
// This waits until all promises are resolved
const [result1, result2] = await Promise.all(makeRequest(), makeRequest());
// do stuff here with your results
console.log(result1);
console.log(result2);
PS: Basic Ajax example is from here, but just swap it for yours and create parameters or similar to make the requests you actually need
OR
You could use a library like Axios for Ajax requests, which already returns Promises by default. Check it out here: https://github.com/axios/axios

Related

Firebase: Calling Cloud Function From Cloud Function

I am running in to an issue with Firebase cloud functions. I have an onWrite cloud function that triggers a sequence of events. I have a path for requests that the onWrite cloud function is tied to. When that cloud function executes, it deletes the new request for the requests path and pushes the request in to a render path/que that will be used client side for rendering UI elements/data. Once the data has been written to the render path, I call a vanilla javascript function that is not tied to any cloud events. The vanilla javascript function is supposed to reach out to an external API and fetch some data to later be updated on the render object that was pushed in to the render path.
The problem is that the vanilla javascript function never executes. I have been looking all over the web to figure out why this happening but can't seem to figure out why. I am on the Flame plan so outbound api requests should be allowed to my knowledge. Here an example of my code:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const request = require('request');
admin.initializeApp();
exports.requestModule = functions.database.ref('/requests').onWrite((change, context) => {
// Create reference to database
let db = admin.database();
if (context && context.auth && context.auth.uid) {
const afterData = change.after.val();
let uid = context.auth.uid;
let cleanData = afterData[uid];
cleanData.status = "loading";
// Remove the requested module from the requests path
let cleansePath = db.ref('/requests/' + uid);
cleansePath.remove().then((snapshot) => {
return true;
}).catch((error) => {
console.log(error);
return false;
});
// Add requested module to the render path
let renderPath = db.ref('/render/' + uid);
renderPath.push(cleanData).then((snapshot) => {
let val = snapshot.val();
let key = snapshot.key;
// Trigger the get weather api call
getWeather(uid, key, val);
return true;
}).catch((error) => {
console.log(error);
return false;
});
}
});
// Fetches data from external api
function getWeather (uid, key, obj) {
console.log('Fetching weather!');
let db = admin.database();
request('https://api.someweathersite.net/forecast/', (error, response, body) => {
if (!error && Number(response.statusCode) === 200) {
console.log('error:', error);
console.log('statusCode:', response && response.statusCode);
console.log('body:', body);
obj.data = body;
obj.status = 'loaded';
// Set data from api response in render object to be shown client side
let render = db.ref('/render/' + uid + '/' + key );
render.set(obj).then(() => {
return true;
}).catch((error) => {
console.log(error)
return false;
});
}
});
}
The console.log message at the top of the "getWeather" function never executes. I don't think that the "getWeather" function is ever executing.
If I put the api call directly in the onWrite "requestModule" function, the api call will work. However, when it calls an external function it never gets called/works. I basically want to have the "requestModule" function handle all requests and plan to have a module dispatcher that handles which module function/api data should be fetched from. That's why I don't want to keep the api call in the "requestModule" function. Any idea of why this happening or how I can get this working?
getWeather is performing asynchronous work to fetch some data, but it's not returning a promise to indicate when that work is complete. In fact, none of the async work you're performing here is correctly using the promises returned by the various API calls. It's not sufficient to simply use then() on each promise.
You need to keep track of all of the async work, and return a single promise that resolves only after all the work is complete. Otherwise, Cloud Functions may terminate and clean up your function before the work is complete. (Note that it's not deterministic which work may or may not actually complete before forced termination, but the only way to ensure that all work completes is through that single promise you return.)
You may want to watch my tutorials on using promises in Cloud Functions to get a better handle on what you're required to do make your functions work correctly: https://firebase.google.com/docs/functions/video-series/

Requests through service-worker are done twice

I've done a simple service-worker to defer requests that fail for my JS application (following this example) and it works well.
But I still have a problem when requests succeed: the requests are done twice. One time normaly and one time by the service-worker due to the fetch() call I guess.
It's a real problem because when the client want to save datas, they are saved twice...
Here is the code :
const queue = new workbox.backgroundSync.Queue('deferredRequestsQueue');
const requestsToDefer = [
{ urlPattern: /\/sf\/observation$/, method: 'POST' }
]
function isRequestAllowedToBeDeferred (request) {
for (let i = 0; i < requestsToDefer.length; i++) {
if (request.method && request.method.toLowerCase() === requestsToDefer[i].method.toLowerCase()
&& requestsToDefer[i].urlPattern.test(request.url)) {
return true
}
}
return false
}
self.addEventListener('fetch', (event) => {
if (isRequestAllowedToBeDeferred(event.request)) {
const requestClone = event.request.clone()
const promiseChain = fetch(requestClone)
.catch((err) => {
console.log(`Request added to queue: ${event.request.url}`)
queue.addRequest(event.request)
event.respondWith(new Response({ deferred: true, request: requestClone }))
})
event.waitUntil(promiseChain)
}
})
How to do it well ?
EDIT:
I think I don't have to re-fetch() the request (because THIS is the cause of the 2nd request) and wait the response of the initial request that triggered the fetchEvent but I have no idea how to do it. The fetchEvent seems to have no way to wait (and read) the response.
Am I on the right way ? How to know when the request that triggered the fetchEvent has a response ?
You're calling event.respondWith(...) asynchronously, inside of promiseChain.
You need to call event.respondWith() synchronously, during the initial execution of the fetch event handler. That's the "signal" to the service worker that it's your fetch handler, and not another registered fetch handler (or the browser default) that will provide the response to the incoming request.
(While you're calling event.waitUntil(promiseChain) synchronously during the initial execution, that doesn't actually do anything with regards to responding to the request—it just ensures that the service worker isn't automatically killed while promiseChain is executing.)
Taking a step back, I think you might have better luck accomplishing what you're trying to do if you use the workbox.backgroundSync.Plugin along with workbox.routing.registerRoute(), following the example from the docs:
workbox.routing.registerRoute(
/\/sf\/observation$/,
workbox.strategy.networkOnly({
plugins: [new workbox.backgroundSync.Plugin('deferredRequestsQueue')]
}),
'POST'
);
That will tell Workbox to intercept any POST requests that match your RegExp, attempt to make those requests using the network, and if it fails, to automatically queue up and retry them via the Background Sync API.
Piggybacking Jeff Posnick's answer, you need to call event.respondWith() and include the fetch() call inside it's async function().
For example:
self.addEventListener('fetch', function(event) {
if (isRequestAllowedToBeDeferred(event.request)) {
event.respondWith(async function(){
const promiseChain = fetch(event.request.clone())
.catch(function(err) {
return queue.addRequest(event.request);
});
event.waitUntil(promiseChain);
return promiseChain;
}());
}
});
This will avoid the issue you're having with the second ajax call.

Multiple javascript Ajax requests seem to work synchronous

This is my first part of code. I here have a list of all the api calls I need to do to retrieve data from the mySQL server.
function ProcessUrls() {
requests = [];
var urls = ['url1', 'url2', 'url3' 'url4'];
for(i=0;i<urls.length;i++)
{
requests.push(new ProcessUrl(urls[i]));
console.log('Invoking the functions');
}
}
The 'Invoking the functions' log in the code above this line of text gets called 4 times almost instantly, as you would expect.
Here I process the urls and do the actual API calls:
function ProcessUrl(url) {
var http = new XMLHttpRequest();
http.open("GET", url, true);
http.send(null);
console.log('Function being called');
http.onreadystatechange = function() {
if (http.readyState == 4 && http.status == 200) {
console.log('Done doing the API call');
}
}
};
}
The "function being called" log gets called almost instantly 4 times as well, the problem is that the 'Done doing the API call' seems to run synchronous since the logs appears slowly one a time in a constant rhythm of ~0.5 seconds.
Is there a way to run these onreadystatechange parallel? This would speed up the process greatly.

Async calls using AngularJS

We are making multiple HTTP requests using Angular:
$scope.GetTest1 = function () {
$http.get("/test/GetTest1/").success(function (response) {
$scope.res = response.aaData;
});
}
$scope.GetTest2 = function () {
$http.get("/test/GetTest2/").success(function (response) {
$scope.res = response.aaData;
});
}
$scope.GetTest3 = function () {
$http.get("/test/GetTest3/").success(function (response) {
$scope.res = response.aaData;
});
}
// This is called from an onclick of a button
$scope.LoadAll = function () {
$scope.GetTest1();
$scope.GetTest2();
$scope.GetTest3();
}
We assumed that these were all called async, however, we have log4net enabled and we log the datetime when the 'gets' are received, and the times for all 3 are:
19:05:26
19:05:27
19:05:28
This was an unexpected surprise as we assumed the time would all be within 1 second. ie async.
Not sure if we're missing something,
Sorry, question is, how do we make these async calls?
I suppose that the reason of that perhaps is on the server side. I had almost the same result when server could serve only one request from one client. If response from server fulfils your $http requests in one second then that could be a problem. Please check your network statistics and if you see that they were called simultaneously but were served not immediately then it's server side problem.
You can easily track this on browser's devtools' timeline

Abort elasticsearch request using elastic.js

Is there a way to cancel requests/queries to Elasticsearch using elasticjs? The web app I am working on performs a request/query every 5 seconds, but I would like to cancel the request if for some reason the response doesn't show up in 5 seconds (so the browser doesn't pile up a bunch of requests that are unnecessary since the queries are happening repeatedly). I understand this would not prevent Elasticsearch from completing the query, but I would like to at least cancel the request in the browser.
Example:
var request = ejs.Request().doSearch();
var dataFromElasticsearch;
request.then(function (data) {
dataFromElasticsearch = data;
});
setTimeout(function () {
if (!dataFromElasticsearch) {
//do something here to cancel request
}
}, 5000)
Per documentation for elasticsearch.js (3.1, at the time of writing):
...calling the API will return an object (either a promise or just a plain object) which has an abort() method. Calling that abort method ends the HTTP request, but it will not end the work Elasticsearch is doing.
Specifically for your example:
setTimeout(function () {
if (!dataFromElasticsearch) {
request.abort();
}
}, 5000)

Categories

Resources