I am trying to achieve the following functionality:
execute call back
resolve promise
check output
if not correct execute again
I have 'mimicked' the scenario with a timer, this reruns a script that makes a call to backend database for some information:
_runCheckScript: function(bStart, bPreScript){
var oController = this;
var scriptTimerdeferred = $.Deferred();
var promise = scriptTimerdeferred.promise();
if(typeof(bStart) === "undefined"){
bStart = true;
}
if(typeof(bPreScript) === "undefined"){
bPreScript = true;
}
// if the HANA DB is not stopped or started, i.e. it is still starting up or shutting down
// check the status again every x number of seconds as per the function
var msTime = 10000;
if(!bPreScript){
this._pushTextIntoConsoleModel("output", {"text":"The instance will be 'pinged' every " + msTime/1000 + " seconds for 2 minutes to monitor for status changes. After this, the script will be terminated."});
}
if(bPreScript){
var timesRun = 0;
var commandTimer = setInterval( function () {
timesRun += 1;
if(timesRun === 12){
scriptTimerdeferred.reject();
clearInterval(commandTimer);
}
// send the deferred to the next function so it can be resolved when finished
oController._checkScript(scriptTimerdeferred, bStart, bPreScript);
}, msTime);
}
return $.Deferred(function() {
var dbcheckDeffered = this;
promise.done(function () {
dbcheckDeffered.resolve();
console.log('Check finished');
oController._pushTextIntoConsoleModel("output", {"text":"Check finished."});
});
});
The script it calls, has it's own promise as it calls another function:
_checkScript: function(scriptTimerdeferred, bStart, bPreScript){
var oProperties = this.getView().getModel("configModel");
var oParams = oProperties.getProperty("/oConfig/oParams");
var deferred = $.Deferred();
var promise = deferred.promise();
var sCompareStatus1 = "inProg";
var sCompareStatus2 = this._returnHanaCompareStatus(bStart, bPreScript);
var sCompareStatus3 = this._returnHanaCompareStatus3(bStart, bPreScript);
var params = {//some params};
// Send the command
this._sendAWSCommand(params, deferred);
// When command is sent
promise.done(function (oController) {
console.log('back to db check script');
var oCommandOutputModel = oController.getView().getModel("commandOutput");
var sStatus = oCommandOutputModel.Status;
// check that it's not in the wrong status for a start/stop
// or if it's a pre script check -> pre script checks always resolve first time
if(sStatus !== sCompareStatus1 && sStatus !== sCompareStatus2 && sStatus !==sCompareStatus3|| bPreScript){
scriptTimerdeferred.resolve();
}
});
},
This works, however what it does is:
set a timer to call the first script every x seconds (as the data is currently changing - a server is coming online)
the script runs and calls another function to get some data from the DB
when the call for data is resolved (complete) it comes back to 'promise.done' on the checkScript and only resolves the timer promise if it meets certain criteria
all the while, the initial timer is resending the call as eventually the DB will come online and the status will change
I am wondering if there is a better way to do this as currently I could have, for example, 3 calls to the DB that go unresolved then all resolve at the same time. I would prefer to run a command, wait for it to resolve, check the output, if it is not right then run command again.
Thanks!
I think what you want to do can be achieved carefully reading what explained in these links:
Promise Retry Design Patterns
In javascript, a function which returns promise and retries the inner async process best practice
See this jsfiddle
var max = 5;
var p = Promise.reject();
for(var i=0; i<max; i++) {
p = p.catch(attempt).then(test);
}
p = p.then(processResult).catch(errorHandler);
function attempt() {
var rand = Math.random();
if(rand < 0.8) {
throw rand;
} else {
return rand;
}
}
function test(val) {
if(val < 0.9) {
throw val;
} else {
return val;
}
}
function processResult(res) {
console.log(res);
}
function errorHandler(err) {
console.error(err);
}
It retries a promise infinite times since the condition is not satisfied. Your condition is the point you said "check the output". If your check fails, retry the promise. # Be careful to hold a limit case, promises waste memory. If your api/service/server/callreceiver is off, and you don't set a threshold, you could create an infinite chain of promises NO STOP
Related
I have an array of webworkers, called workers. I'm initiating them all in a single function, called activate. The problem is, I want to have the activate return the values that are posted by the worker. I either want it to return a promise of some kind or wait until they are all done.
So the code could be:
// the web workers add stuff in this array with onmessage()
var globalArray = [];
function activate(){
for(var i = 0; i < workers.length; i++){
workers[i].postMessage('do something');
}
return // Promise or filled globalArray;
}
So I could use it like this:
var values = await activate();
I don't want the workers to call a seperate function once the last worker is finished. Is there any way I can achieve this?
What you want to do is to create the Promise, and inside of the function of the Promise, initiate all the workers and check when the last ends to call the resolve function of the promise, and return this promise in your activate function.
Would be something like this:
// the web workers add stuff in this array with onmessage()
var globalArray = [];
function activate(){
var promise = new Promise(function(resolve, reject){
var counter = 0;
var array = [];
var callback = function(message){
counter++;
//You can add here the values of the messages
globalArray.push(message.data);
//Or add them to an array in the function of the Promise
array.push(message.data);
//And when all workers ends, resolve the promise
if(counter >= workers.length){
//We resolve the promise with the array of results.
resolve(array);
}
}
for(var i = 0; i < workers.length; i++){
workers[i].onmessage = callback;
workers[i].postMessage('do something');
}
});
return promise;
}
The code has not been tested for now, but hope you get the idea.
one way to do this is to wrap everything in a promise,
const workers = [ new Worker("./worker1.js"), new Worker("./worker2.js")];
const activate = () => {
return new Promise((resolve,reject) => {
let result = [];
for ( let i = 0 ; i < workers.length; i++) {
workers[i].postMessage("do something");
workers[i].onmessage = function(e) {
result.push(e.data);
};
}
resolve(result)
});
};
async function f() {
let res = await activate();
console.log(res);
}
f();
I'm totally new to JS having jumped in a few days ago to try make a chrome extension, so sorry if this is a simple problem, but I can't seem to figure it out.
My original function was to simply download an image and increment the stored count by 1 and add on the file size. However on a page of images it hit the write limits of chrome so I decided to count the values and write them at the end.
Initially the return value happened much later than when the function was executed (so it returned nothing), so I looked up how to fix it and got it working with a callback. However, although it waits for the callbacks, the code just continues past the callbacks and the part afterwards is executed before anything else, meaning the final count will always be 0.
// Loop through all urls
var approx_filesize = 0;
for(var i = 1; i < all_urls.length; i++){
var image_url = all_urls[i];
_download_image(image_url, folder_name, function(item){
approx_filesize += parseInt(item);
});
}
// This happens before any _download_image functions complete
alert('end' + approx_filesize);
// Write to storage
chrome.storage.sync.get({
download_count: 0,
download_size: 0
}, function(items) {
chrome.storage.sync.set({
download_count: parseInt(items.download_count) + all_images_data.length - 1,
download_size: parseInt(items.download_size) + approx_filesize
}, function() {
});
});
I just tried moving the loop into its own callback function and still had no luck, the alert runs before the first function completes.
function image_url_loop_callback(callback, folder_name, all_urls){
var approx_filesize = 0;
for(var i = 1; i < all_urls.length; i++){
var image_url = all_urls[i];
_download_image(image_url, folder_name, function(filesize){
approx_filesize += parseInt(filesize);
});
}
callback(approx_filesize);
}
image_url_loop_callback(function(approx_filesize){
alert(approx_filesize);
}, folder_name, all_urls);
How do I make it so that the loop completes before anything else is done?
Edit: Got it working with promise, here's the adjusted code:
new Promise( function(resolve, reject) {
var count = 1;
var num_items = all_urls.length;
var approx_filesize = 0;
for(var i = 1; i < num_items; i++){
var image_url = all_urls[i];
_download_image(image_url, folder_name, function(item){
approx_filesize += parseInt(item);
count ++;
if(count == num_items){
resolve([num_items, approx_filesize]);
}
});
}
}).then( function(stuff) {
var num_items = stuff[0];
var approx_filesize = stuff[1];
chrome.storage.sync.get({
download_count: 0,
download_size: 0
}, function(items) {
chrome.storage.sync.set({
download_count: parseInt(items.download_count) + num_items,
download_size: parseInt(items.download_size) + approx_filesize
}, function() {
});
});
});
Basically, you have to handle the asynchronous aspect of JavaScript.
To do so, you have to use a Promise.
This works this way:
new Promise( () => {
// My asynchronous code
}).then( () => {
// My code which need to wait for the promise resolution.
});
If you are working with only the latest versions of browsers, you can also have a look at async/await keywords which make asynchronous handling much easier than regular promises (but still are promises).
EDIT: As this answer required further explanation and proper code snippets, I edited it to answer a comment.
This example maybe easier to understand:
let myFoo = "Hello";
test = new Promise( (resolve) => {
console.log(myFoo);
myFoo = "World!";
setTimeout(() => {
resolve();
}, 4000);
}).then( () => {
console.log(myFoo);
});
This will print "Hello" immediately, and "World!" 4 seconds after.
This is how you work with promises. You can perfectly edit variables which are defined in a scope outside of the promise. Please don't use var, just stick to let and define a decent scope.
Due to javascript's async nature you have to use promises:
https://developers.google.com/web/fundamentals/getting-started/primers/promises
I have an application that is sending a http request that returns a promise everytime the user types. I have it debouncing every 500ms. Sometimes the api I am requesting takes a long time to respond. For example, I make a search request for a that takes a long time to respond but then the user continues typing to complete the query of a+x which resolves almost immediately but the results of a+x get overridden by the previous request of just a.
TL;DR: if new promise is called before current resolves, how to cancel current
Create a variable that counts your requests:
var effectiveRequestNumber = 0;
function asyncRequest() {
var requestNumber = ++effectiveRequestNumber; // storing our request number
doSomething().then(function(response) {
// if the function was invoked after this request, then these two won't match
if (effectiveRequestNumber !== requestNumber) {
return;
}
applyResponse(response); // we are fine - applying the response
});
}
The way I usually handle overlapping queries where I only want the results of the last one is to remember something that I can check in the callback.
You haven't quoted any code which makes it tricky to help, but here's an example:
"use strict";
// NOTE: Will only run if your browser supports promises.
// Scoping function to avoid globals
(function() {
// We keep track of the most recent promise
var lastSomethingRequest = null;
// Our function that does something async
function doSomething(value) {
console.log("doing request for " + value);
// Start the async, remember the promise
var p = new Promise(function(resolve) {
setTimeout(function() {
resolve("resolve for " + value);
}, Math.floor(Math.random() * 500));
});
// Remember that as the most recent one
lastSomethingRequest = p;
p.then(function(result) {
// Use the result only if it's the most recent
if (lastSomethingRequest === p) {
console.log("Use this result: " + result);
lastSomethingRequest = null; // Release the promise object
} else {
console.log("Disregard outdated result: " + result);
}
});
}
// Generate 5 requests in a row that will complete in varying
// amounts of time, where we only want the result of the last one
for (var n = 0; n < 5; ++n) {
doSomething(n);
}
})();
I've searched high and low but I just can't seem to wrap my head around q.defer() and creating my own promise.
I have a service getDataService which does exactly that - $http.gets data from a REST server. However only one of each variable can be sent at a time, so if user wants to query server for two entities and return the entire associated data they must send two requests. Because of this I had to use a method which kept i as the actual count (closure) which then runs my get data function the appropriate amount of times:
keepICorrect: function (security) {
var self = this;
for (var i = 0 ; i < entity.length; i++) {
self.getDataFromREST(security, i);
}
},
I call this from my main controller as a promise :
$scope.apply = function (security) {
var myDataPromise = getDataService.keepICorrect(security);
myDataPromise.then(function () {
//DO STUFF
}, 1);
}, function (error) {
alert("Error Retrieving Data");
return $q.reject(error);
});
}
This worked when using .getDataFromREST() but obviously doesn't now as I have to route through my new loop function, keepICorrect().
My question is how on earth do I create a promise which spans from my service to my controller, but not only that, also waits to resolve or fail depending on whether the i amount of requests have completed?
You need to create an array of promises
keepICorrect: function (security) {
var self = this;
var promises = [];
for (var i = 0 ; i < entity.length; i++) {
promises.push(self.getDataFromREST(security, i));
}
return promises;
},
And then wait for all of them to complete using the $q library in Angular
$q.all(getDataService.keepICorrect(security))
.then(....
I have a request-promise function that makes a request to an API. I'm rate-limited by this API and I keep getting the error message:
Exceeded 2 calls per second for api client. Reduce request rates to resume uninterrupted service.
I'm running a couple of Promise.each loops in parallel which is causing the issue, if I run just one instance of Promise.each everything runs fine. Within these Promise.each calls they lead to the same function a with a request-promise call. I want to wrap this function with another queue function and set the interval to 500 milliseconds so that a request isn't made after one another, or parallel, but set to that time, on queue. The thing is I still need these promises to get their contents even if it takes a rather long time to get a response.
Is there anything that will do this for me? Something I can wrap a function in and it will respond at a set interval and not in parallel or fire functions one after another?
Update: Perhaps it does need to be promise specific, I tried to use underscore's throttle function
var debug = require("debug")("throttle")
var _ = require("underscore")
var request = require("request-promise")
function requestSite(){
debug("request started")
function throttleRequest(){
return request({
"url": "https://www.google.com"
}).then(function(response){
debug("request finished")
})
}
return _.throttle(throttleRequest, 100)
}
requestSite()
requestSite()
requestSite()
And all I got back was this:
$ DEBUG=* node throttle.js
throttle request started +0ms
throttle request started +2ms
throttle request started +0ms
Update
The last answer was wrong, this works but I still think I can do better:
// call fn at most count times per delay.
const debounce = function (fn, delay, count) {
let working = 0, queue = [];
function work() {
if ((queue.length === 0) || (working === count)) return;
working++;
Promise.delay(delay).tap(() => working--).then(work);
let {context, args, resolve} = queue.shift();
resolve(fn.apply(context, args));
}
return function debounced() {
return new Promise(resolve => {
queue.push({context: this, args: arguments, resolve});
if (working < count) work();
});
};
};
function mockRequest() {
console.log("making request");
return Promise.delay(Math.random() * 100);
}
var bounced = debounce(mockRequest, 800, 5);
for (var i = 0; i < 5; i++) bounced();
setTimeout(function(){
for (var i = 0; i < 20; i++) bounced();
},2000);
So you need to make the requests throttle function-wide - that's fine. Promises have queueing pretty much built in.
var p = Promise.resolve(); // our queue
function makeRequest(){
p = p.then(function(){ // queue the promise, wait for the queue
return request("http://www.google.com");
});
var p2 = p; // get a local reference to the promise
// add 1000 ms delay to queue so the next caller has to wait
p = p.delay(1000);
return p2;
};
Now makeRequest calls will be at least 1000ms apart.
jfriend has pointed out that you need two requests per second and not a single one - this is just as easily solvable with a second queue:
var p = Promise.resolve(1); // our first queue
var p2 = Promise.resolve(2); // our second queue
function makeRequest(){
var turn = Promise.any([p, p2]).then(function(val){
// add 1000 ms delay to queue so the next caller has to wait
// here we wait for the request too although that's not really needed,
// check both options out and decide which works better in your case
if(val === 1){
p = p.return(turn).delay(1, 1000);
} else {
p2 = p2.return(turn).delay(1, 1000);
}
return request("http://www.google.com");
});
return turn; // return the actual promise
};
This can be generalized to n promises using an array similarly