JavaScript: Asynchronous method in while loop - javascript

I'm tackling a project that requires me to use JavaScript with an API method call. I'm a Java programmer who has never done web development before so I'm having some trouble with it.
This API method is asynchronous and it's in a while loop. If it returns an empty array, the while loop finishes. Otherwise, it loops. Code:
var done = true;
do
{
async_api_call(
"method.name",
{
// Do stuff.
},
function(result)
{
if(result.error())
{
console.error(result.error());
}
else
{
// Sets the boolean to true if the returned array is empty, or false otherwise.
done = (result.data().length === 0) ? true : false;
}
}
);
} while (!done);
This doesn't work. The loop ends before the value of "done" is updated. I've done some reading up on the subject and it appears I need to use promises or callbacks because the API call is asynchronous, but I can't understand how to apply them to the code I have above.
Help would be appreciated!

edit: see the bottom, there is the real answer.
I encourage you yo use the Promise API. Your problem can be solved using a Promise.all call:
let promises = [];
while(something){
promises.push(new Promise((r, j) => {
YourAsyncCall(() => r());
});
}
//Then this returns a promise that will resolve when ALL are so.
Promise.all(promises).then(() => {
//All operations done
});
The syntax is in es6, here is the es5 equivalent (Promise API may be included externally):
var promises = [];
while(something){
promises.push(new Promise(function(r, j){
YourAsyncCall(function(){ r(); });
});
}
//Then this returns a promise that will resolve when ALL are so.
Promise.all(promises).then(function(){
//All operations done
});
You can also make your api call return the promise and push it directly to the promise array.
If you don't want to edit the api_call_method you can always wrap your code in a new promise and call the method resolve when it finishes.
edit: I have seen now the point of your code, sorry. I've just realized that Promise.all will not solve the problem.
You shall put what you posted (excluding the while loop and the control value) inside a function, and depending on the condition calling it again.
Then, all can be wraped inside a promise in order to make the external code aware of this asynchronous execution. I'll post some sample code later with my PC.
So the good answer
You can use a promise to control the flow of your application and use recursion instead of the while loop:
function asyncOp(resolve, reject) {
//If you're using NodeJS you can use Es6 syntax:
async_api_call("method.name", {}, (result) => {
if(result.error()) {
console.error(result.error());
reject(result.error()); //You can reject the promise, this is optional.
} else {
//If your operation succeeds, resolve the promise and don't call again.
if (result.data().length === 0) {
asyncOp(resolve); //Try again
} else {
resolve(result); //Resolve the promise, pass the result.
}
}
});
}
new Promise((r, j) => {
asyncOp(r, j);
}).then((result) => {
//This will call if your algorithm succeeds!
});
/*
* Please note that "(...) => {}" equivals to "function(...){}"
*/

sigmasoldier's solution is correct, just wanted to share the ES6 version with async / await:
const asyncFunction = (t) => new Promise(resolve => setTimeout(resolve, t));
const getData = async (resolve, reject, count) => {
console.log('waiting');
await asyncFunction(3000);
console.log('finshed waiting');
count++;
if (count < 2) {
getData(resolve, reject, count);
} else {
return resolve();
}
}
const runScript = async () => {
await new Promise((r, j) => getData(r, j, 0));
console.log('finished');
};
runScript();

If you don't want to use recursion you can change your while loop into a for of loop and use a generator function for maintaining done state. Here's a simple example where the for of loop will wait for the async function until we've had 5 iterations and then done is flipped to true. You should be able to update this concept to set your done variable to true when your webservice calls have buffered all of your data rows.
let done = false;
let count = 0;
const whileGenerator = function* () {
while (!done) {
yield count;
}
};
const asyncFunction = async function(){
await new Promise(resolve => { setTimeout(resolve); });
};
const main = new Promise(async (resolve)=>{
for (let i of whileGenerator()){
console.log(i);
await asyncFunction();
count++;
if (count === 5){
done = true;
}
}
resolve();
});
main.then(()=>{
console.log('all done!');
});

Also you may try recursion solution.
function asyncCall(cb) {
// Some async operation
}
function responseHandler(result) {
if (result.error()) {
console.error(result.error());
} else if(result.data() && result.data().length) {
asyncCall(responseHandler);
}
}
asyncCall(responseHandler);

Here is a solution I came up with. Place this in an async function.
let finished = false;
const loop = async () => {
return new Promise(async (resolve, reject) => {
const inner = async () => {
if (!finished) {
//insert loop code here
if (xxx is done) { //insert this in your loop code after task is complete
finshed = true;
resolve();
} else {
return inner();
}
}
}
await inner();
})
}
await loop();

If you don't want to use Promises you can restructure your code like so:
var tasks = [];
var index = 0;
function processNextTask()
{
if(++index == tasks.length)
{
// no more tasks
return;
}
async_api_call(
"method.name",
{
// Do stuff.
},
function(result)
{
if(result.error())
{
console.error(result.error());
}
else
{
// process data
setTimeout(processNextTask);
}
}
);
}

Your loop won't work, because it is sync, your async task is async, so the loop will finish before the async task can even respond. I'd reccomend you to use Promises to manage async tasks:
//first wrapping your API into a promise
var async_api_call_promise = function(methodName, someObject){
return new Promise((resolve, reject) => {
async_api_call(methodName, someObject, function(result){
if(result.error()){
reject( result.error() )
}else{
resolve( result.data() )
}
});
})
}
now to your polling code:
//a local utility because I don't want to repeat myself
var poll = () => async_api_call_promise("method.name", {/*Do stuff.*/});
//your pulling operation
poll().then(
data => data.length === 0 || poll(), //true || tryAgain
err => {
console.error(err);
return poll();
}
).then((done) => {
//done === true
//here you put the code that has to wait for your "loop" to finish
});
Why Promises? Because they do state-management of async operations. Why implement that yourself?

let taskPool = new Promise(function(resolve, reject) {
resolve("Success!");
});
let that = this;
while (index < this.totalPieces) {
end = start + thisPartSize;
if (end > filesize) {
end = filesize;
thisPartSize = filesize - start;
}
taskPool.then(() => {
that.worker(start, end, index, thisPartSize);
});
index++;
start = end;
}

Related

Closure not updating inside a while loop in Javascript

I'm trying to get a closure to return a value that is supposed to be updated once a promise is resolved (or rejected).
The following code works. Initially the internal variable from within the close returns NONE as expected.
Then the first Promise is launched, and once that is resolved, the internal variable is updated to FAIL.
The second Promise is a deliberate delay, just so that we can observe the change of the closured variable.
However, once the while loop is added to the equation, by uncommenting that loop(x) section, the update is not observable within the while loop.
I would expect to see this:
...
9963000000 NONE
9964000000 NONE
9965000000 NONE
9966000000 NONE
9967000000 NONE
9968000000 FAIL
9969000000 FAIL
9970000000 FAIL
9971000000 FAIL
9972000000 FAIL
9973000000 FAIL
9974000000 FAIL
...
I know it might be due to the single threaded blocking, but, is there a way to observe a dynamic external variable from within the while loop?
let sleep = async (ms) => new Promise ((resolve, reject) => setTimeout (resolve, ms));
let task = async (ms) => new Promise (function(resolve, reject) {
setTimeout (function(){
const error = true;
let result;
if(error){
result = '_NO_';
reject({'state': false, 'response': result});
}else{
result = '_YES_';
resolve({'state': true, 'response': result});
}
}, ms);
});
let loop = async (cb) => {
let i = 0;
while(i<10000000000){
const value = cb.getResponse();
(function() {
if(i%1000000==0){ console.log(i, value) };
i += 1;
})(i, value);
}
}
const linkResponse = (function(){
let response = 'NONE';
function setResponse(value) {response = value; return response};
function getResponse() {return response};
return { 'setResponse': setResponse, 'getResponse': getResponse };
});
const x = linkResponse();
console.log(x.getResponse());
(async () => {
task(3000)
.then(function(res){
console.log('__OK__', res);
let response = 'SUCCESS';
x.setResponse(response)
})
.catch(function(err){
console.log('error', err);
let response = 'FAIL';
x.setResponse(response)
});
sleep(6000)
.then(function(res){
console.log(x.getResponse())
});
//loop(x);
})();
Well, thanks for the help. Just as I was suspecting, it is indeed a blocked thread issue. I solved the problem with a recursive function. I just needed to have a long process running in the background and I naively thought that an infinite loop will do the job.
let loop2 = function(i, cb) {
if(i>100000){
return
}
console.log(i, cb.getResponse());
i += 1;
sleep(0)
.then(function(res){
loop2(i, cb);
});
}
And then calling:
loop2(0, x);

Chain execution of array of promises in javascript

I am trying to create a chain of promises where each promise waits for the previous promise before getting executed.
const syncStatusChanges = () => {
return new Promise((resolve, reject) => {
console.log("in")
setTimeout(() => {
console.log("done")
resolve({ done: true })
}, 2000);
});
}
const run = () => {
const promises = [syncStatusChanges(), syncStatusChanges()]
promises[0].then(res => {
console.log("done 1")
promises[1].then(res => {
console.log("done 2")
})
})
}
run()
In this example the output is:
in
in
done
done 1
done
done 2
But I want it to be:
in
done
done 1
in
done
done 2
I also want it to work for any n number of functions. I saw this answer but the output is the same!
var promise = statusChangeCalls[0];
for (var i = 1; i < statusChangeCalls.length; i++)
promise = promise.then(statusChangeCalls[i]);
As it is written in comments. You are executing the functions in the array itself. What i understood by seeing your output. below run function can help you.
const run = () => {
const promise = syncStatusChanges();
promise.then(res => {
console.log("done 1")
syncStatusChanges().then(res => {
console.log("done 2")
})
})
}
Promise executes eagerly. It does not wait to register the then function. You can look for observable, they are lazy in execution. Basically they wait until you subscribe them.
For your second doubt about loop. You can use async await keyword to achieve chaining. Just pass number as an parameter in runInLoop function to execute promise that many times.
const runInLoop = async(numberOfPromisesCall)=> {
for (let i = 0; i < numberOfPromisesCall; i++){
await syncStatusChanges();
console.log(`done ${i + 1}`);
}
}
runInLoop(5)

JavaScript recursion returning a promise never resolving

In JavaScript, I have an array of objects being some tasks to do. I iterate through this array with a for loop with await, calling a function doOneTask with returns a Promise.
That works pretty well as long as the code inside doOneTask works as expected. However, those things often fail. Trying again helps almost all the time. So, I'd like to implement a procedure for auto-retrying inside the JavaScript code.
My idea was a recursive function: In case of a failure, doOneTask call itself till the promise if finally resolved.
My code looks like this:
var tasks = [{label: 'task0'},{label: 'task1'},{label: 'task2'}];
async function mainFunction() {
for(let k = 0; k < tasks.length; k++) {
await doOneTask(tasks[k]);
console.log("doOneTask done for index " + k);
}
console.log("End reached!");
}
function doOneTask(task) {
return new Promise(async function (resolve,reject) {
console.log("Starting with: " + task.label);
let checkIfDoeSomeStuffWorked = await doSomeAsyncStuff();
if(checkIfDoeSomeStuffWorked == false) {
console.log(task.label + ": FAILED");
return doOneTask(task);
}
else {
console.log(task.label + ": SUCCESS");
resolve(true);
}
});
}
function doSomeAsyncStuff() {
return new Promise(function (resolve,reject) {
var myRandom = Math.random();
if(myRandom < 0.3) {
resolve(true);
}
else {
resolve(false);
}
});
}
mainFunction();
(In real life, doSomeAsyncStuff is a backend call which often fails. The random() part is just for demonstration. In reality, I also limit the number of trials, before stopping the script.)
However, it doesn't work. In case of a failure, the script stops after having reached the SUCCESS console log. I never get back to the loop and the next items in the loop never get executed.
You have no use for the q library dependency. async functions always return a Promise, so you can simplify your code quite a bit -
async function doOneTask (task) {
const result = await doSomeAsyncStuff()
if (result === false) {
console.log(`${task} failed`)
return doOneTask(task)
}
else {
console.log(`${task} passed`)
return true
}
}
Your fake function doSomeAsyncStuff can be cleaned up too -
async function doSomeAsyncStuff () {
return Math.random() < 0.3
}
But let's add a fake delay of 1 second so that we can show things working 100% -
async function doSomeAsyncStuff () {
return new Promise(resolve =>
setTimeout(resolve, 1000, Math.random() < 0.3)
)
}
Last, your main function uses a really old looping convention. As you're using modern JavaScript, you might as well use for-of syntax -
async function main (tasks = []) {
for (const t of tasks) {
await doOneTask(t)
}
return "done"
}
Finally we run the program -
const tasks =
[ 'task0', 'task1', 'task2' ]
main(tasks).then(console.log, console.error)
// task0 failed
// task0 passed
// task1 failed
// task1 failed
// task1 passed
// task2 passed
// done
Expand the snippet below to verify the results in your own browser -
async function doOneTask (task) {
const result = await doSomeAsyncStuff()
if (result === false) {
console.log(`${task} failed`)
return doOneTask(task)
}
else {
console.log(`${task} passed`)
return true
}
}
async function doSomeAsyncStuff () {
return new Promise(resolve =>
setTimeout(resolve, 1000, Math.random() < 0.3)
)
}
async function main (tasks = []) {
for (const t of tasks) {
await doOneTask(t)
}
return "done"
}
const tasks =
[ 'task0', 'task1', 'task2' ]
main(tasks).then(console.log, console.error)
// task0 failed
// task0 passed
// task1 failed
// task1 failed
// task1 passed
// task2 passed
// done
After having completed the question, but just before posting, something came into my mind: In the setup above, I don't resolve the very same promise object when I finally reach success, but for each function call a new promise object is generated. My solution/workaround is quite simple: use q the promise library passing the promise from one function call to the next function call:
var q = require('q');
async function doOneTask(task,promiseObj) {
if(!promiseObj) {
var promiseObj = q.defer();
}
console.log("Starting with: " + task.label);
let checkIfDoeSomeStuffWorked = await doSomeAsyncStuff();
if(checkIfDoeSomeStuffWorked == false) {
console.log(task.label + ": FAILED");
return doOneTask(task,promiseObj);
}
else {
console.log(task.label + ": SUCCESS");
promiseObj.resolve(true);
}
return promiseObj.promise;
}
That way, we make sure that the very same promise object which is generated at the first call of doOneTask is resolved in the end - event after the 20th execution.

Synchronize multiple Promises while allowing multiple number of retries

I am trying to build a downloader that automatically retries downloading. Basically, a task queue which retries tasks for a certain number of times. I first tried using Promise.all() but the "trick" to circumvent the fail-on-first-reject described here did not help (and is an anti-pattern as described further down in that thread)
So I got a version working which seems to somewhat do what I want. At least the results it prints are correct. But it still throws several uncaught exception test X errors/warnings and I don't know what to do about that.
The Code:
asd = async () => {
// Function simulating tasks which might fail.
function wait(ms, data) {
return new Promise( (resolve, reject) => setTimeout(() => {
if (Math.random() > 0.5){
resolve(data);
} else {
reject(data);
}
}, ms) );
}
let tasks = [];
const results = [];
// start the tasks
for ( let i = 0; i < 20; i++) {
const prom = wait(100 * i, 'test ' + i);
tasks.push([i, prom]);
}
// collect results and handle retries.
for ( let tries = 0; tries < 10; tries++){
failedTasks = [];
for ( let i = 0; i < tasks.length; i++) {
const task_idx = tasks[i][0];
// Wait for the task and check whether they failed or not.
// Any pointers on how to improve the readability of the next 6 lines appreciated.
await tasks[i][1].then(result => {
results.push([task_idx, result])
}).catch(err => {
const prom = wait(100 * task_idx, 'test ' + task_idx);
failedTasks.push([task_idx, prom])
});
}
// Retry the tasks which failed.
if (failedTasks.length === 0){
break;
} else {
tasks = failedTasks;
}
console.log('try ', tries);
}
console.log(results);
}
In the end, the results array contains (unless a task failed 10 times) all the results. But still uncaught exceptions fly around.
As not all rejected promises result in uncaught exceptions, my suspicion is, that starting the tasks first and applying then()/catch() later is causing some timing issues here.
Any improvements or better solutions to my problems are appreciated. E.g. my solution only allows retries "in waves". If anyone comes up with a better continuous solution, that would be much appreciated as well.
Using await and asnyc allows to solve that in a much clearer way.
You pass an array of tasks (functions that when executed start the given task) to the execute_tasks. This function will call for each of those tasks the execute_task, passing the task function to it, the execute_task will return a Promise containing the information if the task was successful or not.
The execute_task as a loop that loops until the async task was successful or the maximum number of retries reached.
Because each of the tasks has its own retry loop you can avoid those waves. Each task will queue itself for a new execution as it fails. Using await this way creates some kind of cooperative multitasking. And all errors are handled because the task is executed in a try catch block.
function wait(ms, data) {
return new Promise((resolve, reject) => setTimeout(() => {
if (Math.random() > 0.5) {
resolve(data);
} else {
reject(new Error());
}
}, ms));
}
async function execute_task(task) {
let result, lastError;
let i = 0
//loop until result was found or the retry count is larger then 10
while (!result && i < 10) {
try {
result = await task()
} catch (err) {
lastError = err
// maybe sleep/wait before retry
}
i++
}
if (result) {
return { success: true, data: result }
} else {
return { success: false, err: lastError }
}
}
async function execute_tasks(taskList) {
var taskPromises = taskList.map(task => execute_task(task))
// the result could be sorted into failed and not failed task before returning
return await Promise.all(taskPromises)
}
var taskList = []
for (let i = 0; i < 10; i++) {
taskList.push(() => {
return wait(500, {
foo: i
})
})
}
execute_tasks(taskList)
.then(result => {
console.dir(result)
})

JavaScript checking if resource is reachable with fetch

I'm basically just trying to verify if a resource is reachable from the executing client. I can not use XHR, because the target resource doesn't allow that.
I'm pretty new to JS and am currently working with this ( executable here ):
var done = false;
var i = 1;
var t = "https://i.stack.imgur.com/Ya15i.jpg";
while(!done && i < 4)
{
console.log("try "+i);
done = chk(t);
sleep(1000);
i = i+1;
if (done)
{
console.log("Reachable!");
break;
}
else
{
console.log("Unreachable.");
}
}
function chk(target)
{
console.log("checking "+target)
fetch(target, {mode: 'no-cors'}).then(r=>{
return true;
})
.catch(e=>{
return false;
});
}
// busy fake sleep
function sleep(s)
{
var now = new Date().getTime();
while(new Date().getTime() < now + s){ /* busy sleep */ }
}
I was expecting this code to check for the resource, print the result, then wait for a sec. Repeat this until 3 tries were unsuccessful or one of them was successful.
Instead the execution blocks for a while, then prints all of the console.logs at once and the resource is never reachable (which it is).
I do know that the fetch operation is asynchronous, but I figured if I previously declare done and implement a sleep it should work. In the worst case, the while loop would use the previously declared done.
How do I achieve the described behavior? Any advice is welcome.
Your sleep function is blocking, what you really want is a recursive function that returns a promise after checking the url n times with a delay of y seconds etc.
Something like this
function chk(target, times, delay) {
return new Promise((res, rej) => { // return a promise
(function rec(i) { // recursive IIFE
fetch(target, {mode: 'no-cors'}).then((r) => { // fetch the resourse
res(r); // resolve promise if success
}).catch( err => {
if (times === 0) // if number of tries reached
return rej(err); // don't try again
setTimeout(() => rec(--times), delay ) // otherwise, wait and try
}); // again until no more tries
})(times);
});
}
To be used like this
var t = "https://i.stack.imgur.com/Ya15i.jpg";
chk(t, 3, 1000).then( image => {
console.log('success')
}).catch( err => {
console.log('error')
});
And note that this does not fail on 404 or 500, any response is a successful request.
The main problem is that you are trying to return from callback. That makes no sense.
But fetch is Promise based request you can use Promise to simulate delays as well
Something like this should do the trick
// promise based delay
const delay = timeout => new Promise(resolve => setTimeout(resolve, timeout))
// check if target can be fetched
const check = target => fetch(target, {...})
.then(response => response.ok)
const ping = (target, times = 3, timeout = 1000) => check(target)
.then(found => {
if(!found && times) { // still can check
// wait then ping one more time
return delay(timeout).then(() => ping(target, times - 1, timeout))
}
return found
})
ping('https://i.stack.imgur.com/Ya15i.jpg')
.then(found => {
console.log(found ? 'Reachable': 'Unreachable')
})
Your chk function returns undefined, you return true/false from promise callbacks not from container function.
You should use recursion and timeout in catch callback.
It will be something like this:
var i = 0;
var done = false;
var t = "https://i.stack.imgur.com/Ya15i.jpg";
(function chk(target){
console.log("checking "+target)
fetch(target, {mode: 'no-cors'}).then(r=>{
done = true;
console.log("Reachable!");
})
.catch(e=>{
console.log("Unreachable.");
if(i<4){
setTimeout(function(){
chk(target)
},1000)
}
});
})(t)
You can't return within a callback. When you do, it is the callback that is returning, not the parent function. If fact, the function chk is never returning anything.
What it sounds like you are intending to do is return the promise returned by fetch. And attempt to fetch three times.
Try this:
const numberOfTries =3;
currentTry = 1;
var t = "https://i.stack.imgur.com/Ya15i.jpg";
chk(t);
function tryCheck(resource, currentTry) {
chk(resource).done(function(){
console.log("Reachable!");
}).catch(function(e) {
console.log("Unreachable.");
if (currentTry >= numberOfTries) return;
sleep(1000);
tryCheck(resource, currentTry + 1);
});
}
function chk(resource) {
console.log("checking "+target);
return fetch(target, {mode: 'no-cors'});
}
Try this, Hope it works
var myHeaders = new Headers();
myHeaders.append('Content-Type', 'image/jpeg');
var myInit = { method: 'GET',
headers: myHeaders,
mode: 'no-cors',
cache: 'default' };
var myRequest = new Request('https://i.stack.imgur.com/Ya15i.jpg');
fetch(myRequest,myInit).then(function(response) {
...
});

Categories

Resources