What would be the best way to implement a stoppable list of asynchronous tasks ?
The number of tasks aren't fixed.
I've found a way but I'm wondering if there is a better way to implement such thing. Also the code looks rather dirty, which makes me think that that's probably not the way to go.
The main issues are:
Detect when the end of tasks is requested.
Be able to execute a chain of asynchronous functions, with the possibility of quitting the process at one point, regardless of the remaining tasks.
For issue (2), I found a decent way, which is by using async.waterfall. Sending a value to the error parameter of the callback causes all the processes to stop.
Now comes the dirty part, for (1). To achieve this, each task has to listen for a value change in a object (let's stay, the state of the sequence) in order to detect when quitting the process is required.
So, a prototype of the implementation looks like that:
let sequenceState = new Proxy(
{
stopped: false,
onStop: () => { }
},
{
set: function (target, prop, value) {
if (prop === "stopped") {
target.stopped = value;
if (value)
target.onStop();
} else {
target[prop] = value;
}
}
}
);
function stop() {
sequenceState.stopped = true;
}
function task1(callback) {
console.log("Task 1...");
let to = setTimeout(() => {
console.log("Done after 2000 ms");
callback();
}, 2000);
sequenceState.onStop = () => {
clearTimeout(to);
callback(true);
};
}
function task2(callback) {
console.log("Task 2...");
let to = setTimeout(() => {
console.log("Done after 3000 ms");
callback();
}, 3000);
sequenceState.onStop = () => {
clearTimeout(to);
callback(true);
};
}
async.waterfall(
[
task1,
task2
],
function (err, results) {
console.log("End of tasks");
});
setTimeout(() => {
console.log("Stopped after 2500 ms !");
stop();
}, 2500);
<script src="https://cdn.jsdelivr.net/npm/async#2.6.0/dist/async.min.js"></script>
Does anyone have a better way of implementing this ?
Related
I am trying to make a method sleep(delay) in method chaining. For this I am using setTimeout with Promise. This will require any method following the sleep to be inside the then.
Right now I am calling the function like
lazyMan("John", console.log).eat("banana").sleep(5).then(d => {d.eat("apple");});.
Here is my code
function lazyMan(name, logFn) {
logFn(name);
return {
eat: function(val) {
console.log(val);
return this;
},
sleep: function(timer) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log(`Sleeping for ${timer} seconds`);
resolve(this);
}, timer * 1000);
}).then(d => this);
}
};
}
lazyMan("John", console.log)
.eat("banana")
.sleep(5)
.then(d => {
d.eat("apple");
});
Is there a way I can modify my function to call it like lazyMan("John", console.log).eat("banana").sleep(5).eat("apple") and get the output in same order
I have gone through Add a sleep method in a object method chain(JS)
You can keep a promise for your "task queue", so anything that needs to be done, will be added onto there via .then(). This provides a fluent API for scheduling stuff.
function lazyMan(name, logFn) {
logFn(name);
let taskQueue = Promise.resolve();
const addTask = f => {
taskQueue = taskQueue.then(f);
}
return {
eat: function(val) {
addTask(() => console.log(`Eating [${val}]`));
return this;
},
sleep: function(timer) {
addTask(() => new Promise((resolve, reject) => {
console.log(`Start sleeping for ${timer} seconds`);
setTimeout(() => {
console.log(`End sleeping for ${timer} seconds`);
resolve();
}, timer * 1000);
}))
return this;
}
};
}
lazyMan("John", console.log)
.eat("banana")
.sleep(5)
.eat("apple");
Note that this change means that every action is technically asynchronous. However, that's at least uniform, so it's less of a chance of a surprise when keeping it in mind.
I have an asynchronous function that performs various await tasks. I am trying to inform my UI in React when the status of the function changes or when one of the tasks is completed.
const foo = async () => {
// trigger on load event
await task1();
// trigger task1 done event
await task2();
// trigger task2 done event
await task3();
// trigger on done event
}
I also want to be able to specify callbacks for each event, like so:
const bar = foo();
foo.on_load(() => {
// some code goes here
});
foo.on_done(() => {
// some code goes here
});
Another alternative would be something like this:
const bar = foo();
foo.on('status_change', status => {
// read the status here and do something depending on the status
})
I have been reading about custom events in JS but not sure how to use them for this. Or maybe there's another way to do this in React.
Any ideas would be helpful. Thanks!
EDIT
var uploadTask = storageRef.child('images/rivers.jpg').put(file);
// Register three observers:
// 1. 'state_changed' observer, called any time the state changes
// 2. Error observer, called on failure
// 3. Completion observer, called on successful completion
uploadTask.on('state_changed', function(snapshot){
// Observe state change events such as progress, pause, and resume
// Get task progress, including the number of bytes uploaded and the total number of bytes to be uploaded
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
switch (snapshot.state) {
case firebase.storage.TaskState.PAUSED: // or 'paused'
console.log('Upload is paused');
break;
case firebase.storage.TaskState.RUNNING: // or 'running'
console.log('Upload is running');
break;
}
}, function(error) {
// Handle unsuccessful uploads
}, function() {
// Handle successful uploads on complete
// For instance, get the download URL: https://firebasestorage.googleapis.com/...
uploadTask.snapshot.ref.getDownloadURL().then(function(downloadURL) {
console.log('File available at', downloadURL);
});
});
I was trying to achieve something like the above code, taken from the firebase documentation on uploading files
This is where I've gotten so far:
class Task {
constructor() {
this.first = null;
this.second = null;
}
on(keyword, callback) {
switch (keyword) {
case "first":
this.first = callback;
break;
case "second":
this.second = callback;
break;
default:
// throw new error
break;
}
}
}
const timeout = async time => {
return new Promise(resolve => setTimeout(resolve, time));
};
const foo = () => {
const task = new Task();
timeout(2000).then(async () => {
task.first && task.first();
await timeout(2000);
task.second && task.second();
});
console.log("returning");
return task;
};
const taskObject = foo();
taskObject.on("first", () => console.log("executing first callback"));
taskObject.on("second", () => console.log("executing second callback"));
Is there a better way to do this - without having the nested thens? Which approach would be better and when? EDIT - removed nested then clauses and replaced with then and await
PS: for my requirements, having callbacks would be sufficient. This is just so I can understand the concept better. Thanks!
I'm going to assume there's a reason for you not simply calling some named method after each async step has complete, i.e., you want to be able to plug in different handlers for each event. Here is one way to go about it - whether or not it's the best is hard to tell from the little context provided:
const foo = async (handlers) => {
handlers.onLoad && handlers.onLoad();
await task1();
handlers.onTask1Complete && handlers.onTask1Complete();
await task2();
handlers.onTask2Complete && handlers.onTask2Complete();
}
const myHandlers = {
onLoad: () => {
// do stuff
},
onTask1Complete: () => {
// do other stuff
},
onTask2Complete: () => {
// etc
}
};
foo(myHandlers);
Note that it lets you specify only the handlers you need. A more flexible approach would be to a publish-subscribe model, where a subscribe method pushes a function to an array of handlers, all of which are called when the event occurs.
The best option would be to make use of promises, which means every time a promise is resolved, you will get notified and then cascading promise will get executed.
an example below of chaining promises
var function3 = function(resolve, reject)
{
try
{
//do some thing
console.log('function3 called');
resolve('function3 success');
}
catch(err)
{
reject(err);
}
}
var function2 = function(resolve, reject)
{
try
{
//do some thing
console.log('function2 called');
resolve('function2 success');
//return new Promise(function3);
}
catch(err)
{
reject(err);
}
}
var function1 = function(resolve, reject)
{
try
{
//do some thing
console.log('function1 called');
resolve('function1 success');
}
catch(err)
{
reject(err);
}
}
var promise = new Promise(function1);
promise
.then(function(response){
console.log(response);
return new Promise(function2);
}, function(error)
{
console.log(error);
})
.then(function(response)
{
console.log(response);
return new Promise(function3);
},
function(err)
{
console.log(error);
})
.then(function(response)
{
console.log(response);
},
function(err)
{
console.log(error);
})
//output
"function1 called"
"function1 success"
"function2 called"
"function2 success"
"function3 called"
"function3 success"
I'm using socket.io and mongoose in my express server.
My socket is listening for events using the following code:
socket.on('do something', async () => {
try {
await doA();
doX();
await doB();
doY();
await doC();
} catch (error) {
console.log(error);
}
});
doA, doB and doC are async operations that writes on database using mongoose, but in general they can be any method returning a promise.
I want that 'do something' runs synchronously.
If the event queue processes more events at the same time I have consistency problems in my mongodb.
In other words if the server receives two 'do something' events, I want that the second event received is processed only when the first event is fully processed (after the await doC). Unfortunately the 'do something' callback is async.
How to handle this?
It's possible to implement a queue by adding the functions you want to run to an array, and then running them one by one. I've created an example below.
let queue = [];
let running = false;
const delay = (t, v) => {
return new Promise((resolve) => {
setTimeout(resolve.bind(null, "Returned value from Promise"), t)
});
}
const onSocketEvent = async () => {
console.log("Got event");
if (!running) {
console.log("Nothing in queue, fire right away");
return doStuff();
}
// There's something in the queue, so add it to it
console.log("Queuing item")
queue.push(doStuff);
}
const doStuff = async () => {
running = true;
const promiseResult = await delay(2000);
console.log(promiseResult);
if (queue.length > 0) {
console.log("There's more in the queue, run the next one now")
queue.shift()();
} else {
console.log("Queue empty!")
running = false;
}
}
onSocketEvent();
setTimeout(() => onSocketEvent(), 1000);
setTimeout(() => onSocketEvent(), 1500);
setTimeout(() => onSocketEvent(), 2000);
setTimeout(() => onSocketEvent(), 2500);
I would suggest adding a delay between each await. This will prevent deadlocks from occurring and fix your issue. For such things, I would suggest using the Caolan's async library.
Task delay example:
setTimeout(function() { your_function(); }, 5000); // 5 seconds
If your function has no parameters and no explicit receiver, you can call directly setTimeout(func, 5000)
Useful jQuery timers plugin
I am trying to take a rxjs source observable, representing a network connection that pushes me data, and reconnect (by resubscribing to the source observable) if I have not received data within a timeout period. I can certainly write this in a somewhat hacky way, but is there a good way to write this concisely with rxjs?
I ultimately wrote an operator. I think there is a better way to do this, but seeing as how no one else has an idea either, here's the pipeable operator that I wrote:
import { Observable, Subscription } from "rxjs";
export function retryAfterTimeout<T>(timeout: number, allowCompletion = false): (obs: Observable<T>) => Observable<T> {
return source => new Observable<T>(observer => {
let sub: Subscription | undefined;
let timer: number | undefined;
function resetTimer() {
if (timer) clearTimeout(timer);
timer = window.setTimeout(() => resub(), timeout);
}
function resub() {
if (sub) sub.unsubscribe();
sub = source.subscribe({
next(x) {
resetTimer();
observer.next(x);
},
error(err) {
observer.error(err);
},
complete() {
if (allowCompletion)
observer.complete();
else
resub();
}
});
}
resub();
resetTimer();
return () => {
if (sub) sub.unsubscribe();
if (timer) window.clearTimeout(timer);
};
});
}
I've been using the chrome-promise library to wrap the Chrome extension API with a facade that returns promises instead of using callbacks. This has generally worked quite well, but I seem to be running into an issue with chrome.storage.local APIs.
My extension's event page listens for the chrome.tabs.onActivated and chrome.tabs.onRemoved events. When it gets the onActivated event, it adds the tab info to an array and calls chrome.storage.local.set(data) to store the updated array in local storage.
When it gets the onRemoved event, it calls chromepromise.storage.local.get(null).then(...) to get the list of tabs via a promise, removes the tab info from the array, and then calls chrome.storage.local.set() again to save the updated array.
The issue is that the onActivated event seems to trigger before the promise flow from the onRemoved event resolves. So the onActivated handler retrieves the old stored array, with the closed tab still in it, and then pushes the newly activated tab. So the stored tab data now includes a tab that's already been closed.
I'm assuming this is an issue with using promises instead of callbacks, but I'm wondering if anyone else has run into this problem with this library and worked around it.
Update
As wOxxOm points out, this is a generic problem with "arbitrating unpredictable asynchronous access to a single resource such as chrome.storage" and not unique to the chrome-promise library.
After researching a bit, I came up with a couple solutions, added as answers below. One uses a mutex to ensure (I think) that one promise chain's getting and setting data in chrome.storage completes before the next one starts. The other queues the whole promise chain that's created from an event and doesn't start the next one until the current one has fully completed. I'm not sure which is better, though I suppose locking for a shorter period of time is better.
Any suggestions or better answers are welcome.
Queue
This solution uses a very simple queuing mechanism. The event handlers call queue() with a function that kicks off the promise chain to handle that event. If there isn't already a promise in the queue, then the function is called immediately. Otherwise, it's pushed on the queue and will be triggered when the current promise chain finishes. This means only one event can be processed at a time, which might not be as efficient.
var taskQueue = [];
function queue(
fn)
{
taskQueue.push(fn);
processQueue();
}
function processQueue()
{
const nextTask = taskQueue[0];
if (nextTask && !(nextTask instanceof Promise)) {
taskQueue[0] = nextTask()
.then((result) => {
console.log("RESULT", result);
taskQueue.shift();
processQueue();
});
}
}
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
queue(() => Promise.resolve(tabID).then(tab => addTab(tab)));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
queue(() => removeTab(tabID));
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
});
}
function removeTab(tabID)
{
return getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
});
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();
Mutex
Update: I ended up using the approach below to create a module that uses a mutex to ensure gets and sets of the Chrome extension storage maintain their order. It seems to be working well so far.
This solution uses the mutex implementation from this article. addTab() and removeTab() call storageMutex.synchronize() with a function that does all the storage getting and setting. This should prevent later events from affecting the storage of earlier events.
The code below is a very simplified version of the extension, but it does run. The playNextEvent() calls at the bottom simulate opening 4 tabs, switching back to tab 2 and closing it, which then causes tab 3 to activate. setTimeout()s are used so that everything doesn't run as one long call stack.
function Mutex() {
this._busy = false;
this._queue = [];
}
Object.assign(Mutex.prototype, {
synchronize: function(task) {
var self = this;
return new Promise(function(resolve, reject) {
self._queue.push([task, resolve, reject]);
if (!self._busy) {
self._dequeue();
}
});
},
_dequeue: function() {
var next = this._queue.shift();
if (next) {
this._busy = true;
this._execute(next);
} else {
this._busy = false;
}
},
_execute: function(record) {
var task = record[0],
resolve = record[1],
reject = record[2],
self = this;
task().then(resolve, reject).then(function() {
self._dequeue();
});
}
});
const storageMutex = new Mutex();
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
return Promise.resolve(tabID).then(tab => addTab(tab));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
return removeTab(tabID);
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
}));
}
function removeTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
}));
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();