How to write a handler chain executer (like express)? - javascript

I am learning about routing libraries like express and they all have a common
export const middleware = (req, res, next) => {
next()
}
So I am trying to make my own implementation of this to learn what's going on and I'm struggling a bit. Any good resources would also be appreciated
The problem I am specifically trying to solve is the following
const executeHandlers = (handlers) => {
// Run them in sequence
}
executeHandlers([
next => {
console.log(1)
next()
},
next => {
console.log(2)
next()
},
next => {
console.log(3)
}
])
I assume next() is the following handler wrapped in a function, but I am struggling to get it there.

Here's a version written with well-separated parts for clarity (and assuming you want to see console.log(1) get called before console.log(2)); see the comments for details:
const executeHandlers = (handlers) => {
// Remember which handler we're on
let i = 0;
// This is the function we'll pass to the handlers
function next() {
// Get the handler to call, if any;
// update the index of the one we're on
const handler = handlers[i++];
if (handler) {
// This flag is specific to each function we pass to `handler`
let called = false;
handler(() => {
// Prevent `handler` from calling this twice
if (!called) {
called = true;
next();
}
});
}
}
// Start the chain
next();
};
Live Example:
const executeHandlers = (handlers) => {
// Remember which handler we're on
let i = 0;
// This is the function we'll pass to the handlers
function next() {
// Get the handler to call, if any;
// update the index of the one we're on
const handler = handlers[i++];
if (handler) {
// This flag is specific to each function we pass to `handler`
let called = false;
handler(() => {
// Prevent `handler` from calling this twice
if (!called) {
called = true;
next();
}
});
}
}
// Start the chain
next();
};
executeHandlers([
next => {
console.log(1)
next()
},
next => {
console.log(2)
next()
},
next => {
console.log(3)
}
]);
There are various spins you could put on that, like handling errors, passing a value from one handler to the next, taking a snapshot of the handlers chain before you start, but that's the basic idea.

Related

stop current run of useEffect and start the next one

I was wondering if there is any way to break the current process of a UseEffect and have it start on the next render, like this
...
useEffect(() => {
SlowFunction(update);
}, [update]);
setUpdate(1)
// a bit of time passes but not long enough for the SlowFunction(1) to be done
setUpdate(2)
//when this is called and the useEffect runs, stop the SlowFunction(1) and run SlowFunction(2)
my updated personal function is called in the use effect like so,
const [update, setUpdate] = useState(0);
const [thisConst, setThisConst] = useState(0);
async function SlowFunction(firstParam, paramEtc, { signal } = {}) {
while (true) {
//wait two seconds between each
await new Promise((r) => setTimeout(r, 2000));
// Before starting every individual "task" in the function,
// first throw if the signal has been aborted. This will stop the function
// if cancellation occurs:
signal?.throwIfAborted();
// else continue working...
console.log('working on another iteration');
}
console.log('Completed!');
return 'some value';
}
useEffect(() => {
const controller = new AbortController();
const { signal } = controller;
(async () => {
try {
const result = await SlowFunction(update, 'some other value', {
signal,
});
setConst(result);
} catch (ex) {
console.log('EXCEPTION THROWN: ', ex);
}
})();
return () => controller.abort(new Error('Starting next render'));
}, [update]);
The AbortSignal API is the standard method for handling cancellation.
I'll provide an example of how to use it with a function like your SlowFunction. You'll need to accept an abort signal as an optional parameter so that when the next render occurs, the function can be cancelled.
Here's an example cancellable function:
async function SlowFunction (firstParam, paramEtc, {signal} = {}) {
for (let i = 0; i < 1_000_000; i += 1) {
// Before starting every individual "task" in the function,
// first throw if the signal has been aborted. This will stop the function
// if cancellation occurs:
signal?.throwIfAborted();
// else continue working...
console.log('working on another iteration');
}
return 'some value';
}
You can use it in an effect hook like this: returning a cleanup function which invokes the abort method on the controller:
useEffect(() => {
const controller = new AbortController();
const {signal} = controller;
(async () => {
try {
const result = await SlowFunction(update, 'some other value', {signal});
setConst(result);
}
catch (ex) {
// Catch the exception thrown when the next render starts
// and the function hasn't completed yet.
// Handle the exception if you need to,
// or do nothing in this block if you don't.
}
})();
return () => controller.abort(new Error('Starting next render'));
}, [update]);
If the function completes before the next render occurs, then the abort operation will have no effect, but if it hasn't yet, then the next time that the statement signal?.throwIfAborted(); is reached, the function will throw an exception and terminate.
Update in response to your comment:
If your JavaScript runtime is too old to support the AbortSignal.throwIfAborted() method, you can work around that by replacing that line:
signal?.throwIfAborted();
with:
if (signal?.aborted) {
throw signal?.reason ?? new Error('Operation was aborted');
}

Javascript Class - Chaining async methods and returning "this"

I'm attempting to introduce a queue-type system to a JS class to allow for Async method chaining, ideally, I'd like to perform operations on the class instance using these async methods and return "this" instance.
export class Queue {
constructor() {
this.queue = Promise.resolve()
this.firstRequestStarted = false
this.firstRequestStatusCode = 0
this.secondRequestStarted = false
this.secondRequestStatusCode = 0
}
then(callback) {
callback(this.queue)
}
chain(callback) {
return this.queue = this.queue.then(callback)
}
first() {
this.chain(async () => {
try {
this.firstRequestStarted = true
const response = await axios.get("https://stackoverflow.com/questions")
this.firstRequestStatusCode = response.status
return this
}
catch (e) {
const { message = "" } = e || {}
return Promise.reject({ message })
}
})
return this
}
second() {
this.chain(async () => {
try {
this.secondRequestStarted = true
const response = await axios.get("https://stackoverflow.com/")
this.secondRequestStatusCode = response.status
return this
}
catch (e) {
const { message = "" } = e || {}
return Promise.reject({ message })
}
})
return this
}
}
Functions are added to the queue, and as we await them, the "then" method will handle their execution.
const x = await new Queue()
.first()
.second()
console.log(x)
The challenge I'm facing is that I can never actually get "this" (instance of Queue) back to x.
1) x === undefined
2) "Chaining cycle detected for promise #<Promise>"
or ( I haven't been able to track down where this one is coming from, node error)
3) finished with exit code 130 (interrupted by signal 2: SIGINT)
I have tried adding a "consume" method, which simply returns "this", this leads to error #2 above
me() {
this.chain( () => {
try {
return this
}
catch (e) {
const { message = "" } = e || {}
return Promise.reject({ message })
}
})
return this
}
The confusion on my part, is that if I use any value other than "this", it works as expected
me() {
this.chain( () => {
try {
return "test"
}
catch (e) {
const { message = "" } = e || {}
return Promise.reject({ message })
}
})
return this
}
x === "test"
I'm also able to return the values associated to this with something like the following
return {...this}
Ideally, I'd like to return the instance of Queue to X, as I plan on modifying the properties of the Queue instance through my async methods, await them, and be returned with an "initialized" instance of Queue.
Any input would be greatly appreciated - thank you!
The problem is that your Queue instances are thenable (have a .then() method), and the promise is tried to be resolved with itself (this.queue). See also here or there.
You have two options:
Do not resolve your promise with the instance, but write
const x = new Queue().first().second();
await x;
console.log(x);
remove the then method from your class, then call
const x = new Queue().first().second().queue;
console.log(x);
(or possibly introduce a getter method - .get(), .toPromise() - instead of directly accessing .queue)

Socket.io multiple handlers for `on` listeners, like in Express

Express lets the developer chain multiple functions as handlers to a single route. From docs:
More than one callback function can handle a route (make sure you
specify the next object). For example:
app.get('/example/b', function (req, res, next) {
console.log('the response will be sent by the next function ...')
next()
}, function (req, res) {
res.send('Hello from B!')
})
This is great if the developer wants to make validations before proceeding to the final function. That's why middlewares are a thing.
Socket.io, on the other hand, only accepts a single handler.
From #types/socket.io:
on( event: string, listener: Function ): Namespace;
It means I can't have middlewares that are event-specific. I know about io.use for global middlewares, and there's an option to have a middleware per namespace too, but all I want is per-event.
My workaround variations
Option 1: try and catch in every event handler.
try {
validateCurrentPlayer(socket);
} catch (e) {
return handleFailedValidation(socket, e);
}
// ... rest of the code
Pro: readable. Con: super repetitive. It means that every relevant entry point with start with the same 5 lines of code that does exactly the same thing every time.
And if the "middleware" returns values, this is how it looks:
let foo: Something;
try {
[foo] = validateCurrentPlayer(socket);
} catch (e) {
return handleFailedValidation(socket, e);
}
// ... rest of the code, use foo
Option 2: Common validation with conditional return
const validation = validate(socket, () => validateCurrentPlayer(socket));
if (validation.error) {
return;
}
const [foo] = validation.result;
This is validate:
export function validate<T extends (...args: any) => any>(socket: Socket, func: T): {
error: boolean;
result: ReturnType<T>;
} {
let result: ReturnType<T> = null;
let error = false;
try {
result = func();
} catch (error) {
handleFailedValidation(socket, error);
}
return {
result,
error,
};
}
As you can see it just wraps the try and catch.
Pro: non-repetitive. Con: Still X lines of code to be copy-pasted into a few handlers.
I don't like these workarounds.
I'm desperately trying to find something similar to the approach of Express, So I could just conditionally call next() if the validation succeeded.
Do you know of any way to do it?
Thanks!

Custom status change events in Javascript

I have an asynchronous function that performs various await tasks. I am trying to inform my UI in React when the status of the function changes or when one of the tasks is completed.
const foo = async () => {
// trigger on load event
await task1();
// trigger task1 done event
await task2();
// trigger task2 done event
await task3();
// trigger on done event
}
I also want to be able to specify callbacks for each event, like so:
const bar = foo();
foo.on_load(() => {
// some code goes here
});
foo.on_done(() => {
// some code goes here
});
Another alternative would be something like this:
const bar = foo();
foo.on('status_change', status => {
// read the status here and do something depending on the status
})
I have been reading about custom events in JS but not sure how to use them for this. Or maybe there's another way to do this in React.
Any ideas would be helpful. Thanks!
EDIT
var uploadTask = storageRef.child('images/rivers.jpg').put(file);
// Register three observers:
// 1. 'state_changed' observer, called any time the state changes
// 2. Error observer, called on failure
// 3. Completion observer, called on successful completion
uploadTask.on('state_changed', function(snapshot){
// Observe state change events such as progress, pause, and resume
// Get task progress, including the number of bytes uploaded and the total number of bytes to be uploaded
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
switch (snapshot.state) {
case firebase.storage.TaskState.PAUSED: // or 'paused'
console.log('Upload is paused');
break;
case firebase.storage.TaskState.RUNNING: // or 'running'
console.log('Upload is running');
break;
}
}, function(error) {
// Handle unsuccessful uploads
}, function() {
// Handle successful uploads on complete
// For instance, get the download URL: https://firebasestorage.googleapis.com/...
uploadTask.snapshot.ref.getDownloadURL().then(function(downloadURL) {
console.log('File available at', downloadURL);
});
});
I was trying to achieve something like the above code, taken from the firebase documentation on uploading files
This is where I've gotten so far:
class Task {
constructor() {
this.first = null;
this.second = null;
}
on(keyword, callback) {
switch (keyword) {
case "first":
this.first = callback;
break;
case "second":
this.second = callback;
break;
default:
// throw new error
break;
}
}
}
const timeout = async time => {
return new Promise(resolve => setTimeout(resolve, time));
};
const foo = () => {
const task = new Task();
timeout(2000).then(async () => {
task.first && task.first();
await timeout(2000);
task.second && task.second();
});
console.log("returning");
return task;
};
const taskObject = foo();
taskObject.on("first", () => console.log("executing first callback"));
taskObject.on("second", () => console.log("executing second callback"));
Is there a better way to do this - without having the nested thens? Which approach would be better and when? EDIT - removed nested then clauses and replaced with then and await
PS: for my requirements, having callbacks would be sufficient. This is just so I can understand the concept better. Thanks!
I'm going to assume there's a reason for you not simply calling some named method after each async step has complete, i.e., you want to be able to plug in different handlers for each event. Here is one way to go about it - whether or not it's the best is hard to tell from the little context provided:
const foo = async (handlers) => {
handlers.onLoad && handlers.onLoad();
await task1();
handlers.onTask1Complete && handlers.onTask1Complete();
await task2();
handlers.onTask2Complete && handlers.onTask2Complete();
}
const myHandlers = {
onLoad: () => {
// do stuff
},
onTask1Complete: () => {
// do other stuff
},
onTask2Complete: () => {
// etc
}
};
foo(myHandlers);
Note that it lets you specify only the handlers you need. A more flexible approach would be to a publish-subscribe model, where a subscribe method pushes a function to an array of handlers, all of which are called when the event occurs.
The best option would be to make use of promises, which means every time a promise is resolved, you will get notified and then cascading promise will get executed.
an example below of chaining promises
var function3 = function(resolve, reject)
{
try
{
//do some thing
console.log('function3 called');
resolve('function3 success');
}
catch(err)
{
reject(err);
}
}
var function2 = function(resolve, reject)
{
try
{
//do some thing
console.log('function2 called');
resolve('function2 success');
//return new Promise(function3);
}
catch(err)
{
reject(err);
}
}
var function1 = function(resolve, reject)
{
try
{
//do some thing
console.log('function1 called');
resolve('function1 success');
}
catch(err)
{
reject(err);
}
}
var promise = new Promise(function1);
promise
.then(function(response){
console.log(response);
return new Promise(function2);
}, function(error)
{
console.log(error);
})
.then(function(response)
{
console.log(response);
return new Promise(function3);
},
function(err)
{
console.log(error);
})
.then(function(response)
{
console.log(response);
},
function(err)
{
console.log(error);
})
//output
"function1 called"
"function1 success"
"function2 called"
"function2 success"
"function3 called"
"function3 success"

In a Chrome extension, how to ensure previous promise resolves before the next one using chrome-promise?

I've been using the chrome-promise library to wrap the Chrome extension API with a facade that returns promises instead of using callbacks. This has generally worked quite well, but I seem to be running into an issue with chrome.storage.local APIs.
My extension's event page listens for the chrome.tabs.onActivated and chrome.tabs.onRemoved events. When it gets the onActivated event, it adds the tab info to an array and calls chrome.storage.local.set(data) to store the updated array in local storage.
When it gets the onRemoved event, it calls chromepromise.storage.local.get(null).then(...) to get the list of tabs via a promise, removes the tab info from the array, and then calls chrome.storage.local.set() again to save the updated array.
The issue is that the onActivated event seems to trigger before the promise flow from the onRemoved event resolves. So the onActivated handler retrieves the old stored array, with the closed tab still in it, and then pushes the newly activated tab. So the stored tab data now includes a tab that's already been closed.
I'm assuming this is an issue with using promises instead of callbacks, but I'm wondering if anyone else has run into this problem with this library and worked around it.
Update
As wOxxOm points out, this is a generic problem with "arbitrating unpredictable asynchronous access to a single resource such as chrome.storage" and not unique to the chrome-promise library.
After researching a bit, I came up with a couple solutions, added as answers below. One uses a mutex to ensure (I think) that one promise chain's getting and setting data in chrome.storage completes before the next one starts. The other queues the whole promise chain that's created from an event and doesn't start the next one until the current one has fully completed. I'm not sure which is better, though I suppose locking for a shorter period of time is better.
Any suggestions or better answers are welcome.
Queue
This solution uses a very simple queuing mechanism. The event handlers call queue() with a function that kicks off the promise chain to handle that event. If there isn't already a promise in the queue, then the function is called immediately. Otherwise, it's pushed on the queue and will be triggered when the current promise chain finishes. This means only one event can be processed at a time, which might not be as efficient.
var taskQueue = [];
function queue(
fn)
{
taskQueue.push(fn);
processQueue();
}
function processQueue()
{
const nextTask = taskQueue[0];
if (nextTask && !(nextTask instanceof Promise)) {
taskQueue[0] = nextTask()
.then((result) => {
console.log("RESULT", result);
taskQueue.shift();
processQueue();
});
}
}
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
queue(() => Promise.resolve(tabID).then(tab => addTab(tab)));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
queue(() => removeTab(tabID));
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
});
}
function removeTab(tabID)
{
return getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
});
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();
Mutex
Update: I ended up using the approach below to create a module that uses a mutex to ensure gets and sets of the Chrome extension storage maintain their order. It seems to be working well so far.
This solution uses the mutex implementation from this article. addTab() and removeTab() call storageMutex.synchronize() with a function that does all the storage getting and setting. This should prevent later events from affecting the storage of earlier events.
The code below is a very simplified version of the extension, but it does run. The playNextEvent() calls at the bottom simulate opening 4 tabs, switching back to tab 2 and closing it, which then causes tab 3 to activate. setTimeout()s are used so that everything doesn't run as one long call stack.
function Mutex() {
this._busy = false;
this._queue = [];
}
Object.assign(Mutex.prototype, {
synchronize: function(task) {
var self = this;
return new Promise(function(resolve, reject) {
self._queue.push([task, resolve, reject]);
if (!self._busy) {
self._dequeue();
}
});
},
_dequeue: function() {
var next = this._queue.shift();
if (next) {
this._busy = true;
this._execute(next);
} else {
this._busy = false;
}
},
_execute: function(record) {
var task = record[0],
resolve = record[1],
reject = record[2],
self = this;
task().then(resolve, reject).then(function() {
self._dequeue();
});
}
});
const storageMutex = new Mutex();
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
return Promise.resolve(tabID).then(tab => addTab(tab));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
return removeTab(tabID);
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
}));
}
function removeTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
}));
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();

Categories

Resources