I have an asynchronous function that performs various await tasks. I am trying to inform my UI in React when the status of the function changes or when one of the tasks is completed.
const foo = async () => {
// trigger on load event
await task1();
// trigger task1 done event
await task2();
// trigger task2 done event
await task3();
// trigger on done event
}
I also want to be able to specify callbacks for each event, like so:
const bar = foo();
foo.on_load(() => {
// some code goes here
});
foo.on_done(() => {
// some code goes here
});
Another alternative would be something like this:
const bar = foo();
foo.on('status_change', status => {
// read the status here and do something depending on the status
})
I have been reading about custom events in JS but not sure how to use them for this. Or maybe there's another way to do this in React.
Any ideas would be helpful. Thanks!
EDIT
var uploadTask = storageRef.child('images/rivers.jpg').put(file);
// Register three observers:
// 1. 'state_changed' observer, called any time the state changes
// 2. Error observer, called on failure
// 3. Completion observer, called on successful completion
uploadTask.on('state_changed', function(snapshot){
// Observe state change events such as progress, pause, and resume
// Get task progress, including the number of bytes uploaded and the total number of bytes to be uploaded
var progress = (snapshot.bytesTransferred / snapshot.totalBytes) * 100;
console.log('Upload is ' + progress + '% done');
switch (snapshot.state) {
case firebase.storage.TaskState.PAUSED: // or 'paused'
console.log('Upload is paused');
break;
case firebase.storage.TaskState.RUNNING: // or 'running'
console.log('Upload is running');
break;
}
}, function(error) {
// Handle unsuccessful uploads
}, function() {
// Handle successful uploads on complete
// For instance, get the download URL: https://firebasestorage.googleapis.com/...
uploadTask.snapshot.ref.getDownloadURL().then(function(downloadURL) {
console.log('File available at', downloadURL);
});
});
I was trying to achieve something like the above code, taken from the firebase documentation on uploading files
This is where I've gotten so far:
class Task {
constructor() {
this.first = null;
this.second = null;
}
on(keyword, callback) {
switch (keyword) {
case "first":
this.first = callback;
break;
case "second":
this.second = callback;
break;
default:
// throw new error
break;
}
}
}
const timeout = async time => {
return new Promise(resolve => setTimeout(resolve, time));
};
const foo = () => {
const task = new Task();
timeout(2000).then(async () => {
task.first && task.first();
await timeout(2000);
task.second && task.second();
});
console.log("returning");
return task;
};
const taskObject = foo();
taskObject.on("first", () => console.log("executing first callback"));
taskObject.on("second", () => console.log("executing second callback"));
Is there a better way to do this - without having the nested thens? Which approach would be better and when? EDIT - removed nested then clauses and replaced with then and await
PS: for my requirements, having callbacks would be sufficient. This is just so I can understand the concept better. Thanks!
I'm going to assume there's a reason for you not simply calling some named method after each async step has complete, i.e., you want to be able to plug in different handlers for each event. Here is one way to go about it - whether or not it's the best is hard to tell from the little context provided:
const foo = async (handlers) => {
handlers.onLoad && handlers.onLoad();
await task1();
handlers.onTask1Complete && handlers.onTask1Complete();
await task2();
handlers.onTask2Complete && handlers.onTask2Complete();
}
const myHandlers = {
onLoad: () => {
// do stuff
},
onTask1Complete: () => {
// do other stuff
},
onTask2Complete: () => {
// etc
}
};
foo(myHandlers);
Note that it lets you specify only the handlers you need. A more flexible approach would be to a publish-subscribe model, where a subscribe method pushes a function to an array of handlers, all of which are called when the event occurs.
The best option would be to make use of promises, which means every time a promise is resolved, you will get notified and then cascading promise will get executed.
an example below of chaining promises
var function3 = function(resolve, reject)
{
try
{
//do some thing
console.log('function3 called');
resolve('function3 success');
}
catch(err)
{
reject(err);
}
}
var function2 = function(resolve, reject)
{
try
{
//do some thing
console.log('function2 called');
resolve('function2 success');
//return new Promise(function3);
}
catch(err)
{
reject(err);
}
}
var function1 = function(resolve, reject)
{
try
{
//do some thing
console.log('function1 called');
resolve('function1 success');
}
catch(err)
{
reject(err);
}
}
var promise = new Promise(function1);
promise
.then(function(response){
console.log(response);
return new Promise(function2);
}, function(error)
{
console.log(error);
})
.then(function(response)
{
console.log(response);
return new Promise(function3);
},
function(err)
{
console.log(error);
})
.then(function(response)
{
console.log(response);
},
function(err)
{
console.log(error);
})
//output
"function1 called"
"function1 success"
"function2 called"
"function2 success"
"function3 called"
"function3 success"
Related
I was wondering if there is any way to break the current process of a UseEffect and have it start on the next render, like this
...
useEffect(() => {
SlowFunction(update);
}, [update]);
setUpdate(1)
// a bit of time passes but not long enough for the SlowFunction(1) to be done
setUpdate(2)
//when this is called and the useEffect runs, stop the SlowFunction(1) and run SlowFunction(2)
my updated personal function is called in the use effect like so,
const [update, setUpdate] = useState(0);
const [thisConst, setThisConst] = useState(0);
async function SlowFunction(firstParam, paramEtc, { signal } = {}) {
while (true) {
//wait two seconds between each
await new Promise((r) => setTimeout(r, 2000));
// Before starting every individual "task" in the function,
// first throw if the signal has been aborted. This will stop the function
// if cancellation occurs:
signal?.throwIfAborted();
// else continue working...
console.log('working on another iteration');
}
console.log('Completed!');
return 'some value';
}
useEffect(() => {
const controller = new AbortController();
const { signal } = controller;
(async () => {
try {
const result = await SlowFunction(update, 'some other value', {
signal,
});
setConst(result);
} catch (ex) {
console.log('EXCEPTION THROWN: ', ex);
}
})();
return () => controller.abort(new Error('Starting next render'));
}, [update]);
The AbortSignal API is the standard method for handling cancellation.
I'll provide an example of how to use it with a function like your SlowFunction. You'll need to accept an abort signal as an optional parameter so that when the next render occurs, the function can be cancelled.
Here's an example cancellable function:
async function SlowFunction (firstParam, paramEtc, {signal} = {}) {
for (let i = 0; i < 1_000_000; i += 1) {
// Before starting every individual "task" in the function,
// first throw if the signal has been aborted. This will stop the function
// if cancellation occurs:
signal?.throwIfAborted();
// else continue working...
console.log('working on another iteration');
}
return 'some value';
}
You can use it in an effect hook like this: returning a cleanup function which invokes the abort method on the controller:
useEffect(() => {
const controller = new AbortController();
const {signal} = controller;
(async () => {
try {
const result = await SlowFunction(update, 'some other value', {signal});
setConst(result);
}
catch (ex) {
// Catch the exception thrown when the next render starts
// and the function hasn't completed yet.
// Handle the exception if you need to,
// or do nothing in this block if you don't.
}
})();
return () => controller.abort(new Error('Starting next render'));
}, [update]);
If the function completes before the next render occurs, then the abort operation will have no effect, but if it hasn't yet, then the next time that the statement signal?.throwIfAborted(); is reached, the function will throw an exception and terminate.
Update in response to your comment:
If your JavaScript runtime is too old to support the AbortSignal.throwIfAborted() method, you can work around that by replacing that line:
signal?.throwIfAborted();
with:
if (signal?.aborted) {
throw signal?.reason ?? new Error('Operation was aborted');
}
I've been trying to find a wrapper that does fetch with retries, timeouts, aborts, etc. I came across https://pastebin.com/54Ct4xEh a little bit ago, and after fixing a couple typos (missing options. and =>), it works, except... well, maybe it works, but I don't know how to use it. How do I abort a fetch with this particular wrapper? I have a fiddle, https://jsfiddle.net/1fdwb2o6/2/. With this code, how can I, say, click a button and have it abort this fetch loop? For my use case, I' using boopstrap, and I have a modal that, when shown, attempts to load dynamic content. If the user clicks Cancel while it's loading, I want the fetch process to stop. From what I can tell, I should be able to do it with the code below... but I'm not sure how to perform the abort. Perhaps this isn't possible, as structured, with a Promise... but I don't know enough (anything) about promises to know better, one way or the other.
const fetchWithRetry = (userOptions) => {
let abort = false;
const options = {
url: '',
options: {},
cancel: {},
retries: 5,
retryDelay: 1000,
...userOptions
};
// Add an abort to the cancel object.
options.cancel.abort = () => {
abort = true;
};
// Abort or proceed?
return abort ? Promise.reject('aborted') : fetch(options.url).then(response => {
// Reject because of abort
return abort ? Promise.reject('aborted')
// Response is good
: response.ok ? Promise.resolve(response.text())
// Retries exceeded
: !options.retries ? Promise.reject('retries exceeded')
// Retry with one less retry
: new Promise((resolve, reject) => {
setTimeout(() => {
// We use the returned promise's resolve and reject as
// callback so that the nested call propagates backwards.
fetchWithRetry({
...options,
retries: options.retries - 1
}).then(resolve, reject);
}, options.retryDelay);
});
});
}
var xxx;
console.clear();
xxx = fetchWithRetry({
url: "some_file_that_doesnt_exist.php"
})
.then((response) => {
alert(response);
}).catch(function(err) {
// Error: response error, request timeout or runtime error
alert("Error! Cannot load folder list! Please try again!");
});
setTimeout(function() {
// somehow, abort the fetch...
// xxx.abort(); <-- no worky...
}, 1234);
As I said in my comments, the code you have in your question does not provide a cancel() function that the caller can use. It has a cancel() function internally, but that's not something the caller can use. As written that function just returns a promise so the caller has nothing they can call to cancel the retries.
So, I decided to write my own version of fetchWithRetry() that would work for your use case. This has a number of capabilities that the one in your question does not:
It returns both the promise and a cancel function so the caller can cancel the retries.
It allows you to pass the init options for fetch() so you can pass any of the various arguments that fetch() supports and are often needed such as withCredentials.
It has an option to check the response.ok boolean so it will detect and retry more things that you would if you required the promise to be rejected before a retry (note: fetch() doesn't reject on a 404, for example).
If There was a fetch() rejection and it was either cancelled or it ran out of retries, then it will use the newest Error class feature where it will set the cause to the actual fetch() error so the caller can see what the original error was.
Note that this version of fetchWithRetry() returns an object containing both a promise and a cancel function. The caller uses the promise the same way they would any promise from fetch() and they can use the cancel() function to cancel any further retries.
Here's the code:
const Deferred = function() {
if (!(this instanceof Deferred)) {
return new Deferred();
}
const p = this.promise = new Promise((resolve, reject) => {
this.resolve = resolve;
this.reject = reject;
});
this.then = p.then.bind(p);
this.catch = p.catch.bind(p);
if (p.finally) {
this.finally = p.finally.bind(p);
}
}
function fetchWithRetry(url, userOptions = {}, init = {}) {
const options = {
// default options values, can be overridden by userOptions
retries: 3,
retryDelay: 1000,
checkResponseOk: true,
...userOptions
};
let cancelled = false;
let timerDeferred;
let timer;
function run() {
return fetch(url, init).then(response => {
// force retry on non 2xx responses too
if (options.checkResponseOk && !response.ok) {
throw new Error(`fetch failed with status ${response.status}`);
}
return response;
}).catch(err => {
// got error, set up retry
console.log(err);
if (cancelled) {
throw new Error("fetch cancelled", { cause: err });
}
--options.retries;
if (options.retries < 0) {
throw new Error("fetch max retries exceeded", { cause: err });
}
// create new Deferred object for use with our timer
// so it can be resolved by the timer or rejected
// by the cancel callback
timerDeferred = new Deferred();
timer = setTimeout(() => {
timerDeferred.resolve();
timer = null;
}, options.retryDelay);
return timerDeferred.then(() => {
if (cancelled) {
throw new Error("fetch cancelled", { cause: err });
}
return run();
});
});
}
return {
promise: run(),
cancel: () => {
cancelled = true;
// if currently in a timer waiting, reject immediately
if (timer) {
clearTimeout(timer);
timer = null;
}
if (timerDeferred) {
timerDeferred.reject(new Error("fetch cancelled"));
}
}
}
};
Sample usage:
const result = fetchWithRetry(someUrl);
result.promise.then(resp => {
return resp.text().then(data => {
// got final result here
console.log(data.slice(0, 100));
});
}).catch(err => {
console.log(err);
});
// simulate user cancel after 1.5 seconds
setTimeout(() => {
result.cancel();
}, 1500);
I am absolutely new to Azure Event Hub.
I checked documentation and see that messages are set to console log.
async function main() {
console.log(`Running receiveEvents sample`);
const consumerClient = new EventHubConsumerClient(consumerGroup, connectionString);
const subscription = consumerClient.subscribe(
{
// The callback where you add your code to process incoming events
processEvents: async (events, context) => {
// Note: It is possible for `events` to be an empty array.
// This can happen if there were no new events to receive
// in the `maxWaitTimeInSeconds`, which is defaulted to
// 60 seconds.
// The `maxWaitTimeInSeconds` can be changed by setting
// it in the `options` passed to `subscribe()`.
for (const event of events) {
console.log(
`Received event: '${JSON.stringify(event.body)}' from partition: '${context.partitionId}' and consumer group: '${context.consumerGroup}'`
);
}
},
processError: async (err, context) => {
console.log(`Error : ${err}`);
}
},
{ startPosition: earliestEventPosition }
);
// Wait for a bit before cleaning up the sample
setTimeout(async () => {
await subscription.close();
await consumerClient.close();
console.log(`Exiting receiveEvents sample`);
}, 30 * 1000);
}
main().catch((error) => {
console.error("Error running sample:", error);
});
I would like to have this one `Received event:
${JSON.stringify(event.body)}
in variable and then operate it in thenable.
How can I do this?
Also is any possibility to filter events?
I haven't found any example.
What I understood from your question, you want a thenable (promise) async processing of the event. You can do something like below.
async function main() {
console.log(`Running receiveEvents sample`);
const consumerClient = new EventHubConsumerClient(consumerGroup, connectionString, eventHubName);
const subscription = consumerClient.subscribe(
{
// The callback where you add your code to process incoming events
processEvents: async (events, context) => {
// Note: It is possible for `events` to be an empty array.
// This can happen if there were no new events to receive
// in the `maxWaitTimeInSeconds`, which is defaulted to
// 60 seconds.
// The `maxWaitTimeInSeconds` can be changed by setting
// it in the `options` passed to `subscribe()`.
for (const event of events) {
console.log(
`Received event: '${JSON.stringify(event.body)}' from partition: '${context.partitionId}' and consumer group: '${context.consumerGroup}'`
);
processAsync(event)
.then(e => {
console.log('Done processing event');
});
}
},
processError: async (err, context) => {
console.log(`Error : ${err}`);
}
},
{ startPosition: earliestEventPosition }
);
const processAsync = async (event) => {
// do something
// you have access to the full event object
return event;
};
// Wait for a bit before cleaning up the sample
setTimeout(async () => {
await subscription.close();
await consumerClient.close();
console.log(`Exiting receiveEvents sample`);
}, 86400 * 1000);
}
main().catch((error) => {
console.error("Error running sample:", error);
});
I have some code that basically calls fetch in Javascript. The third party services sometimes take too long to return a response and in an attempt to be more user-friendly, I want to be able to either post a message or stop the connection from being open after N milliseconds.
I had recently come across this post:
Skip the function if executing time too long. JavaScript
But did not have much luck and had issues getting it to work with the below code. I was also hoping that there was a more modern approach to do such a task, maybe using async/await?
module.exports = (url, { ...options } = {}) => {
return fetch(url, {
...options
})
}
You can use a combination of Promise.race and AbortController, here is an example:
function get(url, timeout) {
const controller = new AbortController();
return Promise.race([fetch(url, {
signal: controller.signal
}), new Promise(resolve => {
setTimeout(() => {
resolve("request was not fulfilled in time");
controller.abort();
}, timeout)
})]);
}
(async() => {
const result = await get("https://example.com", 1);
console.log(result);
})();
The native Fetch API doesn't have a timeout built in like something like axios does, but you can always create a wrapper function that wraps the fetch call to implement this.
Here is an example:
const fetchWithTimeout = (timeout, fetchConfig) => {
const FETCH_TIMEOUT = timeout || 5000;
let didTimeOut = false;
return new Promise(function(resolve, reject) {
const timeout = setTimeout(function() {
didTimeOut = true;
reject(new Error('Request timed out'));
}, FETCH_TIMEOUT);
fetch('url', fetchConfig)
.then(function(response) {
// cleanup timeout
clearTimeout(timeout);
if(!didTimeOut) {
// fetch request was good
resolve(response);
}
})
.catch(function(err) {
// Rejection already happened with setTimeout
if(didTimeOut) return;
// Reject with error
reject(err);
});
})
.then(function() {
// Request success and no timeout
})
.catch(function(err) {
//error
});
}
from here https://davidwalsh.name/fetch-timeout
I've been using the chrome-promise library to wrap the Chrome extension API with a facade that returns promises instead of using callbacks. This has generally worked quite well, but I seem to be running into an issue with chrome.storage.local APIs.
My extension's event page listens for the chrome.tabs.onActivated and chrome.tabs.onRemoved events. When it gets the onActivated event, it adds the tab info to an array and calls chrome.storage.local.set(data) to store the updated array in local storage.
When it gets the onRemoved event, it calls chromepromise.storage.local.get(null).then(...) to get the list of tabs via a promise, removes the tab info from the array, and then calls chrome.storage.local.set() again to save the updated array.
The issue is that the onActivated event seems to trigger before the promise flow from the onRemoved event resolves. So the onActivated handler retrieves the old stored array, with the closed tab still in it, and then pushes the newly activated tab. So the stored tab data now includes a tab that's already been closed.
I'm assuming this is an issue with using promises instead of callbacks, but I'm wondering if anyone else has run into this problem with this library and worked around it.
Update
As wOxxOm points out, this is a generic problem with "arbitrating unpredictable asynchronous access to a single resource such as chrome.storage" and not unique to the chrome-promise library.
After researching a bit, I came up with a couple solutions, added as answers below. One uses a mutex to ensure (I think) that one promise chain's getting and setting data in chrome.storage completes before the next one starts. The other queues the whole promise chain that's created from an event and doesn't start the next one until the current one has fully completed. I'm not sure which is better, though I suppose locking for a shorter period of time is better.
Any suggestions or better answers are welcome.
Queue
This solution uses a very simple queuing mechanism. The event handlers call queue() with a function that kicks off the promise chain to handle that event. If there isn't already a promise in the queue, then the function is called immediately. Otherwise, it's pushed on the queue and will be triggered when the current promise chain finishes. This means only one event can be processed at a time, which might not be as efficient.
var taskQueue = [];
function queue(
fn)
{
taskQueue.push(fn);
processQueue();
}
function processQueue()
{
const nextTask = taskQueue[0];
if (nextTask && !(nextTask instanceof Promise)) {
taskQueue[0] = nextTask()
.then((result) => {
console.log("RESULT", result);
taskQueue.shift();
processQueue();
});
}
}
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
queue(() => Promise.resolve(tabID).then(tab => addTab(tab)));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
queue(() => removeTab(tabID));
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
});
}
function removeTab(tabID)
{
return getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
});
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();
Mutex
Update: I ended up using the approach below to create a module that uses a mutex to ensure gets and sets of the Chrome extension storage maintain their order. It seems to be working well so far.
This solution uses the mutex implementation from this article. addTab() and removeTab() call storageMutex.synchronize() with a function that does all the storage getting and setting. This should prevent later events from affecting the storage of earlier events.
The code below is a very simplified version of the extension, but it does run. The playNextEvent() calls at the bottom simulate opening 4 tabs, switching back to tab 2 and closing it, which then causes tab 3 to activate. setTimeout()s are used so that everything doesn't run as one long call stack.
function Mutex() {
this._busy = false;
this._queue = [];
}
Object.assign(Mutex.prototype, {
synchronize: function(task) {
var self = this;
return new Promise(function(resolve, reject) {
self._queue.push([task, resolve, reject]);
if (!self._busy) {
self._dequeue();
}
});
},
_dequeue: function() {
var next = this._queue.shift();
if (next) {
this._busy = true;
this._execute(next);
} else {
this._busy = false;
}
},
_execute: function(record) {
var task = record[0],
resolve = record[1],
reject = record[2],
self = this;
task().then(resolve, reject).then(function() {
self._dequeue();
});
}
});
const storageMutex = new Mutex();
function onActivated(tabID) {
console.log("EVENT onActivated", tabID);
return Promise.resolve(tabID).then(tab => addTab(tab));
}
function onRemoved(tabID) {
console.log("EVENT onRemoved", tabID);
return removeTab(tabID);
}
var localData = {
tabs: []
};
function delay(time) {
return new Promise(resolve => setTimeout(resolve, time));
}
function getData()
{
return delay(0).then(() => JSON.parse(JSON.stringify(localData)));
}
function saveData(data, source)
{
return delay(0)
.then(() => {
localData = data;
console.log("save from:", source, "localData:", localData);
return Promise.resolve(localData);
});
}
function addTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("addTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
data.tabs.push(tabID);
return saveData(data, "addTab");
}));
}
function removeTab(tabID)
{
return storageMutex.synchronize(() => getData().then((data) => {
console.log("removeTab", tabID, "data:", data);
data.tabs = data.tabs.filter(tab => tab != tabID);
return saveData(data, "removeTab");
}));
}
const events = [
() => onActivated(1),
() => onActivated(2),
() => onActivated(3),
() => onActivated(4),
() => onActivated(2),
() => { onRemoved(2); onActivated(3) }
];
function playNextEvent()
{
var event = events.shift();
if (event) {
delay(0).then(() => { event(); delay(0).then(playNextEvent) });
}
}
playNextEvent();