Stop fetch when using another fetch [duplicate] - javascript

There is a new API for making requests from JavaScript: fetch(). Is there any built in mechanism for canceling these requests in-flight?

TL/DR:
fetch now supports a signal parameter as of 20 September 2017, but not
all browsers seem support this at the moment.
2020 UPDATE: Most major browsers (Edge, Firefox, Chrome, Safari, Opera, and a few others) support the feature, which has become part of the DOM living standard. (as of 5 March 2020)
This is a change we will be seeing very soon though, and so you should be able to cancel a request by using an AbortControllers AbortSignal.
Long Version
How to:
The way it works is this:
Step 1: You create an AbortController (For now I just used this)
const controller = new AbortController()
Step 2: You get the AbortControllers signal like this:
const signal = controller.signal
Step 3: You pass the signal to fetch like so:
fetch(urlToFetch, {
method: 'get',
signal: signal, // <------ This is our AbortSignal
})
Step 4: Just abort whenever you need to:
controller.abort();
Here's an example of how it would work (works on Firefox 57+):
<script>
// Create an instance.
const controller = new AbortController()
const signal = controller.signal
/*
// Register a listenr.
signal.addEventListener("abort", () => {
console.log("aborted!")
})
*/
function beginFetching() {
console.log('Now fetching');
var urlToFetch = "https://httpbin.org/delay/3";
fetch(urlToFetch, {
method: 'get',
signal: signal,
})
.then(function(response) {
console.log(`Fetch complete. (Not aborted)`);
}).catch(function(err) {
console.error(` Err: ${err}`);
});
}
function abortFetching() {
console.log('Now aborting');
// Abort.
controller.abort()
}
</script>
<h1>Example of fetch abort</h1>
<hr>
<button onclick="beginFetching();">
Begin
</button>
<button onclick="abortFetching();">
Abort
</button>
Sources:
The final version of AbortController has been added to the DOM specification
The corresponding PR for the fetch specification is now merged.
Browser bugs tracking the implementation of AbortController is available here: Firefox: #1378342, Chromium: #750599, WebKit: #174980, Edge: #13009916.

https://developers.google.com/web/updates/2017/09/abortable-fetch
https://dom.spec.whatwg.org/#aborting-ongoing-activities
// setup AbortController
const controller = new AbortController();
// signal to pass to fetch
const signal = controller.signal;
// fetch as usual
fetch(url, { signal }).then(response => {
...
}).catch(e => {
// catch the abort if you like
if (e.name === 'AbortError') {
...
}
});
// when you want to abort
controller.abort();
works in edge 16 (2017-10-17), firefox 57 (2017-11-14), desktop safari 11.1 (2018-03-29), ios safari 11.4 (2018-03-29), chrome 67 (2018-05-29), and later.
on older browsers, you can use github's whatwg-fetch polyfill and AbortController polyfill. you can detect older browsers and use the polyfills conditionally, too:
import 'abortcontroller-polyfill/dist/abortcontroller-polyfill-only'
import {fetch} from 'whatwg-fetch'
// use native browser implementation if it supports aborting
const abortableFetch = ('signal' in new Request('')) ? window.fetch : fetch

As of Feb 2018, fetch() can be cancelled with the code below on Chrome (read Using Readable Streams to enable Firefox support). No error is thrown for catch() to pick up, and this is a temporary solution until AbortController is fully adopted.
fetch('YOUR_CUSTOM_URL')
.then(response => {
if (!response.body) {
console.warn("ReadableStream is not yet supported in this browser. See https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream")
return response;
}
// get reference to ReadableStream so we can cancel/abort this fetch request.
const responseReader = response.body.getReader();
startAbortSimulation(responseReader);
// Return a new Response object that implements a custom reader.
return new Response(new ReadableStream(new ReadableStreamConfig(responseReader)));
})
.then(response => response.blob())
.then(data => console.log('Download ended. Bytes downloaded:', data.size))
.catch(error => console.error('Error during fetch()', error))
// Here's an example of how to abort request once fetch() starts
function startAbortSimulation(responseReader) {
// abort fetch() after 50ms
setTimeout(function() {
console.log('aborting fetch()...');
responseReader.cancel()
.then(function() {
console.log('fetch() aborted');
})
},50)
}
// ReadableStream constructor requires custom implementation of start() method
function ReadableStreamConfig(reader) {
return {
start(controller) {
read();
function read() {
reader.read().then(({done,value}) => {
if (done) {
controller.close();
return;
}
controller.enqueue(value);
read();
})
}
}
}
}

Let's polyfill:
if(!AbortController){
class AbortController {
constructor() {
this.aborted = false;
this.signal = this.signal.bind(this);
}
signal(abortFn, scope) {
if (this.aborted) {
abortFn.apply(scope, { name: 'AbortError' });
this.aborted = false;
} else {
this.abortFn = abortFn.bind(scope);
}
}
abort() {
if (this.abortFn) {
this.abortFn({ reason: 'canceled' });
this.aborted = false;
} else {
this.aborted = true;
}
}
}
const originalFetch = window.fetch;
const customFetch = (url, options) => {
const { signal } = options || {};
return new Promise((resolve, reject) => {
if (signal) {
signal(reject, this);
}
originalFetch(url, options)
.then(resolve)
.catch(reject);
});
};
window.fetch = customFetch;
}
Please have in mind that the code is not tested! Let me know if you have tested it and something didn't work. It may give you warnings that you try to overwrite the 'fetch' function from the JavaScript official library.

As for now there is no proper solution, as #spro says.
However, if you have an in-flight response and are using ReadableStream, you can close the stream to cancel the request.
fetch('http://example.com').then((res) => {
const reader = res.body.getReader();
/*
* Your code for reading streams goes here
*/
// To abort/cancel HTTP request...
reader.cancel();
});

This works in browser and nodejs Live browser demo
const cpFetch= require('cp-fetch');
const url= 'https://run.mocky.io/v3/753aa609-65ae-4109-8f83-9cfe365290f0?mocky-delay=3s';
const chain = cpFetch(url, {timeout: 10000})
.then(response => response.json())
.then(data => console.log(`Done: `, data), err => console.log(`Error: `, err))
setTimeout(()=> chain.cancel(), 1000); // abort the request after 1000ms

Easy typescripted version (fetch gets aborted):
export async function fetchWithTimeout(url: RequestInfo, options?: RequestInit, timeout?: number) {
return new Promise<Response>((resolve, reject) => {
const controller = new AbortController();
const signal = controller.signal;
const timeoutId = setTimeout(() => {
console.log('TIMEOUT');
reject('Timeout');
// Cancel fetch in progress
controller.abort();
}, timeout ?? 5 * 1000);
fetch(url, { ...options, signal })
.then((response) => {
clearTimeout(timeoutId);
resolve(response);
})
.catch((e) => reject(e));
});
}
Maybe you need a polyfill (e.g. IE11):
https://polyfill.io/v3/polyfill.min.js?features=AbortController

Related

Javascript JSON-Request timeout [duplicate]

I have a fetch-api POST request:
fetch(url, {
method: 'POST',
body: formData,
credentials: 'include'
})
I want to know what is the default timeout for this? and how can we set it to a particular value like 3 seconds or indefinite seconds?
Using a promise race solution will leave the request hanging and still consume bandwidth in the background and lower the max allowed concurrent request being made while it's still in process.
Instead use the AbortController to actually abort the request, Here is an example
const controller = new AbortController()
// 5 second timeout:
const timeoutId = setTimeout(() => controller.abort(), 5000)
fetch(url, { signal: controller.signal }).then(response => {
// completed request before timeout fired
// If you only wanted to timeout the request, not the response, add:
// clearTimeout(timeoutId)
})
Alternative you can use the newly added AbortSignal.timeout(5000)... but it is not well implemented in most browser right now. All green env have this now. You will lose control over manually closing the request. Both upload and download will have to finish within a total time of 5s
// a polyfill for it would be:
AbortSignal.timeout ??= function timeout(ms) {
const ctrl = new AbortController()
setTimeout(() => ctrl.close(), ms)
return ctrl.signal
}
fetch(url, { signal: AbortSignal.timeout(5000) })
AbortController can be used for other things as well, not only fetch but for readable/writable streams as well. More newer functions (specially promise based ones) will use this more and more. NodeJS have also implemented AbortController into its streams/filesystem as well. I know web bluetooth are looking into it also. Now it can also be used with addEventListener option and have it stop listening when the signal ends
Update since my original answer is a bit outdated I recommend using abort controller like implemented here: https://stackoverflow.com/a/57888548/1059828 or take a look at this really good post explaining abort controller with fetch: How do I cancel an HTTP fetch() request?
outdated original answer:
I really like the clean approach from this gist using Promise.race
fetchWithTimeout.js
export default function (url, options, timeout = 7000) {
return Promise.race([
fetch(url, options),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('timeout')), timeout)
)
]);
}
main.js
import fetch from './fetchWithTimeout'
// call as usual or with timeout as 3rd argument
// throw after max 5 seconds timeout error
fetch('http://google.com', options, 5000)
.then((result) => {
// handle result
})
.catch((e) => {
// handle errors and timeout error
})
Edit 1
As pointed out in comments, the code in the original answer keeps running the timer even after the promise is resolved/rejected.
The code below fixes that issue.
function timeout(ms, promise) {
return new Promise((resolve, reject) => {
const timer = setTimeout(() => {
reject(new Error('TIMEOUT'))
}, ms)
promise
.then(value => {
clearTimeout(timer)
resolve(value)
})
.catch(reason => {
clearTimeout(timer)
reject(reason)
})
})
}
Original answer
It doesn't have a specified default; the specification doesn't discuss timeouts at all.
You can implement your own timeout wrapper for promises in general:
// Rough implementation. Untested.
function timeout(ms, promise) {
return new Promise(function(resolve, reject) {
setTimeout(function() {
reject(new Error("timeout"))
}, ms)
promise.then(resolve, reject)
})
}
timeout(1000, fetch('/hello')).then(function(response) {
// process response
}).catch(function(error) {
// might be a timeout error
})
As described in https://github.com/github/fetch/issues/175
Comment by https://github.com/mislav
Building on Endless' excellent answer, I created a helpful utility function.
const fetchTimeout = (url, ms, { signal, ...options } = {}) => {
const controller = new AbortController();
const promise = fetch(url, { signal: controller.signal, ...options });
if (signal) signal.addEventListener("abort", () => controller.abort());
const timeout = setTimeout(() => controller.abort(), ms);
return promise.finally(() => clearTimeout(timeout));
};
If the timeout is reached before the resource is fetched then the fetch is aborted.
If the resource is fetched before the timeout is reached then the timeout is cleared.
If the input signal is aborted then the fetch is aborted and the timeout is cleared.
const controller = new AbortController();
document.querySelector("button.cancel").addEventListener("click", () => controller.abort());
fetchTimeout("example.json", 5000, { signal: controller.signal })
.then(response => response.json())
.then(console.log)
.catch(error => {
if (error.name === "AbortError") {
// fetch aborted either due to timeout or due to user clicking the cancel button
} else {
// network error or json parsing error
}
});
there's no timeout support in the fetch API yet. But it could be achieved by wrapping it in a promise.
for eg.
function fetchWrapper(url, options, timeout) {
return new Promise((resolve, reject) => {
fetch(url, options).then(resolve, reject);
if (timeout) {
const e = new Error("Connection timed out");
setTimeout(reject, timeout, e);
}
});
}
If you haven't configured timeout in your code, It will be the default request timeout of your browser.
1) Firefox - 90 seconds
Type about:config in Firefox URL field. Find the value corresponding to key network.http.connection-timeout
2) Chrome - 300 seconds
Source
EDIT: The fetch request will still be running in the background and will most likely log an error in your console.
Indeed the Promise.race approach is better.
See this link for reference Promise.race()
Race means that all Promises will run at the same time, and the race will stop as soon as one of the promises returns a value.
Therefore, only one value will be returned.
You could also pass a function to call if the fetch times out.
fetchWithTimeout(url, {
method: 'POST',
body: formData,
credentials: 'include',
}, 5000, () => { /* do stuff here */ });
If this piques your interest, a possible implementation would be :
function fetchWithTimeout(url, options, delay, onTimeout) {
const timer = new Promise((resolve) => {
setTimeout(resolve, delay, {
timeout: true,
});
});
return Promise.race([
fetch(url, options),
timer
]).then(response => {
if (response.timeout) {
onTimeout();
}
return response;
});
}
A more clean way to do it is actually in MDN: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal#aborting_a_fetch_operation_with_a_timeout
try {
await fetch(url, { signal: AbortSignal.timeout(5000) });
} catch (e) {
if (e.name === "TimeoutError") {
console.log('5000 ms timeout');
}
}
Here's a SSCCE using NodeJS which will timeout after 1000ms:
import fetch from 'node-fetch';
const controller = new AbortController();
const timeout = setTimeout(() => {
controller.abort();
}, 1000); // will time out after 1000ms
fetch('https://www.yourexample.com', {
signal: controller.signal,
method: 'POST',
body: formData,
credentials: 'include'
}
)
.then(response => response.json())
.then(json => console.log(json))
.catch(err => {
if(err.name === 'AbortError') {
console.log('Timed out');
}}
)
.finally( () => {
clearTimeout(timeout);
});
Using AbortController and setTimeout;
const abortController = new AbortController();
let timer: number | null = null;
fetch('/get', {
signal: abortController.signal, // Content to abortController
})
.then(res => {
// response success
console.log(res);
if (timer) {
clearTimeout(timer); // clear timer
}
})
.catch(err => {
if (err instanceof DOMException && err.name === 'AbortError') {
// will return a DOMException
return;
}
// other errors
});
timer = setTimeout(() => {
abortController.abort();
}, 1000 * 10); // Abort request in 10s.
This is a fragment in #fatcherjs/middleware-aborter.
By using fatcher, it can easy to abort a fetch request.
import { aborter } from '#fatcherjs/middleware-aborter';
import { fatcher, isAbortError } from 'fatcher';
fatcher({
url: '/bar/foo',
middlewares: [
aborter({
timeout: 10 * 1000, // 10s
onAbort: () => {
console.log('Request is Aborted.');
},
}),
],
})
.then(res => {
// Request success in 10s
console.log(res);
})
.catch(err => {
if (isAbortError(err)) {
//Run error when request aborted.
console.error(err);
}
// Other errors.
});
fetchTimeout (url,options,timeout=3000) {
return new Promise( (resolve, reject) => {
fetch(url, options)
.then(resolve,reject)
setTimeout(reject,timeout);
})
}
You can create a timeoutPromise wrapper
function timeoutPromise(timeout, err, promise) {
return new Promise(function(resolve,reject) {
promise.then(resolve,reject);
setTimeout(reject.bind(null,err), timeout);
});
}
You can then wrap any promise
timeoutPromise(100, new Error('Timed Out!'), fetch(...))
.then(...)
.catch(...)
It won't actually cancel an underlying connection but will allow you to timeout a promise.
Reference
Proper error handling tips
Normal practice:
To add timeout support most of the time it is suggested to introduce a Promise utility function like this:
function fetchWithTimeout(resource, { signal, timeout, ...options } = {}) {
const controller = new AbortController();
if (signal != null) signal.addEventListener("abort", controller.abort);
const id = timeout != null ? setTimeout(controller.abort, timeout) : undefined;
return fetch(resource, {
...options,
signal: controller.signal
}).finally(() => {
if (id != null) clearTimeout(id);
});
}
Calling controller.abort or rejecting the promise inside the setTimeout callback function distorts the stack trace.
This is suboptimal, since one would have to add boilerplate error handlers with log messages in the functions calling the fetch method if post-error log analysis is required.
Good expertise:
To preserve the error along with it's stack trace one can apply the following technique:
function sleep(ms = 0, signal) {
return new Promise((resolve, reject) => {
const id = setTimeout(() => resolve(), ms);
signal?.addEventListener("abort", () => {
clearTimeout(id);
reject();
});
});
}
async function fetch(
resource,
options
) {
const { timeout, signal, ...ropts } = options ?? {};
const controller = new AbortController();
let sleepController;
try {
signal?.addEventListener("abort", () => controller.abort());
const request = nodeFetch(resource, {
...ropts,
signal: controller.signal,
});
if (timeout != null) {
sleepController = new AbortController();
const aborter = sleep(timeout, sleepController.signal);
const race = await Promise.race([aborter, request]);
if (race == null) controller.abort();
}
return request;
} finally {
sleepController?.abort();
}
}
(async () => {
try {
await fetchWithTimeout(new URL(window.location.href), { timeout: 5 });
} catch (error) {
console.error("Error in test", error);
}
})();
Using c-promise2 lib the cancellable fetch with timeout might look like this one (Live jsfiddle demo):
import CPromise from "c-promise2"; // npm package
function fetchWithTimeout(url, {timeout, ...fetchOptions}= {}) {
return new CPromise((resolve, reject, {signal}) => {
fetch(url, {...fetchOptions, signal}).then(resolve, reject)
}, timeout)
}
const chain = fetchWithTimeout("https://run.mocky.io/v3/753aa609-65ae-4109-8f83-9cfe365290f0?mocky-delay=10s", {timeout: 5000})
.then(request=> console.log('done'));
// chain.cancel(); - to abort the request before the timeout
This code as a npm package cp-fetch

Halt Execution of Network Request If It Takes Too Long?

I have some code that basically calls fetch in Javascript. The third party services sometimes take too long to return a response and in an attempt to be more user-friendly, I want to be able to either post a message or stop the connection from being open after N milliseconds.
I had recently come across this post:
Skip the function if executing time too long. JavaScript
But did not have much luck and had issues getting it to work with the below code. I was also hoping that there was a more modern approach to do such a task, maybe using async/await?
module.exports = (url, { ...options } = {}) => {
return fetch(url, {
...options
})
}
You can use a combination of Promise.race and AbortController, here is an example:
function get(url, timeout) {
const controller = new AbortController();
return Promise.race([fetch(url, {
signal: controller.signal
}), new Promise(resolve => {
setTimeout(() => {
resolve("request was not fulfilled in time");
controller.abort();
}, timeout)
})]);
}
(async() => {
const result = await get("https://example.com", 1);
console.log(result);
})();
The native Fetch API doesn't have a timeout built in like something like axios does, but you can always create a wrapper function that wraps the fetch call to implement this.
Here is an example:
const fetchWithTimeout = (timeout, fetchConfig) => {
const FETCH_TIMEOUT = timeout || 5000;
let didTimeOut = false;
return new Promise(function(resolve, reject) {
const timeout = setTimeout(function() {
didTimeOut = true;
reject(new Error('Request timed out'));
}, FETCH_TIMEOUT);
fetch('url', fetchConfig)
.then(function(response) {
// cleanup timeout
clearTimeout(timeout);
if(!didTimeOut) {
// fetch request was good
resolve(response);
}
})
.catch(function(err) {
// Rejection already happened with setTimeout
if(didTimeOut) return;
// Reject with error
reject(err);
});
})
.then(function() {
// Request success and no timeout
})
.catch(function(err) {
//error
});
}
from here https://davidwalsh.name/fetch-timeout

How to raise a Timeout Error in node.js if code takes long time to finish?

in ruby I can:
require 'timeout'
Timeout.timeout 10 do
# do smth > 10 seconds
end
it will raise timeout error to avoid code lock, how to do same thing in nodejs, nodejs #setTimeout doesn't fit my need
one case is, when i http.get timeout(for ex, netowrk is unstable), I should set timeout and handle the failed get request, I hope impl #timeout, how should i do?
try {
timeout(10, function () {
http.get("example.com/prpr")
})
} catch (e) {
if (e.message == "timeout") {
// do smth
} else {
throw e
}
}
You could look into a Promise-based approach here.
Using promises you can pass a function to be executed, and then the standard catch is called if that function raises an exception.
There is a helpful promise-based timeout library on NPM (npm install promise-timeout request-promise), and you could use it in Node something along the lines of...
'use strict';
var promiseTimeout = require('promise-timeout');
var requestPromise = require('request-promise');
promiseTimeout.timeout(requestPromise("http://example.com/prpr"), 10000)
.then(function (result) {
console.log({result});
}).catch(function (err) {
if (err instanceof pt.TimeoutError) {
console.error('HTTP get timed out');
}
});
I had a similar situation with nestJS based on node.js.
When calling an external API, it was a problem that even my service slowed down if it took too long. (If the external api is delayed, my service also had a problem of waiting forever.)
I figured out 2 ways.
First way:
const result = await axios({
timeout: 10000, // error: [AxiosError: timeout of 10000ms exceeded] { code: 'ECONNABORTED', ...
...
});
Second way: Promise.race()
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/race
// first function
const callAPI = axios({
method: "GET",
url: "http://yourapi",
headers: {
...
}
});
// second function
const timeoutCheck = (s) => {
return new Promise(resolve => setTimeout(resolve, s));
}
// check delay (first function VS second function)
const result = await Promise.race([
callAPI,
timeoutCheck(10000).then(() => {
throw new Error("api not responding for more than 10 seconds");
}),
]);
const { data: { resultCode, resultData } } = result;
You can try this out in your case:
var request = http.get(options, function (res) {
// other code goes here
});
request.setTimeout( 10000, function( ) {
// handle timeout here
});

Using Promises with Await/Async Correctly

I'm having some issues understanding how the Promise functionality works, I have previously used Bluebird but I wanted to try to learn the new await/async standard in order to improve as a programmer. I have used async/await and created promises where I feel appropriate however the functions are still executing out of order.
I'm running this on the latest version of Node with Webpack, I'm not getting any meaningful errors. It runs fine just not as expected. My output when running it is:
Searching the Web for: Test String
Web search Completed!
Promise { <pending> }
Response Handler Completed!
Ideally I'd like it to respond with:
Searching the Web for: Test String
Response Handler Completed
Web search Completed
And then return the output of my response handler.
Can anyone spot my mistake?
const https = require('https');
// Replace the subscriptionKey string value with your valid subscription key.
const subscriptionKey = '<samplekey>';
const host = 'api.cognitive.microsoft.com';
const path = '/bing/v7.0/search';
const response_handler = async (response) => {
return new Promise((resolve, reject) => {
let body = '';
response.on('data', (d) => {
body += d;
resolve(body);
});
response.on('end', () => {
console.log('\nRelevant Headers:\n');
for (const header in response.headers)
// header keys are lower-cased by Node.js
{
if (header.startsWith('bingapis-') || header.startsWith('x-msedge-')) { console.log(`${header}: ${response.headers[header]}`); }
}
body = JSON.stringify(JSON.parse(body), null, ' ');
//console.log('\nJSON Test Response:\n');
//console.log(body);
});
response.on('error', (e) => {
console.log(`Error: ${e.message}`);
});
console.log('Response Handler Completed!');
});
};
const bing_web_search = async (search) => {
return new Promise((resolve, reject) => {
console.log(`Searching the Web for: ${search}`);
const request_params = {
method: 'GET',
hostname: host,
path: `${path}?q=${encodeURIComponent(search)}&$responseFilter=${encodeURIComponent('Webpages')}&count=${50}`,
headers: {
'Ocp-Apim-Subscription-Key': subscriptionKey,
},
};
const req = https.request(request_params, response_handler);
console.log('Web search Completed!');
console.log(req.body);
req.end();
});
};
module.exports = {
search: async (search) => {
if (subscriptionKey.length === 32) {
const result = await bing_web_search(search);
console.log('Search Completed');
} else {
console.log('Invalid Bing Search API subscription key!');
console.log('Please paste yours into the source code.');
}
},
};
A bit late but the following should set you on the way, I made changes to the code. If you have any questions please let me know.
const https = require('https');
// Replace the subscriptionKey string value with your valid subscription key.
const subscriptionKey = '<samplekey>';
const host = 'api.cognitive.microsoft.com';
const path = '/bing/v7.0/search';
const response_handler = (resolve,reject) => (response) => { // no need for async, you return a promise
//this one does not return anything, it's the handler for the response and will resolve
// or reject accordingly
let body = '';
response.on('data', (d) => {
body += d;
//cannot resolve yet, we're not done
// you can resolve on end maybe? I don't know nodejs http
// if end event is called when request fails then end would not
// be the correct way either, better use fetch api
//resolve(body);
});
response.on('end', () => {
console.log('\nRelevant Headers:\n');
for (const header in response.headers)
// header keys are lower-cased by Node.js
{
if (header.startsWith('bingapis-') || header.startsWith('x-msedge-')) { console.log(`${header}: ${response.headers[header]}`); }
}
body = JSON.stringify(JSON.parse(body), null, ' ');
resolve(body);//resolving the promise returned by bing_web_search
//console.log('\nJSON Test Response:\n');
//console.log(body);
});
response.on('error', (e) => {
console.log(`Error: ${e.message}`);
//need to reject with the error
reject(e);
});
console.log('Response Handler Completed!');
};
//no need to specify async, you are not awaiting anything
// you are creating a promise, when using non promise asynchronous
// functions that work with callbacks or event emitting objects
// you need resolve and reject functions so you have to return
// new Promise(
// (resolve,reject)=>somecallbackNeedingFunction((err,result)=>
// err ? reject(err) : resolve(result)
// )
// )
const bing_web_search = (search) => {
return new Promise((resolve, reject) => {
console.log(`Searching the Web for: ${search}`);
const request_params = {
method: 'GET',
hostname: host,
path: `${path}?q=${encodeURIComponent(search)}&$responseFilter=${encodeURIComponent('Webpages')}&count=${50}`,
headers: {
'Ocp-Apim-Subscription-Key': subscriptionKey,
},
};
const req = https.request(
request_params,
response_handler(resolve,reject)//passing this resolve and reject
);
//no, request not completed, we just started
console.log('Web search Completed!');
// console.log(req.body); // nothing to log here
req.end();
});
};
module.exports = {
search: async (search) => {
if (subscriptionKey.length === 32) {
//did not change anything bing_web_search returns a promise
// so you can just await it
const result = await bing_web_search(search);
console.log('Search Completed');
//this will resolve with the results
return result
} else {
console.log('Invalid Bing Search API subscription key!');
console.log('Please paste yours into the source code.');
//the caller of this function can handle the rejection
return Promise.reject('Invalid Bing Search API subscription key!');
}
},
};
[update]
Your comment suggest that you do not call search correctly or handle the promise it returns correctly. You have no control over how long a response takes so in a set of responses the first request may return last. This is why you have Promise.all
const searchObjects = [s1,s2];
const Fail = function(reason){this.reason=reason;};
Promise.all(
searchObjects.map(
searchObject => obj.search(searchObject)
.then(
x=>[x,searchObject]//if resolve just pass result
,err =>new Fail([err,searchObject])//if reject add a Fail object with some detail
)
)
)
.then(
results => {
console.log(
"resolved results:",
results.filter(([r,_])=>(r&&r.constructor)!==Fail)
);
console.log(
"failed results:",
results.filter(([r,_])=>(r&&r.constructor)===Fail)
);
}
)
If you have a lot of searches then maybe you want to throttle the amount of responses withing a certain time period or active connections. Let me know if you need help with that.

fetch retry request (on failure)

I'm using browser's native fetch API for network requests. Also I am using the whatwg-fetch polyfill for unsupported browsers.
However I need to retry in case the request fails. Now there is this npm package whatwg-fetch-retry I found, but they haven't explained how to use it in their docs. Can somebody help me with this or suggest me an alternative?
From the fetch docs :
fetch('/users')
.then(checkStatus)
.then(parseJSON)
.then(function(data) {
console.log('succeeded', data)
}).catch(function(error) {
console.log('request failed', error)
})
See that catch? Will trigger when fetch fails, you can fetch again there.
Have a look at the Promise API.
Implementation example:
function wait(delay){
return new Promise((resolve) => setTimeout(resolve, delay));
}
function fetchRetry(url, delay, tries, fetchOptions = {}) {
function onError(err){
triesLeft = tries - 1;
if(!triesLeft){
throw err;
}
return wait(delay).then(() => fetchRetry(url, delay, triesLeft, fetchOptions));
}
return fetch(url,fetchOptions).catch(onError);
}
Edit 1: as suggested by golopot, p-retry is a nice option.
Edit 2: simplified example code.
I recommend using some library for promise retry, for example p-retry.
Example:
const pRetry = require('p-retry')
const fetch = require('node-fetch')
async function fetchPage () {
const response = await fetch('https://stackoverflow.com')
// Abort retrying if the resource doesn't exist
if (response.status === 404) {
throw new pRetry.AbortError(response.statusText)
}
return response.blob()
}
;(async () => {
console.log(await pRetry(fetchPage, {retries: 5}))
})()
I don't like recursion unless is really necessary. And managing an exploding number of dependencies is also an issue. Here is another alternative in typescript. Which is easy to translate to javascript.
interface retryPromiseOptions<T> {
retryCatchIf?:(response:T) => boolean,
retryIf?:(response:T) => boolean,
retries?:number
}
function retryPromise<T>(promise:() => Promise<T>, options:retryPromiseOptions<T>) {
const { retryIf = (_:T) => false, retryCatchIf= (_:T) => true, retries = 1} = options
let _promise = promise();
for (var i = 1; i < retries; i++)
_promise = _promise.catch((value) => retryCatchIf(value) ? promise() : Promise.reject(value))
.then((value) => retryIf(value) ? promise() : Promise.reject(value));
return _promise;
}
And use it this way...
retryPromise(() => fetch(url),{
retryIf: (response:Response) => true, // you could check before trying again
retries: 5
}).then( ... my favorite things ... )
I wrote this for the fetch API on the browser. Which does not issue a reject on a 500. And did I did not implement a wait. But, more importantly, the code shows how to use composition with promises to avoid recursion.
Javascript version:
function retryPromise(promise, options) {
const { retryIf, retryCatchIf, retries } = { retryIf: () => false, retryCatchIf: () => true, retries: 1, ...options};
let _promise = promise();
for (var i = 1; i < retries; i++)
_promise = _promise.catch((value) => retryCatchIf(value) ? promise() : Promise.reject(value))
.then((value) => retryIf(value) ? promise() : Promise.reject(value));
return _promise;
}
Javascript usage:
retryPromise(() => fetch(url),{
retryIf: (response) => true, // you could check before trying again
retries: 5
}).then( ... my favorite things ... )
EDITS: Added js version, added retryCatchIf, fixed the loop start.
One can easily wrap fetch(...) in a loop and catch potential errors (fetch only rejects the returning promise on network errors and the alike):
const RETRY_COUNT = 5;
async function fetchRetry(...args) {
let count = RETRY_COUNT;
while(count > 0) {
try {
return await fetch(...args);
} catch(error) {
// logging ?
}
// logging / waiting?
count -= 1;
}
throw new Error(`Too many retries`);
}

Categories

Resources