I wrote a web crawler with nodejs to send get requests to about 300 urls.
Here is the main loop:
for (let i = 1; i <= 300; i++) {
let page= `https://xxxxxxxxx/forum-103-${i}.html`
await getPage(page,(arr)=>{
console.log(`page ${i}`)
})
}
Here is the function getPage(url,callback):
export default async function getPage(url, callback) {
await https.get(url, (res) => {
let html = ""
res.on("data", data => {
html += data
})
res.on("end", () => {
const $ = cheerio.load(html)
let obj = {}
let arr = []
obj = $("#threadlisttableid tbody")
for (let i in obj) {
if (obj[i].attribs?.id?.substr(0, 6) === 'normal') {
arr.push(`https://xxxxxxx/${obj[i].attribs.id.substr(6).split("_").join("-")}-1-1.html`)
}
}
callback(arr)
console.log("success!")
})
})
.on('error', (e) => {
console.log(`Got error: ${e.message}`);
})
}
I use cheerio to analyze HTML and put all information i need to variable nameed 'arr'.
The program will report an error after running normally for a period of time,like that:
...
success!
page 121
success!
page 113
success!
page 115
success!
Got error: connect ETIMEDOUT 172.67.139.206:443
Got error: connect ETIMEDOUT 172.67.139.206:443
Got error: connect ETIMEDOUT 172.67.139.206:443
Got error: connect ETIMEDOUT 172.67.139.206:443
Got error: connect ETIMEDOUT 172.67.139.206:443
Got error: connect ETIMEDOUT 172.67.139.206:443
I have two questions:
1.What is the reason for the error? Is it because I am sending too many get requests? How can I limit the request frequency?
2.As you can see, The order in which the pages are accessed is chaotic,how to control them?
I have tried using other modules to send get request (such as Axios) but it didn't work.
The http requests are fired simultaneously because the loop is not waiting for the previous request due to wrong use of await. Proper control of loop will limit the request frequency.
for (let i = 1; i <= 300; i++) {
let page= `https://xxxxxxxxx/forum-103-${i}.html`
var arr = await getPage(page);
// use arr in the way you want
console.log(`page ${i}`);
}
export default async function getPage(url) {
// Declare a new promise, wait for the promise to resolve and return its value.
return await new Promise((reso, rej) => {
https.get(url, (res) => {
let html = ""
res.on("data", data => {
html += data
})
res.on("end", () => {
const $ = cheerio.load(html)
let obj = {}
let arr = []
obj = $("#threadlisttableid tbody")
for (let i in obj) {
if (obj[i].attribs?.id?.substr(0, 6) === 'normal') {
arr.push(`https://xxxxxxx/${obj[i].attribs.id.substr(6).split("_").join("-")}-1-1.html`)
}
}
reso(arr) // Resolve with arr
console.log("success!")
})
})
.on('error', (e) => {
console.log(`Got error: ${e.message}`);
throw e;
})
})
}
As you can see, The order in which the pages are accessed is chaotic,how to control them?
await is meaningless unless you put a promise on the right hand side. http.get does not deal in promises.
You could wrap it in a promise but it would be easier to use an API which supports then natively such as node-fetch, axios, or Node.js's native fetch. (That all have APIs that are, IMO, easier to use than http.get in general nor just with regards to flow control).
What is the reason for the error?
It isn't clear.
Is it because I am sending too many get requests?
That is a likely hypothesis.
How can I limit the request frequency?
Once you have your for loop working with promises so the requests are sent in serial instead of parallel, you can insert a sleep between each request.
Related
I am trying to use axios and express to get an array of links from a page, harvest data at each link, and display results to the user. The process I'd like to implement is:
Run axios.get(targetPage), harvest the links on the target page, save to an array
Run run axios.all to get a response from link
Harvest data from each response
Display to user
Below is my app.get function:
app.get('/search', function (req, res) {
var context = {};
context.resources = [];
var promises = [];
var year = req.query.year;
var targetPage = endpoint + year;
axios.get(targetPage).then(resp => {
var $ = cheerio.load(resp.data);
var pages = []
$('queryStr').each(function (i, ele) { pages.push(endpoint + $(ele).attr("href")) });
context.links = pages;
pages.forEach( link => promises.push( axios.get(link) ));
}).then(axios.all(promises)).then( responses => {
responses.forEach( resp => {
var resource = {};
resource.link = resp.url;
var $ = cheerio.load(resp.data)
resource.title = $('query').text()
context.resources.push(resource);
})
}).then( () => {
res.render('search', context);
})
})
I have verified that the urls in the pages[] array are valid. I have tried calling res.render after the first axios.get call, and it successfully rendered my response. I have separately tested the logic in the forEach and verified that it works for each url. I'm getting an error message at the then block immediately following axios.all, which states that responses returned by axios.all is undefined. the Here is the error message:
UnhandledPromiseRejectionWarning: TypeError: Cannot read property 'forEach' of undefined
Thanks for reading.
[TLDR]
This is because the function of your first then clause doesn't have a return statement.
This means there is no successful data passed to the next then clause and so on.
then chaining requires return for each then clause.
Example
There's many problems in my original post, but the main issue was failure to return a Promise in each step of my .then chain. Below is working code, significantly refactored. I'm sure there's better ways of passing along errors and loading in parallel, so please leave answers and comments with any improvements. In addition to the resource linked to by Nazim Kerimbekov and Khoa, I recommend this post for learning how to chain promises.
app.get('/pageToRender', function (req, res) {
var context = {};
var page1 = req.query.year
getInitial(page1)
.then( pages => Promise.all( pages.map( page => { return getResource(page) })))
.then(resources => {
context.resources = resources;
res.render('pageToRender', context);
})
.catch( errors => { console.log(errors) })
})
function getInitial (page) {
return new Promise( (resolve,reject) => {
axios.get(page).then( resp => {
var pages = []
var $ = cheerio.load(resp.data)
$('.mw-content-ltr li a').each(function (i, ele) { pages.push(endpoint + $(ele).attr("href")) });
console.log(pages);
resolve(pages);
}).catch( error => reject(error))
})
}
function getResource(page) {
return new Promise ( (resolve, reject) => {
axios.get(page)
.then( response => {
var resource = {};
resource.link = page;
var $ = cheerio.load(response.data)
resource.title = $('.Title').text()
console.log("resolving resource", resource);
resolve(resource);
}).catch (error => { error })
})
}
I'm getting a "deadline-exceeded" error on the frontend when calling a firebase callable cloud function (onCall).
I know that I have to return a Promise so the function knows when to clean itself, but it is still not working.
After 60 seconds, "deadline-exceeded" is throw to the frontend but the function keeps running on the server and finish with success. All batch operations are written to the firestore.
10:37:14.782 AM
syncExchangeOperations
Function execution took 319445 ms, finished with status code: 200
10:36:57.323 AM
syncExchangeOperations
Function execution started
10:36:57.124 AM
syncExchangeOperations
Function execution took 170 ms, finished with status code: 204
10:36:56.955 AM
syncExchangeOperations
Function execution started
async function syncBinanceOperations(
userId,
userExchange,
userExchangeLastOperations,
systemExchange
) {
try {
const client = Binance({
apiKey: userExchange.apiKey,
apiSecret: userExchange.privateKey
});
const batch = admin.firestore().batch();
const lastOperations = userExchangeLastOperations
? userExchangeLastOperations
: false;
const promises = [];
promises.push(
syncBinanceTrades(client, lastOperations, userId, systemExchange, batch)
);
promises.push(
syncBinanceDeposits(client, lastOperations, userId, systemExchange, batch)
);
promises.push(
syncBinanceWhitdraws(
client,
lastOperations,
userId,
systemExchange,
batch
)
);
promises.push(
updateUserExchange(userId, userExchange.id, {
lastSync: moment().format('x')
})
);
await Promise.all(promises);
return batch.commit();
} catch (error) {
return handleErrors(error);
}
}
exports.syncExchangeOperations = functions.https.onCall(
async (data, context) => {
try {
userAuthenthication(data.userId, context.auth);
let user = await getUser(data.userId);
if (!user.plan.benefits.syncExchanges) {
throw 'Operação não autorizada para o plano contratado';
}
let userExchange = await getUserExchange(data.userId, data.exchangeId);
let response = await Promise.all([
getUserLastOperations(data.userId, userExchange.exchangeId),
getSystemExchange(userExchange.exchangeId)
]);
let userExchangeLastOperations = response[0];
let systemExchange = response[1];
switch (systemExchange.id) {
case 'binance':
return syncBinanceOperations(
user.id,
userExchange,
userExchangeLastOperations,
systemExchange
);
}
} catch (error) {
return handleErrors(error);
}
}
);
It works fine if I change this function to a HTTP request. It waits the function to finish and returns.
exports.syncExchangeOperations = functions
.runWith(runtimeOpts)
.https.onRequest((req, res) => {
return cors(req, res, async () => {
try {
let auth = await admin.auth().verifyIdToken(req.get('Authorization').split('Bearer ')[1]);
let userExchange = await getUserExchange(
auth.uid,
req.query.exchangeId
);
let response = await Promise.all([
getUserLastOperations(auth.uid, userExchange.exchangeId),
getSystemExchange(userExchange.exchangeId)
]);
let userExchangeLastOperations = response[0];
let systemExchange = response[1];
switch (systemExchange.id) {
case 'binance':
await syncBinanceOperations(
auth.uid,
userExchange,
userExchangeLastOperations,
systemExchange
);
}
res.status(200).send();
} catch (error) {
res.status(401).send(handleErrors(error));
}
});
});
The "deadline-exeeded" that you encountered is an error thrown by the Firebase Javascript library on the client (not the function itself). The Firebase docs are lacking documentation o how to use functions.runWithOptions() on a callable function. For some reason the functions().httpsCallable() has a built in timeout on the client side.
So if you use this on your Node.js function:
exports.testFunction = functions.runWith({ timeoutSeconds: 180 }).https.onCall(async (data, ctx) => {
// Your Function Code that takes more than 60second to run
});
You need to override the buit in Javascript Library timeout on the client like this:
let testFunction = firebase.functions().httpsCallable("testFunction", {timeout: 180000});
I don't know what is the purpose of the built in timeout on the client, for me it has no purpose since it doesn't even stop the execution of the function on the server. But it must be there for some internal reasons.
Notice the Node.js timeoutSeconds is in seconds and the timeout option on the client library is in milliseconds.
"Deadline exceeded" means that the function invocation timed out from the perspective of the client. The default is 60 seconds.
Try increasing the timeout on both the client and function so that it has time to complete before the client timeout is reached. You can do this by specifying it in an HttpsCallableOptions object.
Also try returning something other than batch.commit(). Whatever that function return will be serialized and sent to the client, which could cause problems. Instead, just await batch.commit() then return something predictable, like a plain JavaScript object.
See the API documentation for information on setting the timeout:
https://firebase.google.com/docs/reference/js/firebase.functions.Functions#https-callable
https://firebase.google.com/docs/reference/js/firebase.functions.HttpsCallableOptions.html#timeout
I am new to JavaScript and the npm world. I try to upload some data to my REST service via a REST post call. These data I fetch from a csv file. So far so good. On each fetched line I convert the data (for my needs) and call the REST API for uploading those. Since I have many line (approx. 700) the API gets called quite often consecutively. After some calls (guess 500 or so) I get an Socket error
events.js:136
throw er; // Unhandled 'error' event
^
Error: connect ECONNRESET 127.0.0.1:3000
at Object._errnoException (util.js:999:13)
at _exceptionWithHostPort (util.js:1020:20)
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1207:14)
I guess this is because I call the REST API to often. What I don't understand is:
How should I make the call synchronously in order to avoid so many connections?
Or should't I?
What would be the proper solution in JS for this?
I have tried with Promises and so on but all this didn't helped but moved the issue some function calls priorly...
This is my code:
readCsv()
function readCsv() {
var csvFile = csvFiles.pop()
if (csvFile) {
csv({ delimiter: ";" }).fromFile(csvFile).on('json', async (csvRow) => {
if (/.*\(NX\)|.*\(NI\)|.*\(NA\)|.*\(WE\)|.*\(RA\)|.*\(MX\)/.test(csvRow["Produkt"])) {
var data = await addCallLog(
csvRow["Datum"],
csvRow["Zeit"],
csvRow["Menge-Zeit"],
csvRow["Zielrufnummer"],
csvRow["Produkt"]);
}
}).on('done', (error) => {
//console.log('end')
readCsv()
})
} else {
}
}
function addCallLog(date, time, duration, number, product) {
return new Promise(resolve => {
args.data = { number: number, name: "", timestamp: getTimestamp(date, time), duration: getDuration(duration), type: "OUTGOING" }
client.methods.addCallLog(args, (data, response) => {
// client.methods.getCallLog((data, response) => {
// console.log(data)
// })
//console.log("addCallLog resolve")
resolve(data)
})
})
}
As you can see I had the same issue with reading more than one csv files in parallel. I solved this by calling recursively the readCsv function and pop the next file after the other when the file read was done.
You can't call things synchronously. But, you can sequence the async REST calls which is what I presume you mean.
A problem here is that await addCallLog() won't keep the next json events from being generated so you will end with a zillion requests in flight at the same time and apparently you have so many that you run out of resources.
One way around that is to collect the rows you want into an array and then use a regular for loop to iterate that array and you can use await sucessfully in the for loop. Here's what that would look like:
readCsv()
function readCsv() {
var csvFile = csvFiles.pop()
if (csvFile) {
let rows = [];
csv({ delimiter: ";" }).fromFile(csvFile).on('json', (csvRow) => {
if (/.*\(NX\)|.*\(NI\)|.*\(NA\)|.*\(WE\)|.*\(RA\)|.*\(MX\)/.test(csvRow["Produkt"])) {
rows.push(csvRow);
}
}).on('done', async (error) => {
for (let csvRow of rows) {
var data = await addCallLog(
csvRow["Datum"],
csvRow["Zeit"],
csvRow["Menge-Zeit"],
csvRow["Zielrufnummer"],
csvRow["Produkt"]
);
}
readCsv();
})
} else {
}
}
function addCallLog(date, time, duration, number, product) {
return new Promise(resolve => {
args.data = { number: number, name: "", timestamp: getTimestamp(date, time), duration: getDuration(duration), type: "OUTGOING" }
client.methods.addCallLog(args, (data, response) => {
// client.methods.getCallLog((data, response) => {
// console.log(data)
// })
//console.log("addCallLog resolve")
resolve(data)
})
})
}
Your coding appears to be missing error handling. The client.methods.addCallLog() needs a way to communicate back an error.
You probably also need a error event handler for the csv iterator.
After filling the buffer in a prev. function I check that buffer for data and upload those one by one using the "then" callback of the promise
var callLogBuffer = []
checkForUpload()
function checkForUpload() {
console.log("checkForUpload")
if (callLogBuffer.length > 0) {
addCallLog(callLogBuffer.pop()).then((data) => {
checkForUpload()
})
}
}
function addCallLog(callLog) {
return new Promise(resolve => {
args.data = { number: callLog.number, name: "", timestamp: getTimestamp(callLog.date, callLog.time), duration: getDuration(callLog.duration), type: "OUTGOING" }
client.methods.addCallLog(args, (data, response) => {
// client.methods.getCallLog((data, response) => {
// console.log(data)
// })
//console.log("addCallLog resolve")
resolve(data)
})
})
}
This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 3 months ago.
I am trying to use a Node.JS application to make and receive API requests. It does a get request to another server using Axios with data it receives from an API call it receives. The second snippet is when the script returns the data from the call in. It will actually take it and write to the console, but it won't send it back in the second API.
function axiosTest() {
axios.get(url)
.then(function (response) {
console.log(response.data);
// I need this data here ^^
return response.data;
})
.catch(function (error) {
console.log(error);
});
}
...
axiosTestResult = axiosTest();
response.json({message: "Request received!", data: axiosTestResult});
I'm aware this is wrong, I'm just trying to find a way to make it work. The only way I can seem to get data out of it is through console.log, which isn't helpful in my situation.
The issue is that the original axiosTest() function isn't returning the promise. Here's an extended explanation for clarity:
function axiosTest() {
// create a promise for the axios request
const promise = axios.get(url)
// using .then, create a new promise which extracts the data
const dataPromise = promise.then((response) => response.data)
// return it
return dataPromise
}
// now we can use that data from the outside!
axiosTest()
.then(data => {
response.json({ message: 'Request received!', data })
})
.catch(err => console.log(err))
The function can be written more succinctly:
function axiosTest() {
return axios.get(url).then(response => response.data)
}
Or with async/await:
async function axiosTest() {
const response = await axios.get(url)
return response.data
}
Guide on using promises
Info on async functions
I know this post is old. But i have seen several attempts of guys trying to answer using async and await but getting it wrong. This should clear it up for any new references
UPDATE: May 2022
This answer is still having lots of interest and have updated it to use arrow functions
const axiosTest = async () {
try {
const {data:response} = await axios.get(url) //use data destructuring to get data from the promise object
return response
}
catch (error) {
console.log(error);
}
}
you can populate the data you want with a simple callback function,
let's say we have a list named lst that we want to populate,
we have a function that pupulates pupulates list,
const lst = [];
const populateData = (data) => {lst.push(data)}
now we can pass the callback function to the function which is making the axios call and we can pupulate the list when we get data from response.
now we make our function that makes the request and pass populateData as a callback function.
function axiosTest (populateData) {
axios.get(url)
.then(function(response){
populateData(response.data);
})
.catch(function(error){
console.log(error);
});
}
The axios library creates a Promise() object. Promise is a built-in object in JavaScript ES6. When this object is instantiated using the new keyword, it takes a function as an argument. This single function in turn takes two arguments, each of which are also functions — resolve and reject.
Promises execute the client side code and, due to cool Javascript asynchronous flow, could eventually resolve one or two things, that resolution (generally considered to be a semantically equivalent to a Promise's success), or that rejection (widely considered to be an erroneous resolution). For instance, we can hold a reference to some Promise object which comprises a function that will eventually return a response object (that would be contained in the Promise object). So one way we could use such a promise is wait for the promise to resolve to some kind of response.
You might raise we don't want to be waiting seconds or so for our API to return a call! We want our UI to be able to do things while waiting for the API response. Failing that we would have a very slow user interface. So how do we handle this problem?
Well a Promise is asynchronous. In a standard implementation of engines responsible for executing Javascript code (such as Node, or the common browser) it will resolve in another process while we don't know in advance what the result of the promise will be. A usual strategy is to then send our functions (i.e. a React setState function for a class) to the promise, resolved depending on some kind of condition (dependent on our choice of library). This will result in our local Javascript objects being updated based on promise resolution. So instead of getters and setters (in traditional OOP) you can think of functions that you might send to your asynchronous methods.
I'll use Fetch in this example so you can try to understand what's going on in the promise and see if you can replicate my ideas within your axios code. Fetch is basically similar to axios without the innate JSON conversion, and has a different flow for resolving promises (which you should refer to the axios documentation to learn).
GetCache.js
const base_endpoint = BaseEndpoint + "cache/";
// Default function is going to take a selection, date, and a callback to execute.
// We're going to call the base endpoint and selection string passed to the original function.
// This will make our endpoint.
export default (selection, date, callback) => {
fetch(base_endpoint + selection + "/" + date)
// If the response is not within a 500 (according to Fetch docs) our promise object
// will _eventually_ resolve to a response.
.then(res => {
// Lets check the status of the response to make sure it's good.
if (res.status >= 400 && res.status < 600) {
throw new Error("Bad response");
}
// Let's also check the headers to make sure that the server "reckons" its serving
//up json
if (!res.headers.get("content-type").includes("application/json")) {
throw new TypeError("Response not JSON");
}
return res.json();
})
// Fulfilling these conditions lets return the data. But how do we get it out of the promise?
.then(data => {
// Using the function we passed to our original function silly! Since we've error
// handled above, we're ready to pass the response data as a callback.
callback(data);
})
// Fetch's promise will throw an error by default if the webserver returns a 500
// response (as notified by the response code in the HTTP header).
.catch(err => console.error(err));
};
Now we've written our GetCache method, lets see what it looks like to update a React component's state as an example...
Some React Component.jsx
// Make sure you import GetCache from GetCache.js!
resolveData() {
const { mySelection, date } = this.state; // We could also use props or pass to the function to acquire our selection and date.
const setData = data => {
this.setState({
data: data,
loading: false
// We could set loading to true and display a wee spinner
// while waiting for our response data,
// or rely on the local state of data being null.
});
};
GetCache("mySelelection", date, setData);
}
Ultimately, you don't "return" data as such, I mean you can but it's more idiomatic to change your way of thinking... Now we are sending data to asynchronous methods.
Happy Coding!
axiosTest() needs to return axios.get, which in turn returns a Promise.
From there, then can be used to execute a function when said Promise resolves.
See Promise for more info.
Alternatively, await can be used from within the scope of some async function.
// Dummy Url.
const url = 'https://jsonplaceholder.typicode.com/posts/1'
// Axios Test.
const axiosTest = axios.get
// Axios Test Data.
axiosTest(url).then(function(axiosTestResult) {
console.log('response.JSON:', {
message: 'Request received',
data: axiosTestResult.data
})
})
<script src="https://cdnjs.cloudflare.com/ajax/libs/axios/0.18.0/axios.js"></script>
IMO extremely important rule of thumb for your client side js code is to keep separated the data handling and ui building logic into different funcs, which is also valid for axios data fetching ... in this way your control flow and error handlings will be much more simple and easier to manage, as it could be seen from this
ok fetch
and this
NOK fetch
<script src="https://unpkg.com/axios/dist/axios.min.js"></script>
<script>
function getUrlParams (){
var url_params = new URLSearchParams();
if( window.location.toString().indexOf("?") != -1) {
var href_part = window.location.search.split('?')[1]
href_part.replace(/([^=&]+)=([^&]*)/g,
function(m, key, value) {
var attr = decodeURIComponent(key)
var val = decodeURIComponent(value)
url_params.append(attr,val);
});
}
// for(var pair of url_params.entries()) { consolas.log(pair[0]+ '->'+ pair[1]); }
return url_params ;
}
function getServerData (url, urlParams ){
if ( typeof url_params == "undefined" ) { urlParams = getUrlParams() }
return axios.get(url , { params: urlParams } )
.then(response => {
return response ;
})
.catch(function(error) {
console.error ( error )
return error.response;
})
}
// Action !!!
getServerData(url , url_params)
.then( response => {
if ( response.status === 204 ) {
var warningMsg = response.statusText
console.warn ( warningMsg )
return
} else if ( response.status === 404 || response.status === 400) {
var errorMsg = response.statusText // + ": " + response.data.msg // this is my api
console.error( errorMsg )
return ;
} else {
var data = response.data
var dataType = (typeof data)
if ( dataType === 'undefined' ) {
var msg = 'unexpected error occurred while fetching data !!!'
// pass here to the ui change method the msg aka
// showMyMsg ( msg , "error")
} else {
var items = data.dat // obs this is my api aka "dat" attribute - that is whatever happens to be your json key to get the data from
// call here the ui building method
// BuildList ( items )
}
return
}
})
</script>
After 6 hours of fluttering, I realized it was a one-line problem. If you are interfering with the axios life-cycle, you may have forgotten this line:
componentDidMount() {
this.requestInterceptor = axios.interceptors.request.use((request) => {
this.updateApiCallFor(request.url, true);
return request;
});
this.responseInterceptor = axios.interceptors.response.use((response) => {
this.updateApiCallFor(response.config.url, false);
return response; // THIS LINE IS IMPORTANT !
}, (error) => {
this.updateApiCallFor(error.config.url, false);
throw error;
});
async makes a function return a Promise
await makes a function wait for a Promise
code async/await
// https://www.npmjs.com/package/axios
const axios = require('axios')
/* --- */
async function axiosTest() {
let promiseAxios = axios.get( 'https://example.com' )
/* --- */
console.log( await promiseAxios )
}
/* --- */
axiosTest()
replit.com Stackoverflow - Returning data from Axios API
replit.com Stackoverflow - How to return values from async
code async/await with return
// https://www.npmjs.com/package/axios
const axios = require('axios')
/* --- */
async function axiosTest() {
console.log( await promiseAxios() )
}
/* --- */
axiosTest()
/* --- */
// create function for promise axios and return it
function promiseAxios() {
return axios.get( 'https://example.com' )
}
replit.com Stackoverflow - Returning data from Axios API - return
replit.com Stackoverflow - How to return values from async - return
Try this,
function axiosTest() {
axios.get(url)
.then(response => response.data)
.catch(error => error);
}
async function getResponse () {
const response = await axiosTest();
console.log(response);
}
getResponse()
It works, but each function where you want to get the response needs to be an async function or use an additional .then() callback.
function axiosTest() {
axios.get(url)
.then(response => response.data)
.catch(error => error);
}
async function getResponse () {
axiosTest().then(response => {
console.log(response)
});
}
getResponse()
If anyone knows a way to avoid this please do tell.
Also checkout Katsiaryna (Kate) Lupachova's article on Dev.to. I think it will help.
async handleResponse(){
const result = await this.axiosTest();
}
async axiosTest () {
return await axios.get(url)
.then(function (response) {
console.log(response.data);
return response.data;})
.catch(function (error) {
console.log(error);
});
}
You can find check https://flaviocopes.com/axios/#post-requests url and find some relevant information in the GET section of this post.
You can use Async - Await:
async function axiosTest() {
const response = await axios.get(url);
const data = await response.json();
}
I try to download some data from an external API. I would like to pipe the response of every request. The array including the request URLs looks like this :
[ 'https://scihub.copernicus.eu/dhus/odata/v1/Products(\'d98b8730-846f-46d0-a816-5ae4db9f56a7\')/$value',
'https://scihub.copernicus.eu/dhus/odata/v1/Products(\'6edaeb16-3077-45d1-b3f0-fa2d5549f64a\')/$value',
'https://scihub.copernicus.eu/dhus/odata/v1/Products(\'333db2aa-c695-4753-8bd1-e64308af26e1\')/$value',
'https://scihub.copernicus.eu/dhus/odata/v1/Products(\'052cf771-6c4e-4a3a-bc15-51c95a3f37c4\')/$value' ]
I read that request-promise does not support to pipe the request response but I have not found an alternative, that's why the function with which I am trying to get the results from looks as follows:
var fs = require('fs');
var rp = require('request-promise');
function downloadSentinel(promObj){
return new Promise((resolve,reject) => {
try {
var promises = promObj.requestURLS.map(url => rp(url,{auth:auth}).then(body => body.pipe(fs.createWriteStream('./test.zip'))
.on('finish', () => {
resolve(promObj);
})), {concurrency:2});
Promise.all(promises).then(results => {
console.log(results)
});
} catch (error) {
reject(error);
}
})
}
Additionally, it is only possible to download two Products at the same time, I tried to achieve it with the Blubird parameter concurrency, but it doesn't seem to work properly.
How could I solve my problem?
UPDATE
If I try it with this code:
var promises = promObj.requestURLS.map(url => rp(url,{auth:auth}).then(
data => new Promise((resolve,reject) => {
data.pipe(fs.createWriteStream('./data/' + promObj.Name[0] + ".zip"))
.on('finish', () => {
console.log('Finally finished');
resolve(promObj);
})
.on('error', () => {
reject(promObj);
})})),{concurrency:2});
Promise.all(promises).then(results => {
console.log(results)
});
I get the error UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 2): ReferenceError: data.pipe is not a function
What have I missed?
Additionally, I recognized that my data is 800MB large. Is this too large to pipe it without an error?
I get this error now:
In buffer.js:556
if (encoding === undefined) return buf.utf8Slice(start, end);
Error: "toString()" failed