Conditional Promise Chaining - javascript

I get an array of args as an argument, and then I make lots of server calls based on the algo below.
Post to endpoint /abc with args array as data.
Iterate over args array,
a. Pull 3 at a time and send 3 Get calls to endpoint /pqr
b. Once 3 calls in step '2.a' succeeds send 3 Post calls to endpoint /def
c. Collect responses from step '2.a' server call and push it in an array.
d. Repeat step a,b,c till args length.
Code Snippet for the entire process is given below, execution starts at function execute(args).
import Promise from 'bluebird';
import request from 'superagent';
// sends a post request to server
const servercall2 = (args, response) => {
const req = request
.post(`${baseUrl}/def`)
.send(args, response)
.setAuthHeaders();
return req.endAsync();
};
// sends a post request to server
const servercall1 = (args) => {
const req = request
.post(`${baseUrl}/abc`)
.send(args)
.setAuthHeaders();
return req.endAsync()
.then((res) => resolve({res}))
.catch((err) => reject(err));
};
async function makeServerCalls(args, length) {
// convert args to two dimensional array, chunks of given length [[1,2,3], [4,5,6,], [7,8]]
const batchedArgs = args.reduce((rows, key, index) => (index % length === 0 ? rows.push([key])
: rows[rows.length - 1].push(key)) && rows, []);
const responses = [];
for (const batchArgs of batchedArgs) {
responses.push(
// wait for a chunk to complete, before firing the next chunk of calls
await Promise.all(
***// Error, expected to return a value in arrow function???***
batchArgs.map((args) => {
const req = request
.get(`${baseUrl}/pqr`)
.query(args)
// I want to collect response from above req at the end of all calls.
return req.endAsync()
.then((response) =>servercall2(args,response));
})
)
);
}
// wait for all calls to finish
return Promise.all(responses);
}
export function execute(args) {
return (dispatch) => {
servercall1(args)
.then(makeServerCalls(args, 3))
.then((responses) => {
const serverresponses = [].concat(...responses);
console.log(serverresponses);
});
};
}
I am facing couple of issues
2.c seems not to be working fine "Collect responses from step '2.a' server call and push it in an array.". Error: expected to return a value in arrow function. What am I doing wrong here? Please note that at the end I care about the response from step 2.a only.
Is this a right chaining or it can be optimized, based on the requirements mentioned above?
Is there any other failure handling I have to do?

You have a brick wall of text so its becoming a little hard to decipher what you're actually trying to achieve, but I will give my two cents on the code given.
//Both server calls can be simplified.. no need to
//wrap in another promise if one is being returned
const servercall2 = (args, response) => {
const req = request
.post(`${baseUrl}/def`)
.send(args, response)
.setAuthHeaders();
return req.endAsync();
};
//Here... you return no value in the function passed to map, thus an
//error is being thrown. You need to return a Promise from here so that
//it can be passed into Promise.all
const allFinished = await Promise.all(
batchArgs.map((args) => {
const req = request
.get(`${baseUrl}/pqr`)
.query(args)
// I want to collect response from above req at the end of all calls.
return req.endAsync()
})
);
allFinished.then(function(results){
});

It might be this -- each item in batchArgs.map should be a Promise I think? Then Promise.all will wait for each to finish:
batchArgs.map((args) => {
const req = request
.get(`${baseUrl}/pqr`)
.query(args)
// Return promise here
return req.endAsync()
.then((response) =>servercall2(args,response))
.then((res) => res);
})

Related

For loop with fetch returning empty array

I'm writing a server route that makes api calls.
I need to make two different fetch requests cause I need more info that's coming in the first fetch.
The problem is that I'm declaring a variable out of the promise scope and for some reason, my res.send is not awaiting until the array gets full.
I need to iterate until result 9 (I can't use theDogApi's predefined filters to show nine results!)
if (req.query.name) {
var myRes = [];
fetch(`https://api.thedogapi.com/v1/breeds/search?name=${req.query.name}&apikey=${key}`)
.then(r => r.json())
.then( data => {
for (let i = 0; i < 8 && i < data.length; i++) {
fetch(`https://api.thedogapi.com/v1/images/${data[i].reference_image_id
}`)
.then(r => r.json())
.then(datos => {
myRes.push({ ...data[i], ...datos });
})
}
})
.then(res.send(myRes))
}
I'll appreciate the help!
You can try using Promise.all to turn your array of fetch calls into an aggregate promise that resolves to an array of responses when all have arrived. If any fail, the whole thing fails (use Promise.allSettled if you don't want all-or-nothing semantics). Don't forget to catch the error.
Although the code doesn't show it, be sure to check response.ok to make sure the request actually succeeded before calling .json(). Throwing an error if !repsonse.ok and handling it in the .catch block is a typical strategy. Writing a wrapper on fetch is not a bad idea to avoid verbosity.
Lastly, note that Array#slice replaces the for loop. For arrays with fewer than 8 elements, it'll slice as many as are available without issue.
// mock everything
const fetch = (() => {
const responses = [
{
json: async () =>
[...Array(10)].map((e, i) => ({reference_image_id: i}))
},
...Array(10)
.fill()
.map((_, i) => ({json: async () => i})),
];
return async () => responses.shift();
})();
const req = {query: {name: "doberman"}};
const key = "foobar";
const res = {send: response => console.log(`sent ${response}`)};
// end mocks
fetch(`https://api.thedogapi.com/v1/breeds/search?name=${req.query.name}&apikey=${key}`)
.then(response => response.json())
.then(data =>
Promise.all(data.slice(0, 8).map(e =>
fetch(`https://api.thedogapi.com/v1/images/${e.reference_image_id}`)
.then(response => response.json())
))
)
.then(results => res.send(results))
.catch(err => console.error(err))
;
Here is an example of an async function unsing await:
async function fun(queryName, key){
const a = [], p, j = [];
let firstWait = await fetch(`https://api.thedogapi.com/v1/breeds/search?name=${req.query.name}&apikey=${key}`);
let firstJson = await firstWait.json(); // must be an Array
for(let i=0,n=8,j,l=firstJson.length; i<n && i<l; i++){
a.push(fetch('https://api.thedogapi.com/v1/images/'+firstJson[i].reference_image_id));
}
p = await Promise.all(a);
for(let v of p){
j.push(v.json());
}
return Promise.all(j);
}
// assumes req, req.query, req.query.name, and key are already defined
fun(req.query.name, key).then(a=>{
// a is your JSON Array
});
JSON
Here's my hot take: Stop using low-level functions like fetch every time you want to get JSON. This tangles up fetching logic every time we want to get a bit of JSON. Write getJSON once and use it wherever you need JSON -
const getJSON = s =>
fetch(s).then(r => r.json())
const data =
await getJSON("https://path/to/some/data.json")
// ...
URL and URLSearchParams
Another hot take: Stop writing all of your URLs by hand. This tangles URL writing/rewriting with all of your api access logic. We can setup a DogApi endpoint once, with a base url and an apikey -
const DogApi =
withApi("https://api.thedogapi.com/v1", {apikey: "0xdeadbeef"})
And now whenever we need to touch that endpoint, the base url and default params can be inserted for us -
const breed =
// https://api.thedogapi.com/v1/breeds/search?apikey=0xdeadbeef&name=chihuahua
await getJSON(DogApi("/breeds/search", {name}))
// ...
withApi has a simple implementation -
const withApi = (base, defaults) => (pathname, params) =>
{ const u = new URL(url) // <- if you don't know it, learn URL
u.pathname = pathname
setParams(u, defaults)
setParams(u, params)
return u.toString()
}
function setParams (url, params = {})
{ for (const [k,v] of Object.entries(params))
url.searchParams.set(k, v) // <- if you don't know it, learn URLSearchParams
return url
}
fruits of your labor
Now it's dead simple to write functions like imagesForBreed, and any other functions that touch JSON or your DogApi -
async function imagesForBreed (name = "")
{ if (name == "")
return []
const breed =
await getJSON(DogApi("/breeds/search", {name}))
const images =
data.map(v => getJSON(DogAPI(`/images/${v.reference_image_id}`))
return Promise.all(images)
}
And your entire Express handler is reduced to a single line, with no need to touch .then or other laborious API configuration -
async function fooHandler (req, res)
{
res.send(imagesForBreed(req.query.name))
}

Having difficult understanding this promise + async await example to retrieve multiple users info in JavaScript

was following this tutorial(https://javascript.info/fetch) on javascript's promise and async await and had trouble understanding the exercise it provided.
The question is about retrieving multiple users info from Github. One fetch request per user.
And requests shouldn’t wait for each other. So that the data arrives as soon as possible.
The solution it provided is
async function getUsers(names) {
let jobs = [];
for(let name of names) {
let job = fetch(`https://api.github.com/users/${name}`).then(
successResponse => {
if (successResponse.status != 200) {
return null;
} else {
return successResponse.json();
}
},
failResponse => {
return null;
}
);
jobs.push(job);
}
let results = await Promise.all(jobs);
return results;
}
My first question is, can we use await for the fetch. i.e. is the following snippet equivalent to the solution he provided?
async function getUsers2(names) {
let jobs = [];
for(let name of names) {
let response
try {
response = await fetch(`https://api.github.com/users/${name}`);
} catch(e) {
response = null
}
const job = response && response.json()
jobs.push(job);
}
let results = await Promise.all(jobs);
return results;
}
Furthermore, the tutorial said
.then call is attached directly to fetch, so that when we have the response, it doesn’t wait for other fetches, but starts to read .json() immediately.
If we used await Promise.all(names.map(name => fetch(...))), and call .json() on the results, then it would wait for all fetches to respond. By adding .json() directly to each fetch, we ensure that individual fetches start reading data as JSON without waiting for each other.
Does he mean that if we write the solution this way
async function getUser(name) {
const response = await fetch(`https://api.github.com/users/${name}`)
return response.ok ? await response.json : null
}
async function getUsers(names) {
return await Promise.all(names.map(name => getUser(name)))
}
we wouldn't be able to achieve the effect such that we don't want requests shouldn’t wait for each other?
My first question is, can we use await for the fetch. i.e. is the following snippet equivalent to the solution he provided?
No. When in the immediate body of an async function, whenever there's an await, the function will completely pause until the following Promise resolves. So, the loop
for(let name of names) {
let response
try {
response = await fetch(`https://api.github.com/users/${name}`);
} catch(e) {
response = null
}
needs to wait in serial for the headers of each response to be received before continuing on to initialize the next request.
Does he mean that if we write the solution this way
First, the syntax needs to be adjusted: .json is a method, so it needs to be called:
async function getUser(name) {
const response = await fetch(`https://api.github.com/users/${name}`)
return response.ok ? await response.json() : null
// ^^
}
But that's perfectly fine to do. The only await in the getUsers function is waiting for the whole Promise.all to resolve; the .mapping of the array to the getUser call is carried out synchronously, so all requests get sent out at once, so none of the network requests need to wait for any of the others to finish in order to work.
The problem the author was referring to was calling Promise.all on an array of the fetch calls, rather than on an array of the .json() calls:
// Bad, do not use:
const getUsers = async (names) => {
const responses = await Promise.all(names.map(
name => fetch(`https://api.github.com/users/${name}`)
));
return Promise.all(responses.map(
response => response.ok ? response.json() : null
));
}
The problem with the above is that the script must wait for all of the response headers from every request to be received before the response body for any of them can start to be parsed.
Another parallel solution:
const getUsers = names => Promise.all(names.map(
async (name) => {
const res = await fetch(`https://api.github.com/users/${name}`);
return res.ok ? res.json() : null;
}
));

How to order the order of returned API calls with generators?

I'm practicing some more advanced Javascript techniques, and came across generators and iterators as something I wanted to look into. I know that I'm doing this incorrectly, but I'm not really sure how to go about it.
The idea of my little program is this: I want to make API calls to the OpenWeather API for four (or more, but I'm testing with four) cities. The cities are stored in an array and one by one, the city is appended to the URL and a fetch request is sent. Each response is appended to an array and the array is sent to the client.
This was my original code:
// node/express setup here
const cities = ["London%2Cuk", "New York%2Cus", "Johannesburg%2Cza", 'Kingston%2Cjm']
const url = process.env.URL_BASE;
const headers = {
"X-RapidAPI-Host": process.env.HOST,
"X-RapidAPI-Key": process.env.API_KEY
}
const requestInit = { method: 'GET',
headers: headers
};
const fetchWeather = (ep) => {
const appendedURL = url + ep;
return fetch(appendedURL, requestInit)
.then(r => r.json());
}
app.get('/', (req, res, err) => {
const data = []
Promise.all(
cities.map( async (city) => {
await fetchWeather(city)
.then(returns => {
data.push(returns)
})
})
)
.then(() => {
res.send(data)
return data;
})
.catch(err => console.log(err))
})
Right? Solid, works ok. But now I'm stuck on how to order it. The way I would think to do this is to switch await fetchWeather(city) to yield fetchWeather(city) and have a generator manager that would continue calling next(city) until the array had completed, but I'm having an issue figuring out the pattern. I refactored the api call to a generator and am testing out a generator management function.
The paradigm I have based on my understanding is this:
First .next() starts the iteration
Second .next(args) passes the designated city to the first yield
Third .next() sends the yielded fetch request and should (ideally) return the response object that can be .then()'d.
Here is my tester generator code:
function *fetchWeather() {
for (let i = 0; i < cities.length; i++){
const appendedURL = url + (yield);
yield fetch(appendedURL, requestInit)
.then(r => {
return r.json()
});
}
}
const generatorManager = (generator) =>{
if (!generator) {
generator = fetchWeather();
}
generator.next()
generator.next(cities[i])
generator.next().value.then( e =>
console.log(e));
}
I'm getting an error:TypeError: Cannot read property 'then' of undefined And I'm not sure where I'm going wrong here with my logic. How do I refactor this to allow me to wait for specific promises if I can't individually pass known values? I know there has to be a way, but I'm missing something.
Thanks in advance.
I don't understand what benefit you hope to get from using a generator here, but the reason you're getting that error is you're doing one to many .next()'s
The first generator.next() runs fetchWeather until the first yield, which is the yield at the end of const appendedURL = url + (yield);. The return value from calling generator.next() in this case is { value: undefined, done: false }
After that, generator.next(cities[i]) resumes fetchWeather, with cities[i] being the result of the previous yield. The generator continues running, calling fetch, then calling .then on that promise, and then yielding the resulting promise. So the return value that generatorManager sees from doing generator.next(cities[i]) is { value: /* a promise object */, done: false }.
So to fix that error, you need to reduce the number of calls you're making to generator.next
generator.next()
generator.next(cities[i]).value.then(e =>
console.log(e));
As mentioned in the comments, the usual way i'd do this is map the cities to promises, and then do promise.all. For example:
Promise.all(
cities.map((city) => fetchWeather(city)) // note, this is the original fetch weather, not the generator
).then((data) => {
res.send(data);
return data;
})
.catch(err => console.log(err))

Struggle with chaining of promises in react application

JavaScript, React - sending multiple simultaneous ajax calls struggling with promises. Basically I want to chain the calls, if one server call completes then only do next call, and collect the successful response of calls from endpoint /pqr made inside makeServerCalls.
import Promise from 'bluebird';
import request from 'superagent';
// sends a post request to server
const servercall2 = (args, response) => {
const promise = new Promise((resolve, reject) => {
const req = request
.post(`${baseUrl}/def`)
.send(args, response)
.setAuthHeaders();
req.endAsync()
.then((res) => resolve(res))
.catch((err) => reject(err));
});
return promise;
};
// sends a post request to server
const servercall1 = (args) => {
const promise = new Promise((resolve, reject) => {
const req = request
.post(`${baseUrl}/abc`)
.send(args)
.setAuthHeaders();
req.endAsync()
.then((res) => resolve({res}))
.catch((err) => reject(err));
});
return promise;
};
// function to send request to cgi server to execute actions from ui
async function makeServerCalls(args, length) {
// convert args to two dimensional array, chunks of given length [[1,2,3], [4,5,6,], [7,8]]
const batchedArgs = args.reduce((rows, key, index) => (index % length === 0 ? rows.push([key])
: rows[rows.length - 1].push(key)) && rows, []);
const responses = [];
for (const batchArgs of batchedArgs) {
responses.push(
// wait for a chunk to complete, before firing the next chunk of calls
await Promise.all(
***// Error, expected to return a value in arrow function???***
batchArgs.map((args) => {
const req = request
.get(`${baseUrl}/pqr`)
.query(args)
// I want to collect response from above req at the end of all calls.
req.endAsync()
.then((response) =>servercall2(args,response))
.then((res) => res);
})
)
);
}
// wait for all calls to finish
return Promise.all(responses);
}
export function execute(args) {
return (dispatch) => {
servercall1(args)
.then(makeServerCalls(args, 3))
.then((responses) => {
const serverresponses = [].concat(...responses);
console.log(serverresponses);
});
};
}
Error: expected to return a value in arrow function. What am I doing wrong here?
Is this a right chaining or it can be optimized?
What happens if some call fails in between?
You can use Async library for this. No need to re-invent the wheel.
There is a waterfall function that takes a list of functions that execute in series. You can pass result of function 1 to function 2 to function 3 and so on. Once complete waterfall executes, you get the result in callback. You can read more about it in the docs in the link above.

async.queue within a promise chain?

I am trying to create an async queue for an array of get requests to an api, i am just unsure how to combine and use the responses. Maybe my implementation is wrong since i am using async.queue inside a promise then function ?
Ultimately i would like to get results from first promise ->
use results of that first promise to create an array of get requests for the async.queue ->
then combine the results of all the get responses. I need to throttle the amount of requests that go out at a time due to API rate limit.
const rp = require("request-promise");
app.get("/", (req,res) => {
let arr = []
rp.get(url)
.then((response) => {
let arrayID = response
let q = async.queue((task, callback) => {
request({
method: "GET",
url: url,
qs: {
id: task.id
}
}, (error, response, body) => {
arr.push(body)
console.log(arr.length)
// successfully gives me the response i want. im trying to push into an array with all of my responses,
// but when i go to next then chain it is gone or if i try to return arr i get an empty []
})
callback()
}, 3)
for(var i = 0; i < arrayID.length; i++){
q.push({ id : arrayID[i]} );
}
q.drain = function() {
console.log('all items have been processed');
}
return arr
})
.then((responseArray) => {
//empty array even though the length inside the queue said other wise, i know its a problem with async and sync actions but is there a way to make the promise chain and async queue play nice?
res.json(responseArray)
})
})
Figured it out, ended up having to wrap it in a promise and resolve the final array in q.drain()
const rp = require("request-promise");
app.get("/", (req,res) => {
rp.get(url)
.then((response) => {
let arrayID = response
return new Promise((resolve, reject) => {
var q = async.queue((task, callback) => {
request({
method: "GET",
url: url,
qs: {
id:task.id,
},
}, (error, response, body) => {
arr.push(body)
callback();
})
}, 2);
q.drain = () => resolve(arr);
q.push(arrayID);
})
})
.then((response) => res.json(response))
.catch((error) => res.json(error))
}
To launch multiple async calls in parallel you can use Promise.all()
To launch multiple async calls sequentially (i.e they depend on each other) you can return each promise and use its result inside a then() function
Code below:
app.get("/", (req,res)
.then(function(firstResult)) {
//You can use result of first promise here
return Promise.all([
//Create array of get request here
//To also return firstResult just add it in the Promise.All array
]);
})
.then(function(allResults){
//You can use results of all the get requests created in the previous then()
})
.catch(function(error){
//Deal with any error that happened
});

Categories

Resources