Calling async function multiple times - javascript

So I have a method, which I want to call multiple times in a loop. This is the function:
function PageSpeedCall(callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[0]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
As you see this is an async function that calls the PageSpeed API. It then gets the response thanks to the callback and renders it in the view. Now how do I get this to be work in a for/while loop? For example
function PageSpeedCall(websites, i, callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[i]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
var websites = ['google.com','facebook.com','stackoverflow.com'];
for (let i = 0; i < websites.length; i++) {
PageSpeedCall(websites, i);
}
I want to get a raport for each of these sites. The length of the array will change depending on what the user does.
I am using async.parallel to call the functions like this:
let freeReportCalls = [PageSpeedCall, MozCall, AlexaCall];
async.parallel(freeReportCalls, (err, results) => {
if (err) {
console.log(err);
} else {
res.render('reports/report', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}
});
I tried to use promise chaining, but for some reason I cannot put it together in my head. This is my attempt.
return Promise.all([PageSpeedCall,MozCall,AlexaCall]).then(([ps,mz,al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
I tried doing this:
return Promise.all([for(let i = 0;i < websites.length;i++){PageSpeedCall(websites, i)}, MozCall, AlexaCall]).
then(([ps, mz, al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
But node just said it's stupid.
And this would work if I didn't want to pass the websites and the iterator into the functions. Any idea how to solve this?
To recap. So far the functions work for single websites. I'd like them to work for an array of websites.
I'm basically not sure how to call them, and how to return the responses.

It's much easier if you use fetch and async/await
const fetch = require('node-fetch');
async function PageSpeedCall(website) {
const pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
const result = await fetch(pagespeeddCall);
return await result.json();
}
async function callAllSites (websites) {
const results = [];
for (const website of websites) {
results.push(await PageSpeedCall(website));
}
return results;
}
callAllSites(['google.com','facebook.com','stackoverflow.com'])
.then(results => console.log(results))
.error(error => console.error(error));
Which is better with a Promise.all
async function callAllSites (websites) {
return await Promise.all(websites.map(website => PageSpeedCall(website));
}

Starting on Node 7.5.0 you can use native async/await:
async function PageSpeedCall(website) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
return await promisify(pagespeedCall);
}
async function getResults(){
const websites = ['google.com','facebook.com','stackoverflow.com'];
return websites.map(website => {
try {
return await PageSpeedCall(website);
}
catch (ex) {
// handle exception
}
})
}
Node http "callback" to promise function:
function promisify(url) {
// return new pending promise
return new Promise((resolve, reject) => {
// select http or https module, depending on reqested url
const lib = url.startsWith('https') ? require('https') : require('http');
const request = lib.get(url, (response) => {
// handle http errors
if (response.statusCode < 200 || response.statusCode > 299) {
reject(new Error('Failed to load page, status code: ' + response.statusCode));
}
// temporary data holder
const body = [];
// on every content chunk, push it to the data array
response.on('data', (chunk) => body.push(chunk));
// we are done, resolve promise with those joined chunks
response.on('end', () => resolve(body.join('')));
});
// handle connection errors of the request
request.on('error', (err) => reject(err))
})
}

Make PageSpeedCall a promise and push that promise to an array as many times as you need, e.g. myArray.push(PageSpeedCall(foo)) then myArray.push(PageSpeedCall(foo2)) and so on. Then you Promise.all the array.
If subsequent asynch calls require the result of a prior asynch call, that is what .then is for.
Promise.all()
Promise.all([promise1, promise2, promise3]).then(function(values) {
console.log(values);
});

Related

How to achieve recursive Promise calls in Node.js

I am calling an API where I can only fetch 1000 records per request,
I was able to achieve this using recursion.
I am now trying to achieve the same using promises, I am fairly new to Node.js and JavaScript too.
I tried adding the recursion code in an if else block but failed
var requestP = require('request-promise');
const option = {
url: 'rest/api/2/search',
json: true,
qs: {
//jql: "project in (FLAGPS)",
}
}
const callback = (body) => {
// some code
.
.
.//saving records to file
.
//some code
if (totlExtractedRecords < total) {
requestP(option, callback).auth('api-reader', token, true)
.then(callback)
.catch((err) => {
console.log('Error Observed ' + err)
})
}
}
requestP(option).auth('api-reader', token, true)
.then(callback)
.catch((err) => {
console.log('Error Observed ' + err)
})
I want to execute the method using promise and in a synchronous way,
i.e. I want to wait until the records are all exported to a file and continue with my code
I think its better to create your own promise and simply resolve it when your done with your recursion. Here's a simply example just for you to understand the approach
async function myRecursiveLogic(resolveMethod, ctr = 0) {
// This is where you do the logic
await new Promise((res) => setTimeout(res, 1000)); // wait - just for example
ctr++;
console.log('counter:', ctr);
if (ctr === 5) {
resolveMethod(); // Work done, resolve the promise
} else {
await myRecursiveLogic(resolveMethod, ctr); // recursion - continue work
}
}
// Run the method with a single promise
new Promise((res) => myRecursiveLogic(res)).then(r => console.log('done'));
Here's a clean and nice solution using the latest NodeJS features.
The recursive function will continue executing until a specific condition is met (in this example asynchronously getting some data).
const sleep = require('util').promisify(setTimeout)
const recursive = async () => {
await sleep(1000)
const data = await getDataViaPromise() // you can replace this with request-promise
if (!data) {
return recursive() // call the function again
}
return data // job done, return the data
}
The recursive function can be used as follows:
const main = async () => {
const data = await recursive()
// do something here with the data
}
Using your code, I'd refactored it as shown below. I hope it helps.
const requestP = require('request-promise');
const option = {
url: 'rest/api/2/search',
json: true,
qs: {
//jql: "project in (FLAGPS)",
}
};
/*
NOTE: Add async to the function so you can udse await inside the function
*/
const callback = async (body) => {
// some code
//saving records to file
//some code
try {
const result = await requestP(option, callback).auth('api-reader', token, true);
if (totlExtractedRecords < total) {
return callback(result);
}
return result;
} catch (error) {
console.log('Error Observed ' + err);
return error;
}
}
Created this code using feed back from Amir Popovich
const rp = require('Request-Promise')
const fs = require('fs')
const pageSize = 200
const options = {
url: 'https://jira.xyz.com/rest/api/2/search',
json: true,
qs: {
jql: "project in (PROJECT_XYZ)",
maxResults: pageSize,
startAt: 0,
fields: '*all'
},
auth: {
user: 'api-reader',
pass: '<token>',
sendImmediately: true
}
}
const updateCSV = (elment) => {
//fs.writeFileSync('issuedata.json', JSON.stringify(elment.body, undefined, 4))
}
async function getPageinatedData(resolve, reject, ctr = 0) {
var total = 0
await rp(options).then((body) => {
let a = body.issues
console.log(a)
a.forEach(element => {
console.log(element)
//updateCSV(element)
});
total = body.total
}).catch((error) => {
reject(error)
return
})
ctr = ctr + pageSize
options.qs.startAt = ctr
if (ctr >= total) {
resolve();
} else {
await getPageinatedData(resolve, reject, ctr);
}
}
new Promise((resolve, reject) => getPageinatedData(resolve, reject))
.then(() => console.log('DONE'))
.catch((error) => console.log('Error observed - ' + error.name + '\n' + 'Error Code - ' + error.statusCode));

Who can explain nodejs promises in a loop to a PHP programmer

I have searched high and low but can't get my head around promises. What I do understand is how to define one promise and use its result by using .then.
What I do not understand is how I can create a loop to query the database for different blocks of records. This is needed due to a limit set on the number of records to query.
The predefined promise api call is used like this:
let getRecords = (name) => {
return new Promise((resolve,reject) => {
xyz.api.getRecords(name, 1000, 1000, function(err, result){
// result gets processed here.
resolve(//somevariables here);
});
)};
going with what I am used to, I tried:
for (let i=1; i<90000; i+=500) {
xyz.api.getRecords('james', i, 500, function(err, result){
// result gets processed here.
});
}
But then I can't access the information (could be my wrong doing)
Also tried something like this:
function getRecords(name,i){
xyz.api.getRecords(name, i, 500, function(err, result){
// result gets processed here.
});
};
for (let i=1; i<90000; i+=500) {
var someThing = getRecords('james', i);
}
All tutorials only seem to use one query, and process the data.
How do I call the api function multiple times with different arguments, collect the data and process it once everything is retrieved?
Only thing I can think of is, to write info to a file, terrible thought.
Using async/await
(async () => {
function getRecords(name,i){
// create new Promise so you can await for it later
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, function(err, result){
if(err) {
return reject(err);
}
resolve(result);
});
});
}
for (let i = 1; i < 90000; i += 500) {
// wait for the result in every loop iteration
const someThing = await getRecords('james', i);
}
})();
To handle errors you need to use try/catch block
try {
const someThing = await getRecords('james', i);
} catch(e) {
// handle somehow
}
Using only Promises
function getRecords(name, i) {
// create Promise so you can use Promise.all
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, function (err, result) {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
const results = [];
for (let i = 1; i < 90000; i += 500) {
// push Promise's to array without waiting for results
results.push(getRecords("james", i));
}
// wait for all pending Promise's
Promise.all(results).then((results) => {
console.log(results);
});
let count = 0;
function getRecords(name, i) {
return new Promise((resolve, reject) => {
setTimeout(() => {
// example results
resolve((new Array(10)).fill(0).map(() => ++count));
}, 100);
});
}
const results = [];
for (let i = 1; i < 9000; i += 500) {
results.push(getRecords("james", i));
}
Promise.all(results).then((results) => {
console.log("Results:", results);
console.log("Combined results:",[].concat(...results));
});
To handle errors you need to use .catch() block
Promise.all(results).then((results) => { ... }).catch((error) => {
// handle somehow
});
By returning a promise and calling your asynchronous function inside, you can resolve the result and then use it this way:
function getRecords (name, i) {
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, (err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
});
});
}
for (let i = 1; i < 90000; i * 500) {
getRecords('james', i)
.then(result => {
// do stuff with result
})
}
Or, using async / await syntax:
async function getRecords (name, i) {
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, (err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
});
});
}
// this needs to happen inside a function, node does not allow top level `await`
for (let i = 1; i < 90000; i *= 500) {
const result = await getRecords('james', i);
// do stuff
}
Get all of your records at once
const requests = [];
for (let i = 1; i < 90000; i * 500) {
requests.push(getRecords('james', i));
}
const results = await Promise.all(requests);

Iterating through list and make sequential network calls

How do i iterate through a list and make sequential network calls using a sdk?
I am trying to use Coinbase's Node sdk and get the first 10 transactions for all accounts.
I have a list of accounts, and i iterating through them and calling client.account, client.transactions, and client.transactions(pagination). And im adding the results to an array of transactions and returning that array.
I couldn't get this to work with async/await or request-promises.
Any ideas?
https://developers.coinbase.com/api/v2#transactions
var rp = require('request-promise');
var coinbase = require('coinbase');
var client = new coinbase.Client({ 'apiKey': 'keyStuff', 'apiSecret': 'secretStuff' });
var accountList = ['acct1','acct2','acct3',];
var transactionList = [];
try {
let ps = [];
accountList.forEach(acctId => {
var account = client.getAccount(accountId, null);
ps.push(rp(account));
});
Promise.all(ps)
.then(responses => {
for (var i = 0; i < responses.length; i++) {
var result = responses[i];
rp(result.account.getTransactions(null))
.then(res => {
res.pagination = 10;
return rp(result.account.getTransactions(null, res.pagination));
}).catch(err => console.log(err))
.then(txns => {
try {
if (txns.length > 0) {
txns.forEach(function(txn) {
var transaction = {
"trade_type": "",
"price": ""
};
transaction.trade_type = txn.type;
transaction.price = txn.native_amount.amount;
transactionList.push(transaction);
});
}
}
catch (err) {
console.log(err);
}
});
}
}).catch(err => console.log(err));
return transactionList;
//-------------------------------------------------------------------
// if (accountList.length > 0) {
// for (let acctId of accountList) {
// console.log("account id: " + acctId);
// await delayTransactions(acctId);
// }
// console.log("got here last");
// return transactionList;
// }
}
catch (error) {
console.log(error);
}
The commented-out delay method has nested async calls like this:
await client.getAccount(accountId, async function(err, account) {
if (err) {
console.log(err);
}
else {
await account.getTransactions(null, async function(err, txns, pagination) {
.
.
.
Solved it by using async/await and promises. awaiting the coinbase methods wouldn't work because they aren't async functions (surprise!).
function delayedMethod() {
new Promise(resolve => {
client.getAccount(accountId, async function(err, account) {
if (err) {
console.log(err);
}
else {
account.getTransactions(null, async function(err, txns, pagination) {
.
.
.
resolve();
});
}

Updating many(100k+) documents in the most efficient way MongoDB

I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}

In Node, how do I request JSON from multiple URLs using promises?

Please forgive the fairly case-specific question, though I think the general end goal could be of use to other people.
Goal: Populate a MongoDB with data requested from multiple JSON API URLs.
Short question: So far I've had some success with request-promise, which uses Bluebird:
var rp = require('request-promise');
var options = {
uri: 'http://www.bbc.co.uk/programmes/b006qsq5.json',
headers: {
'User-Agent': 'Request-Promise'
},
json: true
};
rp(options)
.then(function (body) {
// Mongoose allows us query db for existing PID and upsert
var query = {pid: body.programme.pid},
update = {
name: body.programme.title,
pid: body.programme.pid,
desc: body.programme.short_synopsis
},
options = { upsert: true, new: true };
// Find the document
Programme.findOneAndUpdate(query, update, options, function(err, result) {
if (err) return res.send(500, { error: err });
return res.send("succesfully saved");
});
})
.catch(function (err) {
return res.send(err);
})
But how do I loop over an array of URLs, without the program failing if any of the promises are rejected?
Something like this for example, using Bluebird, fails if any of the URLs errors.
const urls = ['http://google.be', 'http://google.uk']
Promise.map(urls, rp)
.map((htmlOnePage, index) => {
return htmlOnePage;
})
.then(console.log)
.catch((e) => console.log('We encountered an error' + e));
As I want to write to the DB with successful requests, and ignore those that might not be responding right then, I need something that skips over rejected promises, which .all does not do.
Long question:
I've been reading up about promises all day and it's making my head hurt! But I've found some good resources, such as https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html, which mentions the use of a Promise factory. Would this work for my case? I initially thought I should make each request, process the result and add it to the DB, then move on to the next request; but having seen .all I thought I should do all the requests, save the results in an array and loop over that with my DB saving function.
Should I even be using Promises for this? Maybe I should just make use of something like async.js and run my requests in series.
Thanks very much for any help or ideas.
But how do I loop over an array of URLs, without the program failing if any of the promises are rejected?
if you return a value from .catch other than a rejected promise, you will return a resolved promise
So, your .then for each individual request could return an object like
{
success: true,
result: whateverTheResultIs
}
and your catch returns
{
success: false,
error: whateverTheErrorIs
}
Really you don't NEED the success property, it's a convenience though
So the code would be - assuming process(url) returns a Promise
Promise.map(urls, url =>
process(url)
.then(result => ({result, success:true}))
.catch(error => ({error, success:false}))
)
.then(results => {
let succeeded = results.filter(result => result.success).map(result => result.result);
let failed = results.filter(result => !result.success).map(result => result.error);
});
Or, in ES5
Promise.map(urls, function (url) {
return process(url).then(function (result) {
return { result: result, success: true };
}).catch(function (error) {
return { error: error, success: false };
});
}).then(function (results) {
var succeeded = results.filter(function (result) {
return result.success;
}).map(function (result) {
return result.result;
});
var failed = results.filter(function (result) {
return !result.success;
}).map(function (result) {
return result.error;
});
});
I don't know if this fit your case, but I think You can use a counter to check when all promises has returned, regardless of the fact that each one has been resolved or rejected
var heroes = [
'Superman',
'Batman',
'Spiderman',
'Capitan America',
'Ironman',
];
function getHero(hero) {
return new Promise((resolve, reject) => {
setTimeout(() => {
return Math.round(Math.random()) ? resolve(hero + ' lives') : reject(hero + ' dead');
}, Math.random() * 3000)
})
}
function checkHeroes() {
var checked = heroes.length;
heroes.forEach((hero) => {
getHero(hero)
.then((res) => {
checked --;
console.log(res);
if (!checked) done();
})
.catch((err) => {
checked --;
console.log(err);
if (!checked) done();
});
})
}
function done() {
console.log('All heroes checked');
}
checkHeroes();
I think your issue is less about the bluebird api than structuring your promise chain.
const reducePropsToRequests = (props) => Promise.resolve(Object
.keys(props)
.reduce((acc, key) => {
acc[key] = request(sources[key]);
return acc;
}, {}));
const hashToCollection = (hash) => Promise.resolve(Object
.keys(hash)
.reduce((acc, k) => {
return [...acc, {source: k, data: hash[k]}];
}, []));
const fetchFromSources = (sources) => Promise.props(sources);
const findSeveralAndUpdate = (results) => Promise
.each(results.map(obj => {
// you have access to original {a: 'site.com'}
// here, so use that 'a' prop to your advantage by abstracting out
// your db config somewhere outside your service
return Programme.findOneAndUpdate(someConfig[obj.source], obj.data);
}))
const requestFromSeveralAndUpdate = (sources) => reducePropsToRequests(sources)
.then(fetchFromSources)
.then(hashToCollection)
.then(findSeveralAndUpdate)
.catch(/* some err handler */);
requestFromSeveralAndUpdate({ a: 'site.com', b: 'site.net' });
I'd just use request and write my own promise with try catch inside that only resolves. Pseudo example below
var request = require('request')
var urls = ['http://sample1.com/json', 'http://sample2.com/json']
var processUrl = (url) => {
return new Promise((resolve,reject)=> {
var result;
try {
var myRequest = {
uri: url,
method: 'GET',
header: {...}
};
request(option, (res,body,err)=> {
if(err) {
result = err;
return;
}
result = body;
})
}
catch(e) {
result = e;
}
finally {
resolve(result)
}
})
}

Categories

Resources