I'm new in promises. Im not sure how to write this code in bluebird promises. Can someone explain how this should look with promises?
for (var i = 10; i >= 0; i--) {
var path = '127.0.0.1/getdata/' + i;
http.get({
path: path,
}, (res) => {
var data = res.resume();
});
}
Thanks for everything
If you just want to collect the results of 10 requests, that don't depend upon one another, then you can launch them all in parallel and let promises collect the results for you. Since you mentioned Bluebird, you can let Bluebird make promisified versions of the http methods:
var Promise = require('bluebird');
var request = Promise.promisifyAll(require('request'), {multiArgs: true});
var promises = [];
for (var i = 10; i >= 0; i--) {
var path = '127.0.0.1/getdata/' + i;
promises.push(request.getAsync(path).spread(function(response, body) {
return body;
}));
}
Promise.all(promises).then(function(results) {
// array of results here
}, function(err) {
// error here
});
function makeRequest() {
var items = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1];
return Promise.map(items, function(item) {
console.log(item);
return get('/echo/html/');
});
}
makeRequest().then(function(results) {
console.log(results);
});
fiddle
This approach is using Bluebird's Promise.map
Related
I'm trying to resolve a promise inside a for-loop in node js. In my code, I have a for loop in which I call a function findincollection that returns a promise. Then I push the data to finalresult array and resolve it once the for loop completes. But the issue I'm facing is it doesn't resolve the complete data. The for loop execution is getting finished before all the promises are resolved.
The line console.log(p1); always prints Promise { } but it eventually gets resolved as you can see in my code in the p1.then() statement, I do get the data one by one. But the finalresult array resolves too early. I also want to know why I always get Promise { } even when the promises are still getting resolved eventually.
Please have a look at my code below :
var mob = [123, 456, 789];
var list = [1, 2, 3, 4, 5, 6, 7, 8];
var res = [];
var finalresult = [];
for (y = 0; y < list.length; y++) {
const p1 = findincollection(list[y], mob, savetofile);
console.log(p1); //always prints Promise { <pending> } 8 times
p1.then(function(dt) {
finalresult.push(dt); //pushes all 3 objects one by one
console.log(dt); //prints 3 objects one by one
client.close();
if (y == (collist.length)) { //check if the loop has reached the last index
resolve(finalresult); //resolves the finalresult array with 1 object only instead of 3. I want this part to resolve the complete finalresult array i.e with all 3 objects.
}
});
}
const findincollection = function(index, mob, save) {
return new Promise((resolve, reject) => {
MongoClient.connect(url, function(err, client) {
assert.equal(null, err);
const db = client.db(dbName);
const collection = db.collection(col);
collection.find({ 'Numbers': { $in: mob } }).toArray(function(err, docs) {
const c = save(index, docs);
c.then(function(m) {
console.log(m); //print's Saved 3 times as the mob array length is 3
client.close();
return resolve(res);
})
});
});
});
}
const save = function(index, data) {
return new Promise((resolve, reject) => {
if (data.length > 0) {
for (var k = 0; k < data.length; k++) {
res.push(data[k]);
}
fs.appendFile('textlogs/' + index + '.txt', data, function(err) {
if (err) throw err;
resolve('Saved');
});
}
});
}
I'm not able to figure out how to make the loop wait until all the promises are resolved or make the code synchronous and then only resolve the finalresult array? How do I do it?
What you need here is Promise.all(). It returns a new promise, which gets resolved once all passed promises get resolved.
You can try something similar to:
var promises = [];
for (y = 0; y < list.length; y++) {
promises.push(findincollection(list[y], mob, savetofile));
}
Promise.all(promises)
.then(dt => {
finalresult.push(dt); //pushes all 3 objects one by one
console.log(dt); //prints 3 objects one by one
client.close();
if (y == (collist.length)) {
resolve(finalresult);
}
}); // <-- this will be raised once all your promises are resolved
I see you're closing client inside each promise and this client is not a local variable. So I think you should be closing it after all your promises are completed.
If I am doing an async call like the following, how can chain them with promises, so i can do stuff in order? In this example, what ends up happening is that arr will push the items out of order. I'd prefer an answer with promises, but anything will do as long as it works
var fbArrOfAlbumNames = ['Profile Pictures', 'Cover Photos', 'Mobile Uploads'];
var arr = [];
for(var x = 0; x < fbArrOfAlbumNames.length; x++) {
(function(cntr) {
FB.api('/' + fbArrOfAlbumNames[cntr] + '/photos/', {fields: 'picture,album'}, function(response) {
arr.push(response);
}
})(x);
}
Assuming your ajax calls can actually be run in parallel and you just want the results in order, then you can promisify the ajax function and use Promise.all() to get all the results in order:
// promisify the ajax call
function fbAPIPromise(path, args) {
return new Promise(function(resolve, reject) {
FB.api(path, args, function(results) {
if (!result) return resolve(null);
if (result.error) return reject(result.error);
resolve(result);
});
});
}
var promises = [];
for (var x = 0; x < 10; x++) {
promises.push(fbAPIPromise('/' + fbArrOfAlbumNames[x] + '/photos/', {fields: 'picture,album'});
}
Promise.all(promises).then(function(results) {
// results is an array of results in original order
}).catch(function(err) {
// an error occurred
});
If you have to loop and make a bunch of calls to a repository or gateway in my case, how do I do that asynchronously meaning not wrapping my async calls inside a synchronous for loop?
For example, what would be a better approach (restructuring this code) to loop through a set of ids, and make the call to find() below like I'm trying to do?
The goal: I want to take an array of ids, iterate them, and during each iteration, use the id to call find() on my gateway to go get the object for that id, then stuff it into a final array in which I'll return when all said and done.
What I'm using:
q (for promises)
co-pg (to hit the database)
someModule.js
var _gateway = require('./database/someGateway');
var cars = [];
var car;
for (var i = 0; i < results.docs.length; i++){
var carId = results.docs[i].carId;
_gateway.find(carId)
.then(function(data){
console.log('data[0]: ' + data[0].id);
cars.push(data[0]);
})
.done();
}
console.log("cars: " + cars.length); // length here is 0 because my asyn calls weren't done yet
result(cars);
someGateway.js
'use strict';
var Q = require('q');
var _carModel = require('../../models/car');
module.exports = {
models: {
car: _carModel
},
find: _find
};
function _find(carId)
{
return _carModel.find(carId);
};
carModel.js
'use strict';
var Q = require('q');
var pg = require('co-pg')(require('pg'));
var config = require('../../models/database-config');
var car = module.exports = {};
car.find = Q.async(function *(id)
{
var query = 'SELECT id, title, description FROM car WHERE id = ' + id;
var connectionResults = yield pg.connectPromise(config.connection);
var client = connectionResults[0];
var done = connectionResults[1];
var result = yield client.queryPromise(query);
done();
console.log("result.rows[0].id: " + result.rows[0].id);
return result.rows;
});
so I need help understanding how to refactor my code in someModule.js to get that working properly, so that I make a call to find() for each id, stuff each found car into the array, then return the array. The carModel code is async. It goes out to a physical database to perform the actual query lookup.
UPDATE #1
Ok after a couple more hours of trying all sorts of sh** (q.all(), and a ton of other combinations of callback code, etc.) here's what I have at this point:
someModule.js
var _data;
var Q = require('q');
var _solrClient = require('../models/solr/query');
var _solrEndpoint = "q=_text&indent=true&rows=10";
var _postgreSQLGateway = require('./database/postgreSQLGateway');
module.exports = {
data: function(data){
_data = data;
},
find: function (text, result){
if(!searchText){
result(null);
};
_solrClient.query(endpoint, function(results){
var carIds = [];
var cars = [];
var car;
for (var i = 0; i < results.docs.length; i++){
carIds.push(results.docs[i].carId);
}
for (var i = 0; i < carIds.length; i++) {
var car = _postgreSQLGateway.find(carIds[i], function(o){
console.log("i: " + i);
});
};
});
}
};
someGateway.js
'use strict';
var Q = require('q');
var _carModel = require('../../models/postgreSQL/car');
module.exports = {
models: {
car: _carModel
},
find: _find
};
function _find(carId, foundCar)
{
console.log("CALL MADE");
_carModel.find(carId)
.then(function(car){
console.log("car: " + car[0].id);
foundCar(car);
});
};
carModel.js
[same code, has not changed]
Of course I noticed that the for loop fires off all my function calls asyncronously and so when I console.write the i, it's 10 because the for loop is done but then as we know, the rest of the console.logs happen later after the callbacks are done.
So I still can't get this working right...
Also when I was playing around I started down this path but it ended at a brick wall:
var find = Q.async(function(carIds, cars)
{
var tasks = [];
var foundCars = [];
for (var i = 0; i < carIds.length; i++) {
tasks.push(_postgreSQLGateway.find(carIds[' + i + ']));
};
Q.all([tasks.join()]).done(function (values) {
for (var i = 0; i < values.length; i++) {
console.log("VALUES: " + values[0]);
foundCars.push(values[0]);
}
cars(foundCars);
});
});
I ended up with [object promise] every time for values[i] instead of a car for value[i]
I don't know the Q promises library, but here's a solution using generic Promises built into node.js. This runs all the requests in parallel and then when all results have been collected, it runs the final .then() handler with all the results:
var _gateway = require('./database/someGateway');
var promises = [];
for (var i = 0; i < results.docs.length; i++) {
promises.push(_gateway.find(results.docs[i].carId).then(function (data) {
console.log('data[0]: ' + data[0].id);
return data[0];
}));
}
Promise.all(promises).then(function(cars) {
// cars will be an array of results in order
console.log("cars: " + cars.length);
result(cars);
});
Individual promise libraries (like the one I know Bluebird) have features built in that lets you do this kind of activity in even less code, but I've intentionally kept this answer to just using standard promise features.
This is potentially really easy with the vanilla Promise API from es6 (and replicated by Bluebird and other libs). First map the IDs to an array of promises:
var promises = results.docs.map(function(doc) {
return _gateway.find(doc.carId);
});
Then create a promise for the aggregate result:
var allDone = Promise.all(promises);
Then inside the done() callback of the aggregate promise, you'll have a final array of results, in the same length and order as the carId array:
allDone.then(function(results) {
// do something with "results"
});
I need to go through an array of values, look up date in Redis (to see if it exists), and then continue. For example:
var to_check = [ 1, 2, 3 ]
var found_elements = []
for (var i = 0; i < to_check.length; i++) {
redis.EXISTS('namespace:' + to_check.length[i], function(err, value) {
if (!err && value) {
found_elements.push(to_check.length[i])
}
})
}
console.log(found_elements.join(', '))
I need to get the last line executed after all callbacks sent to Redis have been executed. What would be the best way to approach this?
Use Promise to handle complex async operations. Parallel execution is one of them.
var to_check = [ 1, 2, 3 ];
var found_elements = [];
Promise.all(to_check.map(function(item){
return new Promise(function(resolve,reject){
redis.EXISTS('namespace:' + item, function(err, value) {
if(err){
return reject(err);
}
if (value) {
found_elements.push(item);
}
resolve();
})
});
})).then(function(){
console.log('All operations are done');
}).catch(function(err){
console.log(err);
});
I am sure there are other ways. But this should work(not tested):
var to_check = [ 1, 2, 3 ]
var found_elements = []
for (var i = 0; i < to_check.length; i++) {
(function(i){
redis.EXISTS('namespace:' + to_check.length[i], function(err, value) {
if (!err && value) {
found_elements.push(to_check.length[i])
}
if(i == (to_check.length-1)){
console.log(found_elements.join(', '))
}
})
})(i);
}
I'm using async with Node.js. It runs fine when I have a fixed number of functions to execute:
async.series([
function(cb) { ...one ... },
function(cb) { .. two ... },
], function(err, res) {
...done
});
But now I need to execute an arbitrary number of functions, depending on values in one array, and cannot figure how to pass the array elements:
var values = [1, 2, 3, ... ];
var calls = [];
for (var i = 0; i < values.length; i++) {
calls.push(function(cb) {
HOW TO PASS values[i] HERE?
});
}
async.series(calls, function(err, res) {
...done
});
That's just the common async-in-a-loop problem. You will need a closure for the value of i, in which the pushed function expression is declared. This can either be done with an IEFE as your loop body, or even easier with .forEach() or .map():
var calls = values.map(function closure(val, i) {
return function(cb) {
// use val and i here, which are bound to this execution of closure
};
});
You should be able to use a closure:
var values = [1, 2, 3, ... ];
var calls = [];
for (var i = 0; i < values.length; i++) {
calls.push((function(index) {
return function(cb) {
// use values[index] here
};
})(i));
}
async.series(calls, function(err, res) {
...done
});