Response.json should execute after foreach loop completes its execution
var todoarr = (req.body.data) ? req.body.data : undefined
todoarr.forEach(function(element) {
if(element.done == true) {
TodoService.removeTodo(element, function(success) {
});
}
});
res.json("success");
You can try to use async.js http://caolan.github.io/async/ .
each method http://caolan.github.io/async/docs.html#each
Or you can try use Promise.all.
For example:
let promiseArr = [];
todoarr.forEach(function(element) {
if(element.done == true) {
promiseArr.push(somePromiseMethod(element));
}
});
//now execute promise all
Promise.all(promiseArr)
.then((result) => res.send("success"))
.catch((err) => res.send(err));
More info here. https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
Some promise example:
function somePromiseMethod(element) {
return new Promise((resolve,reject) => {
TodoService.removeTodo(element, function(success) {
resolve();
});
});
}
Hope this helps.
You can't send multiple responses on single request, the only thing you can do it's a single response with the array of results:
es with async:
const async = require('async')
// FIX ME: this isn't correctly handled!
const todoarr = (req.body.data) ? req.body.data : undefined
let results = []
async.each(todoarr, function(element, callback) {
console.log('Processing todo ' + element)
if(element.done == true) {
TodoService.removeTodo(element, function(err, success) {
if(err){
callback(err)
} else {
results.push(success)
callback(null, success)
}
})
}
}, function(err) {
if(err) {
console.log('A element failed to process', err)
res.status(500).json(err)
} else {
console.log('All elements have been processed successfully')
// array with the results of each removeTodo job
res.status(200).json(results)
}
})
You can send the response inside the callback function of forEach.
Modify your function so that it will call res.json() on the last iteration only.
Example:
var todoarr = (req.body.data) ? req.body.data : undefined
todoarr.forEach(function(element,index) {
if(element.done == true) {
TodoService.removeTodo(element, function(success) {
});
}
if(index==todoarr.length-1){
res.json("success");
}
});
However, it may not be according to coding standards but it can definitely solve the problem.
Related
I try to do multiple requests in for each loop and push data response to an array but I get always the first item only
//Get all roles
router.get('/', async (req, res) =>{
try {
knex.from( roles_table )
.then(roles => {
let rolePermissions =[];
roles.forEach(role => {
knex.select(permissions_table + '.*').from(permissions_table)
.innerJoin(permissions_roles_table, permissions_table + '.id', permissions_roles_table +'.id_permission')
.where(permissions_roles_table + '.id_role', '=', role.id)
.then(rows => {
role.permissions = rows;
rolePermissions.push(role)
});
});
res.status(200).json(rolePermissions);
});
} catch (err) {
res.status(500).json({message: err});
}
});
You must learn about Promise and asynchrone if you want to do modern JS.
Something like that could be what you are looking for
//This const will store an array of Promise
const allRows = knex.from( roles_table )
.then(roles => {
return roles.map(role => {
return knex.select(permissions_table + '.*').[...]
});
});
//This method will wait for each promises to succeed or first one to failed
return Promise.all(allRows)
.then( allRows => {
//allRows is now an array containing all the results
res.status(200).json(rolePermissions);
})
.catch (err) {
res.status(500).json({message: err});
});
By the way, you don't need try/catch a Promise, there is the .catch method
spliced.forEach((v) => {
const val = v.split(',') //.slice(0, -2)
formData.devID = val[0];
formData.mobileno = val[1].slice(0, -1);
//const cleanVal = [val[0], val[1].slice(0, -1)];
req.body = formData;
device.validate(formData, req.user, req.app.db.models, (error, result) => {
let errorArr, resultArr = [];
if (error) {
errorArr.push(error)
};
if (result) {
resultArr.push(result)
};
if (errorArr) {
res.status(200).send({
errors: [error]
});
} else {
const formDataFull = new req.app.db.models.Device(formData);
req.app.db.models.Device.bulkWrite([{
insertOne: {
formDataFull
}
},
{
ordered: false
}
])
.then(function(device) {
console.log("created device err,", device);
workflow.emit('response');
res.status(200).send({
success: true
});
})
.catch(function(err) {
console.log("in catch err", err);
if (err.code == 11000) {
workflow.outcome.errors.push("Device Id already exists.");
return workflow.emit('response');
} else return workflow.emit('response', err);
});
}
})
})
spliced is an array of CSV values. so in the above code, only the last object i.e. {formData} is saved. what way should i modify my code so that foreach saves every element and not just last.
Okay so this problem was solved simply by using for-loop. Some people answered using loops and map but sadly none of it worked for me. Maybe my code implementation was wrong. Nevertheless using simple for loop saved the day. thank you all.
PS. This can work if you use a counter to track the callbacks and doing the computation after counter equals the number of parameters in callback.
So I have a method, which I want to call multiple times in a loop. This is the function:
function PageSpeedCall(callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[0]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
As you see this is an async function that calls the PageSpeed API. It then gets the response thanks to the callback and renders it in the view. Now how do I get this to be work in a for/while loop? For example
function PageSpeedCall(websites, i, callback) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${websites[i]}&strategy=mobile&key=${keys.pageSpeed}`;
// second call
var results = '';
https.get(pagespeedCall, resource => {
resource.setEncoding('utf8');
resource.on('data', data => {
results += data;
});
resource.on('end', () => {
callback(null, results);
});
resource.on('error', err => {
callback(err);
});
});
// callback(null, );
}
var websites = ['google.com','facebook.com','stackoverflow.com'];
for (let i = 0; i < websites.length; i++) {
PageSpeedCall(websites, i);
}
I want to get a raport for each of these sites. The length of the array will change depending on what the user does.
I am using async.parallel to call the functions like this:
let freeReportCalls = [PageSpeedCall, MozCall, AlexaCall];
async.parallel(freeReportCalls, (err, results) => {
if (err) {
console.log(err);
} else {
res.render('reports/report', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}
});
I tried to use promise chaining, but for some reason I cannot put it together in my head. This is my attempt.
return Promise.all([PageSpeedCall,MozCall,AlexaCall]).then(([ps,mz,al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
I tried doing this:
return Promise.all([for(let i = 0;i < websites.length;i++){PageSpeedCall(websites, i)}, MozCall, AlexaCall]).
then(([ps, mz, al]) => {
if (awaiting != null)
var areAwaiting = true;
res.render('admin/', {
title: 'Report',
// bw: JSON.parse(results[0]),
ps: JSON.parse(results[0]),
moz: JSON.parse(results[1]),
// pst: results[0],
// mozt: results[1],
// bw: results[1],
al: JSON.parse(results[2]),
user: req.user,
});
}).catch(e => {
console.error(e)
});
But node just said it's stupid.
And this would work if I didn't want to pass the websites and the iterator into the functions. Any idea how to solve this?
To recap. So far the functions work for single websites. I'd like them to work for an array of websites.
I'm basically not sure how to call them, and how to return the responses.
It's much easier if you use fetch and async/await
const fetch = require('node-fetch');
async function PageSpeedCall(website) {
const pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
const result = await fetch(pagespeeddCall);
return await result.json();
}
async function callAllSites (websites) {
const results = [];
for (const website of websites) {
results.push(await PageSpeedCall(website));
}
return results;
}
callAllSites(['google.com','facebook.com','stackoverflow.com'])
.then(results => console.log(results))
.error(error => console.error(error));
Which is better with a Promise.all
async function callAllSites (websites) {
return await Promise.all(websites.map(website => PageSpeedCall(website));
}
Starting on Node 7.5.0 you can use native async/await:
async function PageSpeedCall(website) {
var pagespeedCall = `https://www.googleapis.com/pagespeedonline/v4/runPagespeed?url=https://${website}&strategy=mobile&key=${keys.pageSpeed}`;
return await promisify(pagespeedCall);
}
async function getResults(){
const websites = ['google.com','facebook.com','stackoverflow.com'];
return websites.map(website => {
try {
return await PageSpeedCall(website);
}
catch (ex) {
// handle exception
}
})
}
Node http "callback" to promise function:
function promisify(url) {
// return new pending promise
return new Promise((resolve, reject) => {
// select http or https module, depending on reqested url
const lib = url.startsWith('https') ? require('https') : require('http');
const request = lib.get(url, (response) => {
// handle http errors
if (response.statusCode < 200 || response.statusCode > 299) {
reject(new Error('Failed to load page, status code: ' + response.statusCode));
}
// temporary data holder
const body = [];
// on every content chunk, push it to the data array
response.on('data', (chunk) => body.push(chunk));
// we are done, resolve promise with those joined chunks
response.on('end', () => resolve(body.join('')));
});
// handle connection errors of the request
request.on('error', (err) => reject(err))
})
}
Make PageSpeedCall a promise and push that promise to an array as many times as you need, e.g. myArray.push(PageSpeedCall(foo)) then myArray.push(PageSpeedCall(foo2)) and so on. Then you Promise.all the array.
If subsequent asynch calls require the result of a prior asynch call, that is what .then is for.
Promise.all()
Promise.all([promise1, promise2, promise3]).then(function(values) {
console.log(values);
});
Please forgive the fairly case-specific question, though I think the general end goal could be of use to other people.
Goal: Populate a MongoDB with data requested from multiple JSON API URLs.
Short question: So far I've had some success with request-promise, which uses Bluebird:
var rp = require('request-promise');
var options = {
uri: 'http://www.bbc.co.uk/programmes/b006qsq5.json',
headers: {
'User-Agent': 'Request-Promise'
},
json: true
};
rp(options)
.then(function (body) {
// Mongoose allows us query db for existing PID and upsert
var query = {pid: body.programme.pid},
update = {
name: body.programme.title,
pid: body.programme.pid,
desc: body.programme.short_synopsis
},
options = { upsert: true, new: true };
// Find the document
Programme.findOneAndUpdate(query, update, options, function(err, result) {
if (err) return res.send(500, { error: err });
return res.send("succesfully saved");
});
})
.catch(function (err) {
return res.send(err);
})
But how do I loop over an array of URLs, without the program failing if any of the promises are rejected?
Something like this for example, using Bluebird, fails if any of the URLs errors.
const urls = ['http://google.be', 'http://google.uk']
Promise.map(urls, rp)
.map((htmlOnePage, index) => {
return htmlOnePage;
})
.then(console.log)
.catch((e) => console.log('We encountered an error' + e));
As I want to write to the DB with successful requests, and ignore those that might not be responding right then, I need something that skips over rejected promises, which .all does not do.
Long question:
I've been reading up about promises all day and it's making my head hurt! But I've found some good resources, such as https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html, which mentions the use of a Promise factory. Would this work for my case? I initially thought I should make each request, process the result and add it to the DB, then move on to the next request; but having seen .all I thought I should do all the requests, save the results in an array and loop over that with my DB saving function.
Should I even be using Promises for this? Maybe I should just make use of something like async.js and run my requests in series.
Thanks very much for any help or ideas.
But how do I loop over an array of URLs, without the program failing if any of the promises are rejected?
if you return a value from .catch other than a rejected promise, you will return a resolved promise
So, your .then for each individual request could return an object like
{
success: true,
result: whateverTheResultIs
}
and your catch returns
{
success: false,
error: whateverTheErrorIs
}
Really you don't NEED the success property, it's a convenience though
So the code would be - assuming process(url) returns a Promise
Promise.map(urls, url =>
process(url)
.then(result => ({result, success:true}))
.catch(error => ({error, success:false}))
)
.then(results => {
let succeeded = results.filter(result => result.success).map(result => result.result);
let failed = results.filter(result => !result.success).map(result => result.error);
});
Or, in ES5
Promise.map(urls, function (url) {
return process(url).then(function (result) {
return { result: result, success: true };
}).catch(function (error) {
return { error: error, success: false };
});
}).then(function (results) {
var succeeded = results.filter(function (result) {
return result.success;
}).map(function (result) {
return result.result;
});
var failed = results.filter(function (result) {
return !result.success;
}).map(function (result) {
return result.error;
});
});
I don't know if this fit your case, but I think You can use a counter to check when all promises has returned, regardless of the fact that each one has been resolved or rejected
var heroes = [
'Superman',
'Batman',
'Spiderman',
'Capitan America',
'Ironman',
];
function getHero(hero) {
return new Promise((resolve, reject) => {
setTimeout(() => {
return Math.round(Math.random()) ? resolve(hero + ' lives') : reject(hero + ' dead');
}, Math.random() * 3000)
})
}
function checkHeroes() {
var checked = heroes.length;
heroes.forEach((hero) => {
getHero(hero)
.then((res) => {
checked --;
console.log(res);
if (!checked) done();
})
.catch((err) => {
checked --;
console.log(err);
if (!checked) done();
});
})
}
function done() {
console.log('All heroes checked');
}
checkHeroes();
I think your issue is less about the bluebird api than structuring your promise chain.
const reducePropsToRequests = (props) => Promise.resolve(Object
.keys(props)
.reduce((acc, key) => {
acc[key] = request(sources[key]);
return acc;
}, {}));
const hashToCollection = (hash) => Promise.resolve(Object
.keys(hash)
.reduce((acc, k) => {
return [...acc, {source: k, data: hash[k]}];
}, []));
const fetchFromSources = (sources) => Promise.props(sources);
const findSeveralAndUpdate = (results) => Promise
.each(results.map(obj => {
// you have access to original {a: 'site.com'}
// here, so use that 'a' prop to your advantage by abstracting out
// your db config somewhere outside your service
return Programme.findOneAndUpdate(someConfig[obj.source], obj.data);
}))
const requestFromSeveralAndUpdate = (sources) => reducePropsToRequests(sources)
.then(fetchFromSources)
.then(hashToCollection)
.then(findSeveralAndUpdate)
.catch(/* some err handler */);
requestFromSeveralAndUpdate({ a: 'site.com', b: 'site.net' });
I'd just use request and write my own promise with try catch inside that only resolves. Pseudo example below
var request = require('request')
var urls = ['http://sample1.com/json', 'http://sample2.com/json']
var processUrl = (url) => {
return new Promise((resolve,reject)=> {
var result;
try {
var myRequest = {
uri: url,
method: 'GET',
header: {...}
};
request(option, (res,body,err)=> {
if(err) {
result = err;
return;
}
result = body;
})
}
catch(e) {
result = e;
}
finally {
resolve(result)
}
})
}
var data = [10,21,33,40,50,69];
var i = 0;
var dataSeq = [];
while(i<data.length){
if(data[i]%2 == 0){
store.findOne({'visibility': true},function(err, data){
dataSeq.push(i)
i++;
});
}
else{
dataSeq.push(i)
i++;
}
}
if(i==data.length-1){
console.log(dataSeq) // Should Print [1,2,3,4,5]
return res.status(200).send({ message: 'Task Completed'})
}
I want to collect data as per loop excecutes.
I am aware about how to handle async calls in nodejs. But I want the callbacks in sequence.
e.g. Though there is a async call in if condition i want to hault the loop, so that I can push value of i in dataSeq and it will result in [1,2,3,4,5] array. I want that sequence because my post operations are dependent on that sequence.
I think asyncjs#eachSeries has what you need.
Your code would become something like this:
async.each(data, (item, callback) => {
if(item%2 == 0){
store.findOne({'visibility': true},function(err, data){
dataSeq.push(i)
i++;
});
}
else{
dataSeq.push(i)
i++;
}
}, (err) => {
// if any of the callbacks produced an error, err would equal that error
});
You can use something like async#eachOf
var async = require('async');
var data = [10,21,33,40,50,69];
var dataSeq = [];
async.eachOf(data, function(value, key, cb) {
if (value % 2 == 0) {
store.findOne({ 'visibility': true })
.then(function(doc) {
dataSeq.push(key);
})
.catch(function(err) {
return cb(err);
});
} else {
cb();
}
}, function(err) {
if (err) {
console.error(err)
return res.status(500).send(); # handle the error as you want
}
return res.status(200).send({ message: 'Task Completed'})
})