I would like to make http call synchronously using Q Promises, I have 100 students that I need for each of them to take some data from another platform and to do that I was trying via Q Promises but it does not seem like it is doing synchronously.
How do I make sure that another call is not being made once one is finished with parsing it's response and insertion into mongodb:
my code so far looks like this:
var startDate = new Date("February 20, 2016 00:00:00"); //Start from February
var from = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 30);
var to = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(from, to);
function iterateThruAllStudents(from, to) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
async.eachSeries(students, function iteratee(student, callback) {
if (student.worksnap.user != null) {
var worksnapOptions = {
hostname: 'worksnaps.com',
path: '/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic xxxx='
},
method: 'GET'
};
promisedRequest(worksnapOptions)
.then(function (response) { //callback invoked on deferred.resolve
parser.parseString(response, function (err, results) {
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
_.forEach(timeEntries, function (timeEntry) {
_.forEach(timeEntry, function (item) {
saveTimeEntry(item);
});
});
});
callback();
}, function (newsError) { //callback invoked on deferred.reject
console.log(newsError);
});
}
});
function saveTimeEntry(item) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
throw err;
}
student.timeEntries.push(item);
student.save(function (err) {
if (err) {
console.log(err);
} else {
console.log('item inserted...');
}
});
});
}
function promisedRequest(requestOptions) {
//create a deferred object from Q
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
var deferred = Q.defer();
var req = http.request(requestOptions, function (response) {
//set the response encoding to parse json string
response.setEncoding('utf8');
var responseData = '';
//append data to responseData variable on the 'data' event emission
response.on('data', function (data) {
responseData += data;
});
//listen to the 'end' event
response.on('end', function () {
//resolve the deferred object with the response
console.log('http call finished');
deferred.resolve(responseData);
});
});
//listen to the 'error' event
req.on('error', function (err) {
//if an error occurs reject the deferred
deferred.reject(err);
});
req.end();
//we are returning a promise object
//if we returned the deferred object
//deferred object reject and resolve could potentially be modified
//violating the expected behavior of this function
return deferred.promise;
}
Anyone could tell me what do I need to do to achieve such things?
Is it also possible so that I know when all of the http calls are finished and the insertion is done for all...
I would abandon your current approach and use the npm module request-promise.
https://www.npmjs.com/package/request-promise
It's very popular and mature.
rp('http://your/url1').then(function (response1) {
// response1 access here
return rp('http://your/url2')
}).then(function (response2) {
// response2 access here
return rp('http://your/url3')
}).then(function (response3) {
// response3 access here
}).catch(function (err) {
});
Now you just need to convert this to some kind of iteration for the 100 requests you want and the job will be done.
Related
I would like to call a function which takes data for every student for a specific time interval and repeat the same process until it reaches some condition.
The current code it seems to make parallel calls to the function iterateThruAllStudents();
My code looks like this:
var startDate = new Date("March 16, 2016 00:00:00"); //Start from February
var fromTimestamp = null;
var toTimestamp = null;
var today = new Date();
var todayTimestamp = new Date(today).getTime() / 1000;
async.whilst(
function () {
return fromTimestamp < todayTimestamp;
},
function (callback) {
console.log(startDate);
fromTimestamp = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 5);
toTimestamp = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(fromTimestamp, toTimestamp);
callback(null, startDate);
},
function (err, n) {
console.log('finished for ' + n);
}
);
function iterateThruAllStudents(from, to) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
var counter = 0;
async.eachSeries(students, function iteratee(student, callback) {
if (student.worksnap.user != null) {
var worksnapOptions = {
hostname: 'api.worksnaps.com',
path: '/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic ' + auth_hash
},
method: 'GET'
};
getTimeEntries(worksnapOptions)
.then(function (response) { //callback invoked on deferred.resolve
return convertXMLToJson(response);
}).then(function (timeEntries) {
console.log('convert xml to json');
var isEmpty = _.isEmpty(timeEntries); // true
if (isEmpty) {
callback(null);
}
return saveTimeEntry(timeEntries);
}).then(function (response) {
counter++;
console.log('all timeEntries for one student finished....Student: ' + student.worksnap.user.user_id + ' Student Counter: ' + counter);
callback(null);
});
} else {
callback(null);
}
});
});
}
function convertXMLToJson(response) {
var deferred = Q.defer();
parser.parseString(response, function (err, results) {
if (err) {
deferred.reject(err);
}
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
deferred.resolve(timeEntries);
});
return deferred.promise;
}
function saveTimeEntry(timeEntries) {
var deferred = Q.defer();
_.forEach(timeEntries.time_entry, function (item) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
deferred.reject(err);
}
student.worksnap.timeEntries.push(item);
student.save(function (err) {
if (err) {
deferred.reject(err);
} else {
//console.log(item);
}
});
});
});
deferred.resolve('finished saving timeEntries for one student...');
return deferred.promise;
}
function getTimeEntries(requestOptions) {
//create a deferred object from Q
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
var deferred = Q.defer();
var req = http.request(requestOptions, function (response) {
//set the response encoding to parse json string
response.setEncoding('utf8');
var responseData = '';
//append data to responseData variable on the 'data' event emission
response.on('data', function (data) {
responseData += data;
});
//listen to the 'end' event
response.on('end', function () {
//resolve the deferred object with the response
console.log('http call finished');
deferred.resolve(responseData);
});
});
//listen to the 'error' event
req.on('error', function (err) {
//if an error occurs reject the deferred
console.log('inside On error.');
console.log(err);
deferred.reject(err);
});
req.end();
//we are returning a promise object
//if we returned the deferred object
//deferred object reject and resolve could potentially be modified
//violating the expected behavior of this function
return deferred.promise;
}
Anyone has an idea how to achieve such thing that I can grab data for all students synchronously for each time interval?
Take a look at the docs for async.eachSeries() (alias of #each()). You'll want to supply a third argument, and call the callback originating from the argument in the second function of async.whilst().
I think the following modifications will do what you need. Take a look for the callback I named done, specifically:
var startDate = new Date("March 16, 2016 00:00:00"); //Start from February
var fromTimestamp = null;
var toTimestamp = null;
var today = new Date();
var todayTimestamp = new Date(today).getTime() / 1000;
async.whilst(
function () {
return fromTimestamp < todayTimestamp;
},
function (callback) {
console.log(startDate);
fromTimestamp = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 5);
toTimestamp = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(fromTimestamp, toTimestamp, callback);
},
function (err, n) {
console.log('finished for ' + n);
}
);
function iterateThruAllStudents(from, to, done) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
var counter = 0;
async.eachSeries(students, function iteratee(student, callback) {
if (student.worksnap.user != null) {
var worksnapOptions = {
hostname: 'api.worksnaps.com',
path: '/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic ' + auth_hash
},
method: 'GET'
};
getTimeEntries(worksnapOptions)
.then(function (response) { //callback invoked on deferred.resolve
return convertXMLToJson(response);
}).then(function (timeEntries) {
console.log('convert xml to json');
var isEmpty = _.isEmpty(timeEntries); // true
if (isEmpty) {
callback(null);
}
return saveTimeEntry(timeEntries);
}).then(function (response) {
counter++;
console.log('all timeEntries for one student finished....Student: ' + student.worksnap.user.user_id + ' Student Counter: ' + counter);
callback(null);
});
} else {
callback(null);
}
}, function eachSeriesFinished(err) {
if (err)
return done(err);
return done(null, to);
});
});
}
function convertXMLToJson(response) {
var deferred = Q.defer();
parser.parseString(response, function (err, results) {
if (err) {
deferred.reject(err);
}
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
deferred.resolve(timeEntries);
});
return deferred.promise;
}
function saveTimeEntry(timeEntries) {
var deferred = Q.defer();
_.forEach(timeEntries.time_entry, function (item) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
deferred.reject(err);
}
student.worksnap.timeEntries.push(item);
student.save(function (err) {
if (err) {
deferred.reject(err);
} else {
//console.log(item);
}
});
});
});
deferred.resolve('finished saving timeEntries for one student...');
return deferred.promise;
}
function getTimeEntries(requestOptions) {
//create a deferred object from Q
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
var deferred = Q.defer();
var req = http.request(requestOptions, function (response) {
//set the response encoding to parse json string
response.setEncoding('utf8');
var responseData = '';
//append data to responseData variable on the 'data' event emission
response.on('data', function (data) {
responseData += data;
});
//listen to the 'end' event
response.on('end', function () {
//resolve the deferred object with the response
console.log('http call finished');
deferred.resolve(responseData);
});
});
//listen to the 'error' event
req.on('error', function (err) {
//if an error occurs reject the deferred
console.log('inside On error.');
console.log(err);
deferred.reject(err);
});
req.end();
//we are returning a promise object
//if we returned the deferred object
//deferred object reject and resolve could potentially be modified
//violating the expected behavior of this function
return deferred.promise;
}
Side note, if you were open to it, using something like co or async/await would simplify this code a lot, in my opinion.
I want to make http requests to an API-s to collect for each user it's data and insert into mongodb.
The problem I am having is, it is doing all the requests at once, and seems it gets stuck somewhere and I don't know what is going on.
Al thou I am using async library and add the request() method inside each iteration, and I dont know if this is the right way, here is the code:
function iterateThruAllStudents(from, to) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
async.forEach(students, function iteratee(student, callback) {
if (student.worksnap.user != null) {
var options = {
url: 'https://api.worksnaps.com/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic bGhNSVJkVUFwOE1DS2loOFVyZkFyOENEZEhPSXdCdUlHdElWMHo0czo='
}
};
request(options, getTimeEntriesFromWorksnap);
}
callback(); // tell async that the iterator has completed
}, function (err) {
console.log('iterating done');
});
});
}
function getTimeEntriesFromWorksnap(error, response, body) {
console.log(response.statusCode);
if (!error && response.statusCode == 200) {
parser.parseString(body, function (err, results) {
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
_.forEach(timeEntries, function (timeEntry) {
_.forEach(timeEntry, function (item) {
saveTimeEntry(item);
});
});
});
}
}
function saveTimeEntry(item) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
throw err;
}
student.timeEntries.push(item);
student.save(function (err) {
if (err) {
console.log(err);
} else {
console.log('item inserted...');
}
});
});
}
var from = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 30);
var to = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(from, to);
I am new to JavaScript, especially when dealing with async.
Any help?
Use Async.eachLimit() to make batched request to the api...Try this iterateThruAllStudents() function.
I already had same question before here
See tutorial of limiting here.
Though i am making the limit as 5 but you can do whatever you want(10,50 etc).
function iterateThruAllStudents(from, to) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
async.eachLimit(students,5,function iteratee(student, callback) {
if (student.worksnap.user != null) {
var options = {
url: 'https://api.worksnaps.com/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic bGhNSVJkVUFwOE1DS2loOFVyZkFyOENEZEhPSXdCdUlHdElWMHo0czo='
}
};
request(options,getTimeEntriesFromWorksnap(callback));
}
}, function (err) {
console.log(err);
console.log('iterating done');
});
});
}
function getTimeEntriesFromWorksnap(cb) {
return function(error, response, body){
console.log(response.statusCode);
if (!error && response.statusCode == 200) {
parser.parseString(body, function (err, results) {
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
async.each(timeEntries,function(timeEntry,cb1){
async.each(timeEntry,function(item,cb2){
saveTimeEntry(item,cb2);
},function(err){
if(err)
cb1(err);
else
cb1();
})
},function(err){
if(err)
cb(err);
else
cb();
});
//_.forEach(timeEntries, function (timeEntry) {
// _.forEach(timeEntry, function (item) {
// saveTimeEntry(item);
// });
//});
});
}
cb(null);
}
}
function saveTimeEntry(item,cb2) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
return cb2(err);
}
student.timeEntries.push(item);
student.save(function (err) {
if (err) {
console.log(err);
//return cb2(err);//Do it if you wanna throw an error.
} else {
console.log('item inserted...');
}
cb2();
});
});
}
var from = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 30);
var to = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(from, to);
In your example you missed iteratee param in the each method of async - iteratee(item, callback). Look at this example here.
You need to call callback each time inside your iteratee function to tell async continue doing its processing.
each(collection, iteratee, [callback])
collection - collection to iterate over.
iteratee(item, callback) - function to apply to each item in coll. The iteratee is passed a callback(err) which must be called once it has completed. If no error has occurred, the callback should be run without arguments or with an explicit null argument. The array index is not passed to the iteratee. If you need the index, use forEachOf.
callback(err) - Optional callback which is called when all iteratee functions have finished, or an error occurs.
If you need synchronous behavior, no probs! There is also eachSeries method with the same signature except every collection item will be iterated synchronously.
UPDATE:
Changes should be implemented:
Pass async callback:
request(options, getTimeEntriesFromWorksnap(callback));
Return necessary for request callback function:
function getTimeEntriesFromWorksnap(callback) {
return function(error, response, body) {
// ...
saveTimeEntry(item, callback);
// ...
}
}
Call callback only after record is saved in database:
function saveTimeEntry(item, callback) {
// ..
student.save(callback);
// ..
}
Refactor nested loops (not sure what timeEntries, timeEntry are, so use appropriate async method to iterate these data structures):
async.each(timeEntries, function (timeEntry, callback) {
async.each(timeEntry, function (item, callback) {
saveTimeEntry(item, callback);
}, callback);
}, callback);
I am having db.js with db related functions, I want to make call to db.js and wait until it returns the query result.
But the result is returned after the execution of the db call. Can anyone please help how to solve this.
Code sample:
var Q = require('q');
db= require("./dbaccess.js");
function waitfor(ms){
var deferred = Q.defer();
setTimeout(function() {
deferred.resolve(db);
}, 5000);
return deferred.promise;
}
waitfor(2000).done(function(dbcall) {
console.log('contrived example '+ dbcall.query1());
});
dbacess.js:
var sql = require('mssql');
var config = {
user: 'xx',
password: 'xxx',
server: 'aaa',
database: 'RequestCenter',
stream: true,
}
this.query1=function() {
sql.connect(config, function(err) {
var result;
var request = new sql.Request();
request.query("select * from dbo.AcAccount where Name like 'AutomationCli%' ");
request.on('row', function(row) {
console.log(row.Name);
result = row.Name;
});
request.on('error', function(err) {
console.log("err : "+err);
});
request.on('done', function(returnValue) {
console.log("done");
});
return result;
});
sql.on('error', function(err) {
console.log("sql err : "+err);
});
}
Output:
contrived example undefined
in db: AutomationClient
Expected output:
in db: AutomationClient
contrived example AutomationClient
Not sure why your main code passes in 2000 for the ms argument and then does a 5000ms timeout, in fact, why are you doing a timeout at all, if that was some attempt to wait for the db function to complete, then you don't need it
If you must use promises - personally I'd use a simple callback for such simple code, however, I get that you want to learn how to use Promises
Your original code looked like it was attempting to return the value of the LAST row.name
This code returns an array of row.name
Not knowing the type of data you'd be getting, I don't know which is correct
dbacess.js
var Q = require('q');
var sql = require('mssql');
var config = {
user: 'xx',
password: 'xxx',
server: 'aaa',
database: 'RequestCenter',
stream: true,
}
this.query1 = function() {
var deferred = Q.defer();
sql.connect(config, function(err) {
var result = []; // return all rows - modify as required
var request = new sql.Request();
request.query("select * from dbo.AcAccount where Name like 'AutomationCli%' ");
request.on('row', function(row) {
console.log(row.Name);
result.push(row.Name);
});
request.on('error', function(err) {
console.log("err : " + err);
deferred.reject(err);
});
request.on('done', function(returnValue) {
deferred.resolve(result);
});
});
sql.on('error', function(err) {
console.log("sql err : " + err);
deferred.reject(err);
});
return deferred.promise;
}
Code sample:
db = require("./dbaccess.js");
db.query1().then(function(result) {
console.log('contrived example ' + result);
});
Is there a better way to call the same function from the client and another node js module. Without having 2 separate functions. The only difference method the value is returned?
exports.getFiles = function(req,res){
var globPattern = req.body.globPattern;
var globOptions =req.body.globOptions;
glob(globPattern, globOptions, function (err, files) {
if(err)
{
res.status(400);
winston.log('error', err);
return res.send({success:false,reason: err});
}
res.send({success:true,data:files});
});
};
exports.getFilesFunc = function(payload){
var deferred = q.defer();
var globPattern = payload.globPattern;
var globOptions = payload.globOptions;
glob(globPattern, globOptions, function (err, files) {
if(err)
deferred.resolve({success:false,reason: err});
deferred.resolve({success:true,data: files});
});
return deferred.promise;
};
You can basically call getFilesFunc from getFiles:
exports.getFilesFunc = function(payload){
return Q.nfcall(glob, payload.globPattern, payload.globOptions);
};
exports.getFiles = function(req,res){
this.getFilesFunct(req.body).then(function(files) {
res.send({success:true, data:files});
}, function(err) {
res.status(400);
winston.log('error', err);
return res.send({success:false, reason: err});
});
};
Since you're using Promises, you can shorten your code to the following:
var Q = require('q'),
glob = Q.denodeify(require('glob'));
exports.getFiles = function(pattern, options) {
return glob(pattern, options);
};
Then in your controller, or wherever you're calling the function you'd control what you're going to do with your retured data:
module
.getfiles(req.body.globPattern, req.body.globOptions)
//or .getFiles(payload.globPattern, payload.globOptions)
.then(function (files) {
}, function (error) {
});
In "view" method within my controller was previously using node-async but I wanted to try out using q.
I'm currently trying to convert this
exports.view = function (req, res) {
var category = req.params.category,
id = req.params.id,
ip = req.connection.remoteAddress,
slug = req.params.slug,
submission,
userId = typeof req.session.user !== 'undefined' && req.session.user.id ? req.session.user.id : null,
views;
var getSubmission = function (submissionId, callback) {
Submission.getSubmission({
id: submissionId
}, function (err, submission) {
if (err) {
callback(err);
} else if (submission) {
callback(null, submission);
} else {
callback(err);
}
});
};
async.waterfall([
function (callback) {
getSubmission(id, callback);
},
function (submission, callback) {
res.render('submission', {
title: submission.title + ' -',
submission: submission
});
}]);
To using q... I started doing something like:
var getSubmission = function(id) {
return Submission.getSubmission({
id : submissionId
}).then(function(submission) {
return submission;
});
};
q.fcall(getSubmission).then(function(submission) {
console.log(submission);
});
But it's not quite working as I intended. Am I doing something wrong here? How can I do this?
Is Submission.getSubmission a call to a database? Then you can't "chain" promises to that. You'll have to use the deferred method:
var getSubmission = function(id) {
var deferred = Q.defer();
Submission.getSubmission({
id: id
}, function(err, data){
if (err) {
deferred.reject(err);
} else {
deferred.resolve(data);
}
});
return deferred.promise;
}
getSubmission(some_id).then(successCallback, failureCallback);
You can also use Q#denodeify to convert a function using nodejs-style callbacks (function(err, data)) into a promise based function. Thus, the above can also be achieved by the following:
getSubmissionPromise = Q.denodeify(Submission.getSubmission);
getSubmissionPromise({id: some_id}).then(successCallback, failureCallback);