Meteor HTTP.call on server side correct using? - javascript

Sorry for my english.
I'm beginner JS developer. I need help with Meteor.
I try to write rss aggregator.
Can you tell me why this code not work correctly for me?
rssContent is always undefined.
But if I try console.log(result.content) inside HTTP.call I see the rss data.
I need to pass rssContent in another function in this file to parsing XML, but I have trouble with HTTP.call
server.js code:
var rssSources = ['http://news.yandex.ru/auto.rss'],
parsedRss = [];
var rssContent;
for (var i = 0; i < rssSources.length; i++) {
HTTP.call('GET', rssSources[i],
function(error, result) {
try {
rssContent = result.content;
} catch (e) {
console.log(e);
}
}
);
}
console.log(rssContent);

Your rssContent variable is always undefined because you are calling it inside callback(async) non-blocking method. Instead of callback method, you have to use sync(blocking) method.
I would suggest you, don't assigning value to a global variable, insert data directly it into the database whenever you got a response of your http request.
Blocking Example:-
var rssSources = ['http://news.yandex.ru/auto.rss'],
parsedRss = [];
var rssContent = "";
for (var i = 0; i < rssSources.length; i++) {
var result = Meteor.http.call("GET", rssSources[i]);
if(result.statusCode == '200' && result.content){
rssContent += result.content;
}
}
console.log(rssContent);
Non-Blocking Example:-
var rssSources = ['http://news.yandex.ru/auto.rss'],
for (var i = 0; i < rssSources.length; i++) {
HTTP.call('GET', rssSources[i],
function(error, result) {
try {
//HERE INSERT YOUR DATA INTO THE DATABASE.INSTEAD OF ASSIGNING THE VALUE TO GLOBAL VARIABLE.
} catch (e) {
console.log(e);
}
}
);
}

There are two ways of running an HTTP request in Meteor. It runs synchronously by default. If you pass a callback, it runs asynchronously instead. In this case, assuming this is server-side code, it's easier to omit the callback and treat it as synchronous (much less headache IMO).
var rssContent = [];
for (var i = 0; i < rssSources.length; i++) {
var result = HTTP.call('GET', rssSources[i]);
// error handling
resultContent.push(result);
}
I'm not familiar on handling errors for the synchronous version. You can head here for more details in their doc.

Related

How to avoid creating a function inside a loop?

I am working on gathering some data using REST calls.
I have the following function which makes calls to a "directory" endpoint that returns me a list of people
I can get more information about their kids.
Now to get their personal information I will have to hit their individual APIs.
var listOfPeople = [];
var numberOfKids = [];
//using superagent
var req = request.get(ApiPrefix + "/directory");
req.end(function (err, res) {
if (err || !res.ok) {
console.log("Error obtaining directory");
} else {
listOfPeople.add(res.body);
// loop to retrieve all events.
for (var i = 0; i < res.body.length; i++) {
var personID = res.body[i].id;
$.getJSON('/directory/person', {
id: personID
},
function (result) {
numberOfKids.add(result);
});
}
}
});
While the above code works perfectly fine, I am getting an error from Gulp watch:
line 67, col 30, Don't make functions within a loop. (W083)
1 error
So, how do I decouple the loop and the AJAX call while expecting the exact same behavior?
I think you can remove for loop and start using Array.forEach
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/forEach
Supports all the latest browsers
In you case, Your for loop should be modified to
res.body.forEach(function(person){
var personId = person.id;
// your getJson call comes here
});

Why are my async callback functions not returning the data requests in order?

I'm learning Node.js and am working through the Learnyounode course from Node school which is amazing although it is quite a challenge and I'm and a bit confused...
The question I'm stuck on is Excercise 9- Juggling Async...
I'm trying to solve the problem:
"You must collect the complete content provided to you by each of the URLs and print it to the console (stdout). You don't need to print out the length, just the data as a String; one line per URL. The catch is that you must print them out in the same order as the URLs are provided to you as command-line arguments."
I've tried counting the callbacks in a variable called 'waiting' and then calling a complete() function when it has counted 3 callbacks, but the data still always comes back in the wrong order and I don't know why....
var http = require('http');
var bl = require('bl');
var url = []
url[0] = process.argv[2];
url[1] = process.argv[3];
url[2] = process.argv[4];
var waiting = 0;
for(i=0; i < url.length; i++) {
var output = [];
http.get( url[i], function(res) {
res.setEncoding('utf8');
res.pipe(bl(function(err,data) {
output.push(data.toString());
waiting++;
if(waiting == 3) {
complete(output);
}
}));
});
}
var complete = function(output) {
for(i=0; i < output.length; i++) {
console.log(output[i]);
}
}
The async callbacks are not guaranteed to occur in the order that the HTTP requests are made.
You could maintain the order by changing:
output.push(data.toString());
to:
output[i] = data.toString();
it's because your http get requests are asynchronous.
instead of pushing to output array do this
output[i]=data.toString();
Save the value of the 'i' index for each of you requests, either by closure or by adding it as a property to the request. You can then use that index to order your responses.

Wait for all $http requests to complete in Angular JS

I have a page than can make a different number of $http requests depending on the length of a variables, and then I want to send the data to the scope only when all the requests are finished. For this project I do not want to use jQuery, so please do not include jQuery in your answer. At the moment, the data is sent to the scope as each of the requests finish, which isn't what I want to happen.
Here is part of the code I have so far.
for (var a = 0; a < subs.length; a++) {
$http.get(url).success(function (data) {
for (var i = 0; i < data.children.length; i++) {
rData[data.children.name] = data.children.age;
}
});
}
Here is the part that I am sceptical about, because something needs to be an argument for $q.all(), but it is not mentioned on the docs for Angular and I am unsure what it is meant to be.
$q.all().then(function () {
$scope.rData = rData;
});
Thanks for any help.
$http call always returns a promise which can be used with $q.all function.
var one = $http.get(...);
var two = $http.get(...);
$q.all([one, two]).then(...);
You can find more details about this behaviour in the documentation:
all(promises)
promises - An array or hash of promises.
In your case you need to create an array and push all the calls into it in the loop. This way, you can use $q.all(…) on your array the same way as in the example above:
var arr = [];
for (var a = 0; a < subs.length; ++a) {
arr.push($http.get(url));
}
$q.all(arr).then(function (ret) {
// ret[0] contains the response of the first call
// ret[1] contains the second response
// etc.
});

Handling multiple call asynchronous callbacks

I am learning node.js with learnyounode.
I am having a problem with JUGGLING ASYNC.
The problem is described as follows:
You are given three urls as command line arguments. You are supposed to make http.get() calls to get data from these urls and then print them in the same order as their order in the list of arguments.
Here is my code:
var http = require('http')
var truecount = 0;
var printlist = []
for(var i = 2; i < process.argv.length; i++) {
http.get(process.argv[i], function(response) {
var printdata = "";
response.setEncoding('utf8');
response.on('data', function(data) {
printdata += data;
})
response.on('end', function() {
truecount += 1
printlist.push(printdata)
if(truecount == 3) {
printlist.forEach(function(item) {
console.log(item)
})
}
})
})
}
Here is the questions I do not understand:
I am trying to store the completed data in response.on('end', function(){})for each url using a dictionary. However, I do not know how to get the url for that http.get(). If I can do a local variable inside http.get(), that would be great but I think whenever I declare a variable as var url, it will always point to the last url. Since it is global and it keeps updating through the loop. What is the best way for me to store those completed data as the value with the key equal to the url?
This is how I would go about solving the problem.
#!/usr/bin/env node
var http = require('http');
var argv = process.argv.splice(2),
truecount = argv.length,
pages = [];
function printUrls() {
if (--truecount > 0)
return;
for (i = 0; i < pages.length; i++) {
console.log(pages[i].data + '\n\n');
}
}
function HTMLPage(url) {
var _page = this;
_page.data = '### [URL](' + url + ')\n';
http.get(url, function(res) {
res.setEncoding('utf8');
res.on('data', function(data) {
_page.data += data;
});
res.on('end', printUrls);
});
}
for (var i = 0; i < argv.length; i++)
pages.push(new HTMLPage(argv[i]));
It adds the requests to an array on the start of each request, that way once done I can iterate nicely through the responses knowing that they are in the correct order.
When dealing with asynchronous processing, I find it much easier to think about each process as something with a concrete beginning and end. If you require the order of the requests to be preserved then the entry must be made on creation of each process, and then you refer back to that record on completion. Only then can you guarantee that you have things in the right order.
If you were desperate to use your above method, then you could define a variable inside your get callback closure and use that to store the urls, that way you wouldn't end up with the last url overwriting your variables. If you do go this way though, you'll dramatically increase your overhead when you have to use your urls from process.argv to access each response in that order. I wouldn't advise it.
I went about this challenge a little differently. I'm creating an array of functions that call http.get, and immediately invoking them with their specifc context. The streams write to an object where the key is the port of the server which that stream is relevant to. When the end event is triggered, it adds to that server to the completed array - when that array is full it iterates through and echos in the original order the servers were given.
There's no right way but there are probably a dozen or more ways. Wanted to share mine.
var http = require('http'),
request = [],
dataStrings = {},
orderOfServerInputs = [];
var completeResponses = [];
for(server in process.argv){
if(server >= 2){
orderOfServerInputs[orderOfServerInputs.length] = process.argv[server].substr(-4);
request[request.length] = function(thisServer){
http.get(process.argv[server], function(response){
response.on("data", function(data){
dataStrings[thisServer.substr(-4)] = dataStrings[thisServer.substr(-4)] ? dataStrings[thisServer.substr(-4)] : ''; //if not set set to ''
dataStrings[thisServer.substr(-4)] += data.toString();
});
response.on("end", function(data){
completeResponses[completeResponses.length] = true;
if(completeResponses.length > 2){
for(item in orderOfServerInputs){
serverNo = orderOfServerInputs[item].substr(-4)
console.log(dataStrings[serverNo]);
}
}
});
});
}(process.argv[server]);
}
}
Immediately-Invoked Function Expression (IIFE) could be a solution to your problem. It allows us to bind to function a specific value, in your case, the url which gets the response. In the code below, I bind variable i to index and so, whichever url gets the response, that index of print list will be updated. For more information, refer to this website
var http = require('http')
var truecount = 0;
var printlist = [];
for(var i = 2; i < process.argv.length; i++) {
(function(index){
http.get(process.argv[index], function(response) {
response.setEncoding('utf8');
response.on('data', function(data) {
if (printlist[index] == undefined)
printlist[index] = data;
else
printlist[index]+= data;
})
response.on('end', function() {
truecount += 1
if(truecount == 3) {
printlist.forEach(function(item) {
console.log(item)
})
}
})
})
})(i)
}

How to return this value?

I am building a mobile app using phonegap, jQuery and jQuery mobile. I want to use SqLite database in my app for storing some user information. (I can't use local storage i want to do search/sort operations on the data)
This is the code I am working on to get this done,
function getAccountInformation(){
var accounts = {};
db.transaction(function(transaction) {
transaction.executeSql('SELECT * FROM account;', [],
function(transaction, result) {
if (result != null && result.rows != null) {
for (var i = 0; i < result.rows.length; i++) {
var item={};
var row = result.rows.item(i);
for(var prop in row){
item[prop]=row[prop]
}
accounts[i]=item;
}
}
},errorHandler
);
},errorHandler,nullHandler);
console.log(JSON.stringify(accounts));
}
If I put this console.log(JSON.stringify(accounts)); after the end } of the for loop it shows proper output.
But if I put it where it is right now the {} is printed as an output.
How can I make getAccountInformation() function return that accounts object to my other function? Where I will use jQuery to render the output.
What I want to do is return this accounts object simply by wrting
return accounts;
Because the SqLite functions are asynchronous you cannot just return the value.
I would make the getAccountInformation receiving a callback as below:
function getAccountInformation(callbackfn)
{
db.transaction(function(transaction),
.....,
function (transaction, result)
{
if (result != null)
{
callbackfn(result);
}
});
}
In such way you will get your function called when the db request executed.
That depends on when the function is called. When it is called asynchronously (like an AJAX request) you're out of luck. In that case I suggest you read about jQuery deferreds.
A code snippet based on deferreds could look like this:
var deferred = new jQuery.Deferred();
var accounts = {};
deferred.done(function(data) {
// process your data here
});
db.transaction(function(transaction) {
transaction.executeSql('SELECT * FROM account;', [],
function(transaction, result) {
if (result != null && result.rows != null) {
for (var i = 0; i < result.rows.length; i++) {
var item={};
var row = result.rows.item(i);
for(var prop in row){
item[prop]=row[prop]
}
accounts[i]=item;
}
deferred.resolve(accounts); // let the "done" function get called
} else {
deferred.reject();
}
}, errorHandler
);
},errorHandler,nullHandler);
db.transaction and transaction.executeSql produce their results asynchronously through the use of callback functions. This means that your getAccountInformation function will return immediately and will not wait for the transaction to complete (as in a synchronous call).
It's probably easier to simply pass in a callback function as an argument of your getAccountInformation and run that function when the accounts array is populated. Therefore, change your function signature to getAccountInformation(callback) and replace the executeSql callback with:
function(transaction, result) {
if (result != null && result.rows != null) {
for (var i = 0; i < result.rows.length; i++) {
var item={};
var row = result.rows.item(i);
for(var prop in row){
item[prop]=row[prop]
}
accounts[i]=item;
}
callback(accounts); // Call callback function with populated accounts
}
}
You can then call this function with:
getAccountInformation(function(accounts) {
// Do something with the accounts
});
There are fancier ways such as jQuery Deferreds and Promises which makes working with asynchronous functions easier, but you still need to understand why this exists. By using asynchronous actions, your application stays responsive while waiting for results.

Categories

Resources