Node.js http request - javascript

I am a beginner with node.js, and I'm not seeming to get this to work.
function sleep(milliSeconds){
var startTime = new Date().getTime();
while (new Date().getTime() < startTime + milliSeconds);
}
var isRequestComplete = false;
while(isRequestComplete == false){
console.log("in make request");
var querystring = require('querystring');
var data = querystring.stringify({
username: 'username',
password: 'password',
action: 'convert',
voice: 'engfemale1',
text: 'stuff and things, this should take longer than one request.'
});
var options = {
host: 'ws.ispeech.org',
port: 80,
path: '/api/rest/1.5',
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': data.length
}
};
var http = require('http');
var req = http.request(options, function(res) {
console.log("got response");
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log("body: " + chunk);
if(chunk.indexOf("finished") != -1){
isRequestComplete = true;
}
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.write(data);
req.end();
console.log("completed");
sleep(5000);
}
For whatever reason the http request does not send a response back, ever. Unless the code is fully finished, so in the while loop I never get a response back. Thus the loop never ends. The username and password in my program are inputted, here they are not for confidentiality. Thanks for reading.

this is NOT the way to get your code to sleep! A while loop isn't "sleeping" it's processing as fast as it can. In you case it's grabbing date after date after date trying to get to your destination.
take a look here to see how this *should work.
http://nodejs.org/api/http.html

Have a look at the very first http-server example on http://nodejs.org/.
You have to create an http-server that listens for requests from browsers that arrive at the specified IP-Address:Port. Once a request arrives, the server send the specified response to the browser.

Related

Get gender and age with facebook graph api with node

I am trying to get user's gender and birthday using Facebook Graph API using Node.js. My problem is that only the user's id and name are shown and I don't know why.
This is my code until now:
Facebook.js
var https = require('https');
exports.getFbData = function(accessToken, apiPath, callback) {
var options = {
host: 'graph.facebook.com',
port: 443,
path: apiPath + '?access_token=' + accessToken, //apiPath example: '/me/friends'
method: 'GET'
};
var buffer = ''; //this buffer will be populated with the chunks of the data received from facebook
var request = https.get(options, function(result){
result.setEncoding('utf8');
result.on('data', function(chunk){
buffer += chunk;
});
result.on('end', function(){
callback(buffer);
});
});
request.on('error', function(e){
console.log('error from facebook.getFbData: ' + e.message)
});
request.end();
}
And this is how I call it:
facebook.getFbData(access_token, '/me/friends', function(data){
This code return only the user Id and Name, how can I get his gender and age?
You need to change your method around a bit to pass in some more arguments. The path you want to request is:
https://graph.facebook.com/me?access_token=access_token&fields=id,name,birthday,gender

Read raw http message in Nodejs

I'm sending an http request using the http.request function, and I would like to read the whole http response like text; that is, the raw http protocol text. Is it possible? I've written the below code but it's not working.
// Set up the request
console.log('Sending request');
var post_req = http.request(post_options, function(res) {
res.setEncoding('utf8');
console.log('Response statusCode: ' + res.statusCode);
// res.on('data', function (chunk) {
// console.log('Response: ' + chunk);
// });
// res.on('end', function() {});
});
post_req.on('socket', function (socket) {
var response = "";
socket.on('data', function(chunk){
console.log(chunk);
});
});
// post the data
post_req.write(post_data);
post_req.end();
If you want access to the raw http message, I'd suggest using the net module instead, and writing the request yourself. Something like this for a simple GET request:
var net = require('net');
var host = 'stackoverflow.com',
port = 80,
socket = net.connect(port, host, function() {
var request = "GET / HTTP/1.1\r\nHost: " + host + "\r\n\r\n",
rawResponse = "";
// send http request:
socket.end(request);
// assume utf-8 encoding:
socket.setEncoding('utf-8');
// collect raw http message:
socket.on('data', function(chunk) {
rawResponse += chunk;
});
socket.on('end', function(){
console.log(rawResponse);
});
});
For a POST request sending application/x-www-form-urlencoded data, you could write the request using something like:
function writePOSTRequest (data, host, path) {
return "POST " + path + " HTTP/1.1\r\n" +
"Host: " + host + "\r\n" +
"Content-Type: application/x-www-form-urlencoded\r\n" +
"Content-Length: " + Buffer.byteLength(data) + "\r\n\r\n" +
data + "\r\n\r\n";
}
var data = "name1=value1&name2=value2",
request = writePOSTRequest(data, host, "/path/to/resource");
where I'm using Buffer.byteLength because Content-Length requires the length in bytes, not in characters. Also, remember that data must be URL encoded.
If you don't know much about the format of HTTP messages, then this is a decent place to start:
http://jmarshall.com/easy/http/
Also, if you don't know what the encoding of the response will be then you'll have to parse the headers first to find out, but UTF-8 is by far the most common so it's a pretty safe bet.
Streams2 and Streams1 not always able to inter-operate well, see "problem: streams1 and streams2 duality" in this video.
I tried to listen data at a bit lower level than streams and this code prints raw http response with headers for me:
var http = require('http');
var raw = '';
console.log('Sending request');
var req = http.request({host: 'stackoverflow.com'}, function(res) {
watch(res, 'res');
res.on('end', function() {
console.log(raw);
});
res.on('data', function(data) {
// if we don't attach 'data' handler here 'end' is not called
});
});
req.on('socket', function (socket) {
socket.resume();
var oldOndata = socket.ondata;
socket.ondata = function(buf, start, end) {
raw += buf.slice(start, end).toString();
oldOndata.call(socket, buf, start, end);
};
});
req.end();
Assuming these kind of tools are allowed in your environment, you could run up an HTTP debug proxy such as Fiddler http://www.fiddler2.com/, which enables you to inspect the HTTP calls and responses.

Node.js sending http request in a loop

I'm actually facing a problem with my javascript code executed with node.js
i need to send http requests in a loop to a distant server (i set www.google.ca in the code).
Here is my code :
var http = require('http');
var options = {
hostname: 'www.google.ca',
port: 80,
path: '/',
method: 'GET'
};
function sendRequest(options){
console.log('hello');
var start = new Date();
var req = http.request(options,function(res) {
console.log('Request took:', new Date() - start, 'ms');
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.end();
};
for(var i=0;i<10;i++){
sendRequest(options);
}
The problem I have is that, no matter how many times i go through my loop, i get a response for only the 5 first of them. For the rest of the requests, the function sendRequest() is called but I don't get any responses, neither error message. And then the program terminates.
However it works fine when I set localhost as a host.
Is anyone would have a solution to this problem ?
Thanks in advance !
perhaps either your machine or the remote machine is getting overwhelmed by the 10 simultaneous requests you make. try sending them one at a time, you will have to wait until the first request completes before continuing. one easy way to do so is with async.timesSeries
var http = require('http');
var async = require('async');
var options = {
hostname: 'www.google.ca',
port: 80,
path: '/',
method: 'GET'
};
function sendRequestWrapper(n, done){
console.log('Calling sendRequest', n);
sendRequest(options, function(err){
done(err);
});
};
function sendRequest(options, callback){
//console.log('hello');
var start = new Date();
var req = http.request(options,function(res) {
// I don't know if this callback is called for error responses
// I have only used the `request` library which slightly simplifies this
// Under some circumstances you can accidentally cause problems by calling
// your callback function more than once (e.g. both here and on('error')
console.log('Request took:', new Date() - start, 'ms');
callback(null);
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
callback(err);
});
req.end();
};
async.timesSeries(10, sendRequestWrapper);

NodeJS HTTP request POST ERROR socket hang up

Hi I'm having problems to perform HTTP request on NodeJS given a larger number array of json object. The request works fine given small array of json object. However, if I try to increase the size array of json, I received Error: socket hang up {"error":{"code":"ECONNRESET"}}. Is it required to perform multiple write? Or is it something wrong going on at the other end?
Thanks in advance for taking your time here!
// data is a json object
var post_data = JSON.stringify(data);
var buf = new Buffer(post_data);
var len = buf.length;
var options = {
hostname: address,
port: port,
path: pathName,
method: 'PUT',
headers: {
'Content-Type':'application/json',
'Content-Length': len,
'Transfer-Encoding':'chunked'
}
};
// http call to REST API server
var req = restHttp.request(options, function(res) {
console.log('server PUT response received.');
var resData = '';
res.on('data', function(replyData) {
// Check reply data for error.
console.log(replyData.toString('utf8'));
if(replyData !== 'undefined')
resData += replyData;
});
res.on('end', function() {
callback(JSON.parse(resData));
});
});
req.write(buf);
req.end();
You can stream the request body.
If the data in buf was in a readable stream then you can just do buf.pipe(req).
For example, if the current directory contains a file data.json with the JSON you can do
var buf = fs.createReadStream(__dirname + '/data.json');
to create a ReadStream object. Then you can pipe this to you req
buf.pipe(req);
The pipe command will call req.end once its done streaming.

Node.js script would break when requesting HTTP responses from a site that does not exist

Using Node.js, when one requests a HTTP response, in optimal circumstances, the request comes back with a HTTP response.
However, sometimes the request breaks because the site, for example, has a 404 code, or the site does not exist at all. When requesting a batch of URLs, if there is a 404 code on, say, the 200th URL out of 1000 URLs requested, the entire script breaks. Here is my code:
var hostNames = ['www.gizmodo.com','www.sitethatdoesnotexist123.com','www.google.com'];
for (i; i < hostNames.length; i++){
var options = {
host: hostNames[i],
path: '/'
};
(function (i){
http.get(options, function(res) {
var obj = {};
obj.url = hostNames[i];
obj.statusCode = res.statusCode;
obj.headers = res.headers;
db.scrape.save(obj);
}).on('error',function(e){
console.log("Error Site: " + hostNames[i]);
});
})(i);
};
Is there a way, that for example, if the site does not exist, I simply skip to the next URL, instead of having the script break?
EDIT: Fixed. Thanks user DavidKemp
Use a try/catch block to catch any errors that might occur, and then continue on from there.
For example:
var hostNames = ['www.gizmodo.com','www.sitethatdoesnotexist123.com','www.google.com'];
//moved the function out so we do not have to keep redefining it:
var get_url = function (i){
http.get(options, function(res) {
var obj = {};
obj.url = hostNames[i];
obj.statusCode = res.statusCode;
obj.headers = res.headers;
console.log(JSON.stringify(obj, null, 4));
})
};
for (i; i < hostNames.length; i++){
var options = {
host: hostNames[i],
path: '/'
};
try {
get_url(i);
}
catch(err){
//do something with err
}
};
You need to bind an error handler to your request. I also cleaned up the code a bit.
hostNames.forEach(function(hostname), {
var req = http.get({host: hostName}, function(res) {
var obj = {
url: hostName,
statusCode: res.statusCode,
headers: res.headers
};
console.log(JSON.stringify(obj, null, 4));
});
req.on('error', function(err){
console.log('Failed to fetch', hostName);
});
});
You can use uncaughtException event. this let script run even after exception. link
process.on('uncaughtException', function(err) {
console.log('Caught exception: ' + err);
});
var hostNames = ['www.gizmodo.com','www.sitethatdoesnotexist123.com','www.google.com'];
for (i; i < hostNames.length; i++){
var options = {
host: hostNames[i],
path: '/'
};
(function (i){
http.get(options, function(res) {
var obj = {};
obj.url = hostNames[i];
obj.statusCode = res.statusCode;
obj.headers = res.headers;
db.scrape.save(obj);
}).on('error',function(e){
console.log("Error Site: " + hostNames[i]);
});
})(i);
};
Added a callback for when there's an error. Logs the site that returns an error on console. This error is usually triggered by a 404 or request time is too long.
The full docs are at http://nodejs.org/api/http.html#http_http_get_options_callback at the time of writing. loganfsmyth's answer provides a useful example.

Categories

Resources