Http request document head - javascript

I need to get the document title.
so I try to send request, and paser the response html to get title.
example (via nodejs module request):
request.get("http://www.google.com", function(err, res, body) {
var title = body.match(/<title>(.*?)</title>/g)[1];
})
but when the document is particularly large. the request is slowly.
Is there a way to get document title quickly? Please suggest. Thanks.

Request can give you stream of decompressed data as it is received: http://github.com/request/request#examples (2nd example)
You could keep appending the data received in a buffer, and checking whether it has your desired content yet ("</title>"). As soon as you get it, you could get your title and ignore the rest of the buffer in the stream.
var request = require('request');
var buffer = '';
var flag = 0;
request({
method: 'GET',
uri: 'http://www.google.com',
gzip: true
}).on('data', function(data) {
if (buffer.indexOf('</title>') == -1)
buffer += data;
else done();
});
function done() {
if (flag) return;
flag++;
var title = buffer.match(/<title>(.*?)<\/title>/)[1];
console.log(title);
}

Related

Delay is socket.io event?

I created an ajax call to the IMBd database
// API Key
key = "4dba72b2-7558-4c0f-bd18-9ffcb0999c4e";
// Url
mainUrl = "http://api.myapifilms.com/imdb/top?token="+ key +"&format=json&data=0&start=1&end=250";
// API Call
var request = require('request');
request(mainUrl, function (error, response, body) {
if (!error && response.statusCode == 200) {
// Storing data in an object
var obj = JSON.parse(body), //JSON Parser
movieArray = obj.data.movies, //Creating Array
item = movieArray[randomMovieRank]; //Setting random movie variable
itermArray = [item.ranking,item.title,item.year];
console.log(itermArray);
io.sockets.emit("serverAnswer", {ranking: itermArray[0], title: itermArray[1], year: itermArray});
}
});
return false;
Followed up by:
socket.on("serverAnswer", function(data){
console.log(data.title);
});
The socket.on is called on the client side. The problem I am having is that it is pulling through the data very slowly if at all. The API is working as it is console logging correctly in terminal. But client side it sometimes pulls through ad sometimes doesnt. Is there something I am doing wrong?
EDIT:
Added pastebin: http://pastebin.com/TYHsqBmK
When you invoke the emit method, your client is not guaranteed connected,you can trigger the ajax event after the client connected or emit specified messages,such as
the server:
io.on('connection',function(socket){
if(movies !== null)
{
socket.emit("serverAnswer", {movies:movies});
}
else{
//1.ajax request IMDB resource
//2.set movies variables
//3.emit message
}
});
the client:
socket.on("serverAnswer", function(data){
console.log(data);
});

Node Js HTTP response crashes when response length exceeds 16101

I am using Node JS HTTP request. When my response length exceeds 16101 , it truncates my response. And I recieve limited response like this:
{"id_user":"133696"},{"id_u
This is not a chunked response, its only comes once. I want to recieve the whole response instead of truncated .
My Node version is v0.10.36.
Here is my code:
var https = require('https');
var querystring = require('querystring');
postData.format = 'json';
postData.apikey = 'abcd';
jsonObject = querystring.stringify(postData);
var postheaders = {
'Content-Type' : 'application/x-www-form-urlencoded',
'Content-Length' : Buffer.byteLength(jsonObject, 'utf8')
};
if(callMethod == undefined){
callMethod = 'POST';
}
var optionspost = {
host : this.host,
port : this.port,
path : this.path,
method : callMethod,
headers : postheaders
};
var reqPost = https.request(optionspost, function(res) {
res.setEncoding('utf-8');
res.on('data', function(responseData) {
//---->>> responseData containes truncated response
if(callFunction != undefined && callFunction != null && callFunction != ''){
callFunction(responseData, relatedData);//****** calling success function ****
}
});
res.on('end', function() {
});
});
reqPost.write(jsonObject);
reqPost.end();
reqPost.on('error', function(e) {
console.error(e);
});
Your code is expecting the data event only once, but Node can fire it more than once. In fact, it can fire it as many times as it damn pleases.:) Every time a data event is emitted, another part of the data is provided to you. You know that there is no more data to be consumed when the end event is fired - that's where you should process the data and/or call your callbacks.
Since the response is basically a readable stream, have a look at the data event for Readable Stream.

Node.JS where to put the response.end()

I'm developing a simple NODE.JS application. First I create an httpServer using http module. Then I route the request to the requestsHandlers.js page. 'Response' parameter cames from the creation of the httpServer. Process1, process2 and process3 should write an answer to the page. This is the objective of this app, that process1, process2 and process3 write its respective text.
requestHandlers.js
var process1 = require("./process1");
var process2 = require("./process2");
var process3 = require("./process3");
function iniciar(response) {
console.log("Manipulador de peticiĆ³n 'iniciar' fue llamado.");
response.writeHead(200, {"Content-Type": "text/html"});
process1.fc1(response);
process2.fc2(response);
process3.fc3(response);
//response.end() //WHERE DO I PLACE IT?
}
As you can see, the response parameter is passed to process1.js, which after parsing some data shoud echo some information.
process1.js
var request = require('request')
function fc1 (response){
var url = 'http://webpagethatreturnsJSONfile.com/'
//Download webpage data and parses it
request(url, function(err, resp, body) {
if (err)
throw err;
var jsonResult = JSON.parse(body);
response.write("Number:" + jsonResult.number + '');
//response.end() //WHERE DO I PLACE IT?
});
}
exports.fc1 = fc1;
The point is that I don't know where to put 'response.end()'. Each process takes some time and I want to 'end' when all processes have echo their text.
How could I do it?
I don't know if the code I've attached is enough for helping me.

NodeJS HTTP request POST ERROR socket hang up

Hi I'm having problems to perform HTTP request on NodeJS given a larger number array of json object. The request works fine given small array of json object. However, if I try to increase the size array of json, I received Error: socket hang up {"error":{"code":"ECONNRESET"}}. Is it required to perform multiple write? Or is it something wrong going on at the other end?
Thanks in advance for taking your time here!
// data is a json object
var post_data = JSON.stringify(data);
var buf = new Buffer(post_data);
var len = buf.length;
var options = {
hostname: address,
port: port,
path: pathName,
method: 'PUT',
headers: {
'Content-Type':'application/json',
'Content-Length': len,
'Transfer-Encoding':'chunked'
}
};
// http call to REST API server
var req = restHttp.request(options, function(res) {
console.log('server PUT response received.');
var resData = '';
res.on('data', function(replyData) {
// Check reply data for error.
console.log(replyData.toString('utf8'));
if(replyData !== 'undefined')
resData += replyData;
});
res.on('end', function() {
callback(JSON.parse(resData));
});
});
req.write(buf);
req.end();
You can stream the request body.
If the data in buf was in a readable stream then you can just do buf.pipe(req).
For example, if the current directory contains a file data.json with the JSON you can do
var buf = fs.createReadStream(__dirname + '/data.json');
to create a ReadStream object. Then you can pipe this to you req
buf.pipe(req);
The pipe command will call req.end once its done streaming.

Phantomjs script stops execution after json object parsing

I'm trying to pass post data to the following phantomjs script (using php/curl):
server.listen(port, function(request, response) {
// Print some information Just for debug
console.log("request method: ", request.method); // request.method POST or GET
if(request.method == 'POST' ){
console.log("POST params should be next: ");
console.log(request.headers);
code = response.statusCode = 200;
response.write(code);
console.log("POST params: ",request.postRaw);
console.log("POST params: ",JSON.stringify(request.postRaw));
var json = request.postRaw;
obj = JSON.parse(json);
console.log(obj.email);
console.log(obj.pass);
var userName = json.stringify(obj.email);
var userPass = json.stringify(obj.pass);
console.log("I'm here");
I am trying to parse out the username and password from the post request. I notice that if I leave in:
var userName = json.stringify(obj.email);
var userPass = json.stringify(obj.pass);
the script will hang after:
console.log("I'm here");
If I remove these 2 lines the entire script will execute normally. Why is this happening? How can I fix this so I can parse the json object (obj), but the script will not hang?
In regular javascript, JSON is capitalized (and case-sensitive). Not sure how PhantomJS' subset of javascript lines up with regular javascript, but it'd be my guess that you should probably use capital-case JSON there too.
So for example:
JSON.stringify(obj.email);

Categories

Resources