Client request to server - javascript

Client function is requesting for data from Server. I imported this function in other file where I need it to use. It is working fine. But i don't understand why I am receiving on command prompt "Undefined". I have commented all console.log but still it is coming. I'm not sure if export/import has problem?
Here is my code:
// tstReq.js
function getData(iccid) {
toString(iccid);
var http = require('http');
var jasParseData;
var options = {
host: 'demo8620001.mockable.io',
port: 80,
path: '/Api3',
method: 'get'
};
http.request(options, function(res) {
//console.log('STATUS: ' + res.statusCode);
//console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
//console.log('BODY: ' + chunk);
josParseData= JSON.parse(chunk);
for(i = 0, len = Object.keys(josParseData.iccid[i]).length; i<=len; i++) {
//console.log('JSON.parse:',josParseData.iccid[i]);
//console.log("iccid: ",iccid);
if (josParseData.iccid[i] === iccid) { // Only printed match iccid
console.log('JSON.parse:',josParseData.iccid[i]);
console.log("iccid: ",iccid);
}
if (josParseData.iccid[i] === iccid) {
console.log("Valid Jasper", i+1);
console.log('\n');
}
else{
// console.log ("Invlid Jasper");
}
//console.log('\n');
}
//console.log('\n');
});
}).end();
};
module.exports = getData;
Here is code where I am using exported function:
const fs = require('fs');
var parse = require('csv-parse');
var validateICCID = require('./funcValidateId.js');
var getData = require('./tstReq.js');
fs.createReadStream('iccid2.csv')
.pipe(parse({delimiter: ':'}))
.on('data',function(csv) {
csvrow= csv.toString();
console.log('\n');
console.log(getData(csvrow));
console.log('\n');
});

You're probably getting undefined because the function getData doesn't have a return statement. It doesn't mean your function is broken, it's just not directly returning a result.
If you want to log the result of getData, you'll need to return http.request, and you'll need to resolve the http.request by returning in the callback.
Also, I noticed you declared var jasParseData you probably meant josParseData.

Related

Node.js Http.Request Callback - unable to pass data to parent function

I'm fairly new to Node and Javascript and I'm struggling with my first Node module. What I'm trying to do is export functions for specific API calls and I'd like to reuse my https.request function rather than duplicating the code in each function. For some reason I'm failing to wrap my head around how to pass the data back to my original function. Here's an abbreviated version - the listStuff function would be one of many to handle various api request actions.
'use strict';
const https = require('https');
const _ = require('underscore');
const hostStr = 'api.server.net';
function listStuff(){
var pathStr = '/release/api/stuff';
_apiCall(pathStr);
//Would like to handle the https response data here
};
function _apiCall(pathStr){
var options = {
host: hostStr,
path: pathStr
};
var req = https.get(options, function(res) {
console.log("statusCode: ", res.statusCode);
console.log("headers: ", res.headers);
var responseString = '';
res.on('data', function(d){
responseString += d;
});
res.on('end', function(){
var responseObject = JSON.parse(responseString);
});
});
req.end();
req.on('error', function(e){
console.log(e);
});
};
module.exports = {
listStuff: listStuff
};
Hope this helps. Register a callback in the apiCall function, and then check the callback params for error handling. Then, just make sure you return the callback when you want to end the function call (either in the on end or on error processing).
function listStuff(){
var pathStr = '/release/api/stuff';
_apiCall(pathStr, function(err, data) {
if (err) // handle err
//handle data.
});
};
function _apiCall(pathStr, callback){
var options = {
host: hostStr,
path: pathStr
};
var req = https.get(options, function(res) {
console.log("statusCode: ", res.statusCode);
console.log("headers: ", res.headers);
var responseString = '';
res.on('data', function(d){
responseString += d;
});
res.on('end', function(){
var responseObject = JSON.parse(responseString);
return callback(null, responseObject);
});
});
req.end();
req.on('error', function(e){
console.log(e);
return callback(e);
});
};
A slightly different approach using Promise objects. Note I looked into this as a learning exercise and hope it helps. I have not written all the code for you and the debugging is all yours!
Firstly make _apiCall returns a promise object.
function listStuff()
{
var pathStr = '/release/api/stuff';
var promise = _apiCall(pathStr);
promise.then( function( responceObject){
// handle response object data
});
promise.catch( function( error){
console.log( error.message); // report error
});
}
Next step is to make _apiCall return a promise object for the HTTPS request it will initiate inside the executor of promise creation.
function _apiCall(pathStr)
{ var options = {
host: hostStr,
path: pathStr
};
function beginGet( worked, failed)
{
// see below
}
return new Promise( beginGet);
}
Lastly write beginGet to initiate and call back success or fail functions depending on the outcome of the get request.
function beginGet( worked, failed)
{ var req;
var responseObj;
function getCallBack( res)
{ // all your get request handling code
// on error call failed( error)
// on sucessful completion, call worked(responseObj)
}
req = https.get(options, getCallBack);
}
Also please check with https.get documentation - I think it calls req.end() for you. All the other errors are mine :-)

send binary response in node from phantomjs child process

I have created a node endpoint to create rasterised version for my svg charts.
app.post('/dxexport', function(req, res){
node2Phantom.createPhantomProcess(req,res);
});
My node to phantom function uses spawn to run phantomjs
var spawn = require('child_process').spawn;
exports.createPhantomProcess = function(req,res){
var userRequest = JSON.stringify(req.body);
var bin = "node_modules/.bin/phantomjs"
var args = ['./dxexport/exporter-server.js', userRequest, res];
var cspr = spawn(bin, args);
cspr.stdout.on('data', function (data) {
var buff = new Buffer(data);
res.send(data);
});
cspr.stderr.on('data', function (data) {
data += '';
console.log(data.replace("\n", "\nstderr: "));
});
cspr.on('exit', function (code) {
console.log('child process exited with code ' + code);
process.exit(code);
});
};
when rendering is completed and file is successfully created I call the renderCompleted function inside phantomjs:
var renderCompleted = function (parameters) {
var exportFile = fileSystem.open(parameters.exportFileName, "rb"),
exportFileContent = exportFile.read();
parameters.response.statusCode = 200;
parameters.response.headers = {
"Access-Control-Allow-Origin": parameters.url,
"Content-Type": contentTypes[parameters.format],
"Content-Disposition": "attachment; fileName=" + parameters.fileName + "." + parameters.format,
"Content-Length": exportFileContent.length
};
parameters.response.setEncoding("binary");
parameters.response.write(exportFileContent);
/////// somehow send exportFileContent as node res object for download \\\\\\\\
exportFile.close();
parameters.format !== "svg" && fileSystem.remove(parameters.exportFileName);
for (var i = 0; i < parameters.filesPath.length; i++)
fileSystem.remove(parameters.filesPath[i]);
parameters.filesPath = [];
parameters.response.close()
};
this response is passed from nodejs however apparently this code is calling phantomjs methods so I get errors like
TypeError: 'undefined' is not a function (evaluating 'parameters.response.setEncoding("binary")')
How can I send the binary file response somehow to the node function so it can be sent with my node server to the user?
Any help is appreciated.
Ok after some struggle here is the working solution if someone stumbles on this post.
As Artjom B. mentioned I found that the easiest way was delegate the rendering and file creation of the visualisation to phantomjs. Then send all the parameters related to those operations once done through the console.
Also updated the answer based on #ArtjomB.'s advice to wrap the console message sent in a unique beginning and end string so the risk of the other possible future outputs being mistaken for the intended rendered file object is mitigated.
var renderCompleted = function (parameters) {
console.log("STRTORNDRD" + JSON.stringify(parameters) + "ENDORNDRD");
};
This then gets picked up by stdout and usable like this:
exports.exportVisual = function (req, res) {
var userRequest = JSON.stringify(req.body);
var bin = "node_modules/.bin/phantomjs"
var args = ['./dxexport/exporter-server.js', userRequest, res];
var cspr = spawn(bin, args);
var contentTypes = {
pdf: "application/pdf",
svg: "image/svg+xml",
png: "image/png"
};
cspr.stdout.on('data', function (data) {
var buff = new Buffer(data).toString('utf8');
var strData = buff.match(new RegExp("STRTORNDRD" + "(.*)" + "ENDORNDRD"));
if (strData) {
var parameters = JSON.parse(strData[1]);
var img = fs.readFileSync(parameters.exportFileName);
var headers = {
"Access-Control-Allow-Origin": parameters.url,
"Content-Type": contentTypes[parameters.format],
"Content-Disposition": "attachment; fileName=" + parameters.fileName + "." + parameters.format,
"Content-Length": img.length
};
res.writeHead(200, headers);
res.end(img, 'binary');
// delete files after done
if (parameters.format != "svg") {
fs.unlink(parameters.exportFileName);
}
for (var i = 0; i < parameters.filesPath.length; i++)
fs.unlink(parameters.filesPath[i]);
// done. kill it
cspr.kill('SIGINT');
}
});
cspr.stderr.on('data', function (data) {
data += '';
console.log(data.replace("\n", "\nstderr: "));
});
cspr.on('exit', function (code) {
console.log('child process exited with code ' + code);
process.exit(code);
});
};

With node.js how to I set a var to response from HTTP client?

I know this is probably Asynchronous Javascript 101 and I do have some books on the Kindle I could consult, but I am nowhere near my device.
I have a node app with a variable being assigned to a module that I'm loading. The module has one function that downloads a string of data from a URL.
The problem is, how do I not set the variable until the request has returned?
My code looks like this:
Downloader.js:
var http = require('http');
exports.downloadString = function(str) {
console.log("Downloading from " + str);
http.get(str, function(res) {
var data = [];
console.log("Got response: " + res.statusCode);
res.on('data', function (chunk) {
data.push(chunk);
});
res.on('end', function() {
return data.join('');
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
}
app.js:
var downloader = require('./lib/Downloader')
, dateParser = require('./lib/DateParser')
, eventIdGenerator = require('./lib/EventIdGenerator');
var report = downloader.downloadString("http://exampleapi.com");
console.log(report);
I need to wait until the variable named "report" is populated.
Obviously this means my Mocha tests are also failing as I'm still unsure of how to tell the test to wait until the variable is filled.
I'm sure this is extremely simple, but I am drawing a blank!
Thanks!
Node.js is (mostly) asynchronous, so you'd need to pass a callback function to your module:
Downloader.js:
var http = require('http');
exports.downloadString = function(str, callback) {
console.log("Downloading from " + str);
http.get(str, function(res) {
var data = [];
console.log("Got response: " + res.statusCode);
res.on('data', function (chunk) {
data.push(chunk);
});
res.on('end', function() {
callback(data.join(''));
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
};
app.js:
var downloader = require('./lib/Downloader')
, dateParser = require('./lib/DateParser')
, eventIdGenerator = require('./lib/EventIdGenerator');
downloader.downloadString("http://exampleapi.com", function(report) {
console.log(report);
});

Node.js script would break when requesting HTTP responses from a site that does not exist

Using Node.js, when one requests a HTTP response, in optimal circumstances, the request comes back with a HTTP response.
However, sometimes the request breaks because the site, for example, has a 404 code, or the site does not exist at all. When requesting a batch of URLs, if there is a 404 code on, say, the 200th URL out of 1000 URLs requested, the entire script breaks. Here is my code:
var hostNames = ['www.gizmodo.com','www.sitethatdoesnotexist123.com','www.google.com'];
for (i; i < hostNames.length; i++){
var options = {
host: hostNames[i],
path: '/'
};
(function (i){
http.get(options, function(res) {
var obj = {};
obj.url = hostNames[i];
obj.statusCode = res.statusCode;
obj.headers = res.headers;
db.scrape.save(obj);
}).on('error',function(e){
console.log("Error Site: " + hostNames[i]);
});
})(i);
};
Is there a way, that for example, if the site does not exist, I simply skip to the next URL, instead of having the script break?
EDIT: Fixed. Thanks user DavidKemp
Use a try/catch block to catch any errors that might occur, and then continue on from there.
For example:
var hostNames = ['www.gizmodo.com','www.sitethatdoesnotexist123.com','www.google.com'];
//moved the function out so we do not have to keep redefining it:
var get_url = function (i){
http.get(options, function(res) {
var obj = {};
obj.url = hostNames[i];
obj.statusCode = res.statusCode;
obj.headers = res.headers;
console.log(JSON.stringify(obj, null, 4));
})
};
for (i; i < hostNames.length; i++){
var options = {
host: hostNames[i],
path: '/'
};
try {
get_url(i);
}
catch(err){
//do something with err
}
};
You need to bind an error handler to your request. I also cleaned up the code a bit.
hostNames.forEach(function(hostname), {
var req = http.get({host: hostName}, function(res) {
var obj = {
url: hostName,
statusCode: res.statusCode,
headers: res.headers
};
console.log(JSON.stringify(obj, null, 4));
});
req.on('error', function(err){
console.log('Failed to fetch', hostName);
});
});
You can use uncaughtException event. this let script run even after exception. link
process.on('uncaughtException', function(err) {
console.log('Caught exception: ' + err);
});
var hostNames = ['www.gizmodo.com','www.sitethatdoesnotexist123.com','www.google.com'];
for (i; i < hostNames.length; i++){
var options = {
host: hostNames[i],
path: '/'
};
(function (i){
http.get(options, function(res) {
var obj = {};
obj.url = hostNames[i];
obj.statusCode = res.statusCode;
obj.headers = res.headers;
db.scrape.save(obj);
}).on('error',function(e){
console.log("Error Site: " + hostNames[i]);
});
})(i);
};
Added a callback for when there's an error. Logs the site that returns an error on console. This error is usually triggered by a 404 or request time is too long.
The full docs are at http://nodejs.org/api/http.html#http_http_get_options_callback at the time of writing. loganfsmyth's answer provides a useful example.

Node.js http.get as a function

I am trying to make a function that returns the content of the webpage and this is what I have so far
var get_contents = function() {
var httpRequestParams =
{
host: "google.com",
port: 80,
path: "/?"
};
var req = http.get(httpRequestParams, function(res)
{
var data = '';
res.on('data', function(chunk) {
data += chunk.toString();
});
//console.log(data);
}).end();
return req;
}
This when I run this code, I see the html contents when the console logging is turned on but when I try to return the output, it just never works.
I can't figure out a way to return get_contents() anywhere. On the console, it just doesnt respond.
Thanks
Something like that: (dont forget to handle error and timeout)
var on_contents = function(cb) {
var httpRequestParams =
{
host: "google.com",
port: 80,
path: "/?"
};
var req = http.get(httpRequestParams, function(res)
{
var data = '';
res.on('data', function(chunk) {
data += chunk.toString();
});
res.on('end', function(){
cb(data);
});
//console.log(data);
}).end();
}
function onFinish(data) {
console.log(data);
}
on_contents(onFinish)
The short answer is: You can't return the data from that function. http.get is asynchronous, so it doesn't actually start running the callback until after your function ends. You'll need to have your get_contents function take a callback itself, check in the http.get handler whether you're done loading and, if you are, call the get_contents callback.
There is an awesome module [request][1] available in node.js.
var request = require('request'),
url = require('url');
var server = http.createServer(function (request, response) {
getPage("http://isohunt.com/torrents/?iht=-1&ihq=life+is+beautiful", function (body) {
console.log(body);
})
});
server.listen(3000);
More information can be found on http://www.catonmat.net/blog/nodejs-modules-request/

Categories

Resources