A Node.js/Express.js app makes a RESTful call to another app and receives JSON in response. But the JSON response is not being parsed into new variables. What specific changes need to be made to the code below, so that the JSON body can be successfully parsed into new variables that the receiving Node.js/Express.js app can use for further processing?
Here is the Node.js/Express.js code which is currently receiving the JSON body response:
var url = require('url');
var request = require('request');
app.get('/user**', function(req, res) {
console.log("You Hit The User Route TOP");
request.get(authServer + '/uaa/user', function (error, response, body) {
if(error){console.log('ERROR with user request.')}
if (!error){// && response.statusCode == 200) {
console.log(response.statusCode); console.log(body);
response.on('data', function(chunk){
console.log('inside response.on(data...)');
body += chunk;
});
response.on('end', function(){
console.log('inside response.on(end...)');
body = JSON.parse(body);
var text = '';
for (var key in body){
text += 'Index is: ' + key +
'\nDescription is: ' + body[key]
}
// The Description is: "descriptive string"
console.log("Got a response: ", text);
res.send(text);
});
res.send(body);
};
}).auth(null, null, true, bearerToken);//this inserts bearer token in the GET request
console.log("You Hit The User Route BOTTOM");
});
Here are the nodemon logs for the GET shown in the code. Note that the response.on() blocks are never called because their SYSO never prints:
You Hit The User Route TOP
You Hit The User Route BOTTOM
200
{ long JSON string, which is formatted and truncated below for easier reading }
GET /user 200 182.862 ms - 1296
And here is the formatted and truncated JSON body, which illustrates the format of the data that needs to be parsed into Node.js/Express.js JavaScript variables:
{
"details":
{
"remoteAddress":"127.0.0.1",
"sessionId":null,
"tokenValue":"SomeLongTokenString",
"tokenType":"Bearer",
"decodedDetails":null
},
"authenticated":true,
"userAuthentication":
{
"details":null,
"authorities":
[
{
"authority":"ROLE_ADMIN"
},
{
"authority":"ROLE_USER"
}
],
"authenticated":true,
"principal":"user",
"credentials":"N/A",
"name":"user"
},
"name":"user"
}
The problem is you're acting as though response is a stream that's incrementally giving you the JSON but you've already proven to yourself that's not true with your first console.log(body) statement. Instead, you can parse body immediately and begin working on it. You can also simplify your request handler.
if (error) {
console.log('ERROR with user request.')
return res.sendStatus(500);
}
body = JSON.parse(body);
var text = '';
for (var key in body) {
text += 'Index is: ' + key + '\nDescription is: ' + body[key]
}
// The Description is: "descriptive string"
console.log("Got a response: ", text);
res.send(text);
The following line doesn't wait for your response to be parsed.
res.send(body);
Remove it and wait to respond from your response.on('end') event.
Edit to include restructured request
I'd structure your request differently. You're not streaming your response so there shouldn't be much reason to listen for response events. Also you can get rid of your need for JSON.parse() by letting request handle that for you by indicating that the returning body is JSON.
request({
method: 'GET',
url: authServer + '/uaa/user',
json: true, // indicates the returning data is JSON, no need for JSON.parse()
auth: {
user: null,
password: null,
sendImmediately: true,
bearer: bearerToken
}
}, function (error, response, body) {
if(error){
console.log('ERROR with user request.');
return res.sendStatus(500); // Return back that an error occurred
}
else {
console.log(response.statusCode);
console.log(body);
var text = '';
for (var key in body) {
text += 'Index is: ' + key + '\nDescription is: ' + body[key];
}
return res.status(200).send(text);
}
});
Related
So I'm having trouble getting one javascript function to finish before the next one starting. I've spent quite a lot of time trying to use callback methods described on other stackoverflow posts. I could get simple examples that used timeouts to work but couldn't get it to work with my API request. I stumbled upon async.js and thought that perhaps using async.series would be a good idea to get my two functions to perform one after another. So I tried this approach, however I still seem to be having the problem where the first function takes a bit longer to execute (which is fine) but the execution process moves past this function instead of waiting for it to end. I feel I have a misconception of some sort since I have tried several methods but to no avail.
What is strange is, is that that when running server.js, it goes into the first function but then it leaves the async.series() function even before the request is finished. When I print inside of tokenReq(), I can see that the request was successful as a token code is returned successfully however this happens to late as execution has moved on. The output is shown below.
server.js:
var access_code;
async.series([
function() {
access_code = queries.data.tokenReq(code);
console.log("Finished inside function 1");
},
function() {
console.log("\n Starting function 2 \n");
if (access_code === "error") {
res.json("An error has occured");
} else {
var response = queries.data.messagesReq(access_code);
res.json(response);
}
}
],
function(err, access_code) {
});
console.log("Outside");
queries.js:
tokenReq: function(code) {
var tokenUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/token";
var form = {
code: code,
client_id: "__ID__",
redirect_uri: "__Site__/",
grant_type: "authorization_code",
client_secret: "__Secret__",
};
var formData = querystring.stringify(form);
var contentLength = formData.length;
request({
headers: {
'Content-Length': contentLength,
'Content-Type': 'application/x-www-form-urlencoded'
},
uri: tokenUrl,
body: formData,
method: 'POST'
}, function (error, response, body) {
if (error != "null") {
var access_token = JSON.parse(body).access_token;
console.log("\n INSIDE FUNCTION REQUEST, Token: " + access_token + " \n");
return access_token;
} else {
console.log('error:', error); // Print the error if one occurred
console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received
// console.log('body:', body); // Print the HTML for the Google homepage.
return "error";
}
});
},
Output:
Finished inside function 1
Outside
INSIDE FUNCTION REQUEST, Token: 8Swhd.......
You missed a major point here. Since node.js is asynchronous there should not be a way to know when a function completes its execution. That is why we specify callbacks so that the invoking function knows whom to call when it finishes its execution. Once you have functions with callbacks, you can enforce series/parallel/waterfall behavior with async module.
tokenReq: function(code, cb) {
var tokenUrl = "https://login.microsoftonline.com/common/oauth2/v2.0/token";
var form = {
code: code,
client_id: "__ID__",
redirect_uri: "__Site__/",
grant_type: "authorization_code",
client_secret: "__Secret__",
};
var formData = querystring.stringify(form);
var contentLength = formData.length;
request({
headers: {
'Content-Length': contentLength,
'Content-Type': 'application/x-www-form-urlencoded'
},
uri: tokenUrl,
body: formData,
method: 'POST'
}, function (error, response, body) {
if (error != "null") {
var access_token = JSON.parse(body).access_token;
console.log("\n INSIDE FUNCTION REQUEST, Token: " + access_token + " \n");
return cb(null, access_token);
} else {
console.log('error:', error); // Print the error if one occurred
console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received
// console.log('body:', body); // Print the HTML for the Google homepage.
return cb(new Error("whatever"));
}
});
},
Now, you can use the callback inside server.js
var access_code;
async.series([
function(cb) {
return queries.data.tokenReq(code, cb);
},
function(access_code, cb) {
console.log("\n Starting function 2 \n");
if (access_code === "error") {
res.json("An error has occured");
} else {
var response = queries.data.messagesReq(access_code);
res.json(response);
}
// do whatever you want after this
return cb();
}
],
function(err, access_code) {
if (err) {
console.log(err);
}
// wrap your logic around a function and call the correspoding callback here
});
I have problems sending a json that will be verified by another server, where then I will get a response from that process has been exist. I'm using HTTP.call, but I have not gotten so far any results in when to functionality.
Already do the tests with postman and everything works me correctly.
Here is a copy of code:
// client side event click button
var jsonStr = JSON.stringify(jsonOBJ);
Meteor.call("Json", jsonStr, function(error, result){
if(error){
console.log("error", error);
}
if(result){
console.log(resul);
}
});
///server side
Json(JsonStr) {
var options = {
data: JsonStr,
headers: {
'content-type': 'application/json'
}
}
try {
var url = "https://api.xxxxxxxxx.com/xxxxxxx-api/4.0/xxxxxx.cgi";
var result = HTTP.call('POST', url, options )
return result;
} catch (err) {
console.log(err)
}
}
//I must receive something like
{
"code": "SUCCESS",
"error": null,
"transactionResponse": {
....
....
....
}
}
That's the answer I'm getting from the server
"{"code":"ERROR","error":"Invalid request format","result":null}"
Fixed problem is when using var str = JSON.stringify (jsonOBJ); From the client and it passes through Meteor.call() when it receives the meteor methods on the server does not arrive with the correct format so the solution is to pass the jsonObj from the client without giving the format and to be received on the server if apply The JSON.stringify (jsonOBJ)
I use following http.get() call to call a local endpoint:
http.get({
host: 'localhost',
port: 80,
path: '/service/info?id=' + id
}, function(response) {
console.log(response);
response.setEncoding('utf8');
var data = "";
response.on('data', function(chunk) {
data += chunk;
});
response.on('end', function() {
if(data.length > 0) {
try {
var data_object = JSON.parse(data);
} catch(e) {
return;
}
}
});
}).on("error", function (){console.log("GET request error")});
However, if I send a malformed request, which would trigger a HTTP 400, the request is synthetically incorrect etc, even though the response.statusCode in function(response) is 400, it would end up to the catch() response.on('end', function() {} instead of emitting the error event on http.get(), I wonder why that's the case and how i can handle HTTP 400 response as an error on node.js.
If it gets to catch(e), it waits a long time till it responses anything to the client, which is also weird. I want the server to respond to the client that it hits a 400 as soon as possible.
Elaborating on jeremy's answer, here is an example of checking the status code that works for me:
http.get(url, function (res) {
if (res.statusCode != 200) {
console.log("non-200 response status code:", res.statusCode);
console.log("for url:", url);
return;
}
// do something great :-)
});
response.statusCode contains the status code, you can get that in the http.get(...,cb()) or you can set up a listener
request.on('response', function (response) {});
that can get the status code. You can then destroy the request if you want to cancel the GET, or handle it however you want.
How do I unzip a gzipped body in a request's module response?
I have tried several examples around the web but none of them appear to work.
request(url, function(err, response, body) {
if(err) {
handleError(err)
} else {
if(response.headers['content-encoding'] == 'gzip') {
// How can I unzip the gzipped string body variable?
// For instance, this url:
// http://highsnobiety.com/2012/08/25/norse-projects-fall-2012-lookbook/
// Throws error:
// { [Error: incorrect header check] errno: -3, code: 'Z_DATA_ERROR' }
// Yet, browser displays page fine and debugger shows its gzipped
// And unzipped by browser fine...
if(response.headers['content-encoding'] && response.headers['content-encoding'].toLowerCase().indexOf('gzip') > -1) {
var body = response.body;
zlib.gunzip(response.body, function(error, data) {
if(!error) {
response.body = data.toString();
} else {
console.log('Error unzipping:');
console.log(error);
response.body = body;
}
});
}
}
}
}
I couldn't get request to work either, so ended up using http instead.
var http = require("http"),
zlib = require("zlib");
function getGzipped(url, callback) {
// buffer to store the streamed decompression
var buffer = [];
http.get(url, function(res) {
// pipe the response into the gunzip to decompress
var gunzip = zlib.createGunzip();
res.pipe(gunzip);
gunzip.on('data', function(data) {
// decompression chunk ready, add it to the buffer
buffer.push(data.toString())
}).on("end", function() {
// response and decompression complete, join the buffer and return
callback(null, buffer.join(""));
}).on("error", function(e) {
callback(e);
})
}).on('error', function(e) {
callback(e)
});
}
getGzipped(url, function(err, data) {
console.log(data);
});
try adding encoding: null to the options you pass to request, this will avoid converting the downloaded body to a string and keep it in a binary buffer.
Like #Iftah said, set encoding: null.
Full example (less error handling):
request = require('request');
zlib = require('zlib');
request(url, {encoding: null}, function(err, response, body){
if(response.headers['content-encoding'] == 'gzip'){
zlib.gunzip(body, function(err, dezipped) {
callback(dezipped.toString());
});
} else {
callback(body);
}
});
Actually request module handles the gzip response. In order to tell the request module to decode the body argument in the callback function, We have to set the 'gzip' to true in the options. Let me explain you with an example.
Example:
var opts = {
uri: 'some uri which return gzip data',
gzip: true
}
request(opts, function (err, res, body) {
// now body and res.body both will contain decoded content.
})
Note: The data you get on 'reponse' event is not decoded.
This works for me. Hope it works for you guys too.
The similar problem usually we ran into while working with request module is with JSON parsing. Let me explain it. If u want request module to automatically parse the body and provide you JSON content in the body argument. Then you have to set 'json' to true in the options.
var opts = {
uri:'some uri that provides json data',
json: true
}
request(opts, function (err, res, body) {
// body and res.body will contain json content
})
Reference: https://www.npmjs.com/package/request#requestoptions-callback
As seen in https://gist.github.com/miguelmota/9946206:
Both request and request-promise handle it out of the box as of Dec 2017:
var request = require('request')
request(
{ method: 'GET'
, uri: 'http://www.google.com'
, gzip: true
}
, function (error, response, body) {
// body is the decompressed response body
console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))
console.log('the decoded data is: ' + body)
}
)
I have formulated a more complete answer after trying the different ways to gunzip, and solving errors to do with encoding.
Hope this helps you too:
var request = require('request');
var zlib = require('zlib');
var options = {
url: 'http://some.endpoint.com/api/',
headers: {
'X-some-headers' : 'Some headers',
'Accept-Encoding' : 'gzip, deflate',
},
encoding: null
};
request.get(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
// If response is gzip, unzip first
var encoding = response.headers['content-encoding']
if (encoding && encoding.indexOf('gzip') >= 0) {
zlib.gunzip(body, function(err, dezipped) {
var json_string = dezipped.toString('utf-8');
var json = JSON.parse(json_string);
// Process the json..
});
} else {
// Response is not gzipped
}
}
});
Here is my two cents worth. I had the same problem and found a cool library called concat-stream:
let request = require('request');
const zlib = require('zlib');
const concat = require('concat-stream');
request(url)
.pipe(zlib.createGunzip())
.pipe(concat(stringBuffer => {
console.log(stringBuffer.toString());
}));
Here's a working example (using the request module for node) that gunzips the response
function gunzipJSON(response){
var gunzip = zlib.createGunzip();
var json = "";
gunzip.on('data', function(data){
json += data.toString();
});
gunzip.on('end', function(){
parseJSON(json);
});
response.pipe(gunzip);
}
Full code: https://gist.github.com/0xPr0xy/5002984
I'm using node-fetch. I was getting response.body, what I really wanted was await response.text().
With got, a request alternative, you can simply do:
got(url).then(response => {
console.log(response.body);
});
Decompression is handled automagically when needed.
I used the gunzipSync convenience method in nodejs to decompress the body. This avoids working with callbacks.
import * as zlib from "zlib";
const uncompressedBody:string = zlib.gunzipSync(body).toString("utf-8");
(in typescript)
I'm calling the Firebase REST API from a Node.js process. The problem I'm seeing is that POSTS fail when the post body contains non-ASCII characters. This is despite the request returning a "200" status, and the name of a node (which doesn't actually get created).
I'm currently trying something like this:
function push(path, object, callback) {
console.log("Pushing to "+path+" on: "+firebase.host);
var fullPath=firebase.basePath+path;
console.log("fullPath="+fullPath);
var body = JSON.stringify(object);
var options = {
host: firebase.host,
port: 80,
method: "POST",
path: fullPath, //gamma.firebase.com/...
agent: false,
headers: {
'content-type': 'application/json',
'Content-Length': body.length,
}
};
var req = http.request(options, function(response) {
var result = "";
console.dir(response.headers);
response.on('data', function(chunk) {
result+=chunk;
});
response.on('end', function() {
console.error("POST response result: "+result);
try {
callback(JSON.parse(result));
} catch(e) {
callback({ error: e });
}
});
response.on('error', function(e) {
console.error("POST response error: "+error);
callback({error: e});
});
});
req.on('error', function(error) {
console.error("POST request error: "+error);
});
req.write(body);
req.end();
}
The contents of "object" can be as simple as:
{"text": "test\u00a0text"}
The result I get back is status 200, and an reasonable-looking child name, which doesn't actually get created.
I've tried setting content-type to a bunch of different things (adding ; charset="UTF-8", for example), and it doesn't seem to affect the results at all.
There is an error in the way we are handling certain types of input which is yielding the erroneous 200 status. We will roll out a fix shortly. To work around the problem in the meantime you can omit sending the Content-Length header. This will allow you to post ASCII and non-ASCII data.