Why does Github webhooks give me circular JSON objects? - javascript

With the below code I am trying to get some basic pull request info from my Github repo, when someone submits a pull request for it. But I get this output instead:
$ node poc2.js
sha1=fef5611617bf56a36d165f73d41e527ee2c97d49
res [Circular]
req [Circular]
Since the secret hash is printed, the webhook is received. I can also verifiy this by nc -l 8080 instead of running my NodeJS app. There is will see a large json object, which is from where I got the json structure I use in the console.log's below.
Question
Can anyone figure out, why I get a "circular" json object from both req and res?
And how can I get the values I console.log?
const secret = "sdfsfsfsfwerwergdgv";
const http = require('http');
const crypto = require('crypto');
http.createServer(function (req, res) {
req.on('data', function(chunk) {
let sig = "sha1=" + crypto.createHmac('sha1', secret).update(chunk.toString()).digest('hex');
console.log(sig);
if (req.headers['x-hub-signature'] == sig) {
console.log("res %j", res);
console.log("req %j", req);
if (res.action == 'opened') {
console.log('PR for: ' + res.repository.html_url);
console.log('PR for: ' + res.repository.full_name);
console.log('PR: ' + res.pull_request.html_url);
console.log('PR title: ' + res.pull_request.title);
console.log('PR description' + res.pull_request.description);
console.log('PR by user: ' + res.pull_request.user.login);
};
}
});
res.end();
}).listen(8080);

The req is circular because it's not a JSON object, it's the Request object which includes a lot of internal machinery, some of which is circularly linked.
The actual incoming JSON document is in req.body or wherever your particular JSON body parser puts it, though in this example you don't have one yet so that's something you should fix.
Tip: You may want to use Express instead of the core Node http module, it's much more capable.

Related

Cannot get objects' values from request body while trying to POST [Node.js, MySQL]

I am working on a management system, currently creating the POST requests for the api. I need the values of the request's body to post a new city in the database. The values are used in the stored procedure as parameters. Instead of the key's values which I entered, I am getting an "undefined" value, sometimes a "[object Object]".
I am using a MySQL server, hosted in the cloud by Google's services. Backend is made with Node.js, routing with Express. None of my attempts to fix the bug worked for me.
What I've tried so far:
-Parsing each key's value .toString() and JSON.stingfify() from the body
-Converting the body to Json/string, then back to a javascript object
-Getting the response's requests's body (res.req.body)
-Getting the body's response values in an array, then pushing the next element after it has been passed as a parameter to the stored procedure
-Using the 'request' npm extension to put the values I need when calling the POST method.
-Changed the values to be stored in the URL' parameters instead of the body.
-Sent the body's values as form-data, JSON file, HTML page
Controller method in cityController.js:
exports.city_post = (req, res, next)=>{
poolDb.getConnection(function (err, connection){
if(!err) {
const sql = 'CALL createNewCity(?,?)';
var zipReq = req.params.zip;
var nameReq = req.params.name;
var reqBody = JSON.stringify(req.res.body);
connection.query(sql,[zipReq,nameReq], (err,rows)=>{
if(!err){
return res.status(201).json({Message: 'City with name: '+nameReq+' and zip code: '+zipReq+' created successfully\n', rows});
}
else{
return res.status(404).json({errorMessage: err})
}
});
}
else{
return res.status(500).json({errorMessage: "server error: "+this.err});
}
console.log("\nZip Code: "+ zipReq +"\nName: " + nameReq); //for testing
console.log("\nrequest body: " + reqBody); //for testing
});
}
City route:
const express = require('express');
const router = express.Router();
const CityController = require('../controllers/cityController.js');
router.get('/', CityController.city_getAll);
router.get('/:cityzip', CityController.city_getbyzip);
router.post('/add', CityController.city_post);
...
module.exports = router;
Expected: Posting a new field in table city, status code (201).
Actual: Status code (404), no new insertion in the DB. body, req.body.zip & req.body.name are of value "undefined".
Screenshots:
-Terminal output: https://imgur.com/a/brqKZlP
-Postman request: https://imgur.com/a/ZfFcX8Z
Express doesn't parse post body by default (for some reason). You can try popular body-parser npm package, or collect the raw body data and parse from a string yourself if you don't want to add a whole new package. Here as express app:
app.use(function(req, res, next){
var data = "";
req.on('data', function(chunk){ data += chunk})
req.on('end', function(){
req.rawBody = data;
var json = JSON.parse(req.rawBody); // assuming valid json. ideally check content-type first
next();
});
});

Writing an image to file, received over an HTTP request in Node

I'm certain I'm missing something obvious, but the gist of the problem is I'm receiving a PNG from a Mapbox call with the intent of writing it to the file system and serving it to the client. I've successfully relayed the call, received a response of raw data and written a file. The problem is that my file ends up truncated no matter what path I take, and I've exhausted the answers I've found skirting the subject. I've dumped the raw response to the log, and it's robust, but any file I make tends to be about a chunk's worth of unreadable data.
Here's the code I've got at present for the file making. I tried this buffer move as a last ditch after several failed and comparably fruitless iterations. Any help would be greatly appreciated.
module.exports = function(req, res, cb) {
var cartography = function() {
return https.get({
hostname: 'api.mapbox.com',
path: '/v4/mapbox.wheatpaste/' + req.body[0] + ',' + req.body[1] + ',6/750x350.png?access_token=' + process.env.MAPBOX_API
}, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var mapPath = 'map' + req.body[0] + req.body[1] + '.png';
var map = new Buffer(body, 'base64');
fs.writeFile(__dirname + '/client/images/maps/' + mapPath, map, 'base64', function(err) {
if (err) throw err;
cb(mapPath);
})
})
});
};
cartography();
};
It is possible to rewrite your code in more compact subroutine:
const fs = require('fs');
const https = require('https');
https.get(url, (response)=> { //request itself
if(response) {
let imageName = 'image.png'; // for this purpose I usually use crypto
response.pipe( //pipe response to a write stream (file)
fs.createWriteStream( //create write stream
'./public/' + imageName //create a file with name image.png
)
);
return imageName; //if public folder is set as default in app.js
} else {
return false;
}
})
You could get original name and extension from url, but it safer to generate a new name with crypto and get file extension like i said from url or with read-chunk and file-type modules.

GiantBomb API request getting HTML instead of JSON, Nodejs Request Module

Hey I'm trying to make a query to the giant bomb API, for some reason I am getting back a bunch of HTML/js instead of a JSON object. When I enter the query in the browser I get the JSON as expected.
var giantBombAPI = 'http://www.giantbomb.com/api';
var searchString = giantBombAPI + '/search?api_key=' + apiKey +
'&format=json' + '&query=' + searchTerms + "&resources=game";
//Make our request to the API
request.get({uri: searchString},function (err, res, body) {
jsonRes = JSON.parse(body);
});
Not sure what I'm missing. Also it worked yesterday :P.
I am plugging in "warcraft" for searchterms to test.
I am using the Nodejs request module.
Thanks.
Solved it, the API now requires a custom user agent:
request.get({uri: searchString, headers:{'user-agent' : '<CUSTOM>'}},
function (err, res, body) {
//....
}

nodejs: node-http-proxy and harmon: rewriting the html response from the end point instead of the 302 redirected response.

I'm using nodejs with node-http-proxy along with harmon. I am using harmon to rewrite the proxied response to include a javascript file and a css file. When I set the target of the proxy to be http://nodejs.org or anything other than localhost, I receive a 301 or 302 redirect. The script is rewriting the 301 response instead of the fully proxied response. How can I use harmon to rewrite the end response instead of the 302 response?
Here is the example of the script I am running from the harmon example folder:
var http = require('http');
var connect = require('connect');
var httpProxy = require('http-proxy');
var selects = [];
var simpleselect = {};
//<img id="logo" src="/images/logo.svg" alt="node.js">
simpleselect.query = 'img';
simpleselect.func = function (node) {
//Create a read/write stream wit the outer option
//so we get the full tag and we can replace it
var stm = node.createStream({ "outer" : true });
//variable to hold all the info from the data events
var tag = '';
//collect all the data in the stream
stm.on('data', function(data) {
tag += data;
});
//When the read side of the stream has ended..
stm.on('end', function() {
//Print out the tag you can also parse it or regex if you want
process.stdout.write('tag: ' + tag + '\n');
process.stdout.write('end: ' + node.name + '\n');
//Now on the write side of the stream write some data using .end()
//N.B. if end isn't called it will just hang.
stm.end('<img id="logo" src="http://i.imgur.com/LKShxfc.gif" alt="node.js">');
});
}
selects.push(simpleselect);
//
// Basic Connect App
//
var app = connect();
var proxy = httpProxy.createProxyServer({
target: 'http://nodejs.org'
})
app.use(require('../')([], selects, true));
app.use(
function (req, res) {
proxy.web(req, res);
}
);
The problem is that a lot of sites are now redirecting HTTP to HTTPS.
nodejs.org is one of those.
I have updated the sample https://github.com/No9/harmon/blob/master/examples/doge.js to show how the http-proxy needs to be configured to deal with HTTPS.
If you still have problems with other arbitrary redirects please log an issue on harmon.
Thanks

Creating a map of ids to sockets and vice versa in Node.js

I'm trying to manage a bunch of socket connections. My app is basically an http server that receives posts and passes these along to a socket. When clients open a socket connection, they send a connect message with an id:
{"m":"connect","id":"1"}
The app then saves this id and socket in the id2socket and socket2id maps. On disconnect, the socket/id pair is deleted from the maps.
A post will also contain an id, which indicates the post data should be sent to the socket with that id.
That's great, and this works fine for a single open socket. However, when I have more than one socket open, and then I close a socket, that disconnect wipes everything from the map. I think my understanding of sockets in node is incomplete- is there only a single socket object that is used in the callback? Is there a better way to manage my open socket connections and ids?
start server:
>>node server.js
TCP server listening on 127.0.0.1:5280
HTTP server listening on 127.0.0.1:9002
telnet in:
>>telnet localhost 5280
Trying 127.0.0.1...
Connected to localhost.
Escape character is '^]'.
{"m":"connect","id":"123"}
{"m":"connect","id":"123","success":"true"}
server after connection:
>>Connection from 127.0.0.1:57572
received data: {"m":"connect","id":"123"}
id: 1
m: connect
associating uid 1 with socket [object Object]
do a post:
python post.py {"foo":"bar"}
So this works fine for several open sockets (as long as 1 device is id 123, server has this hardwired for now). However, as soon as you close one connection all the socket connections are removed from the map.
Here's my code:
python script to do post:
import sys
import json
import httplib, urllib, urllib2
values = json.loads('{"foo":"bar"}')
headers = {"Content-type": "application/json"}
conn = httplib.HTTPConnection('127.0.0.1', 9002)
headers = {"Content-type": "application/json"}
conn.request("POST", "", json.dumps(values), headers)
response = conn.getresponse()
print "response.status: "+response.status
print "response.reason: "+response.reason
print "response.read: "+response.read()
conn.close()
node server (http and tcp), hardwired to send data to device '123' on post:
var net = require('net'); // tcp-server
var http = require("http"); // http-server
var qs = require('querystring'); // http-post
// Map of sockets to devices
var id2socket = new Object;
var socket2id = new Object;
// Setup a tcp server
var server_plug = net.createServer(function(socket) {
// Event handlers
socket.addListener("connect", function(conn) {
console.log("Connection from " + socket.remoteAddress + ":" + socket.remotePort );
});
socket.addListener("data", function(data) {
console.log("received data: " + data);
try {
request = JSON.parse(data);
response = request;
if(request.m !== undefined && request['id'] !== undefined){ // hack on 'id', id is js obj property
console.log("id: "+request['id']);
console.log("m: "+request.m);
if(request.m == 'connect'){
console.log("associating uid " + request['id'] + " with socket " + socket);
id2socket[request['id']] = socket;
socket2id[socket] = request['id'];
response.success = 'true';
} else {
response.success = 'true';
}
}
socket.write(JSON.stringify(response));
} catch (SyntaxError) {
console.log('Invalid JSON:' + data);
socket.write('{"success":"false","response":"invalid JSON"}');
}
});
socket.on('end', function() {
id = socket2id[socket]
console.log("socket disconnect by id " + id);
// wipe out the stored info
console.log("removing from map socket:"+socket+" id:"+id);
delete id2socket[id];
delete socket2id[socket];
});
socket.on('timeout', function() {
console.log('socket timeout');
});
});
// Setup http server
var server_http = http.createServer(
// Function to handle http:post requests, need two parts to it
// http://jnjnjn.com/113/node-js-for-noobs-grabbing-post-content/
function onRequest(request, response) {
request.setEncoding("utf8");
request.addListener("data", function(chunk) {
request.content += chunk;
});
request.addListener("end", function() {
console.log("post received!");
//console.log("Request received: "+request.content);
if (request.method == 'POST') {
//var json = qs.parse(request.content);
//console.log("Post: "+json);
// HACK TO TEST STUFF:
// send a message to one of the open sockets
try {
var socket = id2socket['123']; //hardwired
socket.write('{"m":"post"}');
} catch (Error) {
console.log("Cannot find socket with id "+'123');
}
}
});
}
);
// Fire up the servers
var HOST = '127.0.0.1';
var PORT = 5280;
var PORT2 = 9002;
server_plug.listen(PORT, HOST);
console.log("TCP server listening on "+HOST+":"+PORT);
server_http.listen(PORT2);
console.log("HTTP server listening on "+HOST+":"+PORT2);
Objects only take strings as keys for their properties. As your log shows, a socket object is converted into the string "[object Object]". As a result, socket #2 overwrites the id from socket #1 in the object, because all sockets are converted into the same string key. So, there is only one property in the object at all times, because all sockets come down to the same key. When you try to remove the id for socket #2, the single property is deleted and the object is empty.
You seem to want a custom property for each separate socket when used as a key. You can use WeakMaps for this. WeakMaps do allow objects as keys (as opposed to string-only keys), but as they're relatively new they may contain bugs at the moment.
(Note that the id2socket map can just be a plain object, because numbers are converted into strings just fine, and each number has its own, distinct string representation*.)
Using WeakMaps is as follows:
var socket2id = new WeakMap; // as if you were doing: var socket2id = {};
socket2id.set(socket, id); // as if you were doing: socket2id[socket] = id;
socket2id.get(socket); // as if you were doing: socket2id[socket];
socket2id.delete(socket); // as if you were doing: delete socket2id[socket];
Make sure to run with node --harmony (>= 0.7) or node --harmony_weakmaps (<= 0.6).
* 0 and -0 are exceptions, but you shouldn't be using -0 anyway because 0 === -0, so it's difficult to differ between them.

Categories

Resources