Node.js: Transform Request Options into Final URL - javascript

If I'm using Node.js, is there a way I can automatically turn a set of options for the request function into the final URL that Node.js will use for its HTTP request?
That is, if I have a set of options that I use like this
var http = require('http');
var options = {
host: 'www.random.org',
path: '/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new'
};
callback = function(response) {
var str = '';
//another chunk of data has been received, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
//the whole response has been received, so we just print it out here
response.on('end', function () {
conso
console.log(str);
});
}
const req = http.request(options, callback).end();
Is there a way for me to transform
var options = {
host: 'www.random.org',
path: '/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new'
};
Into the string
www.random.org/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new
I realize for the above case this would be a trivial string concatenation.
const url = 'htts://' + options.host + options.path
What I'm interested in is code that can transform any options object into its final URL If I look to the manual, there's twenty one possible options for a request. Some might impact the final URL. Some might not. I'm hoping Node.js or NPM has a built in way of turning those options into a URL and save me the tedious work of doing it myself.

Node.js originally offered the querystring module which has functions which seem to do what you need. For instance, the stringify function:
https://nodejs.org/dist/latest-v15.x/docs/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options
querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' });
// Returns 'foo=bar&baz=qux&baz=quux&corge='
More recently, objects like URLSearchParams were introduced in the url module to better align with the WHATWG spec and therefore be more inline with APIs available in browswers:
https://nodejs.org/dist/latest-v15.x/docs/api/url.html#url_class_urlsearchparams
const myURL = new URL('https://example.org/?abc=123');
console.log(myURL.searchParams.get('abc'));
// Prints 123
myURL.searchParams.append('abc', 'xyz');
console.log(myURL.href);
// Prints https://example.org/?abc=123&abc=xyz
The approach you'll choose in the end depends of your specific business needs.

Related

Node.js request - handling multiple POST requests

I use request library to communicate with other servers via API. But now I need to send multiple (10 or more) POST requests at the same time and move further only if all responsens will be correct. Usually syntax looks a bit like this:
var options = {
url: "",
method: "POST",
header: {...},
body: {...}
};
request(options, function(err,response,body)
{
}
But now I've got an array of objects instead of a single options variable. Is there a way to do this? Or maybe there is another library able to handle the issue.
EDIT:
var arrayOfIds = [];
const requests = [];
for(var i in range){
var options = {} // here goes all bodies and headers to send
requests.push( // push a request to array dynamically
request(options, function(err,response,body){
if(!err && response.statusCode == 201){
arrayOfIds.push(body.id);
}
}));
Promise.all(requests)
.then(function(res){
console.log(arrayOfIds); // this is empty
});
There are several approaches to solve this:
async library, method parallel
Promise.all
To switch your request to promises, use additionaly to request module - request-promise. In code it will look like this:
const request = require('request-promise');
// Note, you don't assign callback here
const promises = [
request.post({...}),
request.post({...}),
request.post({...})
];
// And then you simply do Promise.all
Promise.all(promises).then(console.log);

Not understanding Cookies in Node.js

I am making a program in Node.js that involves cookies. I don't want to use a library like express. I found the following code online for using cookies, but I am not exactly sure how it works. Could somebody break it down for me? Also, I am not sure which part of the code reads cookies in the system and which part writes them. Could you clarify that as well?
Thank you:
Here is the code:
var http = require('http');
function parseCookies(cookie) {
return cookie.split(';').reduce(
function(prev, curr) {
var m = / *([^=]+)=(.*)/.exec(curr);
var key = m[1];
var value = decodeURIComponent(m[2]);
prev[key] = value;
return prev;
},
{ }
);
}
function stringifyCookies(cookies) {
var list = [ ];
for (var key in cookies) {
list.push(key + '=' + encodeURIComponent(cookies[key]));
}
return list.join('; ');
}
http.createServer(function (request, response) {
var cookies = parseCookies(request.headers.cookie);
console.log('Input cookies: ', cookies);
cookies.search = 'google';
if (cookies.counter)
cookies.counter++;
else
cookies.counter = 1;
console.log('Output cookies: ', cookies);
response.writeHead(200, {
'Set-Cookie': stringifyCookies(cookies),
'Content-Type': 'text/plain'
});
response.end('Hello World\n');
}).listen(1234);
Cookies are just a piece of text that's sent as a header when the browser sends a request to a server. The server can then modify the cookies if it wants, and send them back as a header to the browser.
The convention for cookies is that they are key-value pairs separated by an ampersand (&), just like a query string in a URL (which is why decodeURIComponent and encodeURIComponent work in your example!).
parseCookies reads from the cookie string to an object representing your cookies. Eg.
// Input
"foo=bar&baz=42"
// Output
{foo: "bar", baz: 42}
stringifyCookies takes that cookie object, and converts it back to a cookie:
// Input
{foo: "bar", baz: 42}
// Output
"foo=bar&baz=42"
Does that make sense?

Request Stream Get + Post edited JSON body in Node.js

I'm new to Node.js and am working on a project and I'd like to use Request to stream from one endpoint to another. My goal is to use Request to get and post an edited JSON body using a pipe stream. I know that when doing so, content-type and content-length will be preserved in the POST headers. However, I would like to apply .forEach to all JSON objects in the body from the first url, and post them to the second url.
I'm not sure about the correct format, and I'd appreciate some clarification.
I know the basic syntax is this:
request.get('URL').pipe(request.post('URL'));
And so far my best guess is something like this:
request('FIRST_URL', function (error, response, body) {
body = JSON.parse(body);
body.forEach( function(arg) {
//return edited body
});
}).pipe(request.post('SECOND_URL'));
Am I missing something? Is there a better way to do this?
You could write your own transform stream. For example:
var Transform = require('stream').Transform;
var inherits = require('util').inherits;
function JSONTransform() {
Transform.call(this);
this._bufffer = '';
}
inherits(JSONTransform, Transform);
JSONTransform.prototype._transform = function(chunk, enc, cb) {
this._buffer += chunk;
cb();
});
JSONTransform.prototype._flush = function(cb) {
try {
var result = JSON.parse(this._buffer);
this._buffer = null;
// Do whatever transformations
// ...
this.push(JSON.stringify(result));
cb();
} catch (ex) {
cb(ex);
}
});
// Then just pipe
request.get('FIRST_URL')
.pipe(new JSONTransform())
.pipe(request.post('SECOND_URL'));
One other slightly different solution that may be worth considering would be to use a third-party streaming JSON parser module, which may or may not work for your use case.

Express.js proxy pipe translate XML to JSON

For my front-end (angular) app, I need to connect to an external API, which does not support CORS.
So my way around this is to have a simple proxy in Node.JS / Express.JS to pass the requests. The additional benefit is that I can set my api-credentials at proxy level, and don't have to pass them to the front-end where the user might steal/abuse them.
This is all working perfectly.
Here's the code, for the record:
var request = require('request');
var config = require('./config');
var url = config.api.endpoint;
var uname = config.api.uname;
var pword = config.api.pword;
var headers = {
"Authorization" : 'Basic ' + new Buffer(uname + ':' + pword).toString('base64'),
"Accept" : "application/json"
};
exports.get = function(req, res) {
var api_url = url+req.url;
var r = request({url: api_url, headers: headers});
req.pipe(r).pipe(res);
};
The API-endpoint I have to use has XML as only output format. So I use xml2js on the front-end to convert the XML reponse to JSON.
This is also working great, but I would like to lighten the load for the client, and do the XML -> JSON parsing step on the server.
I assume I will have to create something like:
req.pipe(r).pipe(<convert_xml_to_json>).pipe(res);
But I don't have any idea how do create something like that.
So basically I'm looking to create an XML to JSON proxy as a layer on top of an already existing API.
There are a lot of questions on SO regarding "how do I make a proxy" and "how do I convert XML to JSON" but I couldn't find any that combine the two.
you need to use transform stream and for xml to json conversion you need some library i use this xml2json
..then u use it like this (simplified but it should work with request too)
var http = require('http');
var fs = require('fs');
var parser = require('xml2json');
var Transform = require('stream').Transform;
function xmlParser () {
var transform = new Transform();
transform._transform = function(chunk, encoding, done) {
chunk = parser.toJson(chunk.toString())
console.log(chunk);
this.push(chunk);
done();
};
transform.on('error', function (err) {
console.log(err);
});
return transform;
}
var server = http.createServer(function (req, res) {
var stream = fs.createReadStream(__dirname + '/data.xml');
stream.pipe(xmlParser()).pipe(res);
});
server.listen(8000);

Node.js minimal function for parsing route

I have a Node.js / Express app working, that receives routes like so:
app.get('/resource/:res', someFunction);
app.get('/foo/bar/:id', someOtherFunction);
This is great and works fine.
I am also using Socket.IO, and want to have some server calls use websockets instead of traditional RESTful calls. However, I want to make it very clean and almost use the same syntax, preferrably:
app.sio.get('/resource/:res', someFunction);
This will give a synthetic 'REST' interface to Socket.IO, where, from the programmer's perspective, he isn't doing anything different. Just flagging websockets: true from the client.
I can deal with all the details, such as a custom way to pass in the request verbs and parse them and so and so, I don't have a problem with this. The only thing I am looking for is some function that can parse routes like express does, and route them properly. For example,
// I don't know how to read the ':bar',
'foo/:bar'
// Or handle all complex routings, such as
'foo/:bar/and/:so/on'
I could dig in real deep and try to code this myself, or try to read through all of express' source code and find where they do it, but I am sure it exists by itself. Just don't know where to find it.
UPDATE
robertklep provided a great answer which totally solved this for me. I adapted it into a full solution, which I posted in an answer below.
You can use the Express router class to do the heavy lifting:
var io = require('socket.io').listen(...);
var express = require('express');
var sioRouter = new express.Router();
sioRouter.get('/foo/:bar', function(socket, params) {
socket.emit('response', 'hello from /foo/' + params.bar);
});
io.sockets.on('connection', function(socket) {
socket.on('GET', function(url) {
// see if sioRouter has a route for this url:
var route = sioRouter.match('GET', url);
// if so, call its (first) callback (the route handler):
if (route && route.callbacks.length) {
route.callbacks[0](socket, route.params);
}
});
});
// client-side
var socket = io.connect();
socket.emit('GET', '/foo/helloworld');
You can obviously pass in extra data with the request and pass that to your route handlers as well (as an extra parameter for example).
robertklep provided a great answer which totally solved this for me. I adapted it into a full solution, which is below in case others want to do something similar:
Node (server side):
// Extend Express' Router to a simple name
app.sio = new express.Router();
app.sio.socketio = require('socket.io').listen(server, { log: false });
// Map all sockets requests to HTTP verbs, which parse
// the request and pass it into a simple callback.
app.sio.socketio.sockets.on('connection', function (socket) {
var verbs = ['GET', 'POST', 'PUT', 'PATCH', 'DELETE'];
for (var i = 0; i < verbs.length; ++i) {
var go = function(verb) {
socket.on(verb, function (url, data) {
var route = app.sio.match(verb, url);
if (route && route.callbacks.length) {
var req = {url: url, params: route.params, data: data, socket:socket}
route.callbacks[0](req);
}
});
}(verbs[i]);
}
});
// Simplify Socket.IO's 'emit' function and liken
// it traditional Express routing.
app.sio.end = function(req, res) {
req.socket.emit('response', req.url, res);
}
// Here's an example of a simplified request now, which
// looks nearly just like a traditional Express request.
app.sio.get('/foo/:bar', function(req) {
app.sio.end(req, 'You said schnazzy was ' + req.data.schnazzy);
});
Client side:
// Instantiate Socket.IO
var socket = io.connect('http://xxxxxx');
socket.callbacks = {};
// Similar to the server side, map functions
// for each 'HTTP verb' request and handle the details.
var verbs = ['get', 'post', 'put', 'path', 'delete'];
for (var i = 0; i < verbs.length; ++i) {
var go = function(verb) {
socket[verb] = function(url, data, cb) {
socket.emit(String(verb).toUpperCase(), url, data);
if (cb !== undefined) {
socket.callbacks[url] = cb;
}
}
}(verbs[i]);
}
// All server responses funnel to this function,
// which properly routes the data to the correct
// callback function declared in the original request.
socket.on('response', function (url, data) {
if (socket.callbacks[url] != undefined) {
socket.callbacks[url](data);
}
});
// Implementation example, params are:
// 1. 'REST' URL,
// 2. Data passed along,
// 3. Callback function that will trigger
// every time this particular request URL
// gets a response.
socket.get('/foo/bar', { schnazzy: true }, function(data){
console.log(data); // -> 'You said schnazzy was true'
});
Thanks for your help, robertklep!

Categories

Resources