I use request library to communicate with other servers via API. But now I need to send multiple (10 or more) POST requests at the same time and move further only if all responsens will be correct. Usually syntax looks a bit like this:
var options = {
url: "",
method: "POST",
header: {...},
body: {...}
};
request(options, function(err,response,body)
{
}
But now I've got an array of objects instead of a single options variable. Is there a way to do this? Or maybe there is another library able to handle the issue.
EDIT:
var arrayOfIds = [];
const requests = [];
for(var i in range){
var options = {} // here goes all bodies and headers to send
requests.push( // push a request to array dynamically
request(options, function(err,response,body){
if(!err && response.statusCode == 201){
arrayOfIds.push(body.id);
}
}));
Promise.all(requests)
.then(function(res){
console.log(arrayOfIds); // this is empty
});
There are several approaches to solve this:
async library, method parallel
Promise.all
To switch your request to promises, use additionaly to request module - request-promise. In code it will look like this:
const request = require('request-promise');
// Note, you don't assign callback here
const promises = [
request.post({...}),
request.post({...}),
request.post({...})
];
// And then you simply do Promise.all
Promise.all(promises).then(console.log);
Related
If I'm using Node.js, is there a way I can automatically turn a set of options for the request function into the final URL that Node.js will use for its HTTP request?
That is, if I have a set of options that I use like this
var http = require('http');
var options = {
host: 'www.random.org',
path: '/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new'
};
callback = function(response) {
var str = '';
//another chunk of data has been received, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
//the whole response has been received, so we just print it out here
response.on('end', function () {
conso
console.log(str);
});
}
const req = http.request(options, callback).end();
Is there a way for me to transform
var options = {
host: 'www.random.org',
path: '/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new'
};
Into the string
www.random.org/integers/?num=1&min=1&max=10&col=1&base=10&format=plain&rnd=new
I realize for the above case this would be a trivial string concatenation.
const url = 'htts://' + options.host + options.path
What I'm interested in is code that can transform any options object into its final URL If I look to the manual, there's twenty one possible options for a request. Some might impact the final URL. Some might not. I'm hoping Node.js or NPM has a built in way of turning those options into a URL and save me the tedious work of doing it myself.
Node.js originally offered the querystring module which has functions which seem to do what you need. For instance, the stringify function:
https://nodejs.org/dist/latest-v15.x/docs/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options
querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' });
// Returns 'foo=bar&baz=qux&baz=quux&corge='
More recently, objects like URLSearchParams were introduced in the url module to better align with the WHATWG spec and therefore be more inline with APIs available in browswers:
https://nodejs.org/dist/latest-v15.x/docs/api/url.html#url_class_urlsearchparams
const myURL = new URL('https://example.org/?abc=123');
console.log(myURL.searchParams.get('abc'));
// Prints 123
myURL.searchParams.append('abc', 'xyz');
console.log(myURL.href);
// Prints https://example.org/?abc=123&abc=xyz
The approach you'll choose in the end depends of your specific business needs.
I'm querying SOLR7.5 for some large objects and would like to render them to a Browser UI as they are returned.
What are my options for reading the response bit by bit using when using the select request handler
I don't think there is anything native to Solr to do what you are asking.
One approach to handle this would be to return only the ID of the documents that match the criteria in your query (and not include the heavy part of the document) and then fetch the large part of the document asynchronously from the client.
i was looking in the wrong place. I just needed to read up on my webAPI fetch().
the response.json() reads the response to completion.
response.body.getReader() allows you to grab the stream in chunk and decode it from there.
let test = 'https://my-solr7/people/select?q=something'
fetchStream(test);
function fetchStream(uri, params = {}){
const options = {
method: 'GET',
};
var decoder = new TextDecoder();
fetch(uri, options)
.then ()
.then( (response) => {
let read;
const reader = response.body.getReader();
reader.read()
.then(read = (result) => {
if (result.done) return;
console.log(result.value);
let chunk = decoder.decode(result.value || new Uint8Array, {stream: !result.done});
console.log(chunk)
reader.read().then(read);
});
});
}
I have a controller.js file that I am trying to get data from my feed-service.js file. FEEDLY_USER_ID and FEEDLY_ACCESS_TOKEN are accessible and defined in a separate config.js file.
controller.js:
$scope.feedlyGlobalAll = FeedService.getGlobalAll();
feed-service.js:
var request = require('request');
var globalOptions = {
url: 'https://cloud.feedly.com/v3/streams/contents?streamId=user/' + FEEDLY_USER_ID + '/category/global.all',
auth: {
'bearer': FEEDLY_ACCESS_TOKEN
}
};
service.getGlobalAll = function(){
request.get(globalOptions, function(error, response, body){
if(!error && response.statusCode == 200){
service.globalAll = JSON.parse(body);
return service.globalAll;
}
// error code here //
})
}
I'm using an npm package called "request" to make the GET because I couldn't get https.get() work. This Feedly API call requires the URL, my user ID, and an access token passed in the header.
I've been reading and apparently I'm supposed to use callbacks or promises, but I can't get either of them to work. With http.get(), I can utilize promises by http.get().then(yada yada), which works for the forecast.io call I'm making elsewhere. The request module apparently doesn't allow .then(). I typically run into TypeError and .then() is not a function.
When I tried doing https.get(), here is the code I was using. I was never able to acquire a successful response.
var url = {
url: 'https://cloud.feedly.com/v3/streams/contents?streamId=user/' + FEEDLY_USER_ID + '/category/global.all',
'headers': {
'Authorization': 'Bearer ' + FEEDLY_ACCESS_TOKEN
}
};
https = require('https');
https.get(url).then(yada yada)
I tried numerous things in the var url for the https.get call. I tried with and without quotes, I tried just auth: bearer token as it works with the request module, but wasn't able to get it to work.
Solution to this issue would either be:
Callback from feed-service to controller using existing request module code.
Figuring out the correct way to form the url for the https.get request and then I can utilize its promise function.
Use a promise inside your request callback:
service.getGlobalAll = function() {
return $q(function(resolve, reject) {
request.get(globalOptions, function(error, response, body){
if(!error && response.statusCode == 200){
service.globalAll = JSON.parse(body);
resolve(service.globalAll);
} else {
reject();
}
});
});
};
$q (angular's promise api) can be injected as a dependency in your service. The above code will return a promise that will resolve when your library's ajax call returns, so you can access it with .then().
I'm new to Node.js and am working on a project and I'd like to use Request to stream from one endpoint to another. My goal is to use Request to get and post an edited JSON body using a pipe stream. I know that when doing so, content-type and content-length will be preserved in the POST headers. However, I would like to apply .forEach to all JSON objects in the body from the first url, and post them to the second url.
I'm not sure about the correct format, and I'd appreciate some clarification.
I know the basic syntax is this:
request.get('URL').pipe(request.post('URL'));
And so far my best guess is something like this:
request('FIRST_URL', function (error, response, body) {
body = JSON.parse(body);
body.forEach( function(arg) {
//return edited body
});
}).pipe(request.post('SECOND_URL'));
Am I missing something? Is there a better way to do this?
You could write your own transform stream. For example:
var Transform = require('stream').Transform;
var inherits = require('util').inherits;
function JSONTransform() {
Transform.call(this);
this._bufffer = '';
}
inherits(JSONTransform, Transform);
JSONTransform.prototype._transform = function(chunk, enc, cb) {
this._buffer += chunk;
cb();
});
JSONTransform.prototype._flush = function(cb) {
try {
var result = JSON.parse(this._buffer);
this._buffer = null;
// Do whatever transformations
// ...
this.push(JSON.stringify(result));
cb();
} catch (ex) {
cb(ex);
}
});
// Then just pipe
request.get('FIRST_URL')
.pipe(new JSONTransform())
.pipe(request.post('SECOND_URL'));
One other slightly different solution that may be worth considering would be to use a third-party streaming JSON parser module, which may or may not work for your use case.
I have a Node.js / Express app working, that receives routes like so:
app.get('/resource/:res', someFunction);
app.get('/foo/bar/:id', someOtherFunction);
This is great and works fine.
I am also using Socket.IO, and want to have some server calls use websockets instead of traditional RESTful calls. However, I want to make it very clean and almost use the same syntax, preferrably:
app.sio.get('/resource/:res', someFunction);
This will give a synthetic 'REST' interface to Socket.IO, where, from the programmer's perspective, he isn't doing anything different. Just flagging websockets: true from the client.
I can deal with all the details, such as a custom way to pass in the request verbs and parse them and so and so, I don't have a problem with this. The only thing I am looking for is some function that can parse routes like express does, and route them properly. For example,
// I don't know how to read the ':bar',
'foo/:bar'
// Or handle all complex routings, such as
'foo/:bar/and/:so/on'
I could dig in real deep and try to code this myself, or try to read through all of express' source code and find where they do it, but I am sure it exists by itself. Just don't know where to find it.
UPDATE
robertklep provided a great answer which totally solved this for me. I adapted it into a full solution, which I posted in an answer below.
You can use the Express router class to do the heavy lifting:
var io = require('socket.io').listen(...);
var express = require('express');
var sioRouter = new express.Router();
sioRouter.get('/foo/:bar', function(socket, params) {
socket.emit('response', 'hello from /foo/' + params.bar);
});
io.sockets.on('connection', function(socket) {
socket.on('GET', function(url) {
// see if sioRouter has a route for this url:
var route = sioRouter.match('GET', url);
// if so, call its (first) callback (the route handler):
if (route && route.callbacks.length) {
route.callbacks[0](socket, route.params);
}
});
});
// client-side
var socket = io.connect();
socket.emit('GET', '/foo/helloworld');
You can obviously pass in extra data with the request and pass that to your route handlers as well (as an extra parameter for example).
robertklep provided a great answer which totally solved this for me. I adapted it into a full solution, which is below in case others want to do something similar:
Node (server side):
// Extend Express' Router to a simple name
app.sio = new express.Router();
app.sio.socketio = require('socket.io').listen(server, { log: false });
// Map all sockets requests to HTTP verbs, which parse
// the request and pass it into a simple callback.
app.sio.socketio.sockets.on('connection', function (socket) {
var verbs = ['GET', 'POST', 'PUT', 'PATCH', 'DELETE'];
for (var i = 0; i < verbs.length; ++i) {
var go = function(verb) {
socket.on(verb, function (url, data) {
var route = app.sio.match(verb, url);
if (route && route.callbacks.length) {
var req = {url: url, params: route.params, data: data, socket:socket}
route.callbacks[0](req);
}
});
}(verbs[i]);
}
});
// Simplify Socket.IO's 'emit' function and liken
// it traditional Express routing.
app.sio.end = function(req, res) {
req.socket.emit('response', req.url, res);
}
// Here's an example of a simplified request now, which
// looks nearly just like a traditional Express request.
app.sio.get('/foo/:bar', function(req) {
app.sio.end(req, 'You said schnazzy was ' + req.data.schnazzy);
});
Client side:
// Instantiate Socket.IO
var socket = io.connect('http://xxxxxx');
socket.callbacks = {};
// Similar to the server side, map functions
// for each 'HTTP verb' request and handle the details.
var verbs = ['get', 'post', 'put', 'path', 'delete'];
for (var i = 0; i < verbs.length; ++i) {
var go = function(verb) {
socket[verb] = function(url, data, cb) {
socket.emit(String(verb).toUpperCase(), url, data);
if (cb !== undefined) {
socket.callbacks[url] = cb;
}
}
}(verbs[i]);
}
// All server responses funnel to this function,
// which properly routes the data to the correct
// callback function declared in the original request.
socket.on('response', function (url, data) {
if (socket.callbacks[url] != undefined) {
socket.callbacks[url](data);
}
});
// Implementation example, params are:
// 1. 'REST' URL,
// 2. Data passed along,
// 3. Callback function that will trigger
// every time this particular request URL
// gets a response.
socket.get('/foo/bar', { schnazzy: true }, function(data){
console.log(data); // -> 'You said schnazzy was true'
});
Thanks for your help, robertklep!