Closed. This question needs to be more focused. It is not currently accepting answers.
Want to improve this question? Update the question so it focuses on one problem only by editing this post.
Closed 4 years ago.
Improve this question
I'm trying to write a script to download images using node.js. This is what I have so far:
var maxLength = 10 // 10mb
var download = function(uri, callback) {
http.request(uri)
.on('response', function(res) {
if (res.headers['content-length'] > maxLength*1024*1024) {
callback(new Error('Image too large.'))
} else if (!~[200, 304].indexOf(res.statusCode)) {
callback(new Error('Received an invalid status code.'))
} else if (!res.headers['content-type'].match(/image/)) {
callback(new Error('Not an image.'))
} else {
var body = ''
res.setEncoding('binary')
res
.on('error', function(err) {
callback(err)
})
.on('data', function(chunk) {
body += chunk
})
.on('end', function() {
// What about Windows?!
var path = '/tmp/' + Math.random().toString().split('.').pop()
fs.writeFile(path, body, 'binary', function(err) {
callback(err, path)
})
})
}
})
.on('error', function(err) {
callback(err)
})
.end();
}
I, however, want to make this more robust:
Are there libraries that do this and do this better?
Is there a chance that response headers lie (about length, about content type)?
Are there any other status codes I should care about? Should I bother with redirects?
I think I read somewhere that binary encoding is going to be deprecated. What do I do then?
How can I get this to work on windows?
Any other ways you can make this script better?
Why: for a feature similar to imgur where users can give me a URL, I download that image, and rehost the image in multiple sizes.
I'd suggest using the request module. Downloading a file is as simple as the following code:
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('https://www.google.com/images/srpr/logo3w.png', 'google.png', function(){
console.log('done');
});
I ran into this problem some days ago, for a pure NodeJS answer I would suggest using Stream to merge the chunks together.
var http = require('http'),
Stream = require('stream').Transform,
fs = require('fs');
var url = 'http://www.google.com/images/srpr/logo11w.png';
http.request(url, function(response) {
var data = new Stream();
response.on('data', function(chunk) {
data.push(chunk);
});
response.on('end', function() {
fs.writeFileSync('image.png', data.read());
});
}).end();
The newest Node versions won't work well with binary strings, so merging chunks with strings is not a good idea when working with binary data.
*Just be careful when using 'data.read()', it will empty the stream for the next 'read()' operation. If you want to use it more than once, store it somewhere.
You can use Axios (a promise-based HTTP client for Node.js) to download images in the order of your choosing in an asynchronous environment:
npm i axios
Then, you can use the following basic example to begin downloading images:
const fs = require('fs');
const axios = require('axios');
/* ============================================================
Function: Download Image
============================================================ */
const download_image = (url, image_path) =>
axios({
url,
responseType: 'stream',
}).then(
response =>
new Promise((resolve, reject) => {
response.data
.pipe(fs.createWriteStream(image_path))
.on('finish', () => resolve())
.on('error', e => reject(e));
}),
);
/* ============================================================
Download Images in Order
============================================================ */
(async () => {
let example_image_1 = await download_image('https://example.com/test-1.png', 'example-1.png');
console.log(example_image_1.status); // true
console.log(example_image_1.error); // ''
let example_image_2 = await download_image('https://example.com/does-not-exist.png', 'example-2.png');
console.log(example_image_2.status); // false
console.log(example_image_2.error); // 'Error: Request failed with status code 404'
let example_image_3 = await download_image('https://example.com/test-3.png', 'example-3.png');
console.log(example_image_3.status); // true
console.log(example_image_3.error); // ''
})();
var fs = require('fs'),
http = require('http'),
https = require('https');
var Stream = require('stream').Transform;
var downloadImageToUrl = (url, filename, callback) => {
var client = http;
if (url.toString().indexOf("https") === 0){
client = https;
}
client.request(url, function(response) {
var data = new Stream();
response.on('data', function(chunk) {
data.push(chunk);
});
response.on('end', function() {
fs.writeFileSync(filename, data.read());
});
}).end();
};
downloadImageToUrl('https://www.google.com/images/srpr/logo11w.png', 'public/uploads/users/abc.jpg');
if you want progress download try this:
var fs = require('fs');
var request = require('request');
var progress = require('request-progress');
module.exports = function (uri, path, onProgress, onResponse, onError, onEnd) {
progress(request(uri))
.on('progress', onProgress)
.on('response', onResponse)
.on('error', onError)
.on('end', onEnd)
.pipe(fs.createWriteStream(path))
};
how to use:
var download = require('../lib/download');
download("https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_150x54dp.png", "~/download/logo.png", function (state) {
console.log("progress", state);
}, function (response) {
console.log("status code", response.statusCode);
}, function (error) {
console.log("error", error);
}, function () {
console.log("done");
});
note: you should install both request & request-progress modules using:
npm install request request-progress --save
This is an extension to Cezary's answer. If you want to download it to a specific directory, use this. Also, use const instead of var. Its safe this way.
const fs = require('fs');
const request = require('request');
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('https://www.google.com/images/srpr/logo3w.png', './images/google.png', function(){
console.log('done');
});
Building on the above, if anyone needs to handle errors in the write/read streams, I used this version. Note the stream.read() in case of a write error, it's required so we can finish reading and trigger close on the read stream.
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
if (err) callback(err, filename);
else {
var stream = request(uri);
stream.pipe(
fs.createWriteStream(filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
.on('close', function() {
callback(null, filename);
});
}
});
};
Related
I'm trying to upload images from a url to the server safely.
var http = require('http');
var fs = require('fs');
var path = require('path')
var download = function(url, dest, filename, cb) {
var file = fs.createWriteStream(dest + "/" + filename + path.extname(url));
var request = http.get(url, function(response) {
response.pipe(file);
file.on('error', function(err) {
console.log(err.message);
file.end();
});
file.on('finish', function() {
file.close(cb);
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest);
if (cb) cb(err.message);
});
}
var url = "https://vignette.wikia.nocookie.net/spongebob/images/d/d7/SpongeBob_stock_art.png";
download(url, 'downloadedAssets/imgs', 'spongebob', function onComplete(err) {
if (err) {
console.log(err.message);
} else {
console.log('image uploaded to server');
}
});
Which crashes the server
TypeError [ERR_INVALID_PROTOCOL]: Protocol "https:" not supported.
Expected "http:"
I understand I can't upload https files, but why does it crash the server instead of executing file.on('error') then ending the file.
I also tried try/catch and same thing
It crashes at http.get(url,function(){..}) because you are using http module. You need to use https module to do the get request to a https url.
const https = require('https');
https.get('https://encrypted.google.com/', (res) => {
console.log('statusCode:', res.statusCode);
console.log('headers:', res.headers);
res.on('data', (d) => {
process.stdout.write(d);
});
}).on('error', (e) => {
console.error(e);
});
You can use request module which support both http and https. From the doc:
Request is designed to be the simplest way possible to make http
calls. It supports HTTPS and follows redirects by default.const request = require('request');
const options = {
url: 'https://api.github.com/repos/request/request',
headers: {
'User-Agent': 'request'
}
};
function callback(error, response, body) {
if (!error && response.statusCode == 200) {
const info = JSON.parse(body);
console.log(info.stargazers_count + " Stars");
console.log(info.forks_count + " Forks");
}
}
request(options, callback);
Or to use Promises out of the box, use axios.
fs.createWriteStream is working absolutely fine so it is not throwing exceptions or errors.
The error is coming from http.get() method because you are trying to access 'https' using http module. Put debugger in http.get() module, you will definitely get error thrown.
How to download audio file from URL and store it in local directory?
I'm using Node.js and I tried the following code:
var http = require('http');
var fs = require('fs');
var dest = 'C./test'
var url= 'http://static1.grsites.com/archive/sounds/comic/comic002.wav'
function download(url, dest, callback) {
var file = fs.createWriteStream(dest);
var request = http.get(url, function (response) {
response.pipe(file);
file.on('finish', function () {
file.close(callback); // close() is async, call callback after close completes.
});
file.on('error', function (err) {
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (callback)
callback(err.message);
});
});
}
No error occured but the file has not been found.
Duplicate of How to download a file with Node.js (without using third-party libraries)?, but here is the code specific to your question:
var http = require('http');
var fs = require('fs');
var file = fs.createWriteStream("file.wav");
var request = http.get("http://static1.grsites.com/archive/sounds/comic/comic002.wav", function(response) {
response.pipe(file);
});
Your code is actually fine, you just don't call the download function. Try adding this to the end :
download(url, dest, function(err){
if(err){
console.error(err);
}else{
console.log("Download complete");
}
});
Also, change the value of dest to something else, like just "test.wav" or something. 'C./test' is a bad path.
I tried it on my machine and your code works fine just adding the call and changing dest.
Here is an example using Axios with an API that may require authorization
const Fs = require('fs');
const Path = require('path');
const Axios = require('axios');
async function download(url) {
let filename = "filename";
const username = "user";
const password = "password"
const key = Buffer.from(username + ':' + password).toString("base64");
const path = Path.resolve(__dirname, "audio", filename)
const response = await Axios({
method: 'GET',
url: url,
responseType: 'stream',
headers: { 'Authorization': 'Basic ' + key }
})
response.data.pipe(Fs.createWriteStream(path))
return new Promise((resolve, reject) => {
response.data.on('end', () => {
resolve();
})
response.data.on('error', () => {
reject(err);
})
})
}
I've got an audio file which I post to a server for translation. I've managed to create a request in postman, but I do not know how to write the file to this server. Below is the code I have got so far:
var http = require("https");
var options = {}
var req = http.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
var body = Buffer.concat(chunks);
console.log(body.toString());
});
});
options{} is filled with method/hostname/port ofcourse. In postman I add "binary" file, but I cannot figger out how to write a file to the request in Node JS.
One solution that would easily fit into your current program without too much work is to make use of the form-data module on npm.
The form-data module makes ease of multipart requests in node. The following is a simple example of how to use.
var http = require("https");
var FormData = require('form-data');
var fs = require('fs')
var form = new FormData();
form.append('my_field', fs.createReadStream('my_audio.file'));
var options = {
host: 'your.host',
port: 443,
method: 'POST',
// IMPORTANT!
headers: form.getHeaders()
}
var req = http.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
var body = Buffer.concat(chunks);
console.log(body.toString());
});
});
// Pipe form to request
form.pipe(req);
In a "real-world" scenario you would want to do a lot more error checking. Also, there are plenty of other http clients on npm that make this process easy as well (the request module uses form-data BTW). Check out request, and got if you are interested.
For sending a binary request the fundamentals are still the same, req is a writable stream. As such, you can pipe data into the stream, or write directly with req.write(data). Here's an example.
var http = require('https');
var fs = require('fs');
var options = {
// ...
headers: {
'Content-Type': 'application/octet-stream'
}
}
var req = http.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
var body = Buffer.concat(chunks);
console.log(body.toString());
});
});
var audioFile = fs.createReadStream('my_audio.file', { encoding: 'binary' });
audioFile.pipe(req);
Note, that if you use the write method explicitly req.write(data) you must call req.end(). Also, the you may want to take a look at the encoding options for Node's Buffer (docs).
You can use the request package on npm.
Install the request module from npm:
npm install request --save
Then use the request module to send your request.
Have a look at https://www.npmjs.com/package/request for details on implementation.
Thanks #undefined, your answer really helped me.
I am posting my solution which worked for me to send file to another server using axios. Ignore the type specifications, I had Typescript enabled for my project.
export const fileUpload: RequestHandler = async (req: Request, res: Response, next: NextFunction) => {
const chunks: any[] = [];
req.on('data', (chunk) => chunks.push(chunk));
req.on('end', () => {
const data = Buffer.concat(chunks);
axios.put("ANOTHER_SERVER_URL", data).then((response) => {
console.log('Success', response);
}).catch(error => {
console.log('Failure', error);
});
});
return res.status(200).json({});
};
Thanks, hope it helps!
I am trying to download a File from google drive using Google SDK API using node.js.
But I am unable to write/save file at server side - node.js
Code:-
var GoogleTokenProvider = require("refresh-token").GoogleTokenProvider,
async = require('async'),
fs = require("fs"),
request = require('request'),
_accessToken;
var _XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var https = require('https');
const CLIENT_ID = "";
const CLIENT_SECRET = "";
const REFRESH_TOKEN = '';
const ENDPOINT_OF_GDRIVE = 'https://www.googleapis.com/drive/v2';
async.waterfall([
//-----------------------------
// Obtain a new access token
//-----------------------------
function(callback) {
var tokenProvider = new GoogleTokenProvider({
'refresh_token': REFRESH_TOKEN,
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET
});
tokenProvider.getToken(callback);
},
//--------------------------------------------
// Retrieve the children in a specified folder
//
// ref: https://developers.google.com/drive/v2/reference/files/children/list
//-------------------------------------------
function(accessToken, callback) {
_accessToken = accessToken;
request.get({
'url': ENDPOINT_OF_GDRIVE + '/files?' + "q='root' in parents and (mimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document')",
'qs': {
'access_token': accessToken
}
}, callback);
},
//----------------------------
// Parse the response
//----------------------------
function(response, body, callback) {
var list = JSON.parse(body);
if (list.error) {
return callback(list.error);
}
callback(null, list.items[0]);
},
//-------------------------------------------
// Get the file information of the children.
//
// ref: https://developers.google.com/drive/v2/reference/files/get
//-------------------------------------------
function(children, callback) {
var xhr = new _XMLHttpRequest();
xhr.open('GET', children.downloadUrl);
xhr.setRequestHeader('Authorization', 'Bearer ' + _accessToken);
xhr.onload = function() {
console.log("xhr.responseText", xhr.responseText)
fs.writeFile("download.docx", xhr.responseText)
callback(xhr.responseText);
};
xhr.onerror = function() {
callback(null);
};
xhr.send();
}
],
function(err, results) {
if (!err) {
console.log(results);
}
});
I am getting this in console:-
Content of xhr.responseText is something like that
��▬h��↕E6M��~��3�3∟�9�� � �►��/2�:���♂�4��]�♀I�R���►
$SB6Q���c↔��H�=;+
���►q�3Tdכ��#!T��hEl_�|�I�↨��h(�^:▬�[h̓D♠��f���♠*���ݾ��M→
�1⌂♦"N�↑�o�]�7U$��A6����♠�W��k`�f▬♫��K�Z�^‼�0{<Z�▼�]F�����
���J♥A♀��♣�a�}7�
"���H�w"�♥���☺w♫̤ھ�� �P�^����O֛���;�<♠�aYՠ؛`G�kxm��PY�[��g
Gΰino�/<���<�1��ⳆA$>"f3��\�ȾT��∟I S�������W♥����Y
Please help me to know what is the format of the data I am getting from Drive Api and write it in which format so that I got a complete .docx file
Edit
I am open to use any method other than xmlRequest if it helps me downloading the file(.docx).
node-XMLHttpRequest, it seems, does not support binary downloads - see this issue. What you are seeing is the file's binary contents converted into String which, in JavaScript, is an irreversible and destructive process for binary data (which means you cannot convert the string back to buffer and get the same data as the original contents).
Using request, you can download a binary file this way:
var request = require('request')
, fs = require('fs')
request.get(
{ url: 'your-file-url'
, encoding: null // Force Request to return the data as Buffer
, headers:
{ Authorization: 'Bearer ' + accessTokenHere
}
}
, function done (err, res) {
// If all is well, the file will be at res.body (buffer)
fs.writeFile('./myfile.docx', res.body, function (err) {
// Handle err somehow
// Do other work necessary to finish the request
})
}
)
Note: This will buffer the whole file into memory before it can be saved to disk. For small files, this is fine, but for larger files, you might want to look into implementing this as a streamed download. This SO question already answers that, I recommend you have a look.
More information about how to authorize your requests can be found on Google Developers docs.
Complete Working example: Downloading file from GoogleDrive - Node.js API
var GoogleTokenProvider = require("refresh-token").GoogleTokenProvider,
async = require('async'),
fs = require("fs"),
request = require('request'),
_accessToken;
const CLIENT_ID = "";
const CLIENT_SECRET = "";
const REFRESH_TOKEN = '';
const ENDPOINT_OF_GDRIVE = 'https://www.googleapis.com/drive/v2';
async.waterfall([
//-----------------------------
// Obtain a new access token
//-----------------------------
function(callback) {
var tokenProvider = new GoogleTokenProvider({
'refresh_token': REFRESH_TOKEN,
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET
});
tokenProvider.getToken(callback);
},
//--------------------------------------------
// Retrieve the children in a specified folder
//
// ref: https://developers.google.com/drive/v2/reference/files/children/list
//-------------------------------------------
function(accessToken, callback) {
_accessToken = accessToken;
request.get({
'url': ENDPOINT_OF_GDRIVE + '/files?' + "q='root' in parents and (mimeType = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document')",
'qs': {
'access_token': accessToken
}
}, callback);
},
//----------------------------
// Parse the response
//----------------------------
function(response, body, callback) {
var list = JSON.parse(body);
if (list.error) {
return callback(list.error);
}
callback(null, list.items);
},
//-------------------------------------------
// Get the file information of the children.
//
// ref: https://developers.google.com/drive/v2/reference/files/get
//-------------------------------------------
function(children, callback) {
for(var i=0;i<children.length;i++) {
var file = fs.createWriteStream(children[i].title);
// Downnload and write file from google drive
(function(child) {
request.get(
{ url: child.downloadUrl
, encoding: null // Force Request to return the data as Buffer
, headers:
{ Authorization: 'Bearer ' + _accessToken
}
}
, function done (err, res) {
res.pipe(file)
// If all is well, the file will be at res.body (buffer)
fs.writeFile('./' + child.title, res.body, function (err) {
if(!err) {
console.log('done')
} else {
console.log(err)
}
// Handle err somehow
// Do other work necessary to finish the request
})
}
)
})(children[i])
}
}
],
function(err, results) {
if (!err) {
console.log(results);
}
});
I was just having issues with this, I've included an example of how I managed to get this working using the Google API Node.js library: https://gist.github.com/davestevens/6f376f220cc31b4a25cd
How can I make an HTTP request from within Node.js or Express.js? I need to connect to another service. I am hoping the call is asynchronous and that the callback contains the remote server's response.
Here is a snippet of some code from a sample of mine. It's asynchronous and returns a JSON object. It can do any form of GET request.
Note that there are more optimal ways (just a sample) - for example, instead of concatenating the chunks you put into an array and join it etc... Hopefully, it gets you started in the right direction:
const http = require('http');
const https = require('https');
/**
* getJSON: RESTful GET request returning JSON object(s)
* #param options: http options object
* #param callback: callback to pass the results JSON object(s) back
*/
module.exports.getJSON = (options, onResult) => {
console.log('rest::getJSON');
const port = options.port == 443 ? https : http;
let output = '';
const req = port.request(options, (res) => {
console.log(`${options.host} : ${res.statusCode}`);
res.setEncoding('utf8');
res.on('data', (chunk) => {
output += chunk;
});
res.on('end', () => {
let obj = JSON.parse(output);
onResult(res.statusCode, obj);
});
});
req.on('error', (err) => {
// res.send('error: ' + err.message);
});
req.end();
};
It's called by creating an options object like:
const options = {
host: 'somesite.com',
port: 443,
path: '/some/path',
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
};
And providing a callback function.
For example, in a service, I require the REST module above and then do this:
rest.getJSON(options, (statusCode, result) => {
// I could work with the resulting HTML/JSON here. I could also just return it
console.log(`onResult: (${statusCode})\n\n${JSON.stringify(result)}`);
res.statusCode = statusCode;
res.send(result);
});
UPDATE
If you're looking for async/await (linear, no callback), promises, compile time support and intellisense, we created a lightweight HTTP and REST client that fits that bill:
Microsoft typed-rest-client
Try using the simple http.get(options, callback) function in node.js:
var http = require('http');
var options = {
host: 'www.google.com',
path: '/index.html'
};
var req = http.get(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
// Buffer the body entirely for processing as a whole.
var bodyChunks = [];
res.on('data', function(chunk) {
// You can process streamed parts here...
bodyChunks.push(chunk);
}).on('end', function() {
var body = Buffer.concat(bodyChunks);
console.log('BODY: ' + body);
// ...and/or process the entire body here.
})
});
req.on('error', function(e) {
console.log('ERROR: ' + e.message);
});
There is also a general http.request(options, callback) function which allows you to specify the request method and other request details.
Request and Superagent are pretty good libraries to use.
note: request is deprecated, use at your risk!
Using request:
var request=require('request');
request.get('https://someplace',options,function(err,res,body){
if(err) //TODO: handle err
if(res.statusCode === 200 ) //etc
//TODO Do something with response
});
You can also use Requestify, a really cool and very simple HTTP client I wrote for nodeJS + it supports caching.
Just do the following for GET method request:
var requestify = require('requestify');
requestify.get('http://example.com/api/resource')
.then(function(response) {
// Get the response body (JSON parsed or jQuery object for XMLs)
response.getBody();
}
);
This version is based on the initially proposed by bryanmac function which uses promises, better error handling, and is rewritten in ES6.
let http = require("http"),
https = require("https");
/**
* getJSON: REST get request returning JSON object(s)
* #param options: http options object
*/
exports.getJSON = function (options) {
console.log('rest::getJSON');
let reqHandler = +options.port === 443 ? https : http;
return new Promise((resolve, reject) => {
let req = reqHandler.request(options, (res) => {
let output = '';
console.log('rest::', options.host + ':' + res.statusCode);
res.setEncoding('utf8');
res.on('data', function (chunk) {
output += chunk;
});
res.on('end', () => {
try {
let obj = JSON.parse(output);
// console.log('rest::', obj);
resolve({
statusCode: res.statusCode,
data: obj
});
}
catch (err) {
console.error('rest::end', err);
reject(err);
}
});
});
req.on('error', (err) => {
console.error('rest::request', err);
reject(err);
});
req.end();
});
};
As a result you don't have to pass in a callback function, instead getJSON() returns a promise. In the following example the function is used inside of an ExpressJS route handler
router.get('/:id', (req, res, next) => {
rest.getJSON({
host: host,
path: `/posts/${req.params.id}`,
method: 'GET'
}).then(({ statusCode, data }) => {
res.json(data);
}, (error) => {
next(error);
});
});
On error it delegates the error to the server error handling middleware.
Unirest is the best library I've come across for making HTTP requests from Node. It's aiming at being a multiplatform framework, so learning how it works on Node will serve you well if you need to use an HTTP client on Ruby, PHP, Java, Python, Objective C, .Net or Windows 8 as well. As far as I can tell the unirest libraries are mostly backed by existing HTTP clients (e.g. on Java, the Apache HTTP client, on Node, Mikeal's Request libary) - Unirest just puts a nicer API on top.
Here are a couple of code examples for Node.js:
var unirest = require('unirest')
// GET a resource
unirest.get('http://httpbin.org/get')
.query({'foo': 'bar'})
.query({'stack': 'overflow'})
.end(function(res) {
if (res.error) {
console.log('GET error', res.error)
} else {
console.log('GET response', res.body)
}
})
// POST a form with an attached file
unirest.post('http://httpbin.org/post')
.field('foo', 'bar')
.field('stack', 'overflow')
.attach('myfile', 'examples.js')
.end(function(res) {
if (res.error) {
console.log('POST error', res.error)
} else {
console.log('POST response', res.body)
}
})
You can jump straight to the Node docs here
Check out shred. It's a node HTTP client created and maintained by spire.io that handles redirects, sessions, and JSON responses. It's great for interacting with rest APIs. See this blog post for more details.
Check out httpreq: it's a node library I created because I was frustrated there was no simple http GET or POST module out there ;-)
For anyone who looking for a library to send HTTP requests in NodeJS, axios is also a good choice. It supports Promises :)
Install (npm): npm install axios
Example GET request:
const axios = require('axios');
axios.get('https://google.com')
.then(function (response) {
// handle success
console.log(response);
})
.catch(function (error) {
// handle error
console.log(error);
})
Github page
Update 10/02/2022
Node.js integrates fetch in v17.5.0 in experimental mode. Now, you can use fetch to send requests just like you do on the client-side. For now, it is an experimental feature so be careful.
If you just need to make simple get requests and don't need support for any other HTTP methods take a look at: simple-get:
var get = require('simple-get');
get('http://example.com', function (err, res) {
if (err) throw err;
console.log(res.statusCode); // 200
res.pipe(process.stdout); // `res` is a stream
});
Use reqclient: not designed for scripting purpose
like request or many other libraries. Reqclient allows in the constructor
specify many configurations useful when you need to reuse the same
configuration again and again: base URL, headers, auth options,
logging options, caching, etc. Also has useful features like
query and URL parsing, automatic query encoding and JSON parsing, etc.
The best way to use the library is create a module to export the object
pointing to the API and the necessary configurations to connect with:
Module client.js:
let RequestClient = require("reqclient").RequestClient
let client = new RequestClient({
baseUrl: "https://myapp.com/api/v1",
cache: true,
auth: {user: "admin", pass: "secret"}
})
module.exports = client
And in the controllers where you need to consume the API use like this:
let client = require('client')
//let router = ...
router.get('/dashboard', (req, res) => {
// Simple GET with Promise handling to https://myapp.com/api/v1/reports/clients
client.get("reports/clients")
.then(response => {
console.log("Report for client", response.userId) // REST responses are parsed as JSON objects
res.render('clients/dashboard', {title: 'Customer Report', report: response})
})
.catch(err => {
console.error("Ups!", err)
res.status(400).render('error', {error: err})
})
})
router.get('/orders', (req, res, next) => {
// GET with query (https://myapp.com/api/v1/orders?state=open&limit=10)
client.get({"uri": "orders", "query": {"state": "open", "limit": 10}})
.then(orders => {
res.render('clients/orders', {title: 'Customer Orders', orders: orders})
})
.catch(err => someErrorHandler(req, res, next))
})
router.delete('/orders', (req, res, next) => {
// DELETE with params (https://myapp.com/api/v1/orders/1234/A987)
client.delete({
"uri": "orders/{client}/{id}",
"params": {"client": "A987", "id": 1234}
})
.then(resp => res.status(204))
.catch(err => someErrorHandler(req, res, next))
})
reqclient supports many features, but it has some that are not supported by other
libraries: OAuth2 integration and logger integration
with cURL syntax, and always returns native Promise objects.
If you ever need to send GET request to an IP as well as a Domain (Other answers did not mention you can specify a port variable), you can make use of this function:
function getCode(host, port, path, queryString) {
console.log("(" + host + ":" + port + path + ")" + "Running httpHelper.getCode()")
// Construct url and query string
const requestUrl = url.parse(url.format({
protocol: 'http',
hostname: host,
pathname: path,
port: port,
query: queryString
}));
console.log("(" + host + path + ")" + "Sending GET request")
// Send request
console.log(url.format(requestUrl))
http.get(url.format(requestUrl), (resp) => {
let data = '';
// A chunk of data has been received.
resp.on('data', (chunk) => {
console.log("GET chunk: " + chunk);
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
console.log("GET end of response: " + data);
});
}).on("error", (err) => {
console.log("GET Error: " + err);
});
}
Don't miss requiring modules at the top of your file:
http = require("http");
url = require('url')
Also bare in mind that you may use https module for communicating over secured network. so these two lines would change:
https = require("https");
...
https.get(url.format(requestUrl), (resp) => { ......
## you can use request module and promise in express to make any request ##
const promise = require('promise');
const requestModule = require('request');
const curlRequest =(requestOption) =>{
return new Promise((resolve, reject)=> {
requestModule(requestOption, (error, response, body) => {
try {
if (error) {
throw error;
}
if (body) {
try {
body = (body) ? JSON.parse(body) : body;
resolve(body);
}catch(error){
resolve(body);
}
} else {
throw new Error('something wrong');
}
} catch (error) {
reject(error);
}
})
})
};
const option = {
url : uri,
method : "GET",
headers : {
}
};
curlRequest(option).then((data)=>{
}).catch((err)=>{
})