This is my entire code
const rp = require('request-promise');
(async () => {
const headers = {
Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'pt-BR,pt;q=0.8,en-US;q=0.5,en;q=0.3',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
Host: 'www.receita.fazenda.gov.br',
Pragma: 'no-cache',
'Upgrade-Insecure-Requests': 1,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:64.0) Gecko/20100101 Firefox/64.0',
};
const jar = rp.jar();
try {
const firstAccess = await rp.get('https://www.receita.fazenda.gov.br/Aplicacoes/SSL/ATCTA/CPF/ConsultaSituacao/ConsultaPublica.asp', {
headers,
gzip: true,
simple: false,
strictSSL: false,
jar,
});
console.log(firstAccess);
} catch (e) {
console.log(e);
}
})();
When i run my code i receive this error:
UnhandledPromiseRejectionWarning: RequestError: Error: Client network
socket disconnected before secure TLS connection was established
The url works in the browser, but I can not access through nodeJS, apparently it's some problem with the https certificate
Is there any way to ignore the error and access it?
You need to move your call inside a try catch block to handle the promise rejection in case the call fails:
...
try {
const firstAccess = await rp.get(...)
} catch (e) {
console.log(e)
}
...
My problem was resolved after restarting/changing the network connection. The issue is with the network most of the time.
Related
I'm trying to get the set-cookie header from the http response, but it's not showing up for most of the requests.
Using https://www.southwest.com/ as an example, you can see that https://bs.serving-sys.com/Serving/ActivityServer.bs?cn=as&ActivityID=1345510&rnd=459203.51759912557&fare%20class=[fare%20class]&business%20or%20leisure=[business%20or%20leisure]&number%20of%20passengers=[number%20of%20passengers]&date=[date]&destination=[destination]&origination=[origination] sets 3 cookies:
Puppeteer code:
const puppeteer = require('puppeteer');
async function getResponseCookies() {
function handleResponse(response) {
const url = response.url();
const headers = response.headers();
const status = response.status()
if(url.includes('https://bs.serving-sys.com/Serving/ActivityServer.bs')) {
console.log('RESPONSE URL ', url)
console.log('RESPONSE HEADERS ', headers)
console.log('RESPONSE STATUS ', status)
}
}
const browser = await puppeteer.launch({
ignoreDefaultArgs: ["--enable-automation"],
executablePath: "/usr/bin/google-chrome",
headless: true,
ignoreHTTPSErrors: true,
});
const page = await browser.newPage();
await page.setUserAgent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36");
await page.on('response', async(response) => {
await handleResponse(response);
})
const urls = ['https://www.southwest.com'];
for(let url of urls) {
await page.goto(url, { timeout: 0, waitUntil: 'networkidle0' });
}
await browser.close();
}
getResponseCookies();
The above code execution outputs the following log, without any 'set-cookie' headers in the response:
RESPONSE URL https://bs.serving-sys.com/Serving/ActivityServer.bs?cn=as&ActivityID=1345510&rnd=68456.37277058625&fare%20class=[fare%20class]&business%20or%20leisure=[business%20or%20leisure]&number%20of%20passengers=[number%20of%20passengers]&date=[date]&destination=[destination]&origination=[origination]
RESPONSE HEADERS { pragma: 'no-cache',
date: 'Mon, 03 Feb 2020 10:30:16 GMT',
'content-encoding': 'gzip',
server: 'Microsoft-IIS/7.5',
'x-powered-by': 'ASP.NET',
p3p: 'CP="NOI DEVa OUR BUS UNI"',
'access-control-allow-origin': '*',
'cache-control': 'no-cache, no-store',
'content-type': 'text/html; charset=UTF-8',
'content-length': '616',
expires: 'Sun, 05-Jun-2005 22:00:00 GMT' }
Any ideas why the Set-Cookie header is missing from the response?
*Note that those cookies are returned when using Network.getAllCookies from CDP
According to https://github.com/puppeteer/puppeteer/issues/4918, it looks like puppeteer is not listening to the Network.responseReceivedExtraInfo event that contains the raw headers. Listening to that event did the trick for me.
I am getting a Socket Hangup error when attempting to connect to an external web site, an example of a failing site given below. The code is working for other sites and I can successfully access the test site if I rewrite the code in Python. I am unable to make changes to the external site so am looking to enhance the NodeJS script to handle the connection. Any help appreciated
let request = require("request-promise");
let needle = require("needle");
//Check if can connect to site
let arrTestCases = ["https://compassandstars.com"];
for (x in arrTestCases) {
chkcon(arrTestCases[x])
}
async function chkcon(req) {
let resultJSON = {};
let data;
try {
console.log ("Validating blog " + req);
//Attempt using RequestPromise, fails with same error
// let getURL = await request({
// url: req,
// headers: {
// 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36'
// }
//// headers: {
//// 'User-Agent': 'ouruseragent',
//// 'Connection': 'keep-alive'
//// }
// })
needle('get', req)
.then(function(response) {
resultJSON = {ValidURL: true};
console.log("URL Validated successfully", response.body);
})
.catch(function(err) {
console.log(err);
})
} catch (e) {
console.log("Bad Blog URL ",e.message);
} finally {
console.log("Result: " + JSON.stringify(resultJSON), req);
}
The error response:
Result: {} https://compassandstars.com
{ Error: socket hang up
at TLSSocket.onConnectEnd (_tls_wrap.js:1073:19)
at Object.onceWrapper (events.js:219:13)
at TLSSocket.emit (events.js:132:15)
at endReadableNT (_stream_readable.js:1101:12)
at process._tickCallback (internal/process/next_tick.js:152:19)
code: 'ECONNRESET',
path: null,
host: 'compassandstars.com',
port: 443,
localAddress: undefined }
I can recreate the issue using NodeJS 8.10 on AWS Lambda and locally on my machine using NodeJS 9.6.1.
My research indicates it may be an error finding a compatible cipher to make the SSL connection but I'm unable to find out how to force Request to change the request to handle this.
I am using express js in backend and doing fetch in frontend. My frontend is hosted by webpack-dev-sever which is running locally on port 3000. My express js server is running locally on port 3001. I need to send some custom headers along-with my request.
My express js server is using 'morgan' for logging on terminal. The code of my server looks like this:
const express = require('express')
const bodyParser = require('body-parser')
const morgan = require('morgan')
const fs = require('fs')
const path = require('path')
const mime = require('mime')
const http = require('http')
const cors = require('cors')
const server = express();
let whitelist = [
'http://localhost:3000',
];
var corsOptions = {
origin: function(origin, callback){
var originIsWhitelisted = whitelist.indexOf(origin) !== -1;
callback(null, originIsWhitelisted);
},
credentials: true
};
server.use(cors(corsOptions));
server.use(bodyParser());
server.use(morgan('dev'));
server.use(function (req, res, next) {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'Content-Type, x-imei, X-API-Key, requestId, Authorization');
res.header('Access-Control-Allow-Methods', '*');
next()
})
server.post('/verify/identifier', function(req, res, next) {
console.log(req.body)
console.log(req.headers)
res.send({"responseCode":"0012"});
});
server.listen(port, function() {
console.log(`server hosted on port ${port}`)
})
My frontend code is this:
export function fetchMerchantApi(url, body) {
let reqObj = {
method: 'POST',
headers: {
"Content-Type": "application/json",
"X-API-Key": secret_key,
"requestId": getUniqueId()
},
cache: 'default',
mode: 'cors'
}
try {
return new Promise((res, rej) => {
fetch(url, reqObj).then(result => {
return result.json()
}).then(data => {
res(data)
});
})
} catch(e) {
throw new Error(e);
}
}
fetchMerchantApi("http://localhost:3001/verify/identifier", reqBody).then(res => {
console.log(res);
})
All imports, syntax, etc are correct.
The chrome debugger tool's network tab is this:
The logs in terminal of server:
body
[0] { customField1: 'Y',
[0] imei: '358967064854480',
[0] isMerchantExist: 'Y',
[0] mobileNo: '9999999999',
[0] serialNumber: 'ZY22285JKV' }
headers:
[0] { host: 'localhost:3001',
[0] connection: 'keep-alive',
[0] 'content-length': '119',
[0] 'x-imei': '358967064854480',
[0] origin: 'http://localhost:3000',
[0] requestid: '9999999999300513303519185',
[0] 'content-type': 'application/json',
[0] 'user-agent': 'Mozilla/5.0 (Linux; Android
6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Mobile Safari/537.36',
[0] 'x-api-key': 'l7xx5a9a4eea46054ef38b18b5e6fdbd2c5a',
[0] accept: '/',
[0] referer: 'http://localhost:3000/auth/verifyNo',
[0] 'accept-encoding': 'gzip, deflate, br',
[0] 'accept-language': 'en-IN,en-GB;q=0.8,en-US;q=0.6,en;q=0.4' }
morgain log:
[0] POST /cr/v2/merchant/verify/identifier 200 2.432 ms - 23
After all this, I am getting no error and also no response data from backend.
Funny thing is that my rest client(Postman) works fine. I get data on postman.
This is a tricky one . It is an options request, that is a preflight request from the browser. The options request contains two http headers Access-Control-Request-Headers' andAccess-Control-Method` .
But it looks like you allowing every method in cors options.
you can enable pre flight requests like
app.options('*', cors())
I was trying to make a simple request to site. it should get html text, but it gets ' '
NPM module here: github.com/request/request
Code:
var fs = require('fs');
var request = require('request');
var options = {
url:'https://sample.site/phpLoaders/getInventory/getInventory.php',
encoding : 'utf8',
gzip : true,
forever: true,
headers: {
'Host': 'sample.site',
'Connection': 'keep-alive',
'Content-Length': '58',
'Cache-Control': 'max-age=0',
'Accept': '*/*',
'Origin': 'https://csgosell.com',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Referer': 'https://sample.site/',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4',
'Cookie': 'my-cookies from browser'
},
form: {
stage:'bot',
steamId:76561198284997423,
hasBonus:false,
coins:0
}
};
request.post(options,
function(error, response, body){
console.log(response.statusCode);
if (!error) {
fs.writeFileSync('site.html', body);
}
else{
console.log(error);
}
}
);
Chrome request: https://i.stack.imgur.com/zKQo5.png
Nodejs request:https://i.stack.imgur.com/yH9U3.png
the difference is in headers:
:authority:csgosell.com
:method:POST :path:/phpLoaders/getInventory/getInventory.php :scheme:https
after some googling, I anderstood that it is http2, and tried to put it inow another agent's options, but nothing changed.
var spdy = require('spdy');
var agent = spdy.createAgent({
host: 'sample.site',
port: 443,
spdy: {
ssl: true,
}
}).once('error', function (err) {
this.emit(err);
});
options.agent = agent;
To answer your question i will copy/paste a part of my code that enable you to receive a post request from your frontend application(angularJS) to your backend application (NodeJS), and another function that enable you to do the inverse send a post request from nodeJS to another application (that might consume it):
1) receive a request send from angularJS or whatever inside your nodeJS app
//Import the necessary libraries/declare the necessary objects
var express = require("express");
var myParser = require("body-parser");
var app = express();
// we will need the following imports for the inverse operation
var https = require('https')
var querystring = require('querystring')
// we need these variables for the post request:
var Vorname ;
var Name ;
var e_mail ;
var Strasse ;
app.use(myParser.urlencoded({extended : true}));
// the post request is send from http://localhost:8080/yourpath
app.post("/yourpath", function(request, response ) {
// test the post request
if (!request.body) return res.sendStatus(400);
// fill the variables with the user data
Vorname =request.body.Vorname;
Name =request.body.Name;
e_mail =request.body.e_mail;
Strasse =request.body.Strasse;
response.status(200).send(request.body.title);
});
2) Do the inverse send a POST request from a nodeJS application to another application
function sendPostRequest()
{
// prepare the data that we are going to send to anymotion
var jsonData = querystring.stringify({
"Land": "Land",
"Vorname": "Vorname",
"Name": "Name",
"Strasse": Strasse,
});
var post_options = {
host: 'achref.gassoumi.de',
port: '443',
method: 'POST',
path: '/api/mAPI',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': jsonData.length
}
};
// request object
var post_req = https.request(post_options, function(res) {
var result = '';
res.on('data', function (chunk) {
result += chunk;
console.log(result);
});
res.on('end', function () {
// show the result in the console : the thrown result in response of our post request
console.log(result);
});
res.on('error', function (err) {
// show possible error while receiving the result of our post request
console.log(err);
})
});
post_req.on('error', function (err) {
// show error if the post request is not succeed
console.log(err);
});
// post the data
post_req.write(jsonData);
post_req.end();
// ps : I used a https post request , you could use http if you want but you have to change the imported library and some stuffs in the code
}
So finally , I hope this answer will helps anyone who is looking on how to get a post request in node JS and how to send a Post request from nodeJS application.
For further details about how to receive a post request please read the npm documentation for body-parser library : npm official website documentation
How can i get the referrer from the request object? The variable this.request.headers['referer'] was empty.
If your page was referred by another page, the referer will be accessable through this.headers.referer.
If the page was not refered by another page (was loaded directly), this.headers.referer will be undefined.
This demo code:
'use strict'
const Koa = require('koa')
let app = new Koa()
app.use(function * () {
console.log(this.headers)
})
app.listen(8888)
Yielded this when referred by another page:
{ host: 'localhost:8888',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0',
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'accept-language': 'en-US,en;q=0.5',
'accept-encoding': 'gzip, deflate',
referer: 'http://localhost:1111/',
connection: 'keep-alive' }
And this when loaded directly:
{ host: 'localhost:8888',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0',
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'accept-language': 'en-US,en;q=0.5',
'accept-encoding': 'gzip, deflate',
connection: 'keep-alive' }
For newer koa versions - I currently use 2.5.1 - a warning is generated because of the generator function *(), so here's a newer example with ES6 syntax
const Koa = require('koa')
const app = new Koa()
app
.use(async (ctx) => console.log(ctx.request.headers))
app.listen(8888)
Output should be the same as described by #JoshWillik except the warning
Edit:
gif add that shows that it works
and a sample repository to try out