I'm trying to get the set-cookie header from the http response, but it's not showing up for most of the requests.
Using https://www.southwest.com/ as an example, you can see that https://bs.serving-sys.com/Serving/ActivityServer.bs?cn=as&ActivityID=1345510&rnd=459203.51759912557&fare%20class=[fare%20class]&business%20or%20leisure=[business%20or%20leisure]&number%20of%20passengers=[number%20of%20passengers]&date=[date]&destination=[destination]&origination=[origination] sets 3 cookies:
Puppeteer code:
const puppeteer = require('puppeteer');
async function getResponseCookies() {
function handleResponse(response) {
const url = response.url();
const headers = response.headers();
const status = response.status()
if(url.includes('https://bs.serving-sys.com/Serving/ActivityServer.bs')) {
console.log('RESPONSE URL ', url)
console.log('RESPONSE HEADERS ', headers)
console.log('RESPONSE STATUS ', status)
}
}
const browser = await puppeteer.launch({
ignoreDefaultArgs: ["--enable-automation"],
executablePath: "/usr/bin/google-chrome",
headless: true,
ignoreHTTPSErrors: true,
});
const page = await browser.newPage();
await page.setUserAgent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36");
await page.on('response', async(response) => {
await handleResponse(response);
})
const urls = ['https://www.southwest.com'];
for(let url of urls) {
await page.goto(url, { timeout: 0, waitUntil: 'networkidle0' });
}
await browser.close();
}
getResponseCookies();
The above code execution outputs the following log, without any 'set-cookie' headers in the response:
RESPONSE URL https://bs.serving-sys.com/Serving/ActivityServer.bs?cn=as&ActivityID=1345510&rnd=68456.37277058625&fare%20class=[fare%20class]&business%20or%20leisure=[business%20or%20leisure]&number%20of%20passengers=[number%20of%20passengers]&date=[date]&destination=[destination]&origination=[origination]
RESPONSE HEADERS { pragma: 'no-cache',
date: 'Mon, 03 Feb 2020 10:30:16 GMT',
'content-encoding': 'gzip',
server: 'Microsoft-IIS/7.5',
'x-powered-by': 'ASP.NET',
p3p: 'CP="NOI DEVa OUR BUS UNI"',
'access-control-allow-origin': '*',
'cache-control': 'no-cache, no-store',
'content-type': 'text/html; charset=UTF-8',
'content-length': '616',
expires: 'Sun, 05-Jun-2005 22:00:00 GMT' }
Any ideas why the Set-Cookie header is missing from the response?
*Note that those cookies are returned when using Network.getAllCookies from CDP
According to https://github.com/puppeteer/puppeteer/issues/4918, it looks like puppeteer is not listening to the Network.responseReceivedExtraInfo event that contains the raw headers. Listening to that event did the trick for me.
Related
I'm trying to fetch a page after a redirect, but I'm getting a 401 status in the response.
async function getPage(cookie, jsid) {
let params = {
method: "GET",
headers: {
cookie: jsid,
Cookie: cookie,
"content-type": "text/html; charset=utf-8",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36"
},
credentials: "include",
redirect: "follow",
};
console.log("3333333", params)
await fetch('https://portal.elpts.ru/portal', params)
.then(res => { console.log("11111", res) })
.then(text => console.log("22222", text))
.catch(err => console.error("error: " + err))
}
I am getting cookies and jsessionid through other requests, they are coming correctly. I reproduced this request through the insomnia application and it correctly returned 200. This is the request code from the app:
async function test() {
const FormData = require('form-data');
const fetch = require('node-fetch');
const formData = new FormData();
let url = 'https://portal.elpts.ru/portal';
let options = {
method: 'GET',
headers: {
Cookie: 'csrf-token-name=csrftoken; csrf-token-value=1725ec8f21b4ebe5015ce5b7c82c88bf378087f0dec427fa2dfb10d0de6ad93a74b8e3f2abb8edeb; JSESSIONID=sp-rf-app-portal-2c~D2D8E12880DD51810AB42BCAB7F4EEA5',
'content-type': 'multipart/form-data; boundary=---011000010111000001101001',
cookie: 'JSESSIONID=sp-rf-app-portal-2c~D2D8E12880DD51810AB42BCAB7F4EEA5; '
}
};
options.body = formData;
fetch(url, options)
.then(res => res.json())
.then(json => console.log(json))
.catch(err => console.error('error:' + err));
}
And this is the response I get:
I am using isomorphic-fetch but node-fetch gives the same result.
It is necessary to separate getting the JSESSIONID and getting the page itself. The default is redirect: 'follow' , so when getting the jsid, you need to specify redirect: 'manual'
async function getJsid(cookies) {
return await getData('https://portal.elpts.ru/portal/', {
method: 'GET',
headers: {
'Content-Type': 'text/html; charset=utf-8',
'Cookie': cookies,
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36'
},
redirect: 'manual',
}).then(async res => {
return res.headers.get('set-cookie')
})
}
async function getPage(cookies, jsId) {
const page = await getData('https://portal.elpts.ru/portal', {
method: "GET",
headers: {
'Cookie': cookies + '; ' + jsId
},
redirect: "follow",
});
return await page.text();
}
axios.get(downloadUrl, Object.assign({}, this.tokens[token_nr].config, {responseType: stream}))
.then((response) => {
console.log("HEADERS", response.headers)
console.log(typeof response.data, "RESPONSE LENGTH", response.data.length)
const decryptedBuffer = encryptionService.decrypt(Buffer.from(response.data), infos);
resolve(decryptedBuffer);
})
This axios request should give the data of a mp3 file. I previously had it via the request package which gives a binary Buffer (using the encoding: null option) and I can use it in the encryptionService.decrypt() function.
In the response.headers I can see it gives the same content-length as it would with the request package. But when I print the length of response.data it's shorter. I tried both ArrayBuffer and stream as my ResponseType. Also leaving the ResponseType option out does not help. What should I do to get the full content.
Some logs: (not all headers)
HEADERS {
'accept-ranges': 'bytes',
'cache-control': 'public',
'content-type': 'audio/mpeg',
'content-length': '14175084',
connection: 'close'
}
string RESPONSE LENGTH 13495410
CONFIG HEADERS {
headers: {
Accept: 'application/json, text/plain, */*',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.129 Safari/537.36',
'cache-control': 'max-age=0',
'accept-language': 'en-US,en;q=0.9,en-US;q=0.8,en;q=0.7',
'accept-charset': 'utf-8,ISO-8859-1;q=0.8,*;q=0.7',
cookie: 'arl=XXXX',
Connection: 'keep-alive',
'Keep-Alive': 'timeout=1500, max=100',
'Content-Type': 'text/plain;charset=UTF-8'
},
}
When creating request try passing following headers Connection, Keep-Alive. Sometimes it close the connection before fully receiving the response
var axioRequest = await axios.create({
baseURL: url,
headers: {
Connection: 'keep-alive',
'Keep-Alive': 'timeout=1500, max=100'
}
});
It was resolved with this answer:
https://disjoint.ca/til/2017/09/20/how-to-download-a-binary-file-using-axios/
I missed the Content-Type header and the {ResponseType: 'arraybuffer'}
I'm using Nock to intercept a http request.
test.js:
const nock = require('nock');
const server = require('../server');
const request = require('request');
describe('My test', () =>{
it('Should returns the customized header', () => {
nock('url')
.get('/test')
.reply(302, {
'user-agent': 'Mozilla'
})
const { headers } = await request(server).get('/test');
expect(headers['user-agent']).to.include('Mozilla');
}
})
When I run the test, it fails and the log of headers received by request is like that:
{
'user-agent': 'node-superagent/3.8.3',
location: 'undefined',
vary: 'Accept, Accept-Encoding',
'content-type': 'text/plain; charset=utf-8',
'content-length': '31',
date: 'Fri, 24 May 2019 09:15:46 GMT',
connection: 'close'
}
Do I missed something or it's the normal behaviour of Nock?
The issue is with the way you're passing the headers to the reply function. Headers are the third argument for that method, but you're providing them as the second arg which means the object with the the user-agent key is being used as the body. Since it's a 302 and you probably want an empty body, you should pass an empty string as the second arg.
nock('url')
.get('/test')
.reply(302, '', {
'user-agent': 'Mozilla'
})
Related docs.
This is my entire code
const rp = require('request-promise');
(async () => {
const headers = {
Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'pt-BR,pt;q=0.8,en-US;q=0.5,en;q=0.3',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
Host: 'www.receita.fazenda.gov.br',
Pragma: 'no-cache',
'Upgrade-Insecure-Requests': 1,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:64.0) Gecko/20100101 Firefox/64.0',
};
const jar = rp.jar();
try {
const firstAccess = await rp.get('https://www.receita.fazenda.gov.br/Aplicacoes/SSL/ATCTA/CPF/ConsultaSituacao/ConsultaPublica.asp', {
headers,
gzip: true,
simple: false,
strictSSL: false,
jar,
});
console.log(firstAccess);
} catch (e) {
console.log(e);
}
})();
When i run my code i receive this error:
UnhandledPromiseRejectionWarning: RequestError: Error: Client network
socket disconnected before secure TLS connection was established
The url works in the browser, but I can not access through nodeJS, apparently it's some problem with the https certificate
Is there any way to ignore the error and access it?
You need to move your call inside a try catch block to handle the promise rejection in case the call fails:
...
try {
const firstAccess = await rp.get(...)
} catch (e) {
console.log(e)
}
...
My problem was resolved after restarting/changing the network connection. The issue is with the network most of the time.
I am using express js in backend and doing fetch in frontend. My frontend is hosted by webpack-dev-sever which is running locally on port 3000. My express js server is running locally on port 3001. I need to send some custom headers along-with my request.
My express js server is using 'morgan' for logging on terminal. The code of my server looks like this:
const express = require('express')
const bodyParser = require('body-parser')
const morgan = require('morgan')
const fs = require('fs')
const path = require('path')
const mime = require('mime')
const http = require('http')
const cors = require('cors')
const server = express();
let whitelist = [
'http://localhost:3000',
];
var corsOptions = {
origin: function(origin, callback){
var originIsWhitelisted = whitelist.indexOf(origin) !== -1;
callback(null, originIsWhitelisted);
},
credentials: true
};
server.use(cors(corsOptions));
server.use(bodyParser());
server.use(morgan('dev'));
server.use(function (req, res, next) {
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Headers', 'Content-Type, x-imei, X-API-Key, requestId, Authorization');
res.header('Access-Control-Allow-Methods', '*');
next()
})
server.post('/verify/identifier', function(req, res, next) {
console.log(req.body)
console.log(req.headers)
res.send({"responseCode":"0012"});
});
server.listen(port, function() {
console.log(`server hosted on port ${port}`)
})
My frontend code is this:
export function fetchMerchantApi(url, body) {
let reqObj = {
method: 'POST',
headers: {
"Content-Type": "application/json",
"X-API-Key": secret_key,
"requestId": getUniqueId()
},
cache: 'default',
mode: 'cors'
}
try {
return new Promise((res, rej) => {
fetch(url, reqObj).then(result => {
return result.json()
}).then(data => {
res(data)
});
})
} catch(e) {
throw new Error(e);
}
}
fetchMerchantApi("http://localhost:3001/verify/identifier", reqBody).then(res => {
console.log(res);
})
All imports, syntax, etc are correct.
The chrome debugger tool's network tab is this:
The logs in terminal of server:
body
[0] { customField1: 'Y',
[0] imei: '358967064854480',
[0] isMerchantExist: 'Y',
[0] mobileNo: '9999999999',
[0] serialNumber: 'ZY22285JKV' }
headers:
[0] { host: 'localhost:3001',
[0] connection: 'keep-alive',
[0] 'content-length': '119',
[0] 'x-imei': '358967064854480',
[0] origin: 'http://localhost:3000',
[0] requestid: '9999999999300513303519185',
[0] 'content-type': 'application/json',
[0] 'user-agent': 'Mozilla/5.0 (Linux; Android
6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Mobile Safari/537.36',
[0] 'x-api-key': 'l7xx5a9a4eea46054ef38b18b5e6fdbd2c5a',
[0] accept: '/',
[0] referer: 'http://localhost:3000/auth/verifyNo',
[0] 'accept-encoding': 'gzip, deflate, br',
[0] 'accept-language': 'en-IN,en-GB;q=0.8,en-US;q=0.6,en;q=0.4' }
morgain log:
[0] POST /cr/v2/merchant/verify/identifier 200 2.432 ms - 23
After all this, I am getting no error and also no response data from backend.
Funny thing is that my rest client(Postman) works fine. I get data on postman.
This is a tricky one . It is an options request, that is a preflight request from the browser. The options request contains two http headers Access-Control-Request-Headers' andAccess-Control-Method` .
But it looks like you allowing every method in cors options.
you can enable pre flight requests like
app.options('*', cors())