my application: When you send a request from a browser to my node server, my node server will request an origin website, download all of its static files (including code) and server them back to the user. Next time you visit my node server it will server all the content back from node instead of requesting the origin.
When i make a request for a font awesome file from node
http://example.com/modules/megamenu/fonts/fontawesome-webfont.woff?v=4.2.0
The file's content is different from when i request the same url with cUrl.
This is causing this error in the browser when i return the file from node back to the browser:
Failed to decode downloaded font: http://nodeDomain.test/modules/megamenu/fonts/fontawesome-webfont.woff?v=4.2.0
If i copy and paste the content from the file i requested via curl into the file stored on my node server, the error disappears and all the font awesome stuff works.
Here are the headers I am sending with the request to the origin server from node.
{
connection: 'keep-alive',
pragma: 'no-cache',
'cache-control': 'no-cache',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5)AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
accept: '*/*',
referer: 'http://example.com/modules/megamenu/css/font-awesome.min.css',
'accept-language': 'en-US,en;q=0.8',
cookie: 'PrestaShop-a30a9934ef476d11b.....'
}
I tried to see what headers where being sent when doing the curl request from command line but i cannot figure out how to do it.
______Node code used to fetch file_______
Url: in options is the one stated above
headers: are the browsers request headers
var options = {
url: originRequestPath,
headers: requestHeaders
}
var originPage = rquest(options);
var responseBody = '';
var resHeads = '';
originPage.on('response', function(res)
{
//store response headers locally
}
originPage.on('data', function(chunk)
{
responseBody += chunk;
});
originPage.on('end', function()
{
storeData.storePageData(storeFilePath, responseBody);
});
__________Store Function below________________
exp.storePageData = function(storePath, pageContent)
{
fs.outputFile(storePath, pageContent, function(err) {
if(err){ console.log(err)}
});
}
I believe the problem with your code is you are converting your buffer output to utf8 string. since you are adding buffer with empty string responseBody += chunk; that buffer is converted to utf-8 string. Thus you are losing some data for binary files. Try this way:
var originPage = rquest(options);
var chunks = []
originPage.on('response', function(res)
{
//store response headers locally
}
originPage.on('data', function(chunk)
{
chunks.push(chunk)
});
originPage.on('end', function()
{
var data = Buffer.concat(chunks)
//send data to browser and store content locally
});
Related
i'm wondering what am i doing wrong with this lambda function.
Goal:
Send http options to fetch an PDF and forward it to consumer from Lambda service.
Current code:
"use strict";
const http = require("http");
function getPDF(options, event) {
console.log(options);
return new Promise((resolve, reject) => {
let body = "";
let statusCode = 0;
let headers = { };
http
.request(options, (res) => {
statusCode = res.statusCode;
const headersFromReq = res.headers || {};
res.on("data", (chunk) => (body += chunk));
res.on("end", function () {
console.log( statusCode, headers, body);
resolve({
body: Buffer.from(body).toString(),
statusCode,
headers: {
...headersFromReq,
//'Content-type': 'application/pdf',
//'content-disposition': 'attachment; filename=test.pdf'
}
});
})
.on("error", reject)
.end();
});
});
}
exports.handler = async (event) => {
try {
const response = await getPDF(event.options, event);
return response;
} catch (error) {
console.error(error);
return {
statusCode: 500,
body: JSON.stringify(error),
headers: {}
};
}
};
Whatever i've tried, it either times out or does not result in the actually needed response of Base64 encoded PDF.
Params for testing would look something like this:
{
"options": {
"hostname": "www.africau.edu",
"port": 80,
"path": "images/default/sample.pdf",
"method": "GET",
"headers": {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36",
"Accept": "application/pdf",
"Accept-encoding": "gzip, deflate, br"
}
}
}
Current logs -
Function Logs
START RequestId: 8d6be86c-788d-4f49-8305-8caf377cd32e Version: $LATEST
2021-09-28T09:01:21.507Z 8d6be86c-788d-4f49-8305-8caf377cd32e INFO {
hostname: 'www.africau.edu',
port: 80,
path: 'images/default/sample.pdf',
method: 'GET',
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36',
Accept: 'application/pdf',
'Accept-encoding': 'gzip, deflate, br'
}
}
END RequestId: 8d6be86c-788d-4f49-8305-8caf377cd32e
REPORT RequestId: 8d6be86c-788d-4f49-8305-8caf377cd32e Duration: 11011.54 ms Billed Duration: 11000 ms Memory Size: 128 MB Max Memory Used: 54 MB Init Duration: 152.43 ms
2021-09-28T09:01:32.494Z 8d6be86c-788d-4f49-8305-8caf377cd32e Task timed out after 11.01 seconds
Your approach has an underlying conceptual problem - it may take time to execute. The time that you don't have when you run things in lambda. Your lambda "technically" has the maximum of 15 minutes to finish the execution (although you explicitly have to configure it. I think by default it's 10s), but if you trigger it from AWS API Gateway, that goes down to 30 seconds and this is not a limit you can configure. It's the total max. Moreover your lambda response cannot be larger than 6MB and it is normally supposed to be JSON, so you would have to convert your file to Base64, but again, if you serve that file via api gateway even that limit goes down once again... What you're trying just cannot be done reliably with lambda in this way. There is a different way however that would actually be recommended by AWS.
You send a request to API Gateway that triggers a lambda
Lambda looks up if the requested file already exists in S3
If it doesn't exist:
The lambda downloads a file and puts it into S3. Note that you can now set up S3 bucket policy so that file stays in S3 only for certain amount of time. You probably don't want to keep it there forever, but it's nice to keep it cached for a while in case the user tries to re-download your PDF. This way they will be able to get the response much faster
The lambda then generates a pre-signed S3 URL to the freshly downloaded file (a special URL that you can request from S3 that will be valid for another few minutes only) and returns it in the response
If it already exists:
the lambda just generates the pre-signed S3 URL and returns it in the response
Your client (UI application I presume) has to generate a consecutive request to the pre-signed url received in the response (so it talks directly to S3). This way, even if your user has slow internet connection and they need 20 minutes to download the file, you don't get any timeouts... well you will still get some if the file is really large and the lambda cannot download it quickly enough, but that would require a longer discussion. In this case I'm assuming your file is under 15MB.
I'm currently learning node and i'm looking for HTTP library that would allow me to send GET request, without downloading server response content (body).
I need to send very large amount of http requests every minute. However i do not need to read their content (also to save bandwidth). I can't use HEAD for this purpose.
Is there any way to avoid downloading response body using node-requests, or perhaps any other library - could be used?
My sample code using node-request:
const options = {
url: "https://google.com",
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36'
}
}
//How to avoid downloading a whole response?
function callback(err, response, body) {
console.log(response.request.uri.host + ' - ' + response.statusCode);
}
request(options, callback);
HTTP GET by standard fetches the file content, you cannot avoid downloading(getting response) it but you can ignore it. Which is basically what you are doing.
request(options, (err, response, body)=>{
//just return from here don't need to process anything
});
EDIT1:
To just use some bytes of the response, you can use http.get and get the data using the data event. From the doc:
http.get('http://nodejs.org/dist/index.json', (res) => {
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
//this is when the response will end
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
I'm having an issue with express app. I'm using multer to upload a file, then using res.download to send the file back. This seems to work with text files but images are not working. When I send the file to the client, the file size is actually a little bit smaller than what is on the server. It seems as is the full file isn't being transferred.
I'm not doing anything fancy with the response I'm just using res.download. I've researched basically every article I can find and it seems like this works for everyone else.
Only text files are working. Word, excel, pdfs are all saying they're corrupted when downloaded.
EDIT: Here is the function that runs res.download. Its passed the file path, mimetype etc.
function downloadFile(req, res) {
let fpath = req.body.path;
let originalName = req.body.originalName;
let mimetype = req.body.mimetype;
let filename = req.body.filename;
res.download(fpath, originalName, function(err) {
if (err) {
console.log(err);
}
});
}
EDIT: Here is my redux thunk that makes the request and triggers the file download. The download function comes from the downloadjs library.
export const downloadFile = (path, originalName, mimetype, filename) => {
return dispatch => {
return axios.post('/api/v1/quotes/downloadFile', { path: path, originalName: originalName, mimetype: mimetype, filename: filename })
.then(res => {
if (res.status !== 200) {
ErrorHandler.logError(res);
}
else {
// download(res.data, originalName);
download(new Blob([res.data]), originalName, mimetype);
}
}).catch(function(error) {
ErrorHandler.logError(error);
});
}
}
EDIT: Here is a small sample of what I see in the network tab. It seems like its the image contents, but the size is smaller than what is on the server and when I try to open it I get an unsupported file type error.
PNG
IHDR{>õIÖÕsRGB®ÎégAMA±üa pHYsÃÃÇo¨d+{IDATx^íÝml\×ßq¾jº]´Mv´¤ÛÅvÛnÛEßt4/vQ[äÅ¢¯òb>-
él²æJv$Ǧ(ѦDÉR$R
¥V-Q6mÅ4kF¶®,U%ÊYS¶åDr¼5ÿ=ÿ{Ï9sîÌ!Gßp#Î}¾çÞ9÷7÷Þ¹Ó!¸o/ÛÚaï>MOJ4µ¸aíÐF{÷ég?ùó?µÚa a=öFØHa a=öFØHa
Request Header
Accept: application/json, text/plain, */*
Accept-Encoding: gzip, deflate, br
Accept-Language: en-US,en;q=0.9
Connection: keep-alive
Content-Length: 160
Content-Type: application/json;charset=UTF-8
Host: localhost:3000
Origin: http://localhost:3000
Referer: http://localhost:3000/Quote
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36
Response Header
accept-ranges: bytes
cache-control: public, max-age=0
connection: close
content-disposition: attachment; filename="sSubLineConfigIds.PNG"
content-length: 11238
content-type: application/octet-stream
date: Wed, 17 Jul 2019 19:03:54 GMT
etag: W/"2be6-16c0151b84a"
last-modified: Wed, 17 Jul 2019 19:03:48 GMT
x-powered-by: Express
I was able to figure this out. What I ended up doing is converting the file into base64 and setting the download link to that base64 string.
The node function that gets hit to build the base64 string.
function downloadFile(req, res) {
let fpath = req.body.path;
let mimetype = req.body.mimetype;
fs.readFile(fpath, function (err, data) {
if (err) res.status(500).send('File could not be downloaded');
var base64 = Buffer.from(data).toString('base64');
base64='data:' + mimetype + ';base64,'+base64;
res.send(base64);
});
}
Here is the client side code that builds a link, simulates click, and sets the source link equal to the base64 string.
export const downloadFile = (path, originalName, mimetype, filename) => {
return dispatch => {
return axios.post('/api/v1/quotes/downloadFile', { path: path, originalName: originalName, mimetype: mimetype, filename: filename })
.then(res => {
if (res.status !== 200) {
ErrorHandler.logError(res);
}
else {
const linkSource = res.data;
const downloadLink = document.createElement("a");
const fileName = originalName;
downloadLink.href = linkSource;
downloadLink.download = fileName;
downloadLink.click();
}
}).catch(function(error) {
ErrorHandler.logError(error);
});
}
}
Things looks fine as per the code shared.
It seems this request is initiated through XHR from your front end side than you have to write the download logic to convert the response to blob and then create a file for download as mentioned how-to-create-a-dynamic-file-link-for-download-in-javascript
I was trying to make a simple request to site. it should get html text, but it gets ' '
NPM module here: github.com/request/request
Code:
var fs = require('fs');
var request = require('request');
var options = {
url:'https://sample.site/phpLoaders/getInventory/getInventory.php',
encoding : 'utf8',
gzip : true,
forever: true,
headers: {
'Host': 'sample.site',
'Connection': 'keep-alive',
'Content-Length': '58',
'Cache-Control': 'max-age=0',
'Accept': '*/*',
'Origin': 'https://csgosell.com',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Referer': 'https://sample.site/',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4',
'Cookie': 'my-cookies from browser'
},
form: {
stage:'bot',
steamId:76561198284997423,
hasBonus:false,
coins:0
}
};
request.post(options,
function(error, response, body){
console.log(response.statusCode);
if (!error) {
fs.writeFileSync('site.html', body);
}
else{
console.log(error);
}
}
);
Chrome request: https://i.stack.imgur.com/zKQo5.png
Nodejs request:https://i.stack.imgur.com/yH9U3.png
the difference is in headers:
:authority:csgosell.com
:method:POST :path:/phpLoaders/getInventory/getInventory.php :scheme:https
after some googling, I anderstood that it is http2, and tried to put it inow another agent's options, but nothing changed.
var spdy = require('spdy');
var agent = spdy.createAgent({
host: 'sample.site',
port: 443,
spdy: {
ssl: true,
}
}).once('error', function (err) {
this.emit(err);
});
options.agent = agent;
To answer your question i will copy/paste a part of my code that enable you to receive a post request from your frontend application(angularJS) to your backend application (NodeJS), and another function that enable you to do the inverse send a post request from nodeJS to another application (that might consume it):
1) receive a request send from angularJS or whatever inside your nodeJS app
//Import the necessary libraries/declare the necessary objects
var express = require("express");
var myParser = require("body-parser");
var app = express();
// we will need the following imports for the inverse operation
var https = require('https')
var querystring = require('querystring')
// we need these variables for the post request:
var Vorname ;
var Name ;
var e_mail ;
var Strasse ;
app.use(myParser.urlencoded({extended : true}));
// the post request is send from http://localhost:8080/yourpath
app.post("/yourpath", function(request, response ) {
// test the post request
if (!request.body) return res.sendStatus(400);
// fill the variables with the user data
Vorname =request.body.Vorname;
Name =request.body.Name;
e_mail =request.body.e_mail;
Strasse =request.body.Strasse;
response.status(200).send(request.body.title);
});
2) Do the inverse send a POST request from a nodeJS application to another application
function sendPostRequest()
{
// prepare the data that we are going to send to anymotion
var jsonData = querystring.stringify({
"Land": "Land",
"Vorname": "Vorname",
"Name": "Name",
"Strasse": Strasse,
});
var post_options = {
host: 'achref.gassoumi.de',
port: '443',
method: 'POST',
path: '/api/mAPI',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': jsonData.length
}
};
// request object
var post_req = https.request(post_options, function(res) {
var result = '';
res.on('data', function (chunk) {
result += chunk;
console.log(result);
});
res.on('end', function () {
// show the result in the console : the thrown result in response of our post request
console.log(result);
});
res.on('error', function (err) {
// show possible error while receiving the result of our post request
console.log(err);
})
});
post_req.on('error', function (err) {
// show error if the post request is not succeed
console.log(err);
});
// post the data
post_req.write(jsonData);
post_req.end();
// ps : I used a https post request , you could use http if you want but you have to change the imported library and some stuffs in the code
}
So finally , I hope this answer will helps anyone who is looking on how to get a post request in node JS and how to send a Post request from nodeJS application.
For further details about how to receive a post request please read the npm documentation for body-parser library : npm official website documentation
Okay so i have the following route for when a file is being uploaded:
router.route('/moduleUpload')
.post(function (request, response) {
request.files.file.originalname = request.files.file.originalname.replace(/ +?/g, '');
var media = new Media(request.files.file, './user_resources/module/' + request.body.module_id + '/');
if (!fs.existsSync(media.targetDir)) {
fs.mkdirSync(media.targetDir, 0777, function (err) {
if (err) {
console.log(err);
response.send("ERROR! Can't make the directory! \n"); // echo the result back
}
});
}
moveFile(media);
var token = jwt.encode({
mediaObject: media
}, require('../secret')());
response.status(200).json(token);
});
As you can see this uses the support of a function called moveFile(media);
So here is that function:
function moveFile(media) {
var source = fs.createReadStream(media.file.path);
var dest = fs.createWriteStream(media.targetDir + media.file.originalname);
source.pipe(dest);
source.on('end', function () { /* copied */
console.log('file has been moved!')
});
source.on('error', function (err) { /* error */
console.log('error while moving file')
});
}
This works and the console prints out:
file has been moved!
Now after this i attempt to download / stream the video file i use the following route:
router.route('/resource/:encodedString')
.all(function (req, res) {
var decoded = jwt.decode(req.params.encodedString, require('../secret')());
var mediaObject = decoded.mediaObject;
res.header('content-disposition', 'filename=' + mediaObject.file.originalname, "Content-Length: " + mediaObject.file.size, "mimeType:" + mediaObject.file.mimetype);
var stream = fs.createReadStream(mediaObject.targetDir + mediaObject.file.originalname);
stream.pipe(res);
stream.on('end', function () {
console.log("Reading operation completed.");
res.end();
});
});
Using the encoded string i first decode it to find the media object and then stream it back.
This works WITH EVERY file execpt mp4 / video files.
Here is the complete console print out of when i upload a video:
listening on *:8105
file has been moved!
Reading operation completed.
And here are my browser request headers:
Remote Address:***
Request ****
Request Method:GET
Status Code:200 OK
Response Headers
view source
Access-Control-Allow-Credentials:true
Access-Control-Allow-Origin:http://localhost
Connection:keep-alive
Content-Length:5
Content-Type:application/octet-stream
Date:Sun, 21 Jun 2015 17:13:24 GMT
Set-Cookie:io=1T7R1vmJ3xV-k0ApAABH
Request Headers
view source
Accept:*/*
Accept-Encoding:gzip, deflate, sdch
Accept-Language:en-US,en;q=0.8
Connection:keep-alive
Cookie:io=1T7R1vmJ3xV-k0ApAABH; lb_login_id=117
Host:angular.learningbankapp.com:8105
Origin:http://localhost
Referer:http://localhost/learningbankapp/
User-Agent:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.76 Safari/537.36
So can someone tell me what im doing wrong?