I am sending a pdf file from express server like this -
app.get("/pdf", (req, res) => {
var file = fs.createReadStream('./public/file.pdf');
res.contentType("application/pdf");
res.send(file);
})
I want to display it on react side (Note- display pdf , not download)
I tried converting it into blob but it shows blank pdf
I am calling below function on click in react end
viewHandler = async () => {
axios.get('http://localhost:4000/pdf')
.then(response => {
console.log(response.data);
//Create a Blob from the PDF Stream
const file = new Blob(
[response.data],
{ type: 'application/pdf' });
//Build a URL from the file
const fileURL = URL.createObjectURL(file);
//Open the URL on new Window
console.log("Asdasd");
window.open(fileURL);
})
.catch(error => {
console.log(error);
});
};
I want to display pdf without using static url to server pdf file
like -
<object data='http://localhost:4000/file.pdf'
type='application/pdf'
width='100%'
height='700px' />
Related
I am trying to pass an image uploaded from a react app through express to a managed s3 bucket. The platform/host I am using creates and manages the s3 bucket and generates upload and access urls. This all works fine (I have tested a generated upload url in postman with an image in a binary body and it worked perfectly).
My problem is passing the image through express. I am using multer to get the image from the form but I am assuming multer is turning that image into some kind of file object and s3 is expecting some sort of blob or stream.
In following code, the image in req.file exists, I get a 200 response from s3 with no errors and when I visit the asset url the url works, but the image itself is missing.
const router = Router();
const upload = multer()
router.post('/', upload.single('file'), async (req, res) => {
console.log(req.file)
const asset = req.file
const assetPath = req.headers['asset-path']
let s3URLs = await getPresignedURLS(assetPath)
const sendAsset = await fetch(
s3URLs.urls[0].upload_url, // the s3 upload url
{
method: 'PUT',
headers: {
"Content-Type": asset.mimetype
},
body: asset,
redirect: 'follow'
}
)
console.log("s3 response", sendAsset)
res.status(200).json({"url": s3URLs.urls[0].access_url });
});
export default router;
I am not sure what to do to convert what multer gives me to something that aws s3 will accept. I am also open to getting rid of multer if there is an easier way to upload binary files to express.
Instead of multer, you can use multiparty to get file data from request object. And to upload file to s3 bucket you can use aws-sdk.
const AWS = require("aws-sdk");
const multiparty = require("multiparty");
/**
* Helper method which takes the request object and returns a promise with a data.
*/
const getDataFromRequest = (req) =>
new Promise(async(resolve, reject) => {
const form = new multiparty.Form();
await form.parse(req, (err, fields, files) => {
if (err) reject(err);
const bucketname = fields.bucketname[0];
const subfoldername = fields.subfoldername[0];
const file = files["file"][0]; // get the file from the returned files object
if (!file) reject("File was not found in form data.");
else resolve({
file,
bucketname,
subfoldername
});
});
});
/**
* Helper method which takes the request object and returns a promise with the AWS S3 object details.
*/
const uploadFileToS3Bucket = (
file,
bucketname,
subfoldername,
options = {}
) => {
const s3 = new AWS.S3();
// turn the file into a buffer for uploading
const buffer = readFileSync(file.path);
var originalname = file.originalFilename;
var attach_split = originalname.split(".");
var name = attach_split[0];
// generate a new random file name
const fileName = name;
// the extension of your file
const extension = extname(file.path);
console.log(`${fileName}${extension}`);
const params = {
Bucket: bucketname, //Bucketname
ACL: "private", //Permission
Key: join(`${subfoldername}/`, `${fileName}${extension}`), // File name you want to save as in S3
Body: buffer, // Content of file
};
// return a promise
return new Promise((resolve, reject) => {
return s3.upload(params, (err, result) => {
if (err) reject(err);
else resolve(result); // return the values of the successful AWS S3 request
});
});
};
router.post('/', upload.single('file'), async(req, res) => {
try {
// extract the file from the request object
const {
file,
bucketname,
subfoldername
} = await getDataFromRequest(req);
// Upload File to specified bucket
const {
Location,
ETag,
Bucket,
Key
} = await uploadFileToS3Bucket(
file,
bucketname,
subfoldername
);
let response = {};
res["Location"] = Location;
response["ETag"] = ETag;
response["Bucket"] = Bucket;
response["Key"] = Key;
res.status(200).json(response);
} catch (error) {
throw error;
}
});
Request body will be form data with following fields
bucketname:
subfoldername:
file: FileData
For anyone that ever stumbles across this question the solution was to create an custom multer storage engine. Inside the engine you get access to the file with a stream property that s3 accepted (with the correct headers).
I am generating a pdf with html2pdf, and I have managed to generate the pdf, but now I need to send this pdf to my server in node or save it directly in a folder on my server, now the pdf is downloaded in the path indicated by the client, but I I need to have a copy on my server, I have tried with the output parameter but I have not achieved anything, this is my current code:
document.addEventListener("DOMContentLoaded", () => {
// Escuchamos el click del botón
const $boton = document.querySelector("#btnCrearPdf");
$boton.addEventListener("click", () => {
const $elementoParaConvertir = document.body; // <-- Aquí puedes elegir cualquier elemento del DOM
html2pdf()
.set({
margin: 1,
filename: 'documento.pdf',
image: {
type: 'jpeg',
quality: 0.98
},
html2canvas: {
scale: 3, // A mayor escala, mejores gráficos, pero más peso
letterRendering: true,
},
jsPDF: {
unit: "in",
format: "a3",
orientation: 'portrait' // landscape o portrait
}
})
.from($elementoParaConvertir)
.save()
.output('./123123123.pdf', 'f')
.then(pdfResult => {
console.log(pdfResult);
})
.catch(err => console.log(err));
});
});
But I can't figure out how to send the pdf to the server or save it directly from the frontend, does anyone know how I can save the pdf that is generated on my server? Thanks a lot.
You need to create e.g. a PUT endpoint on you backend-server and send the generated file from the client to the server.
The data could be send using something like this:
const filename = 'documento.pdf';
html2pdf()
.set({
filename,
// other options...
})
.from($elementoParaConvertir)
.toPdf()
.output('datauristring')
.then(function(pdfBase64) {
const file = new File(
[pdfBase64],
filename,
{type: 'application/pdf'}
);
const formData = new FormData();
formData.append("file", file);
fetch('/upload', {
method: 'PUT',
body: formData,
})
.then(response => response.json())
.then(result => {
console.log('Success:', result);
})
.catch(error => {
console.error('Error:', error);
});
});
Helpful posts:
Sending html2pdf generated pdf back to the server
html2pdf#181 - send generated pdf as an email attachment
html2pdf#271 - Save PDF into local server
After setup file sending given by #mojoaxel.Firstly you have to setup document storing operation.I am using multer to storing pdf you can use other libraries.
See below code for configuring document save functionality.
var multer = require("multer");
var fs = require('fs');
var Storage = multer.diskStorage({
destination: function (req, file, cb) {
let dir = 'document/' + 'Home'; // Your directory
if (!fs.existsSync(dir)) { // check whether directory exists or not if not then create new directory
fs.mkdirSync(dir);
}
cb(null, dir);
},
filename: function (req, file, cb) {
let inputData = Common.isEmpty(req.body.data) === false ? JSON.parse(req.body.data) : req.body; // check if you send formData or not if yes then parsing data else send as it is
cb(null, file.originalname);
}
});
var upload = multer({
storage: Storage
});
router.post("/callurl", upload.array('file') ,function(req, res){
// your code here
})
I need to axios to get a pdf buffer from nodejs and more some data info.
If I set the axios post with responseType: "arraybuffer", and I make node send only the pdf buffer it is working.
But I need to return some more data info from node and if I get rid off the "responseType:arrayBuffer", in order to receive a json, I am unable to convert pdfBuffer to pdf with new Blob function.
I get BLOB invalid.
What am I doing wrong?
This is not working:
//front end:
const resp=await axios.post("atestadosClientes/generatePDF", data);
const file = new Blob([response.data.pdfBuffer], {
type: "application/pdf",
});
//Build a URL from the file
const fileURL = URL.createObjectURL(file);
//Node response:generate pdf buffer
return res.send({
message: "Success",
id: resp[0].insertId,
pdfBuffer: pdfSignedBuffer, //pdf buffer
});
This is working:
//front end:
const resp=await axios.post("atestadosClientes/generatePDF", data, {
responseType: "arraybuffer",
});
},
const file = new Blob([response.data], {
type: "application/pdf",
});
//Build a URL from the file
const fileURL = URL.createObjectURL(file);
//Node response:generate pdf buffer
return res.send(pdfBuffer)
I am having json data, i want to export that as data.xlsx. For example
I have table there is a export button, if user click that export button, this ng-click function will work.
controllers.js:
$scope.exportDataXlsx = function () {
var json = $scope.contacts;
console.log(json);
$http.post('/api/exportcontactsxlsx', json).then(function (response) {
$state.reload();
toastr.success("exported successfully!");
});
};
My api code:
exports.exportContactsXlsx = function (req, res) {
var data = req.body;
var xls = json2xls(data);
fs.writeFileSync('data.xlsx', xls, 'binary');
}
I am using npm package called jsonexport.
If i click export, the file will downloaded to my project.
But i need output, when user click export button The 'data.xlsx' file should be downloaded in chrome left corner, and in user default download directory.
after saving your file in your server. what you need is to send the name of your file so you can access it from the browser :
$scope.exportDataXlsx = function () {
var json = $scope.contacts;
console.log(json);
$http.post('/api/exportcontactsxlsx', json).then(function (response) {
downloadFile(response.fileName) // make sure response.fileName has the file name
$state.reload();
toastr.success("exported successfully!");
});
};
function downloadFile(name) {
var link = document.createElement('a');
link.download = name;
link.href = '/files/' + name;
link.click();
}
server :
// in app.js, to serve your files as static files
app.use("/files", express.static(path.join(__dirname, 'files')));
// send the the name of your file to the client
exports.exportContactsXlsx = function (req, res) {
var data = req.body;
var xls = json2xls(data);
fs.writeFileSync(path.join(__dirname,'../files/data.xlsx'), xls, 'binary');
//changed the directory
res.json({ fileName: 'data.xlsx' });
}
You have to set parametrs in the res headers.
You can try for the following:
var fields = ['firstName', 'email'];
var csv = json2csv({ data: resp, fields: fields });
res.set('Cache-Control', 'max-age=0, no-cache, must-revalidate, proxy-revalidate');
res.set('Content-Type','application/force-download');
res.set('Content-Type','application/octet-stream');
res.set('Content-Type','application/download');
res.set('Content-Disposition','attachment;filename=userList.csv');
res.set('Content-Transfer-Encoding','binary');
res.send(csv);
SO when you hit the API in browser, it will ask for SaveFile option and if user clicks OK it will be downloaded to default Download directory of chrome.
So I have a react native application that's kind of like slack and I'm trying to do image uploads to s3.
I went with getSignedUrl route.
So the client pics a photo,
fetches a signed url to the bucket
then changes the url on the server for that user
then a put request to the signed url that was fetched.
It mostly works the files get in the right bucket and they are photos. but
A) the link makes me download the file instead of displaying it in browser.
B) the file isn't an image...its an xml file and can only be opened in photoshop
I've tried changing the type in the data.append type,
Adding header to the signed request
Adding x-amz- headers to the signed request
hard coding the file type in server
converting image to base64 string with a native module but It still is coming up wrong.
Client Side calls to server
uploadToServer() {
// alert('coming soon!');
//Go back to profile page
this.props.navigation.goBack();
//grab user from navigator params
let user = this.props.navigation.state.params.user
let pic = this.state.selected;
// turn uri into base64
NativeModules.ReadImageData.readImage(pic.uri, (image) => {
console.log(image);
var data = new FormData();
data.append('picture', {
uri: image,
name: pic.filename,
type: 'image/jpeg'
});
//get the signed Url for uploading
axios.post(api.getPhotoUrl, {fileName: `${pic.filename}`}).then((res) => {
console.log("get Photo URL response", res);
//update the user with the new url
axios.patch(api.fetchUserByID(user.id), {profileUrl: res.data.url}).then((resp) => {
console.log("Update User response", resp.data);
}).catch(err => errorHandler(err));
//upload the photo using the signed request url given to me.
//DO I NEED TO TURN DATA INTO A BLOB?
fetch(res.data.signedRequest, {
method: 'PUT',
body: data
}).then((response) => {
console.log("UPLOAD PHOTO RESPONSE: ", response);
}).catch(err => errorHandler(err))
}).catch((err) => errorHandler(err))
})
}
GET SIGNED URL logic from on out
router.post('/users/sign-s3', (req, res) => {
const s3 = new aws.S3({signatureVersion: 'v4', region: 'us-east-2'});
const fileName = `${req.user.id}-${req.body.fileName}`;
const fileType = req.body.fileType;
const s3Params = {
Bucket: AWS_S3_BUCKET,
Key: `images/${fileName}`,
Expires: 60,
ContentType: 'image/jpeg',
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3Params, (err, data) => {
if (err) {
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
url: `https://${AWS_S3_BUCKET}.s3.amazonaws.com/${s3Params.Key}`
};
res.write(JSON.stringify(returnData));
res.end();
return null;
});
});
You need to change content type from image to supported xml format if you want it to be displayed in browser.
Refer this and set content type accordingly.