I'm trying to create a Node server that generates a PDF on-the-fly using PDFKit. The PDF is generated based on parameters from a POST request (via Express). One of the parameters specifies an image URL, which the server downloads and injects into the PDF.
Right now, I have the following structure:
// Get dependencies
var express = require('express'),
http = require('http'),
fs = require('fs'),
pdfDocument = require('pdfkit');
// Get express started.
var app = express();
// Use JSON in POST body
app.use(express.json());
// Setup POST response
app.post('/post_pdf', function(req, res) {
// Get the PDF initialized
var doc = new pdfDocument();
// Set some headers
res.statusCode = 200;
res.setHeader('Content-type', 'application/pdf');
res.setHeader('Access-Control-Allow-Origin', '*');
// Header to force download
res.setHeader('Content-disposition', 'attachment; filename=Untitled.pdf');
// Pipe generated PDF into response
doc.pipe(res);
/**
* Generate PDF contents
*/
// Prepare write stream for image
var image = fs.createWriteStream('image.jpeg');
// Download image
http.get("http://dummyimage.com/640.jpeg", function(response) {
// Pipe response into image write stream
// (because PDFKit needs to read from a saved file)
response.pipe(image).on('close', function() {
// Read data back, make sure there are no errors
fs.readFile('image.jpeg', function(err, data) {
if (err) throw err;
/**
* Use `data` to get image info (width, height, etc.)
* ------------------
* Inject image
*/
// Close document and response
doc.end();
res.end();
return;
})
});
});
});
I have two questions:
Is there a less messy way to do this, perhaps with fewer nested callbacks? I'm totally open to adding another dependency to make life easier.
Right now, the code above does not work. It returns a PDF, but the PDF is corrupted (according to Preview). Any tips as to why this could be occurring are very welcome.
In debugging this issue, I discovered several things:
PDFKit does not need to read info from a file. It will also accept a Buffer
doc.image(myBuffer); // You don't have to use a path string
When piping a file directly into the response, a manual call to response.end() will cause problems if the file has already been closed
doc.pipe(res); // Pipe document directly into the response
doc.end(); // When called, this ends the file and the response
// res.end(); <-- DON'T call res.end()
// The response was already closed by doc.end()
return;
Request is a super-useful NodeJS library that can flatten the callback tree
Updated code:
var express = require('express'),
request = require('request'),
pdfDocument = require('pdfkit');
// Start Express
var app = express();
// Use JSON in POST body
app.use(express.json());
// Setup POST response
app.post('/post_pdf', function(req, res) {
// Create PDF
var doc = new pdfDocument();
// Write headers
res.writeHead(200, {
'Content-Type': 'application/pdf',
'Access-Control-Allow-Origin': '*',
'Content-Disposition': 'attachment; filename=Untitled.pdf'
});
// Pipe generated PDF into response
doc.pipe(res);
// Process image
request({
url: 'http://dummyimage.com/640.jpeg',
encoding: null // Prevents Request from converting response to string
}, function(err, response, body) {
if (err) throw err;
// Inject image
doc.image(body); // `body` is a Buffer because we told Request
// to not make it a string
doc.end(); // Close document and, by extension, response
return;
});
});
const fetch = require('node-fetch');
const PDFDocument = require('pdfkit');
const doc = new PDFDocument({});
url = AnyImageUrl;
res = await fetch(url,{encoding: null });
imageBuffer = await res.buffer();
img = new Buffer(imageBuffer, 'base64');
doc.image(img,(doc.page.width - 525) /2, doc.y, {align: 'center', width: 125});
Related
I'm trying to create a small service that converts images from one type to another.
I'll get to the conversion part later, right now i cant even send it to the node server properly.
i have a simple script that's supposed to use filesystem to read an image and then POST it to a node server endpoint.
I couldn't find a way to do this anywhere online
i tried all kinds of different ways and formats, but on the endpoint the req.body is always an empty object or just an error
const fse = require('fs-extra');
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
var xhr = new XMLHttpRequest();
fse.readFile('testimg.png', function (err, data) {
if (err) {
throw err
}
console.log(data)
xhr.open("POST", 'http://localhost:3000/convert', true); // true for asynchronous
xhr.setRequestHeader('Content-Type', 'application/upload');
xhr.send(data)
})
and this is the server endpoint:
var express = require('express');
var router = express.Router();
router.get('/', (req,res)=>{
res.send("hello")
})
router.post('/', async(req,res)=>{
console.log(req.body);
res.send("Hello i work")
})
module.exports = router;
What i want is to get the data on the server endpoint and to be able to process it and convert it for example: upload jpg and convert and send back as png or the opposite.
Help highly appreciated
I have a REST client on node, and I'm trying to upload pdf a file to another REST webserver which provides the ability to parse my pdf and extract some data. Basically it is a service. The npm package that I use is: https://www.npmjs.com/package/node-rest-client. If there are other rest clients, I can use those as well. The rest api I need to use is described below:
POST / ; Uploads a new PDF document via a form <br>
POST /file ; Uploads a new PDF document via bytestream
The question is how to upload the file. Also, I would like to see how to store the file at the other end.
You can use npm module request to upload the file.
Here is a working example
var request = require('request');
var fs = require('fs');
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://yourdomain/file',
multipart: [
{
'content-type': 'application/pdf',
body: fs.createReadStream('image.png')
}
]
},
function (error, response, body) {
if (error) {
return console.error('upload failed:', error);
}
console.log('Upload successful! Server responded with:', body);
});
For receiving at the server side with node you can use modules like busboy. Here is a demo for this
var busboy = require('connect-busboy');
app.use(busboy());
app.use(function(req, res) {
if (req.busboy) {
req.busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
// move your file etc
});
req.pipe(req.busboy);
}
});
You can use request.
There is an example for that
fs.createReadStream('file.pdf').pipe(request.post('http://example.com/file'))
I'm trying to get back from the remote site(URL) in the following node.js codes.
Case 1: Using basic HTTP module + request module
var http = require('http');
var fs = require('fs');
var request = require('request');
var server = http.createServer(function(req, res) {
// Can't get back any response from remote URL
// Can't use 'res' or nothing have in res
request('http://remote.foo.com/app/lists/dosomething.json',
function(err, response, body) {
console.log(body);
}
);
var host = 'http://remote.foo.com';
var app = '/app/lists/dosomething.json?';
var url = 'param1=a¶m2=b';
// I can get something to print out back
request.get(host+app+url).pipe(res);
});`
Case 2: Using Express + request module
var express = require('express')
, http = require('http')
, path = require('path')
, net = require('net')
, url = require('url')
, qs = require('qs');
var request = require('request');
//.....
app.get('/test/foo.json', function(req, res) {
var url_parts = url.parse(req.url, true);
var query = qs.parse(url_parts.query);
var host = 'http://remote.foo.com';
var app = '/path/dosomething.json?';
var url = 'param1=a¶m2=b';
//the following line, the same code above Case 1
//can't get any result, just blank..
request.get(host+app+url).pipe(res);
res.end();
});
//....
Can anyone explain it?
Thanks
Expanding on my comment a little bit:
When you pipe() a readable stream into a writable stream, by default writable.end() is called when the readable stream emits the end event (see documentation). This means that when you pipe streams, you don't have to explicitly close the destination stream by yourself: it is automatically closed when the source closes. This should make sense: when the source stream ends, the destination stream doesn't have anything to do, and should indeed close.
In your second example, you first pipe a readable stream (request) into a writable stream (res), but the very next instruction closes the res stream, without even reading anything, since the first read() call is executed on the next tick (see source). The solution is then to remove res.end(), since it will be handled automatically.
As a side note: if you need to keep the destination stream open for some reason, you can pass {end: false} to pipe.
I understand how to load a remote file with Node.js + request, and I can then read it and return the png binary blob. Is there a elegant way to do it with one request (or even a one-liner)
something like:
http.createServer(function(req, res) {
res.writeHead(200, {
'Content-Type': 'image/png'
});
var picWrite = fs.createWriteStream(local);
var picFetch = fs.createReadStream(local);
picStream.on('close', function() {
console.log("file loaded");
});
request(remote).pipe(picWrite).pipe(picFetch).pipe(res);
})
To be clear: my aim is to load a remote file from a CDN, cache it locally to the server and then return the file in the original request. In future requests I use fs.exists() to check it exists first.
This is my best effort so far:
http.createServer(function(req, res) {
var file = fs.createWriteStream(local);
request.get(remote).pipe(file).on('close', function() {
res.end(fs.readFileSync(local), 'binary');
});
})
Since the request will return a readable stream, we can listen on its data and end events to write to both the HTTP response and a writable stream.
var http = require('http');
var request = require('request');
http.createServer(function(req, res) {
res.writeHead(200, { 'Content-Type': 'image/png' });
var file = fs.createWriteStream(local);
// request the file from a remote server
var rem = request(remote);
rem.on('data', function(chunk) {
// instead of loading the file into memory
// after the download, we can just pipe
// the data as it's being downloaded
file.write(chunk);
res.write(chunk);
});
rem.on('end', function() {
res.end();
});
});
The method that you showed first writes the data to disk, then reads it into memory again. This is rather pointless, since the data is already accessible when it's being written to disk.
If you use an event handler, you can write to both the HTTP response and the file stream without needing to pointlessly load the file to memory again. This also solves the problem with using pipe(), because pipe() will consume the data from the readable stream, and can only be done once.
This also solves problems with running out of memory, because if you were to download a large file, then it would effectively run your Node.js process out of memory. With streams, only chunks of a file are loaded into memory at one time, so you don't have this problem.
I am trying to learn the ins and outs of node. I know you can serve a file with a framework but I am trying to do it manually. I have a jpeg file in './public/logo.jpg'. When I send the request through localhost:8080 I don't get the image, just a blank screen with a generic image placeholder. What am I doing wrong? Thanks!
var http=require('http');
var url=require('url');
var fs=require('fs');
// creates a new httpServer instance
http.createServer(function (req, res) {
// this is the callback, or request handler for the httpServer
log('in server callback')
res.ins=res.write;
var parse=url.parse(req.url,true);
var path0=parse.pathname;
console.log(path0)
// respond to the browser, write some headers so the
// browser knows what type of content we are sending
var serveFile=function(){
var path='./public'+path0
fs.exists(path,function(e){
if(e){
log('serving file')
log(path)
fs.readFile(path,'binary',function(err,data){
if(data){
res.writeHead(200, {'Content-Type': 'image/jpeg'});
res.ins(data)
res.end()
}
})
}
else{
log('no file to serve')
log(path)
servePage()
}
})
}
serveFile()
}).listen(8080); // the server will listen on port 8080
Simply change the following two lines in your readFile callback :
res.writeHead(200, {'Content-Type': 'image/jpeg'});
res.write(data, 'binary');
Use response.write to send data to the client and set encoding to binary. (Default is utf-8)