I have a REST client on node, and I'm trying to upload pdf a file to another REST webserver which provides the ability to parse my pdf and extract some data. Basically it is a service. The npm package that I use is: https://www.npmjs.com/package/node-rest-client. If there are other rest clients, I can use those as well. The rest api I need to use is described below:
POST / ; Uploads a new PDF document via a form <br>
POST /file ; Uploads a new PDF document via bytestream
The question is how to upload the file. Also, I would like to see how to store the file at the other end.
You can use npm module request to upload the file.
Here is a working example
var request = require('request');
var fs = require('fs');
request({
method: 'PUT',
preambleCRLF: true,
postambleCRLF: true,
uri: 'http://yourdomain/file',
multipart: [
{
'content-type': 'application/pdf',
body: fs.createReadStream('image.png')
}
]
},
function (error, response, body) {
if (error) {
return console.error('upload failed:', error);
}
console.log('Upload successful! Server responded with:', body);
});
For receiving at the server side with node you can use modules like busboy. Here is a demo for this
var busboy = require('connect-busboy');
app.use(busboy());
app.use(function(req, res) {
if (req.busboy) {
req.busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
// move your file etc
});
req.pipe(req.busboy);
}
});
You can use request.
There is an example for that
fs.createReadStream('file.pdf').pipe(request.post('http://example.com/file'))
Related
I have the following JavaScript that runs in NodeJS. It's a script to upload data via a NodeJS API. I am not very familiar with the NodeJS functionality. I need help in refactoring this code so it does the following:
reads an uploads only the .json file extensions in the directory
file extensions follow a set naming convention with a number at the end of each file
I want to iterate through all of the .json files in the same directory and load them to the api - it could be as many as 2,000 files at once (less than 2MB per file)
Is there a way to give the files like 30 seconds between loads?
I am not very familiar with NodeJS and loading functions, which is why I need help. Please let me know if I can clarify anything. I need to be able to specify the directory to read the .json files from.
const fs = require('fs');
const http = require('http');
//function to post transaction
function postData (xactData) {
var options = {
hostname: 'localhost',
port: 8080,
path: '/abc/def/ghi/jkl',
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': xactData.length,
'Request-Timeout': "600000"
}
};
// setting up the request
var post_req = http.request(options, res => {
console.log('statusCode: ${res.statusCode}');
res.on('data', d=> {
process.stdout.write(d);
});
});
post_req.on('error', error => {
console.error(error)
})
//post data
post_req.write(xactData);
post_req.end();
}
//read in file
try {
const data = fs.readFileSync('./file/xyz.json', 'utf8');
if (data) {
postData(data);
}
} catch (err) {
console.error(err)
}
Here are the steps my application is doing:
User uploads an image (.PNG) file in the browser.
Browser sends the raw image data to my server.
The server then saves the file in my file system.
The problem is that my file system complains that the file is not a PNG file when I try to open it.
I'm trying to localize where the problem occurs without success. When I look at the file data using VIM it looks the same to me, with the same number of lines and the file contents both start with:
<89>PNG^M
^Z
^#^#^#^MIHDR^#^#^BD^#^#^#Î^H^B^#^#^#P6<8a><9a>^#^#^#^CsBIT^H^H^HÛáOà^ ^#^#_zTXtRaw profile type APP1^#^#^H<99>ãJOÍK-ÊLV((ÊOËÌIåR^#^Cc^S. ^SK^SK£D^C^C^C^K^C^H04006^D<92>F#
...
However, the file sizes are different, with the file I'm writing from my server being larger. So obviously they are different in some way.
I tried doing diff file1 file2 in the command line and it just gives me binary files differ without showing me the difference... ?? Strange.
So I'm currently confused about how the files differ, and where in my code this difference gets introduced...
I feel I'm missing some crucial knowledge about how some things work here under the hood, and I would very much appreciate if someone smarter than me could help me out here.
Code
Client:
<input id="icon-button-file" type="file" onChange={handleChange} />
<label htmlFor="icon-button-file">
<Button/>
</label>
function handleChange(event: any) {
if (!event.target.files[0]) return
const file = event.target.files[0]
const reader = new FileReader()
reader.onload = function (e) {
const instance = axios.create({
baseURL: 'http://localhost:3000/',
timeout: 5000,
headers: { 'Content-Type': 'application/json' }
})
instance.post(
'image',
JSON.stringify({
imageData: e.target.result.toString()
})
).then(result => {
console.log(result)
})
}
reader.readAsBinaryString(file)
}
Server:
app.post(`http://localhost:3000/image`, (req, res) => {
fs.writeFile('img.png', req.body.imageData, (err) => {
console.log(err)
})
})
EDIT:
I made it work by sending the image content to the server as a dataUrl instead, using reader.readAsDataUrl(). This encodes the image data as a Base64 string, which seems like a common practice. However, I'm still curious why sending the raw data doesn't work.
You can use Formidable to handle files in nodejs easily. Good to hear that you got it running already. I hope this helps to someone else. this covers some basics
and yes another approach is to encode to Base64 and decode back to a file from the server side. cheers
var express = require('express');
var router = express.Router();
var fs = require('fs');
var formidable = require('formidable');
/* GET home page. */
router.get('/', function(req, res, next) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.write('<form action="fileupload" method="post" enctype="multipart/form-data">');
res.write('<input type="file" name="filetoupload"><br>');
res.write('<input type="submit">');
res.write('</form>');
res.end()
});
router.post(`/fileupload`, (req, res) => {
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
var oldpath = files.filetoupload.path;
var newpath = './public/' + files.filetoupload.name;
fs.rename(oldpath, newpath, function (err) {
if (err) throw err;
res.write('File uploaded and moved!');
res.end();
});
});
})
module.exports = router;
Answering my own question.
The reason for the problem was that I was sending the image binary data over http which can corrupt the data.
See: https://stackoverflow.com/a/201510/6017605
Since base64 encodes it as text, it can safely be transmitted.
This also helped me understand the problem: https://www.jscape.com/blog/ftp-binary-and-ascii-transfer-types-and-the-case-of-corrupt-files
I created a web service with nodeJS (express) that takes a JSON file, reads it and stores its content as a document in a mongodb.
The POST route works fine. The code looks like this
var multipart = require('connect-multiparty');
var fs = require('fs');
router.post('/', multipartMiddleware, function(req, res) {
fs.readFile(req.files.swagger.path, 'utf8', function(err, data) {
req.body.swagger = JSON.parse(data);
.
.
});
});
Basically, it's picking up the data from the JSON file in req.files and putting inside req.body object.
I need the same thing done on a Jasmine spec file. I'm able to physically test the web service using POSTman but I need to test it through a spec file using jasmine. For now, I'd like to check if the web-service throws a 200 OK status, but it's throwing a 500
So far, my jasmine-node test suite looks like this
describe("POST /", function() {
it("returns status code 200", function(done) {
var req = request.post(base_url, function(error, response, body) {
expect(response.statusCode).toBe(200);
});
var form = req.form();
form.append('file','<FILE_DATA>', {
filename : "sample.json",
contentType : "text/plain"
});
console.log("Form", form);
});
});
How do I upload files onto nodejs server programatically, so that I can pick up the file from req.files in my spec?
I'm trying to create a Node server that generates a PDF on-the-fly using PDFKit. The PDF is generated based on parameters from a POST request (via Express). One of the parameters specifies an image URL, which the server downloads and injects into the PDF.
Right now, I have the following structure:
// Get dependencies
var express = require('express'),
http = require('http'),
fs = require('fs'),
pdfDocument = require('pdfkit');
// Get express started.
var app = express();
// Use JSON in POST body
app.use(express.json());
// Setup POST response
app.post('/post_pdf', function(req, res) {
// Get the PDF initialized
var doc = new pdfDocument();
// Set some headers
res.statusCode = 200;
res.setHeader('Content-type', 'application/pdf');
res.setHeader('Access-Control-Allow-Origin', '*');
// Header to force download
res.setHeader('Content-disposition', 'attachment; filename=Untitled.pdf');
// Pipe generated PDF into response
doc.pipe(res);
/**
* Generate PDF contents
*/
// Prepare write stream for image
var image = fs.createWriteStream('image.jpeg');
// Download image
http.get("http://dummyimage.com/640.jpeg", function(response) {
// Pipe response into image write stream
// (because PDFKit needs to read from a saved file)
response.pipe(image).on('close', function() {
// Read data back, make sure there are no errors
fs.readFile('image.jpeg', function(err, data) {
if (err) throw err;
/**
* Use `data` to get image info (width, height, etc.)
* ------------------
* Inject image
*/
// Close document and response
doc.end();
res.end();
return;
})
});
});
});
I have two questions:
Is there a less messy way to do this, perhaps with fewer nested callbacks? I'm totally open to adding another dependency to make life easier.
Right now, the code above does not work. It returns a PDF, but the PDF is corrupted (according to Preview). Any tips as to why this could be occurring are very welcome.
In debugging this issue, I discovered several things:
PDFKit does not need to read info from a file. It will also accept a Buffer
doc.image(myBuffer); // You don't have to use a path string
When piping a file directly into the response, a manual call to response.end() will cause problems if the file has already been closed
doc.pipe(res); // Pipe document directly into the response
doc.end(); // When called, this ends the file and the response
// res.end(); <-- DON'T call res.end()
// The response was already closed by doc.end()
return;
Request is a super-useful NodeJS library that can flatten the callback tree
Updated code:
var express = require('express'),
request = require('request'),
pdfDocument = require('pdfkit');
// Start Express
var app = express();
// Use JSON in POST body
app.use(express.json());
// Setup POST response
app.post('/post_pdf', function(req, res) {
// Create PDF
var doc = new pdfDocument();
// Write headers
res.writeHead(200, {
'Content-Type': 'application/pdf',
'Access-Control-Allow-Origin': '*',
'Content-Disposition': 'attachment; filename=Untitled.pdf'
});
// Pipe generated PDF into response
doc.pipe(res);
// Process image
request({
url: 'http://dummyimage.com/640.jpeg',
encoding: null // Prevents Request from converting response to string
}, function(err, response, body) {
if (err) throw err;
// Inject image
doc.image(body); // `body` is a Buffer because we told Request
// to not make it a string
doc.end(); // Close document and, by extension, response
return;
});
});
const fetch = require('node-fetch');
const PDFDocument = require('pdfkit');
const doc = new PDFDocument({});
url = AnyImageUrl;
res = await fetch(url,{encoding: null });
imageBuffer = await res.buffer();
img = new Buffer(imageBuffer, 'base64');
doc.image(img,(doc.page.width - 525) /2, doc.y, {align: 'center', width: 125});
I am taking in a png file from AFNetworking saving it to GridFS and then I would like to be able to retrive it at some point. Out of curiousity I logged the image before it entered GridFS and it looks like..
<89504e47 0d0a1a0a 0000000d 49484452 00000074 0000008c 08020000 0022391a ...>
I save this in a buffer and then store it into GridFS.
When I am retrieving it via a GET request I log it again before sending it out and it appears to be in the same format.
Then I attempt to do this
res.writeHead(200, {'Content-Type': 'image/png' });
gs.createReadStream(image).pipe(res); //using GridJS this it the syntax to read
When viewing this in a browser it just appears like an empty or broken image link. If I inspect the page source it appears to be just
If I never set the headers it just appears as hundreds of lines of
<89504e47 0d0a1a0a 0000000d 49484452 00000074 0000008c 08020000 0022391a ...>
I feel like I am not converting a buffer right or something.
var http = require('http'),
MongoDB = require("mongodb"),
MongoClient = require("mongodb").MongoClient,
GridStore = require("mongodb").GridStore;
http.createServer(function (req, res) {
console.log("Serving request for file: " + req.url);
MongoClient.connect("mongodb://localhost:27017/test", {}, function(err, db) {
gridStore = new GridStore(db, req.url, "r");
gridStore.open(function(err, gs) {
if (err) {
console.log("error in open: " + err);
return;
}
res.writeHead(200, {'Content-Type': 'image/png'});
var s = gs.stream(true);
s.pipe(res);
});
});
}).listen(8124, "127.0.0.1");
console.log('Server running at http://127.0.0.1:8124/');
I can successfully run server and serve the png files successfully to the browser using the code pasted above. However since, you are saying that raw contents of the source does appear on client side when you do a "see source". I would suggest trying the code I wrote and see if that has the same issues.