busboy "file" to gm to ftp upload - javascript

I want to re size a uploaded image on nodejs and send it on via ftp.
Using nodejs, busboy, GraphicsMagick, and jsftp.
var uploadFile = function(dir, req, cb) {
req.busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var imageMagick = gm.subClass({
imageMagick: true
});
console.log('Resizing file...');
console.log(filename);
imageMagick(file)
.resize(150, 150)
.stream(function(err, stdout, stderr) {
if (err)
console.log(err);
var i = [];
stdout.on('data', function(data) {
console.log('data');
i.push(data);
});
stdout.on('close', function() {
console.log('close');
var image = Buffer.concat(i);
console.log(image.length);
console.log(image);
ftp.put(image, filepath, function(hadError) {
if (!hadError) {
filename = config.one.filepath + dir + "/" + filename;
cb(null, filename);
} else {
console.log(hadError);
cb(hadError, 'Error');
}
});
});
});
});
req.pipe(req.busboy);
};
The output is now:
Resizing file...
100-0001_IMG.JPG
close
0
<Buffer >
On ftp server side I get a 0 bytes file and also never doing a cb.
I found this two questions but couln't make it work for me:
Question 1
Question 2
I guess there must be something wrong with my file I gave to gm because "data" is never written to the console.
File it self is fine since I managed to upload a unresized file to the ftp server.
I appreciate every help!
Thx

Firstly you can check your stream for errors:
stdout.on('error', function(err) {
console.log(err);
});
So if there is an error related to imageMagick it can be a mismatching of imageMagic binary and node-imagemagick library

Related

Adding local file to zip

I'm trying to add a local file to the zip so when the user downloads and unzips, he'll get a folder with a .dll and a config.json file:
var zip = new JSZip();
options.forEach(option => {
zip.folder("REST." + option + ".Connector")
.file("config.json", "//config for " + option)
// I want this file to be from a local directory within my project
// eg. {dir}\custom_rest_connector_repository\src\dlls\Connectors.RestConnector.dll
.file('../dlls/Connectors.RestConnector.dll', null);
});
zip.generateAsync({type:"blob"}).then(function (blob) {
FileSaver.saveAs(blob, "REST_Connectors_"
+ dateStr
+ ".zip");
});
I read through the JSZip documentation but couldn't find an example or any information whether this can actually be done.
If it can't, is there any other more robust library that does support this operation?
Found the answer to my own question using the jszip-utils
JSZipUtils.getBinaryContent("../dlls/Connectors.RestConnector.dll", function (err, data) {
if(err) {
throw err; // or handle the error
}
zip.file("../dlls/Connectors.RestConnector.dll", data, {binary:true});
});

Writing an image to file, received over an HTTP request in Node

I'm certain I'm missing something obvious, but the gist of the problem is I'm receiving a PNG from a Mapbox call with the intent of writing it to the file system and serving it to the client. I've successfully relayed the call, received a response of raw data and written a file. The problem is that my file ends up truncated no matter what path I take, and I've exhausted the answers I've found skirting the subject. I've dumped the raw response to the log, and it's robust, but any file I make tends to be about a chunk's worth of unreadable data.
Here's the code I've got at present for the file making. I tried this buffer move as a last ditch after several failed and comparably fruitless iterations. Any help would be greatly appreciated.
module.exports = function(req, res, cb) {
var cartography = function() {
return https.get({
hostname: 'api.mapbox.com',
path: '/v4/mapbox.wheatpaste/' + req.body[0] + ',' + req.body[1] + ',6/750x350.png?access_token=' + process.env.MAPBOX_API
}, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var mapPath = 'map' + req.body[0] + req.body[1] + '.png';
var map = new Buffer(body, 'base64');
fs.writeFile(__dirname + '/client/images/maps/' + mapPath, map, 'base64', function(err) {
if (err) throw err;
cb(mapPath);
})
})
});
};
cartography();
};
It is possible to rewrite your code in more compact subroutine:
const fs = require('fs');
const https = require('https');
https.get(url, (response)=> { //request itself
if(response) {
let imageName = 'image.png'; // for this purpose I usually use crypto
response.pipe( //pipe response to a write stream (file)
fs.createWriteStream( //create write stream
'./public/' + imageName //create a file with name image.png
)
);
return imageName; //if public folder is set as default in app.js
} else {
return false;
}
})
You could get original name and extension from url, but it safer to generate a new name with crypto and get file extension like i said from url or with read-chunk and file-type modules.

Reading file to disk error, "name and value are required for setHeader()"

Trying to allow users to upload image files to the Node.js server in a MEAN Stack application. I am using ng-file-upload for the client side angular directive. That seems to be working good enough. I run into an error when I pass the image to the server.
I use an API route to handle the work on the server side. The server will be responsible for saving the file to disk with node-multiparty module. It seems to hit route but when it tries to emit a close event I get the error. throw new Error('"name" and "value" are required for setHeader().'
The file I want is in my temp folder but it doesn't get saved to the target directory on my server plus I get the header error after the file should have been saved. So I need to stop the error and save the file with fs.rename() to the target image directory.
Here is the code that is breaking.
file api.js
// router to save images
router.route('/img/upload')
.post(function (req, res) {
console.log("image upload hits the router")
var options = {};
var count = 0;
var form = new multiparty.Form(options);
//save file to disk
form.on('file', function (name, file) {
var uploadDirectory = 'img/user/profile/';
var oldPath = file.path;
var newPath = uploadDirectory + file.originalFilename;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err;
console.log('renamed complete');
});
});
// Close emitted after form parsed
form.on('close', function () {
console.log('Upload completed!');
res.setHeader('text/plain'); // Here is the line that gives an error.
res.end('Received ' + count + ' files');
});
// Parse req
form.parse(req);
});
So this is what I got to work for me
The actual line that gave me an error was setHeaders. It appears I needed to put the name and value as strings separated by a comma. This works perfectly for me now. I hope it saves everyone time coding.
// post
.post(function (req, res) {
var options = {};
var count = 0;
var form = new multiparty.Form(options);
form.on('error', function (err) {
console.log('Error parsing form: ' + err.stack);
});
//save file to disk
form.on('file', function (name, file) {
var uploadDirectory = '/img/user/profile/';
var oldPath = file.path;
var newPath = uploadDirectory + file.originalFilename;
fs.rename(oldPath, newPath, function (err) {
if (err) throw err;
console.log('renamed complete');
});
});
// Close emitted after form parsed
form.on('close', function () {
console.log('Upload completed!');
res.setHeader('Content-Type', 'text/plain');
res.end('Received ' + count + ' files');
});
// Parse req
form.parse(req);
});

Express GraphicsMagick

I'm currently building a MEAN app and the section I'm currently working on involves image uploads. I'm trying to use GraphicsMagick for Node but I'm not really having any success. Below is my POST request for image uploads (as is):
app.post('/api/users/upload_image', function (req, res) {
var fstream;
req.pipe(req.busboy);
req.busboy.on('file', function (fieldname, file, filename) {
console.log('\n\nUploading file: '.underline.bold +filename .underline.bold);
// var readStream = fs.createReadStream(filename);
// gm(readStream, readStream.path)
// .resize('200','200')
// .stream(function (err, stdout, stderr) {
// var writeStream = fs.createWriteStream('www/uploads/' + readStream.path);
// stdout.pipe(writeStream);
// });
fstream = fs.createWriteStream('www/uploads/' + filename);
file.pipe(fstream);
});
req.busboy.on('finish', function () {
res.writeHead(303, { Connection: 'close', Location: '/' });
res.end();
});
});
The commented out section is my attempt at using GM but that throws back the error: Error: ENOENT, open '[filename].jpg'
Where am I going wrong? This is my first try at using GM so I'm a newb to this library!
var readStream = fs.createReadStream(filename);
at this line file named filename is not actually there yet you write that file below the commented lines. What you have is read steam named file you get from busyboy. so get rid of this line and pass file to gm directly
gm(file,....
Code below does exactly what you want, note the gm's write function's parameters.
app.post('/api/users/upload_image', function (req, res) {
req.pipe(req.busboy);
req.busboy.on('file', function (fieldname, file, filename) {
console.log('\n\nUploading file: '.underline.bold +filename .underline.bold);
console.log('Resizing file...');
gm(file,'www/uploads/' + filename)
.resize(200,200)
.write('www/uploads/' + filename, function (err) {
console.log("finished");
});
});
req.busboy.on('finish', function () {
res.writeHead(303, { Connection: 'close', Location: '/' });
res.end();
});
});
Note that gm's resize when used this way, simply fit's image in 200x200 box, does not exactly rescale/stretch it to 200x200.
If this still give you an error like EPIPE, your node process might not have sufficent permissions to write that file.
The problem is that you're trying to read the file before it's written. If you don't need to save the original image, you could just stream the file directly to gm instead:
req.busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
console.log('\n\nUploading file: '.underline.bold +filename .underline.bold);
console.log('Resizing file...');
gm(file, filename)
.resize('200','200')
.write('www/uploads/' + filename, function(err) {
if (err) throw err; // handle better
console.log('Success!'.bold);
});
});
You should probably also check that the given mime type of the file matches what you expect (e.g. image/jpeg, image/png, etc). You could also take that a step farther by using something like mmmagic to check the file type. However with mmmagic, you would have to forgo the direct streaming to gm option and save the file to disk first.

How to download and unzip a zip file in memory in NodeJs?

I want to download a zip file from the internet and unzip it in memory without saving to a temporary file. How can I do this?
Here is what I tried:
var url = 'http://bdn-ak.bloomberg.com/precanned/Comdty_Calendar_Spread_Option_20120428.txt.zip';
var request = require('request'), fs = require('fs'), zlib = require('zlib');
request.get(url, function(err, res, file) {
if(err) throw err;
zlib.unzip(file, function(err, txt) {
if(err) throw err;
console.log(txt.toString()); //outputs nothing
});
});
[EDIT]
As, suggested, I tried using the adm-zip library and I still cannot make this work:
var ZipEntry = require('adm-zip/zipEntry');
request.get(url, function(err, res, zipFile) {
if(err) throw err;
var zip = new ZipEntry();
zip.setCompressedData(new Buffer(zipFile.toString('utf-8')));
var text = zip.getData();
console.log(text.toString()); // fails
});
You need a library that can handle buffers. The latest version of adm-zip will do:
npm install adm-zip
My solution uses the http.get method, since it returns Buffer chunks.
Code:
var file_url = 'http://notepad-plus-plus.org/repository/7.x/7.6/npp.7.6.bin.x64.zip';
var AdmZip = require('adm-zip');
var http = require('http');
http.get(file_url, function(res) {
var data = [], dataLen = 0;
res.on('data', function(chunk) {
data.push(chunk);
dataLen += chunk.length;
}).on('end', function() {
var buf = Buffer.alloc(dataLen);
for (var i = 0, len = data.length, pos = 0; i < len; i++) {
data[i].copy(buf, pos);
pos += data[i].length;
}
var zip = new AdmZip(buf);
var zipEntries = zip.getEntries();
console.log(zipEntries.length)
for (var i = 0; i < zipEntries.length; i++) {
if (zipEntries[i].entryName.match(/readme/))
console.log(zip.readAsText(zipEntries[i]));
}
});
});
The idea is to create an array of buffers and concatenate them into a new one at the end. This is due to the fact that buffers cannot be resized.
Update
This is a simpler solution that uses the request module to obtain the response in a buffer, by setting encoding: null in the options. It also follows redirects and resolves http/https automatically.
var file_url = 'https://github.com/mihaifm/linq/releases/download/3.1.1/linq.js-3.1.1.zip';
var AdmZip = require('adm-zip');
var request = require('request');
request.get({url: file_url, encoding: null}, (err, res, body) => {
var zip = new AdmZip(body);
var zipEntries = zip.getEntries();
console.log(zipEntries.length);
zipEntries.forEach((entry) => {
if (entry.entryName.match(/readme/i))
console.log(zip.readAsText(entry));
});
});
The body of the response is a buffer that can be passed directly to AdmZip, simplifying the whole process.
Sadly you can't pipe the response stream into the unzip job as node zlib lib allows you to do, you have to cache and wait the end of the response. I suggest you to pipe the response to a fs stream in case of big files, otherwise you will full fill your memory in a blink!
I don't completely understand what you are trying to do, but imho this is the best approach. You should keep your data in memory only the time you really need it, and then stream to the csv parser.
If you want to keep all your data in memory you can replace the csv parser method fromPath with from that takes a buffer instead and in getData return directly unzipped
You can use the AMDZip (as #mihai said) instead of node-zip, just pay attention because AMDZip is not yet published in npm so you need:
$ npm install git://github.com/cthackers/adm-zip.git
N.B. Assumption: the zip file contains only one file
var request = require('request'),
fs = require('fs'),
csv = require('csv')
NodeZip = require('node-zip')
function getData(tmpFolder, url, callback) {
var tempZipFilePath = tmpFolder + new Date().getTime() + Math.random()
var tempZipFileStream = fs.createWriteStream(tempZipFilePath)
request.get({
url: url,
encoding: null
}).on('end', function() {
fs.readFile(tempZipFilePath, 'base64', function (err, zipContent) {
var zip = new NodeZip(zipContent, { base64: true })
Object.keys(zip.files).forEach(function (filename) {
var tempFilePath = tmpFolder + new Date().getTime() + Math.random()
var unzipped = zip.files[filename].data
fs.writeFile(tempFilePath, unzipped, function (err) {
callback(err, tempFilePath)
})
})
})
}).pipe(tempZipFileStream)
}
getData('/tmp/', 'http://bdn-ak.bloomberg.com/precanned/Comdty_Calendar_Spread_Option_20120428.txt.zip', function (err, path) {
if (err) {
return console.error('error: %s' + err.message)
}
var metadata = []
csv().fromPath(path, {
delimiter: '|',
columns: true
}).transform(function (data){
// do things with your data
if (data.NAME[0] === '#') {
metadata.push(data.NAME)
} else {
return data
}
}).on('data', function (data, index) {
console.log('#%d %s', index, JSON.stringify(data, null, ' '))
}).on('end',function (count) {
console.log('Metadata: %s', JSON.stringify(metadata, null, ' '))
console.log('Number of lines: %d', count)
}).on('error', function (error) {
console.error('csv parsing error: %s', error.message)
})
})
If you're under MacOS or Linux, you can use the unzip command to unzip from stdin.
In this example I'm reading the zip file from the filesystem into a Buffer object but it works
with a downloaded file as well:
// Get a Buffer with the zip content
var fs = require("fs")
, zip = fs.readFileSync(__dirname + "/test.zip");
// Now the actual unzipping:
var spawn = require('child_process').spawn
, fileToExtract = "test.js"
// -p tells unzip to extract to stdout
, unzip = spawn("unzip", ["-p", "/dev/stdin", fileToExtract ])
;
// Write the Buffer to stdin
unzip.stdin.write(zip);
// Handle errors
unzip.stderr.on('data', function (data) {
console.log("There has been an error: ", data.toString("utf-8"));
});
// Handle the unzipped stdout
unzip.stdout.on('data', function (data) {
console.log("Unzipped file: ", data.toString("utf-8"));
});
unzip.stdin.end();
Which is actually just the node version of:
cat test.zip | unzip -p /dev/stdin test.js
EDIT: It's worth noting that this will not work if the input zip is too big to be read in one chunk from stdin. If you need to read bigger files, and your zip file contains only one file, you can use funzip instead of unzip:
var unzip = spawn("funzip");
If your zip file contains multiple files (and the file you want isn't the first one) I'm afraid to say you're out of luck. Unzip needs to seek in the .zip file since zip files are just a container, and unzip may just unzip the last file in it. In that case you have to save the file temporarily (node-temp comes in handy).
Two days ago the module node-zip has been released, which is a wrapper for the JavaScript only version of Zip: JSZip.
var NodeZip = require('node-zip')
, zip = new NodeZip(zipBuffer.toString("base64"), { base64: true })
, unzipped = zip.files["your-text-file.txt"].data;

Categories

Resources