How to do resize to image before upload - javascript

I want to resize my images before I upload them to s3 amazon.
I need 3 diffrent size : resizing (original image, thumbnail, web size) .
How can I do this?
How do I get the path of my image that was past with method POST?
This is my code:(to upload image to s3 amazon with node js)
app.post('/upload', function(request, response) {
var ext
, hash
, form = new formidable.IncomingForm()
, files = []
, fields = [];
form.keepExtensions = true;
form.uploadDir = 'tmp';
form.on('fileBegin', function(name, file) {
ext = file.path.split('.')[1];
hash = hasher();
file.path = form.uploadDir + '/' + hash;
});
form.on('field', function(field, value) {
fields.push([field, value]);
}).on('file', function(field, file) {
files.push([field, file]);
}).on('end', function() {
fs.readFile(__dirname + '/../tmp/' + hash, function(error, buf) {
var req = client.put('/images/' + hash + '.png', {
'x-amz-acl': 'private',
'Content-Length': buf.length,
'Content-Type': 'image/png'
});
req.on('response', function(res){
var image = new S3({
hash : hash,
url : req.url
});
image.save(function(error, result) {
if (error) {
console.error(error);
} else {
response.redirect('http://' + request.headers.host + '/' + hash);
};
})
});
req.end(buf);
});
});
form.parse(request);
});

Use graphicsmagick, imagemagick for that. Also gm module. Link
Example:
gm(buf)
.resize(100, 100)
.toBuffer('PNG',function (err, buffer) {
if (err) return handle(err);
console.log('done!');
upload(buffer,function(err){
if(!err)
console.log("uploaded")
}
}) //use buffer for upload
function upload(buffer,callback){
s3bucket = new AWS.S3();
params={
Bucket: bucketName,
Key: folder + '/' + fileName,
Body: buffer,
ACL: 'public-read'
}
s3bucket.putObject(params,function(err)....
}

HTML component:
*<*input type="file" class="hidden" id="logoFile">// remove *
JAVASCRIPT:
file = document.getElementById('logoFile').files[0];
image = new Parse.File("image.jpg", file); // I am using Parse. You can use other Node.js modules.
You can set size as per your wish.
Try this:
image.resize(200, 200, function(err, image){
// encode resized image to jpeg and get a Buffer object
image.toBuffer('jpg', function(err, buffer){
// save buffer to disk / send over network / etc.
});
});

Related

How to save pdf file to s3 and on local as well in node js

i am stuck in something that i am unable to solve. I am uploading the file and getting it through req.files. I have added middleware which uploads the file to S3. I tried everything but cannot do it. Upload to S3 is working fine but it isn't saving the file in local path. When i try through fs.writeFile it saves the file which is corrupted.
can someone help me?
middleware function
uploadToS3bucket: (path) => {
return multer(
{
storage: multerS3({
s3: s3,
acl: 'public-read',
bucket: AWS_BUCKET_NAME,
contentType: multerS3.AUTO_CONTENT_TYPE,
metadata: function (req, file, cb) {
cb(null, {fieldName: file.fieldname});
},
key: function (req, file, cb) {
const params = Object.keys(req.params).map(key => {
return req.params[key]
}).join('/');
const key = path + '/' + params + (params.length > 0 ? '/' : '') + shortUUID.generate() + "-" + file.originalname;
cb(null, key)
}
})
}
)
}
route
router.post('/resume', uploadToS3bucket('temp').fields([
{name: 'resume', maxCount: 1}
]), resumeCtrl.getUserResume);
i tried
// in metadata function
fs.writeFile('python/resume_parser/data/input/resume/' + file.originalname, file, (err) => {
if (err) throw err;
});
and
request({uri: 'http://localhost:3000/v1/user/resume', headers: { 'Content-type' : 'applcation/pdf', 'encoding': 'binary' }} , function (error, response, body) {
if (!error) {
fs.writeFile("python/resume_parser/data/input/resume/"+ file.originalname, body , function (err) {
});
}
})
it is saving the pdf but that is corrupted.
can someone help me?
thanks

getting message file not supported After downloading image file in nodejs . I'm uploading file from angular7 using Formdata

I'm sending the image using formdata in angular to my nodejs api. and in nodejs i'm storing that file at myfolder but when i open my stored image file its shows
"Abc.jpeg It appears that we don't support this file format"
From nodejs i used multiparty, then i used formidable but getting same error in both
I compared the size of file before uploading (original file) size was 78kb but after uploading the file size become 111kb.
Nodejs Code
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
console.log(files.fileDetails.path);
var oldpath = files.fileDetails.path;
var newpath = 'C:/storage/myfolder/' + files.fileDetails.name;
fs.rename(oldpath, newpath, function (err) {
if (err) throw err;
res.write('File uploaded and moved!');
res.end();
});
})
Angular Code
public OnSubmit(formValue: any) {
let main_form: FormData = new FormData();
for(let j=0;j<this.totalfiles.length; j++) {
console.log("the values is ",<File>this.totalfiles[j]);
console.log("the name is ",this.totalFileName[j]);
main_form.append(fileDetails,this.totalfiles[j])
}
console.log(formValue.items)
this._SocietyService.postFiles(main_form).subscribe(data => {
console.log("result is ", data)
})
}
var path = require('path')
var multer = require('multer')
var storage = multer.diskStorage({
destination: 'C:/storage/myfolder/',
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now());
}
})
var upload = multer({ storage: storage })
You can use the multer its a vary handy middleware to handle form/multipart data.

Node request module with fs.createWriteStream() creates an empty file

I'm trying to upload an external url to my server. Here's what I got so far
var fs = require('fs');
var request = require('request');
var path = require('path');
const imagesFolder = 'downloadedAssets/imgs';
function download(url, dest, filename, cb) {
var file = fs.createWriteStream(dest + "/" + filename + path.extname(url));
request( {url: url}, function(err, response) {
if(err) {
console.log(err.message);
return;
}
response.pipe(file);
file.on('error', function(err) {
console.log(err.message);
file.end();
});
file.on('finish', function() {
file.close(cb);
});
});
}
and then executing the function...
var url = 'http://pngimg.com/uploads/spongebob/spongebob_PNG44.png';
download(url, imagesFolder, 'sponge', function onComplete(err) {
if (err) {
console.log(err.message);
} else {
console.log('image uploaded to server');
}
});
This doesn't throw any errors, and it creates a file name sponge.png, but the file is empty. Any idea why?
You might have mixed up the examples on the official website
Try using pipe() like below.
function download(url, dest, filename, cb) {
var file = fs.createWriteStream(dest + "/" + filename + path.extname(url));
request( {url: url}).pipe(file);
}

Copying files from one folder to another folder in s3 of same bucket NODE JS

I am trying to copy a file from one folder to another folder in same bucket, But I am gettnin Access denined error. But if I try to do it on two different buckets means its wokring fine.
Please find what I have tried so far below:
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'xxx',
secretAccessKey: 'xxx'
});
var s3 = new AWS.S3();
var params = {
Bucket : 'bucketname', /* Another bucket working fine */
CopySource : 'bucketname/externall/1.txt', /* required */
Key : "1.txt", /* required */
ACL : 'public-read',
};
s3.copyObject(params, function(err, data) {
if (err)
console.log(err, err); // an error occurred
else {
console.log(data); // successful response
}
});
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'ACCESS_KEY',
secretAccessKey: 'SECRET_KEY',
region: 'REGION'
});
var s3 = new AWS.S3();
var bktName = 'BUCKET_NAME';
var options = {
Bucket: bktName,
Prefix: 'SOURCE_FOLDER/'
};
s3.listObjectsV2(options, function (err, data) {
if (err) {
console.log(err);
} else {
data['Contents'].forEach(function (obj) {
var lockey = obj.Key.replace(/SOURCE/g, 'TARGET');
// Example if you want to move from /test/123/ to /test/234/ (or 123/ to 234/) then SOURCE = 123 and TARGET = 234
var params = {
Bucket: bktName,
CopySource: '/' + bktName + '/' + obj.Key,
Key: lockey
};
s3.copyObject(params, function (err, data) {
if (err) {
console.log(err);
} else {
console.log('Inserted', lockey);
}
});
});
}
});
I used same method copyObject and used same bucket name in source and destination path, it worked.
below is my code sample
{
Bucket: bucketName,
CopySource: '/'+bucketName+'/local/Country.png',
Key: 'local/copy-Country.png'
}

Node.js File Upload (Express 4, MongoDB, GridFS, GridFS-Stream)

I am trying to setup a file API in my node.js application. My goal is to be able to write the file stream directly to gridfs, without needing to store the file to disk initially. It seems like my create code is working. I am able to save a file upload to gridfs. The problem is reading the file. When I try to download a saved file via a web browser window, I see that the file contents are wrapped with something like the following:
------WebKitFormBoundarye38W9pfG1wiA100l
Content-Disposition: form-data; name="file"; filename="myfile.txt"
Content-Type: text/javascript
***File contents here***
------WebKitFormBoundarye38W9pfG1wiA100l--
So my question is what do I need to do to strip the boundary information from the file stream before saving it to gridfs? Here's the code i'm working with:
'use strict';
var mongoose = require('mongoose');
var _ = require('lodash');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
var gfs = new Grid(mongoose.connection.db);
// I think this works. I see the file record in fs.files
exports.create = function(req, res) {
var fileId = new mongoose.Types.ObjectId();
var writeStream = gfs.createWriteStream({
_id: fileId,
filename: req.query.name,
mode: 'w',
content_type: req.query.type,
metadata: {
uploadedBy: req.user._id,
}
});
writeStream.on('finish', function() {
return res.status(200).send({
message: fileId.toString()
});
});
req.pipe(writeStream);
};
// File data is returned, but it's wrapped with
// WebKitFormBoundary and has headers.
exports.read = function(req, res) {
gfs.findOne({ _id: req.params.id }, function (err, file) {
if (err) return res.status(400).send(err);
// With this commented out, my browser will prompt
// me to download the raw file where I can see the
// webkit boundary and request headers
//res.writeHead(200, { 'Content-Type': file.contentType });
var readstream = gfs.createReadStream({
_id: req.params.id
// I also tried this way:
//_id: file._id
});
readstream.pipe(res);
});
};
By the way, i'm not currently using any middleware for these routes, but am open to doing so. I just didn't want the file to hit the disk prior to being sent to gridfs.
Edit:
Per #fardjad, I added the node-multiparty module for multipart/form-data parsing and it kind of worked. But when I download an uploaded file and compare with an original (as text), there are lots of differences in the encoding, and the downloaded file won't open. Here's my latest attempt.
'use strict';
var mongoose = require('mongoose');
var _ = require('lodash');
var multiparty = require('multiparty');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
var gfs = new Grid(mongoose.connection.db);
exports.create = function(req, res) {
var form = new multiparty.Form();
var fileId = new mongoose.Types.ObjectId();
form.on('error', function(err) {
console.log('Error parsing form: ' + err.stack);
});
form.on('part', function(part) {
if (part.filename) {
var writeStream = gfs.createWriteStream({
_id: fileId,
filename: part.filename,
mode: 'w',
content_type: part.headers['content-type'],
metadata: {
uploadedBy: req.user._id,
}
})
part.pipe(writeStream);
}
});
// Close emitted after form parsed
form.on('close', function() {
return res.status(200).send({
message: fileId.toString()
});
});
// Parse req
form.parse(req);
};
exports.read = function(req, res) {
gfs.findOne({ _id: req.params.id }, function (err, file) {
if (err) return res.status(400).send(err);
res.writeHead(200, { 'Content-Type': file.contentType });
var readstream = gfs.createReadStream({
_id: req.params.id
});
readstream.pipe(res);
});
};
Final Edit:
Here's a simple implementation that I copied from another developer and modified. This is working for me: (I'm still trying to figure out why it won't work in my original express app. Something seems to be interfering)
https://gist.github.com/pos1tron/094ac862c9d116096572
var Busboy = require('busboy'); // 0.2.9
var express = require('express'); // 4.12.3
var mongo = require('mongodb'); // 2.0.31
var Grid = require('gridfs-stream'); // 1.1.1"
var app = express();
var server = app.listen(9002);
var db = new mongo.Db('test', new mongo.Server('127.0.0.1', 27017));
var gfs;
db.open(function(err, db) {
if (err) throw err;
gfs = Grid(db, mongo);
});
app.post('/file', function(req, res) {
var busboy = new Busboy({ headers : req.headers });
var fileId = new mongo.ObjectId();
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
console.log('got file', filename, mimetype, encoding);
var writeStream = gfs.createWriteStream({
_id: fileId,
filename: filename,
mode: 'w',
content_type: mimetype,
});
file.pipe(writeStream);
}).on('finish', function() {
// show a link to the uploaded file
res.writeHead(200, {'content-type': 'text/html'});
res.end('download file');
});
req.pipe(busboy);
});
app.get('/', function(req, res) {
// show a file upload form
res.writeHead(200, {'content-type': 'text/html'});
res.end(
'<form action="/file" enctype="multipart/form-data" method="post">'+
'<input type="file" name="file"><br>'+
'<input type="submit" value="Upload">'+
'</form>'
);
});
app.get('/file/:id', function(req, res) {
gfs.findOne({ _id: req.params.id }, function (err, file) {
if (err) return res.status(400).send(err);
if (!file) return res.status(404).send('');
res.set('Content-Type', file.contentType);
res.set('Content-Disposition', 'attachment; filename="' + file.filename + '"');
var readstream = gfs.createReadStream({
_id: file._id
});
readstream.on("error", function(err) {
console.log("Got error while processing stream " + err.message);
res.end();
});
readstream.pipe(res);
});
});
See my comment on the issue you created on github. I had the same problem but I managed to debug the issue. I narrowed it down to where i was confident that the problem was a piece of express middleware modified the request. I disabled my middleware one by one until i found the unlikely culprit: connect-livereload
I commented out app.use(require('connect-livereload')()); and the problem went away.
I believe it was injecting the livereload script into the response (a binary image file).
Looks like the file has been uploaded through an HTML form, in that case you need to decode the multipart/form-data encoded data, re-assemble the parts if needed and save the file to GridFS. For parsing, you can use something like node-multiparty.

Categories

Resources