upload image to s3 bucket node js - javascript

I am having some problems uploading my image from a form to my s3 bucket. At present part of the image gets uploaded, so for example 19kb instead of the full 272kb, if i try and open the image from within my bucket it's broken
app.post('/admin/addClub', (req, res) => {
if (!req.user) {
res.redirect('/admin');
return;
}
// Upload image to S3
var s3Bucket = new AWS.S3( { params: {Bucket: process.env.AWS_BUCKET, Key: process.env.AWS_ACCESS_KEY_ID} } )
var data = { Key: req.body.imageBanner, // file from form
Body: req.body.imageBanner, // Not sure here
ACL: "public-read",
ContentType: helper.getContentTypeByFile(req.body.imageBanner)
};
s3Bucket.putObject(data, function(err, data){
if (err)
{ console.log('Error uploading data: ', data);
res.redirect('/admin/main');
} else {
console.log('succesfully uploaded the image!');
res.redirect('/admin/main');
}
});
Can anybody advise what i need to pass through for the Body key? as i think this must be my issue
Thanks

You need to integrate the express-fileupload package that allows you to receive file uploads on Express.
To install run: npm install --save express-fileupload
Then you'll need to pass req.files.imageBanner.data (supposing your file upload field looks like <input name="imageBanner" type="file" />) as the Body parameter. Here's how it should look:
var fileUpload = require('express-fileupload');
app.use(fileUpload());
app.post('/admin/addClub', (req, res) => {
if (!req.user) {
res.redirect('/admin');
return;
}
// Upload image to S3
var s3Bucket = new AWS.S3( { params: {Bucket: process.env.AWS_BUCKET, Key: process.env.AWS_ACCESS_KEY_ID} } )
var data = { Key: req.body.imageBanner, // file from form
Body: req.files.imageBanner.data,
ACL: "public-read",
ContentType: helper.getContentTypeByFile(req.body.imageBanner)
};
s3Bucket.putObject(data, function(err, data){
if (err)
{ console.log('Error uploading data: ', data);
res.redirect('/admin/main');
} else {
console.log('succesfully uploaded the image!');
res.redirect('/admin/main');
}
});
});
You can find the documentation for the express-fileupload package here:
https://www.npmjs.com/package/express-fileupload

Related

How to do delete folder or file through requests s3 in digitalocean space

I trying to handle a great service called Space in Digital Ocean. Here is a pretty nice explanation about uploading files, but there is only about GET request and nothing about DELETE. Maybe someone has got experience with S3 together with Digital Ocean Space on Node JS?
Using localhost server I found out in settings of Space that "Advanced CORS Options" you can provide "Origin" and only "Allowed Methods" GET.
But PUT, DELETE, POST, HEAD is disable.
Here is what I trying:
export default {
upload: (req, res) => {
const storage = multerS3({
s3,
bucket,
contentType: multerS3.AUTO_CONTENT_TYPE,
acl: 'public-read',
key: function(req, file, callback) {
const { email } = req.user;
callback(null, email + '/' + file.originalName);
},
});
const upload = multer({ storage }).array('upload', 3);
upload(req, res, err => {
if (err) {
return res.status(422).send({
errors: [{ title: 'Image Upload Error', detail: err.message }],
});
} else {
console.log('Success upload file');
}
res.end();
});
},
delete: (req, res) => {
const params = { Bucket: bucket, Key: 'some-folder-name-here' };
s3.deleteObjects(params, function(err, data) {
if (err) {
return res.send({ error: err });
}
res.send({ data });
});
}
}
Actually it should be great if it's gonna be possible to delete and upload files through my localhost using simple s3 api.
Thank you.
UPDATE:
Mistake was here: s3.deleteObjects, have to use s3.deleteObject, and yes, have to use in Key whole path without bucket name.
Perfect explain here. Thanks everyone.

How to write a program to download bunch of images from url and upload it to AWS S3 Server

Can someone please help me to write a program to download image files from url and upload same file to AWS S3 and delete the downloaded image.
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
download('https://www.google.com/images/srpr/logo3w.png', 'google.png', function(){
console.log('done');
fs.readFile('google.png', function (err, data) {
if (err) {
console.log("Read file failed: "+ err)
}
let params = {
Bucket: waftBucket,
Key: 'google.png',
Body: data,
ContentType: 'image/png',
ACL: 'public-read'
};
let s3 = new AWS.S3();
s3.putObject(params, function(err, data) {
if (err) {
return console.log('There was an error uploading image: ' + err.message);
}
console.log('Successfully Uploaded.');
fs.unlink(google.png);
});
});
});
The above code is working for 1 File, but it's not working for loops
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
request(uri).pipe(fs.createWriteStream(filename)).on('close', callback);
});
};
var getFileName = function(url){
return url.split("/")[5];
}
var data = {
frags : [
{url: "https://fimgs.net/images/perfume/375x500.39678.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.39678.jpg")},
{url: "https://fimgs.net/images/perfume/375x500.4506.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.4506.jpg")},
{url: "https://fimgs.net/images/perfume/375x500.29601.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.29601.jpg")},
{url: "https://fimgs.net/images/perfume/375x500.32597.jpg", fileName: getFileName("https://fimgs.net/images/perfume/375x500.32597.jpg")}
]};
for(var i=0; i<data.frags.length; i++){
download(data.frags[i].url, data.frags[i].fileName, function(){
fs.readFile(data.frags[i].fileName, function (err, data) {
if (err) {
console.log("Read file failed: "+ err)
}
let params = {
Bucket: waftBucket,
Key: 'images/'+data.frags[i].fileName,
Body: data,
ContentType: 'image/jpeg',
ACL: 'public-read'
};
let s3 = new AWS.S3();
s3.putObject(params, function(err, data) {
if (err) {
return console.log('There was an error uploading image: ' + err.message);
}
console.log('Successfully Uploaded.');
fs.unlink(data.frags[i].fileName);
});
});
});
}
Is there any other way to do this? Is it possible to read the contents of URL and save it directly to AWS S3 bucket without downloading and deleting the file in temp folder?
Thanks in Advance.
Without knowing much about the exact use case, I would recommend checking out the AWS CLI to do this. It provides a sync option which is described as below:
Syncs directories and S3 prefixes. Recursively copies new and updated
files from the source directory to the destination. Only creates
folders in the destination if they contain one or more files.
So, you need to download your files to one temp folder, sync it with S3 using the CLI and then delete the folder (using rimraf or fs).
s3.putObject only supports single object upload per HTTP request.
As all functions are async using for loop will create a lot of mess.
So, either you can use promises to make multiple requests one after another or use a better solution as pointed out by #kmukkamala.
Install S3 tools on the server and run the following command to sync the entire directory.
s3cmd sync myserverDir s3://bucket/
You can schedule a corntab to run the script every 10-20 mins and then delete the folder or files.

AWS S3 ZIP file upload is corrupting the archived files

I am using the below code snippet to upload image through file upload control,zip it using jszip api and then store it in AWS S3.File upload is success and i can see demo.zip in AWS console.However files in demo.zip is getting corrupted and hence unable to unzip it.
I tried my level best to debug,but i did not get any clue on the reason for it.
Any help in correcting my code would be really appreciated.
var multer = require('multer');
var uploadservice = multer({ storage: multer.memoryStorage(), limits: { fileSize: 1000 * 1000 * 12 } }).array("files", 5);
app.post('endpointurl', function (req, res, next) {
uploadservice(req, res, function (err) {
if (err) {
console.log("error - " + err)
res.status(200).end("File upload is failure");
} else {
var files = req.files
var JSZip = require("jszip");
var zip = new JSZip();
for (i = 0; i < files.length; i++){
zip.file('file' + i, files[i].buffer, { binary: true });
}
zip.generateAsync({
type: "binarystring",
compression: "DEFLATE",
mimeType:"application/zip"
}).then(function (content) {
var s3 = new aws.S3();
var S3_BUCKET = 'mybucket'
s3.putObject({
ACL: 'private',
Bucket: S3_BUCKET,
Key: "demo.zip",
// serverSideEncryption: 'AES256',
Body: content,
ContentType: "application/zip"
}, function (error, response) {
if (error) {
console.log("error - " + error)
res.status(200).end("File upload failed");
} else {
console.log("success")
res.status(200).end("File is uploaded successfully");
}
});
})
}
});
sorry, but I can't comment, so:
What happens if you create the zip file and then open it before moving it to S3? Perhaps it's the use of zip.generateAsync({type: "binarystring",.... that is causing the corruption.
Well, in my case it gets corrupted after copying into S3.
I've mounted the S3 bucket in an EC2 instance and copying the zip into that particular path. Everything that is copied into that directory gets uploaded into S3.
When I try to unzip after copying it throws "Bad Zip, incorrect headers". I couldn't unzip that. The same zip before uploading works exactly as expected.

Error uploading data:The request signature we calculated does not match the signature you provided. Check your key and signing method

I trying to upload image over AWS S3 Bucket using react native platform but i am getting Error uploading data: Error: The request signature we calculated does not match the signature you provided. Check your key and signing method.
Have any one tried to upload images
JavaScript Code to upload Images over AWS S3
var uniqueFileName = image.fileName;
console.log("File Name",uniqueFileName)
var bodyData = image.data;
console.log("File Json",bodyData)
var filetype= image.type;
console.log("File Type",filetype)
var AWS3 = require('aws-sdk/dist/aws-sdk-react-native');
AWS3.config.update({
"accessKeyId": AWS.accessKeyId,
"secretAccessKey": AWS.secretAccessKey,
"region": "us-east-1"
});
var s3 = new AWS3.S3();
var params = {
Bucket: AWS.bucketName ,
Key: uniqueFileName,
ContentType: filetype,
Body: bodyData,
ContentEncoding: 'base64'
};
s3.upload(params, function (err, res) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
}
});
}

Node.js upload to Amazon S3 works but file corrupt

I am submitting a form via my CMS which contains a filepicker for an image & some text. The code runs & an object is created in my S3 account with the correct name but it is corrupt. For example, I am uploading JPG images but when I view them in the s3 dashboard I just see a black screen.
Any help is greatly appreciated.
My HTML form:
<form enctype="multipart/form-data" action="updateSchedule" method="POST">
<input type="file" name="schedulepicture" id="schedulepicture">
<textarea rows="4" cols="50" id="ScheduleText" name="ScheduleText" maxlength="2000"> <button type="submit" id="updateschedulebutton">Update</button>
</form>
My Node.JS script:
router.post('/updateschedule', isLoggedIn, upload.single('schedulepicture'), function(req, res) {
var scheduleImageToUpload;
//Check if image was uploaded with the form & process it
if (typeof req.file !== "undefined") {
//Create Amazon S3 specific object
var s3 = new aws.S3();
//This uploads the file but the file cannot be viewed.
var params = {
Bucket: S3_BUCKET,
Key: req.file.originalname, //This is what S3 will use to store the data uploaded.
Body: req.file.path, //the actual *file* being uploaded
ContentType: req.file.mimetype, //type of file being uploaded
ACL: 'public-read', //Set permissions so everyone can see the image
processData: false,
accessKeyId: S3_accessKeyId,
secretAccessKey: S3_secretAccessKey
}
s3.upload( params, function(err, data) {
if (err) {
console.log("err is " + err);
}
res.redirect('../adminschedule');
});
}
});
I do believe you need to pass a stream instead of the file path, you can use fs.createReadStream like this:
router.post('/updateschedule', isLoggedIn, upload.single('schedulepicture'), function(req, res) {
var scheduleImageToUpload;
//Check if image was uploaded with the form & process it
if (typeof req.file !== "undefined") {
//Create Amazon S3 specific object
var s3 = new aws.S3();
var stream = fs.createReadStream(req.file.path)
//This uploads the file but the file cannot be viewed.
var params = {
Bucket: S3_BUCKET,
Key: req.file.originalname, //This is what S3 will use to store the data uploaded.
Body: stream, //the actual *file* being uploaded
ContentType: req.file.mimetype, //type of file being uploaded
ACL: 'public-read', //Set permissions so everyone can see the image
processData: false,
accessKeyId: S3_accessKeyId,
secretAccessKey: S3_secretAccessKey
}
s3.upload( params, function(err, data) {
if (err) {
console.log("err is " + err);
}
res.redirect('../adminschedule');
});
}
});

Categories

Resources