Is it possible to only allow download from Amazon's S3 for anyone who have some token and a link to some file in my bucket?
This token, for example, can be generated by my upload backend server, and appended to the uploaded file.
What possibilities do I have here?
Edit
When I said download from S3, I mean directly from S3 not through my server, only upload happens through server
You can generate presigned URLs using minio-py like this:
from minio import Minio
client = Minio('s3.amazonaws.com',
access_key='YOUR-ACCESSKEYID',
secret_key='YOUR-SECRETACCESSKEY')
downloadURL = client.presigned_get_object('mybucket', 'myobject')
You can use downloadURL on browser to download files directly from S3.
Of course it is possible, just warp the access with some authentication from your end
var backendTokenAuthenticator = require('/auth')
app.get('/download/:file',function(req, res){
if(req.query.token && req.params.file){
backendTokenAuthenticator(req.query.token,req.params.file,function(err,isAuth){
if(err){
res.status(500).end()
}else if(!isAuth){
res.status(403).end('permission denied')
}else{
var s3 = new AWS.S3();
s3.getObject({
Bucket: "myBucket",
Key: req.params.file
},
function (error, data) {
if (error) {
console.log("Failed to retrieve an object: " + error);
res.status(404).end('File not found');
} else {
res.setHeader('Content-Length', data.ContentLength);
res.setHeader('Content-Type', 'application/octet-stream');
res.end(data.Body)
}
});
}
})
}else{
res.status(403).end('token must be supplied')
}
});
Related
I'm trying to upload images from the browser to Amazon S3, and the code below sends some sort of blob to Amazon S3 just fine, I can't read the resulting file in a browser. It doesn't seem to know it's an image file.
I send it to NodeJS from the browser:
let myReader=new FileReader();
myReader.onloadend=(e)=>{ app.ws.send(myReader.result); }
myReader.readAsDataURL(e.target.files[0]);
In NodeJS I send it to S3:
const s3=new AWS.S3();
const params= { Bucket:<bucketName>, Key:fileName, Body:imgData, ACL:"public-read", ContentEncoding:'base64' };
s3.putObject(params, (err, data)=>{
if (err) throw err;
});
Check AWS S3 guide,
This doc contains the logic needed to upload image from browser to S3 bucket
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/s3-example-photo-album.html
Turns out you need to modify the base64 image data coming in and explicitly set the ContentType:
const s3=new AWS.S3();
const type = imgData.split(';')[0].split('/')[1];
imgData= new Buffer.from(imgData.replace(/^data:image\/\w+;base64,/, ""), 'base64');
let params = { Bucket:<bucketName>, Key:fileName, Body:imgData,
ACL:"public-read", ContentType:"image."+type, ContentEncoding: 'base64' };
s3.upload(params, (err, data)=>{
if (err) throw err;
... Do something ...
});
I am using express-fileupload to upload the images. The images are saved in my local directory. I want to insert the name of the file to the mongodb if possible. Finally I want the image to be displayed in my frontend.
function insertRecord(req,res){
if(req.files){
const file=req.files.filename
filename=file.name
file.mv("./upload"+filename,function(err){
if(err)
console.log(err)
})
}
const user=new User()
user.name=req.body.name
user.address=req.body.address
user.email=req.body.email
user.mobile=req.body.mobile
user.filename=req.body.filename
user.save((err,docs)=>{
if(!err){
res.redirect('/user/list')
}
else {
if (err.name == 'ValidationError') {
handleValidationError(err, req.body);
res.render("./users/addOrEdit", {
viewTitle: "Insert User",
user: req.body
});
}
else
console.log('Error during record insertion : ' + err);
}
});
}
I am not sure whether the way to insert the name of the file to the mongodb is correct or not. Anyway, that is optional but I am not understanding how can I display the uploaded images which are present in the local directory.
I tried to save the image as base64 but the record is not saved to the database now.
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, 'uploads/')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
})
var upload = multer({ storage: storage })
router.post('/',upload.single('myImage'),function(req,res){
if (req.body._id == '')
insertRecord(req, res);
else
updateRecord(req, res);
})
function insertRecord(req,res){
var img = fs.readFileSync(req.file.path);
var encode_image = img.toString('base64');
var finalImg = {
contentType: req.file.mimetype,
image: new Buffer(encode_image, 'base64')
};
const user=new User()
user.name=req.body.name
user.address=req.body.address
user.email=req.body.email
user.mobile=req.body.mobile
user.save(finalImg,(err,docs)=>{
if(!err){
res.redirect('/user/list')
}
else {
if (err.name == 'ValidationError') {
handleValidationError(err, req.body);
res.render("./users/addOrEdit", {
viewTitle: "Insert User",
user: req.body
});
}
else
console.log('Error during record insertion : ' + err);
}
});
}
Edit: I think there is a problem in the code: it should be `'./upload/'+filename' not without the second slash.
In order to show the images, you have to open a static route in Express. Example: app.use('/images', express.static(PATH)). Then you can, in the frontend, call it as <img src="URL/images/FILENAME" />
From your code, it is not possible to understand what kind of data you are sending to the server. As far as I understand, you're trying mv the string filename. In order to transfer files (such as images), you should have form-data instead of JSON data or you should encode the image file into Base64 to transfer it as text (not the filename, the whole file).
Check Multer out for this kind of job. It is described well in the README.md. Apart from that, until you submit the form, the image won't be available in the front-end. If you want to preview the image before uploading it's a separate process which you can learn more in here.
I am attempting to use node to upload a file to a s3 server. I can do this successfully. However, the reason of using s3 is i need to upload a file for a users profile picture. How would i get the url(of the uploaded file in s3) in my code and store it in the db, so it is viewable on the frontend? I also need to append some kind of unique string to the file name, therefore it doesn't get overwritten. How would i go about doing this?
Thank you.
function uploadFileToS3Bucket(filePath) {
AWS.config.update({
accessKeyId: 'AWS ACCESS KEY',
secretAccessKey: 'AWS SECRETE KEY'
});
var s3 = new AWS.S3();
var params = {
Bucket: 'AWS_BUCKET_NAME',
Body: fs.createReadStream(filePath),
Key: "BB_Teams_PDF_" + Date.now() + "_" + path.basename(filePath)
};
s3.upload(params, function (err, data) {
if (err) {
console.log("Error", err);
}
//success
if (data) {
console.log("Uploaded in:", data.Location);
}
});
}
I am using the below code snippet to upload image through file upload control,zip it using jszip api and then store it in AWS S3.File upload is success and i can see demo.zip in AWS console.However files in demo.zip is getting corrupted and hence unable to unzip it.
I tried my level best to debug,but i did not get any clue on the reason for it.
Any help in correcting my code would be really appreciated.
var multer = require('multer');
var uploadservice = multer({ storage: multer.memoryStorage(), limits: { fileSize: 1000 * 1000 * 12 } }).array("files", 5);
app.post('endpointurl', function (req, res, next) {
uploadservice(req, res, function (err) {
if (err) {
console.log("error - " + err)
res.status(200).end("File upload is failure");
} else {
var files = req.files
var JSZip = require("jszip");
var zip = new JSZip();
for (i = 0; i < files.length; i++){
zip.file('file' + i, files[i].buffer, { binary: true });
}
zip.generateAsync({
type: "binarystring",
compression: "DEFLATE",
mimeType:"application/zip"
}).then(function (content) {
var s3 = new aws.S3();
var S3_BUCKET = 'mybucket'
s3.putObject({
ACL: 'private',
Bucket: S3_BUCKET,
Key: "demo.zip",
// serverSideEncryption: 'AES256',
Body: content,
ContentType: "application/zip"
}, function (error, response) {
if (error) {
console.log("error - " + error)
res.status(200).end("File upload failed");
} else {
console.log("success")
res.status(200).end("File is uploaded successfully");
}
});
})
}
});
sorry, but I can't comment, so:
What happens if you create the zip file and then open it before moving it to S3? Perhaps it's the use of zip.generateAsync({type: "binarystring",.... that is causing the corruption.
Well, in my case it gets corrupted after copying into S3.
I've mounted the S3 bucket in an EC2 instance and copying the zip into that particular path. Everything that is copied into that directory gets uploaded into S3.
When I try to unzip after copying it throws "Bad Zip, incorrect headers". I couldn't unzip that. The same zip before uploading works exactly as expected.
Update: For future reference, Amazon have now updated the documentation from what was there at time of asking. As per #Loren Segal's comment below:-
We've corrected the docs in the latest preview release to document this parameter properly. Sorry about the mixup!
I'm trying out the developer preview of the AWS SDK for Node.Js and want to upload a zipped tarball to S3 using putObject.
According to the documentation, the Body parameter should be...
Body - (Base64 Encoded Data)
...therefore, I'm trying out the following code...
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
fs.readFile('myarchive.tgz', function (err, data) {
if (err) { throw err; }
var base64data = new Buffer(data, 'binary').toString('base64');
var s3 = new AWS.S3();
s3.client.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: base64data
}).done(function (resp) {
console.log('Successfully uploaded package.');
});
});
Whilst I can then see the file in S3, if I download it and attempt to decompress it I get an error that the file is corrupted. Therefore it seems that my method for 'base64 encoded data' is off.
Can someone please help me to upload a binary file using putObject?
You don't need to convert the buffer to a base64 string. Just set body to data and it will work.
Here is a way to send a file using streams, which might be necessary for large files and will generally reduce memory overhead:
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
var fileStream = fs.createReadStream('myarchive.tgz');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: fileStream
}, function (err) {
if (err) { throw err; }
});
});
I was able to upload my binary file this way.
var fileStream = fs.createReadStream("F:/directory/fileName.ext");
var putParams = {
Bucket: s3bucket,
Key: s3key,
Body: fileStream
};
s3.putObject(putParams, function(putErr, putData){
if(putErr){
console.error(putErr);
} else {
console.log(putData);
}
});