I'm developing a JavaScript client to upload files directly to Amazon S3.
<input type="file" id="file-chooser" />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
var bucket = new AWS.S3({params: {Bucket: 'myBucket'}});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var file = fileChooser.files[0];
if (file) {
results.innerHTML = '';
var params = {Key: file.name, ContentType: file.type, Body: file};
bucket.putObject(params, function (err, data) {
results.innerHTML = err ? 'ERROR!' : 'UPLOADED.';
});
} else {
results.innerHTML = 'Nothing to upload.';
}
}, false);
</script>
The example from Amazon documentation works fine, but it doesn't provide any feedback on the upload progress.
Any ideas?
Thanks
Rather than using the s3.PutObject function why not instead use the ManagedUpload function.
It has been specifically developed to allow you to hook into a httpUploadProgress event that should allow the updating of your progress bar to be implemented fairly easily.
I have done some customisation for file upload progress. Use this same logic in node, angular and javascript.
Here is repository link :
https://github.com/aviboy2006/aws-s3-file-upload-progress
Use this fiddle for test: https://jsfiddle.net/sga3o1h5/
Note : Update access key, bucketname and secret key.
var bucket = new AWS.S3({
accessKeyId: "",
secretAccessKey: "",
region: 'us-east-1'
});
uploadfile = function(fileName, file, folderName) {
const params = {
Bucket: "fileuploadprocess",
Key: folderName + fileName,
Body: file,
ContentType: file.type
};
return this.bucket.upload(params, function(err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
});
}
uploadSampleFile = function() {
var progressDiv = document.getElementById("myProgress");
progressDiv.style.display="block";
var progressBar = document.getElementById("myBar");
file = document.getElementById("myFile").files[0];
folderName = "Document/";
uniqueFileName = 'SampleFile';
let fileUpload = {
id: "",
name: file.name,
nameUpload: uniqueFileName,
size: file.size,
type: "",
timeReference: 'Unknown',
progressStatus: 0,
displayName: file.name,
status: 'Uploading..',
}
uploadfile(uniqueFileName, file, folderName)
.on('httpUploadProgress', function(progress) {
let progressPercentage = Math.round(progress.loaded / progress.total * 100);
console.log(progressPercentage);
progressBar.style.width = progressPercentage + "%";
if (progressPercentage < 100) {
fileUpload.progressStatus = progressPercentage;
} else if (progressPercentage == 100) {
fileUpload.progressStatus = progressPercentage;
fileUpload.status = "Uploaded";
}
})
}
I bumped into this post, then i found this AWS npm package, which does exactly what you are asking for:
#aws-sdk/lib-storage
import { Upload } from "#aws-sdk/lib-storage";
import { S3Client, S3 } from "#aws-sdk/client-s3";
const target = { Bucket, Key, Body };
try {
const parallelUploads3 = new Upload({
client: new S3({}) || new S3Client({}),
tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
partSize: 5MB, // optional size of each part
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
await parallelUploads3.done();
} catch (e) {
console.log(e);
}
Related
I have my code written below, and all of this generates my signed URL perfectly fine when on development and the files that I want to get and upload locally work.
const S3 = require("aws-sdk/clients/s3");
const fs = require("fs");
const s3 = new S3({
region: process.env.AWS_BUCKET_REGION,
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
signatureVersion: "v2",
});
const uploadFile = (file, id, directory) => {
const fileStream = fs.createReadStream(file.path);
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Body: fileStream,
Key: directory + id,
MimeType: file.mimetype,
};
return s3.upload(uploadParams).promise();
};
exports.uploadFile = uploadFile;
const deleteFile = (id, directory) => {
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Key: directory + id,
};
return s3.deleteObject(uploadParams).promise();
};
exports.deleteFile = deleteFile;
const getFileStream = ({ key }) => {
if (key) {
const downloadParams = {
Key: key,
Bucket: process.env.AWS_BUCKET_NAME,
};
return s3.getObject(downloadParams).createReadStream();
}
};
exports.getFileStream = getFileStream;
function generatePreSignedPutUrl({ key, operation }) {
var params = { Bucket: process.env.AWS_BUCKET_NAME, Key: key, Expires: 60 };
let x = s3.getSignedUrl(operation, params);
return x;
}
exports.generatePreSignedPutUrl = generatePreSignedPutUrl;
These are the requests I make from the client
const getSignedURL = async ({ key, operation }) =>
client.post(`${endpoint}/get-signed-url`, { key, operation });
const result = await getSignedURL({
key: directory + "/" + key,
operation: "getObject",
});
let url = result.data.data.url;
console.log({ url });
and this is the route on my server.
router.post("/get-signed-url", requireKey, async (req, res) => {
const { key, operation } = req.body;
try {
console.log({ b: req.body });
let url = generatePreSignedPutUrl({ key, operation });
console.log({ url });
res.json({ success: true, data: { url } });
} catch (e) {
console.log({ e });
res.status(400).json({ error: "Internal Server Error" });
}
});
I followed all of S3's documentation to get this set up and I am trying to make sure all files can be downloaded and uploaded securely from my application. Does anyone know how I can get this to work in production when the server is hosted in Heroku and the client is a nextjs site?
On moving to the next step in the form I have run some checks. One is to stop photos over 10mb and preventing .heic files from being upload. 90% of the time it works, but now and again files are let through.
Any help with a better written solution or a reason why this may fail and let large files or .heic file through.
var upload_one = document.getElementById("image_one");
if(upload_one.files.length > 0) {
if (upload_one.files.item(0).size >= '10485760') {
upload_one.className += " invalid";
valid = false;
alert("Photo is too large. Photos need to be under 10mb")
}
fileName = document.querySelector('#image_one').value;
extension = fileName.split('.').pop();
if (extension == 'heic') {
upload_one.className += " invalid";
valid = false;
alert("Files can only be .png, .jpg or .jpeg")
}
}
You should have a look at presigned Url using S3 bucket on aws.
Basically you generate an upload url where you can upload big files direclty to S3.
Personally I use a lambda to generate this presignedUrl and I return it to front end then.
Backend
const AWS = require("aws-sdk");
const S3 = new AWS.S3();
const { v4: uuidv4 } = require("uuid");
const getUrl = async (params) => {
return await new Promise((resolve, reject) => {
S3.getSignedUrl("putObject", params, (err, url) => {
if (err) {
reject(err);
} else {
resolve({
statusCode: 200,
url,
});
}
});
});
};
exports.handler = async (event, context) => {
const id = uuidv4();
const { userId } = event?.queryStringParameters;
const params = {
Bucket: process.env.INVOICE_BUCKET,
Key: `${userId}/${id}.csv`,
ContentType: `text/csv`,
ACL: "public-read",
};
try {
const { url } = await getUrl(params);
return handleRes({ message: `Successfully generated url`, url, key: `${id}.csv`, publicUrl: `https://yourBucket.s3.eu-west-1.amazonaws.com/${userId}/${id}.csv` }, 200);
} catch (e) {
console.error(e);
return handleRes({ message: "failed" }, 400);
}
};
Front end
$(function () {
$("#theForm").on("submit", sendFile);
});
function sendFile(e) {
e.preventDefault();
var urlPresigned;
var publicUrl;
var key;
$.ajax({
type: "GET",
url: `https://yourId.execute-api.eu-west-1.amazonaws.com/Prod/file-upload-to-bucket?userId=${userId}`,
success: function (resp) {
urlPresigned = resp.url;
publicUrl = resp.publicUrl;
key = resp.key;
var theFormFile = $("#theFile").get()[0].files[0];
$.ajax({
type: "PUT",
url: urlPresigned,
contentType: "text/csv", // Put meme type
processData: false,
// the actual file is sent raw
data: theFormFile,
success: function () {
// File uploaed
},
error: function (err) {
console.log(err);
},
});
},
});
}
exports.add = async (req, res) => {
const { body } = req;
var storageRef = fb.firebaseAdmin
.storage()
.bucket(
"gs://test-ptoject-2147f.appspot.com); var filename='computer.jpg'; var path='./computer.jpg'"
);
try {
if (filename == undefined) {
return res.status(400).send({ message: 'Please upload a file!' });
}
const storage = await storageRef.upload(path, {
public: true,
destination: `/uploads/${filename}`,
metadata: {
firebaseStorageDownloadTokens: uuidv4(),
},
});
res.status(200).send({ message: 'File uploaded successfully.!' });
storageRef.getDownloadURL().then(function (url) {
const image = doc('computer');
image.src = url;
console.log('your url is:', url);
});
} catch (err) {
console.log(err);
}
};
With the Admin SDK, with the following code
var storageRef = fb.firebaseAdmin
.storage()
.bucket(...);
you actually define a Bucket and there isn't any getDownloadURL() method for a Bucket.
You should call the getSignedUrl() method on a File. The getDownloadURL() method is only for the JavaScript SDK.
The following should do the trick (untested):
const storage = await storageRef.upload(path, {
public: true,
destination: `/uploads/${filename}`,
metadata: {
firebaseStorageDownloadTokens: uuidv4(),
},
});
const signedUrlResponse = await storageRef.getSignedUrl();
const url = await signedUrlResponse[0];
Note that it seems there is a typo/problem in this part of your code, with the value you pass to the bucket() method:
var storageRef = fb.firebaseAdmin
.storage()
.bucket(
"gs://test-ptoject-2147f.appspot.com); var filename='computer.jpg'; var path='./computer.jpg'"
);
I am new to Nodejs and trying to send file received through multer to google drive. However I am setting
var media = {
mimeType: req.file.mimetype,
body:req.file
};
Which is giving me error as
const index = require('../index.js');
const { google } = require('googleapis');
let drive = google.drive('v3');
const receive = multer();
router.post("/insertNewProduct", auth, receive.single('productUrl'), async (req, res) => {
try {
const data = req.body;
var folderId = '1v3tdYH_GLtCOOxhQ8HXDdRAb-2Jh455_';
let fs = require('fs')
var fileMetadata = {
'name': new Date().toISOString()+req.file.filename,
parents: [folderId]
};
var media = {
mimeType: req.file.mimetype,
body:req.file
};
drive.files.create({
auth: index.jwtClient,
resource: fileMetadata,
publishAuto: true,
media: media,
fields: 'id'
},
async function (err, file) {
if (err) {
// Handle error
console.error(err);
}
else {
console.log('********File create success. File Id: ', file.data.id);
res.status(201).send({
message: "Record Create Successfully",
data: newRows
});
}
}
);
}
}
} catch (e) {
console.log("Inside catch: " + e);
res.status(404).send({
message: e,
data: {}
});
}
});
As I understand the error, how I set the body:req.file is wrong.
Here's how req.file look like.
How can I pass req.file to
var media = {
mimeType: req.file.mimetype,
body:req.file
};
I tried body:req.file.buffer but it didn't work.
I am trying to upload images from my browser to Amazon S3 directly, using angular js.
Below is my code.
function _upload($files) {
$scope.file = $files[0];
$scope.creds = {
access_key: '***',
secret_key: '***',
bucket: 'sabari-test'
};
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
// AWS.
if ($scope.file) {
// Perform File Size Check First
var fileSize = Math.round(parseInt($scope.file.size));
if (fileSize > $scope.sizeLimit) {
console.log('Sorry, your attachment is too big.');
return false;
}
// Prepend Unique String To Prevent Overwrites
var uniqueFileName = 'hai' + '-' + $scope.file.name;
var params = {
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, function(err, data) {
if (err) {
console.log(err.message);
return false;
} else {
// Upload Successfully Finished
console.log('File Uploaded Successfully');
}
})
} else {
// No File Selected
console.log('Please select a file to upload');
}
}
I get the below error:
"Missing credentials in config"
Please let me know what is the missing credential?
Thanks.
You need to replace these lines:
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
With this:
var bucket = new AWS.S3({
region = 'us-west-2',
credentials: new AWS.Credentials($scope.creds.access_key, $scope.creds.secret_key)
});
And then move the Bucket to your var params
var params = {
Bucket: $scope.creds.bucket,
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};