Amazon S3 upload image - using angular js directly from browser - javascript

I am trying to upload images from my browser to Amazon S3 directly, using angular js.
Below is my code.
function _upload($files) {
$scope.file = $files[0];
$scope.creds = {
access_key: '***',
secret_key: '***',
bucket: 'sabari-test'
};
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
// AWS.
if ($scope.file) {
// Perform File Size Check First
var fileSize = Math.round(parseInt($scope.file.size));
if (fileSize > $scope.sizeLimit) {
console.log('Sorry, your attachment is too big.');
return false;
}
// Prepend Unique String To Prevent Overwrites
var uniqueFileName = 'hai' + '-' + $scope.file.name;
var params = {
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, function(err, data) {
if (err) {
console.log(err.message);
return false;
} else {
// Upload Successfully Finished
console.log('File Uploaded Successfully');
}
})
} else {
// No File Selected
console.log('Please select a file to upload');
}
}
I get the below error:
"Missing credentials in config"
Please let me know what is the missing credential?
Thanks.

You need to replace these lines:
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
With this:
var bucket = new AWS.S3({
region = 'us-west-2',
credentials: new AWS.Credentials($scope.creds.access_key, $scope.creds.secret_key)
});
And then move the Bucket to your var params
var params = {
Bucket: $scope.creds.bucket,
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};

Related

aws-sdk Signed URL Failing on Heroku during production but works in development

I have my code written below, and all of this generates my signed URL perfectly fine when on development and the files that I want to get and upload locally work.
const S3 = require("aws-sdk/clients/s3");
const fs = require("fs");
const s3 = new S3({
region: process.env.AWS_BUCKET_REGION,
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
signatureVersion: "v2",
});
const uploadFile = (file, id, directory) => {
const fileStream = fs.createReadStream(file.path);
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Body: fileStream,
Key: directory + id,
MimeType: file.mimetype,
};
return s3.upload(uploadParams).promise();
};
exports.uploadFile = uploadFile;
const deleteFile = (id, directory) => {
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Key: directory + id,
};
return s3.deleteObject(uploadParams).promise();
};
exports.deleteFile = deleteFile;
const getFileStream = ({ key }) => {
if (key) {
const downloadParams = {
Key: key,
Bucket: process.env.AWS_BUCKET_NAME,
};
return s3.getObject(downloadParams).createReadStream();
}
};
exports.getFileStream = getFileStream;
function generatePreSignedPutUrl({ key, operation }) {
var params = { Bucket: process.env.AWS_BUCKET_NAME, Key: key, Expires: 60 };
let x = s3.getSignedUrl(operation, params);
return x;
}
exports.generatePreSignedPutUrl = generatePreSignedPutUrl;
These are the requests I make from the client
const getSignedURL = async ({ key, operation }) =>
client.post(`${endpoint}/get-signed-url`, { key, operation });
const result = await getSignedURL({
key: directory + "/" + key,
operation: "getObject",
});
let url = result.data.data.url;
console.log({ url });
and this is the route on my server.
router.post("/get-signed-url", requireKey, async (req, res) => {
const { key, operation } = req.body;
try {
console.log({ b: req.body });
let url = generatePreSignedPutUrl({ key, operation });
console.log({ url });
res.json({ success: true, data: { url } });
} catch (e) {
console.log({ e });
res.status(400).json({ error: "Internal Server Error" });
}
});
I followed all of S3's documentation to get this set up and I am trying to make sure all files can be downloaded and uploaded securely from my application. Does anyone know how I can get this to work in production when the server is hosted in Heroku and the client is a nextjs site?

Occasionally uploading larger files

On moving to the next step in the form I have run some checks. One is to stop photos over 10mb and preventing .heic files from being upload. 90% of the time it works, but now and again files are let through.
Any help with a better written solution or a reason why this may fail and let large files or .heic file through.
var upload_one = document.getElementById("image_one");
if(upload_one.files.length > 0) {
if (upload_one.files.item(0).size >= '10485760') {
upload_one.className += " invalid";
valid = false;
alert("Photo is too large. Photos need to be under 10mb")
}
fileName = document.querySelector('#image_one').value;
extension = fileName.split('.').pop();
if (extension == 'heic') {
upload_one.className += " invalid";
valid = false;
alert("Files can only be .png, .jpg or .jpeg")
}
}
You should have a look at presigned Url using S3 bucket on aws.
Basically you generate an upload url where you can upload big files direclty to S3.
Personally I use a lambda to generate this presignedUrl and I return it to front end then.
Backend
const AWS = require("aws-sdk");
const S3 = new AWS.S3();
const { v4: uuidv4 } = require("uuid");
const getUrl = async (params) => {
return await new Promise((resolve, reject) => {
S3.getSignedUrl("putObject", params, (err, url) => {
if (err) {
reject(err);
} else {
resolve({
statusCode: 200,
url,
});
}
});
});
};
exports.handler = async (event, context) => {
const id = uuidv4();
const { userId } = event?.queryStringParameters;
const params = {
Bucket: process.env.INVOICE_BUCKET,
Key: `${userId}/${id}.csv`,
ContentType: `text/csv`,
ACL: "public-read",
};
try {
const { url } = await getUrl(params);
return handleRes({ message: `Successfully generated url`, url, key: `${id}.csv`, publicUrl: `https://yourBucket.s3.eu-west-1.amazonaws.com/${userId}/${id}.csv` }, 200);
} catch (e) {
console.error(e);
return handleRes({ message: "failed" }, 400);
}
};
Front end
$(function () {
$("#theForm").on("submit", sendFile);
});
function sendFile(e) {
e.preventDefault();
var urlPresigned;
var publicUrl;
var key;
$.ajax({
type: "GET",
url: `https://yourId.execute-api.eu-west-1.amazonaws.com/Prod/file-upload-to-bucket?userId=${userId}`,
success: function (resp) {
urlPresigned = resp.url;
publicUrl = resp.publicUrl;
key = resp.key;
var theFormFile = $("#theFile").get()[0].files[0];
$.ajax({
type: "PUT",
url: urlPresigned,
contentType: "text/csv", // Put meme type
processData: false,
// the actual file is sent raw
data: theFormFile,
success: function () {
// File uploaed
},
error: function (err) {
console.log(err);
},
});
},
});
}

react-native through upload image on s3 Bucket using aws-sdk

I am using aws-sdk for upload image on the s3 bucket. Please look at my code below I already spend one day in it.
uploadImageOnS3 = () => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
let file= {
uri: this.state.fileObj.uri,
type: this.state.fileObj.type,
name: this.state.fileObj.fileName
};
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: file,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
Error:
Unsupported body payload object
Please help me to short out I am also using react-native-image-picker for image upload.
You have to use array buffer in body stream to pass data object.
As per the aws documentation you can pass data stream, string, array buffer or blob data type in body parameter.
Please check below code, which will resolve your issue,
import fs from "react-native-fs";
import { decode } from "base64-arraybuffer";
uploadImageOnS3 = async() => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: "v4"
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
const fPath = this.state.fileObj.uri;
const base64 = await fs.readFile(fPath, "base64");
//console.log(base64);
const arrayBuffer = decode(base64);
//console.log(arrayBuffer);
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
You can check out the React Native AWS amplify documentation for the proper process. In the documentation, it is mentioned that you can pass data stream, string, array buffer, or blob data type in body parameter.
import AWS from 'aws-sdk';
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'xxxxx';
const IAM_USER_KEY = 'xxxxxxxxxxxxxxxxx';
const IAM_USER_SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
}

Amazon S3: getSignedUrl: "Missing required key 'Bucket' in params"

Struggled here for two days. I am kind of new to javascript and AWS so any hint will be appreciate.
I have 11 buckets. Others work fine except this one.
When I pass in another bucket name and key value, it works, but when I pass in the one I needed I get error: "Missing required key 'Bucket' in params".
For example:
If I pass in bucket: 'businesspicture', it successfully load the picture I need.
$scope.$watch("userInfo.picture", function (imageValue) {
var defaultIcon = '/images/defaultuser.jpg';
if (imageValue !== defaultIcon && !imageValue.startsWith("https://")) {
pictureServices.picture.getPictureFromS3({
fileName: imageValue,
bucket: "userpicture"
}, {}, function (pic) {
$scope.iconPreviewImage = pic.url;
$scope.userInfo.picture = imageValue;
}, function (error) {
dialogService.showNgResourceError(error);
});
}
});
pictureService.js.
var AWS = require('aws-sdk');
AWS.config.loadFromPath('../s3_config.json');
var photoBuckets = new AWS.S3();
exports.getPictureFromS3 = function(fileName, bucketName) {
return new Promise(function(resolve, reject){
var params = {
Bucket: bucketName,
Key: fileName
};
photoBuckets.getSignedUrl('getObject', params, function(err, url){
if(err){
reject(err);
} else{
awsurl = {url:url};
resolve(awsurl);
}
});
});
};

AWS SDK JavaScript: how display upload progress of AWS.S3.putObject?

I'm developing a JavaScript client to upload files directly to Amazon S3.
<input type="file" id="file-chooser" />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
var bucket = new AWS.S3({params: {Bucket: 'myBucket'}});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var file = fileChooser.files[0];
if (file) {
results.innerHTML = '';
var params = {Key: file.name, ContentType: file.type, Body: file};
bucket.putObject(params, function (err, data) {
results.innerHTML = err ? 'ERROR!' : 'UPLOADED.';
});
} else {
results.innerHTML = 'Nothing to upload.';
}
}, false);
</script>
The example from Amazon documentation works fine, but it doesn't provide any feedback on the upload progress.
Any ideas?
Thanks
Rather than using the s3.PutObject function why not instead use the ManagedUpload function.
It has been specifically developed to allow you to hook into a httpUploadProgress event that should allow the updating of your progress bar to be implemented fairly easily.
I have done some customisation for file upload progress. Use this same logic in node, angular and javascript.
Here is repository link :
https://github.com/aviboy2006/aws-s3-file-upload-progress
Use this fiddle for test: https://jsfiddle.net/sga3o1h5/
Note : Update access key, bucketname and secret key.
var bucket = new AWS.S3({
accessKeyId: "",
secretAccessKey: "",
region: 'us-east-1'
});
uploadfile = function(fileName, file, folderName) {
const params = {
Bucket: "fileuploadprocess",
Key: folderName + fileName,
Body: file,
ContentType: file.type
};
return this.bucket.upload(params, function(err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
});
}
uploadSampleFile = function() {
var progressDiv = document.getElementById("myProgress");
progressDiv.style.display="block";
var progressBar = document.getElementById("myBar");
file = document.getElementById("myFile").files[0];
folderName = "Document/";
uniqueFileName = 'SampleFile';
let fileUpload = {
id: "",
name: file.name,
nameUpload: uniqueFileName,
size: file.size,
type: "",
timeReference: 'Unknown',
progressStatus: 0,
displayName: file.name,
status: 'Uploading..',
}
uploadfile(uniqueFileName, file, folderName)
.on('httpUploadProgress', function(progress) {
let progressPercentage = Math.round(progress.loaded / progress.total * 100);
console.log(progressPercentage);
progressBar.style.width = progressPercentage + "%";
if (progressPercentage < 100) {
fileUpload.progressStatus = progressPercentage;
} else if (progressPercentage == 100) {
fileUpload.progressStatus = progressPercentage;
fileUpload.status = "Uploaded";
}
})
}
I bumped into this post, then i found this AWS npm package, which does exactly what you are asking for:
#aws-sdk/lib-storage
import { Upload } from "#aws-sdk/lib-storage";
import { S3Client, S3 } from "#aws-sdk/client-s3";
const target = { Bucket, Key, Body };
try {
const parallelUploads3 = new Upload({
client: new S3({}) || new S3Client({}),
tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
partSize: 5MB, // optional size of each part
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
await parallelUploads3.done();
} catch (e) {
console.log(e);
}

Categories

Resources