Amazon S3: getSignedUrl: "Missing required key 'Bucket' in params" - javascript

Struggled here for two days. I am kind of new to javascript and AWS so any hint will be appreciate.
I have 11 buckets. Others work fine except this one.
When I pass in another bucket name and key value, it works, but when I pass in the one I needed I get error: "Missing required key 'Bucket' in params".
For example:
If I pass in bucket: 'businesspicture', it successfully load the picture I need.
$scope.$watch("userInfo.picture", function (imageValue) {
var defaultIcon = '/images/defaultuser.jpg';
if (imageValue !== defaultIcon && !imageValue.startsWith("https://")) {
pictureServices.picture.getPictureFromS3({
fileName: imageValue,
bucket: "userpicture"
}, {}, function (pic) {
$scope.iconPreviewImage = pic.url;
$scope.userInfo.picture = imageValue;
}, function (error) {
dialogService.showNgResourceError(error);
});
}
});
pictureService.js.
var AWS = require('aws-sdk');
AWS.config.loadFromPath('../s3_config.json');
var photoBuckets = new AWS.S3();
exports.getPictureFromS3 = function(fileName, bucketName) {
return new Promise(function(resolve, reject){
var params = {
Bucket: bucketName,
Key: fileName
};
photoBuckets.getSignedUrl('getObject', params, function(err, url){
if(err){
reject(err);
} else{
awsurl = {url:url};
resolve(awsurl);
}
});
});
};

Related

Can't Access Data in S3 with Lambda

I have been using Javascript for a few months and my code runs well in local, but I have always the same problem in a Lambda function.
I cant access any data with s3.getObject.
This is a simple example code that doesn't run in Lambda:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.myHandler = function(event, context, callback) {
// Retrieve the object
s3.getObject({
Bucket: 'XXXXXX',
Key: 'YYYYY'
}, function(err, data) {
if (err) {
console.log(err);
} else {
console.log("data");
}
});
};
This is because your function is being terminated before your callback is executed, since your s3.getObject() call is asynchronous under the hood.
In order to get data from AWS Lambda, you'll have to return your value like this:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.myHandler = function(event, context, callback) {
// Retrieve the object
s3.getObject({
Bucket: 'XXXXXX',
Key: 'YYYYY'
}, function(err, data) {
if (err) {
console.log(err);
callback(err)
} else {
callback(null, {statusCode: 200, body: JSON.stringify(data) })
}
});
};
I suggest you use Node 8 though, so you can easily use async/await.
Your code would then look like this:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.myHandler = async (event) => {
const data = await s3.getObject({
Bucket: 'XXXXXX',
Key: 'YYYYY'
}).promise();
return {
statusCode: 200,
body: JSON.stringify(data)
}
};
Another problem in your code is that you always print "data" instead of data, so a String is printed instead of the data itself.

Wait for promise in javascript & graphql/node

I'm building a graphql server and one of the resolvers should return an url that is fetched from the aws api. I have tried in hours with promises, async await but nothing have worked yet.
What happens in the code i the following:
1) i make a call to aws api, and get a signed url in the callback.
2) i want to return that url in the graphql resolver function - getSignedURL
My question is: How can i make a resolver function return a result that i've got in another functions callback?
I will appreciate any help!
IN CLASS S3Store
var s3 = new aws.S3();
newSignedUrl(callback){
var params = {
Bucket: 'testBucket28993746',
Key: uuid.v4(),
Expires: 100,
ContentType: 'image/jpg'
};
s3.getSignedUrl('putObject', params, (err, signedURL)=>{
callback(err,signedURL);
});
}
Graphql resolver
getSignedURL(){//TODO: more secure, require auth first
let newURL = null;
s3store = new S3Store();
s3store.newSignedUrl((err,result)=>{
if(err){
console.log(err)
newURL = {}
} else{
console.log(result);
newURL = result;
}
});
return newURL;
},
When i make a call to the graphql endpoint, i get following:
{
"data": {
"getSignedURL": null
}
}
This woked for me:
IN CLASS S3Store
getSignedUrl(){
var params = {
Bucket: 'testBucket28993746',
Key: uuid.v4(),
Expires: 100,
ContentType: 'image/jpg'
};
return new Promise ((resolve, reject)=> { s3.getSignedUrl('putObject',params, (err, signedURL)=>{
if(err){
reject(err);
} else {
resolve( signedURL);
// console.log(signedURL);
console.log("in else ")
}
})
})
}
Graphql resolver
getSignedURL(){
return new S3Store().getSignedUrl().then((url)=> {return url});
}

S3 Iterate through Bucket/Folders/Files

I am iterating through S3 bucket using s3.listObjects but I am getting this error: { [UnexpectedParameter: Unexpected key 'Key' found in params]
Below is the code I am using:
exports.handler = function(event, context) {
var bucket = event.Records[0].s3.bucket.name;
var key = event.Records[0].s3.object.key;
var params = {
Bucket: bucket,
Key: key
};
console.log('bucket name ', bucket);
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
context.succeed(data.ContentType);
}
});
s3.listObjects(params, function(err, data) {
if (err) return console.log(err);
params = {Bucket: 'bucketName'};
});
};
Can anyone please suggest what am I doing wrong here?
Thanks
List objects doesn't take a key as a parameter since its wants to list all the keys in the bucket to you. Really its just looking for you to tell it which bucket to list the contents of. Additionally it does take some other parameters to help filter results and a max number of objects to return.
AWS.S3.listObjects Docs
// Acceptable Parameters as taken from the AWS.S3.listObjects Docs
var params = {
Bucket: 'STRING_VALUE', /* required */
Delimiter: 'STRING_VALUE',
EncodingType: 'url',
Marker: 'STRING_VALUE',
MaxKeys: 0,
Prefix: 'STRING_VALUE'
};
Essentially, the API is communicating to you that you're passing in an unnecessary parameter.
var params = { Bucket: bucket };
s3.listObjects(params, function(err, data) {
if (err) return console.error(err);
// data.Contents is the array of objects within the bucket
console.log(data.Contents);
return;
});
if a is your bucket and images are under a/b/c folder . Then Just use
Bucketname as "a" and add the path with image key.
Otherwise, just go to your aws service and find out your image key.
In my case images are under ctr/images/serviceImage.
var params = {
Bucket: "ctr",//bucket name
Delete: {
Objects: [
{
Key: "images/ServiceImage/01c99e0c-f21e-4860-bf01-e7c79274b0ae.jpg"
},
{
Key: "imgs/ServiceImage/0390cdf2-1989-43cd-8c93-77510dcd597e.jpg"
}
],
Quiet: false
}
};

Amazon S3 upload image - using angular js directly from browser

I am trying to upload images from my browser to Amazon S3 directly, using angular js.
Below is my code.
function _upload($files) {
$scope.file = $files[0];
$scope.creds = {
access_key: '***',
secret_key: '***',
bucket: 'sabari-test'
};
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
// AWS.
if ($scope.file) {
// Perform File Size Check First
var fileSize = Math.round(parseInt($scope.file.size));
if (fileSize > $scope.sizeLimit) {
console.log('Sorry, your attachment is too big.');
return false;
}
// Prepend Unique String To Prevent Overwrites
var uniqueFileName = 'hai' + '-' + $scope.file.name;
var params = {
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, function(err, data) {
if (err) {
console.log(err.message);
return false;
} else {
// Upload Successfully Finished
console.log('File Uploaded Successfully');
}
})
} else {
// No File Selected
console.log('Please select a file to upload');
}
}
I get the below error:
"Missing credentials in config"
Please let me know what is the missing credential?
Thanks.
You need to replace these lines:
var bucket = new AWS.S3({
params: {
Bucket: $scope.creds.bucket
}
});
AWS.config.accessKeyId = $scope.creds.access_key;
AWS.config.secretAccessKey = $scope.creds.secret_key;
AWS.config.region = 'us-west-2';
With this:
var bucket = new AWS.S3({
region = 'us-west-2',
credentials: new AWS.Credentials($scope.creds.access_key, $scope.creds.secret_key)
});
And then move the Bucket to your var params
var params = {
Bucket: $scope.creds.bucket,
Key: uniqueFileName,
ContentType: $scope.file.type,
Body: $scope.file,
ServerSideEncryption: 'AES256'
};

AWS SDK JavaScript: how display upload progress of AWS.S3.putObject?

I'm developing a JavaScript client to upload files directly to Amazon S3.
<input type="file" id="file-chooser" />
<button id="upload-button">Upload to S3</button>
<div id="results"></div>
<script type="text/javascript">
var bucket = new AWS.S3({params: {Bucket: 'myBucket'}});
var fileChooser = document.getElementById('file-chooser');
var button = document.getElementById('upload-button');
var results = document.getElementById('results');
button.addEventListener('click', function() {
var file = fileChooser.files[0];
if (file) {
results.innerHTML = '';
var params = {Key: file.name, ContentType: file.type, Body: file};
bucket.putObject(params, function (err, data) {
results.innerHTML = err ? 'ERROR!' : 'UPLOADED.';
});
} else {
results.innerHTML = 'Nothing to upload.';
}
}, false);
</script>
The example from Amazon documentation works fine, but it doesn't provide any feedback on the upload progress.
Any ideas?
Thanks
Rather than using the s3.PutObject function why not instead use the ManagedUpload function.
It has been specifically developed to allow you to hook into a httpUploadProgress event that should allow the updating of your progress bar to be implemented fairly easily.
I have done some customisation for file upload progress. Use this same logic in node, angular and javascript.
Here is repository link :
https://github.com/aviboy2006/aws-s3-file-upload-progress
Use this fiddle for test: https://jsfiddle.net/sga3o1h5/
Note : Update access key, bucketname and secret key.
var bucket = new AWS.S3({
accessKeyId: "",
secretAccessKey: "",
region: 'us-east-1'
});
uploadfile = function(fileName, file, folderName) {
const params = {
Bucket: "fileuploadprocess",
Key: folderName + fileName,
Body: file,
ContentType: file.type
};
return this.bucket.upload(params, function(err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
});
}
uploadSampleFile = function() {
var progressDiv = document.getElementById("myProgress");
progressDiv.style.display="block";
var progressBar = document.getElementById("myBar");
file = document.getElementById("myFile").files[0];
folderName = "Document/";
uniqueFileName = 'SampleFile';
let fileUpload = {
id: "",
name: file.name,
nameUpload: uniqueFileName,
size: file.size,
type: "",
timeReference: 'Unknown',
progressStatus: 0,
displayName: file.name,
status: 'Uploading..',
}
uploadfile(uniqueFileName, file, folderName)
.on('httpUploadProgress', function(progress) {
let progressPercentage = Math.round(progress.loaded / progress.total * 100);
console.log(progressPercentage);
progressBar.style.width = progressPercentage + "%";
if (progressPercentage < 100) {
fileUpload.progressStatus = progressPercentage;
} else if (progressPercentage == 100) {
fileUpload.progressStatus = progressPercentage;
fileUpload.status = "Uploaded";
}
})
}
I bumped into this post, then i found this AWS npm package, which does exactly what you are asking for:
#aws-sdk/lib-storage
import { Upload } from "#aws-sdk/lib-storage";
import { S3Client, S3 } from "#aws-sdk/client-s3";
const target = { Bucket, Key, Body };
try {
const parallelUploads3 = new Upload({
client: new S3({}) || new S3Client({}),
tags: [...], // optional tags
queueSize: 4, // optional concurrency configuration
partSize: 5MB, // optional size of each part
leavePartsOnError: false, // optional manually handle dropped parts
params: target,
});
parallelUploads3.on("httpUploadProgress", (progress) => {
console.log(progress);
});
await parallelUploads3.done();
} catch (e) {
console.log(e);
}

Categories

Resources