I am iterating through S3 bucket using s3.listObjects but I am getting this error: { [UnexpectedParameter: Unexpected key 'Key' found in params]
Below is the code I am using:
exports.handler = function(event, context) {
var bucket = event.Records[0].s3.bucket.name;
var key = event.Records[0].s3.object.key;
var params = {
Bucket: bucket,
Key: key
};
console.log('bucket name ', bucket);
s3.getObject(params, function(err, data) {
if (err) {
console.log(err);
} else {
context.succeed(data.ContentType);
}
});
s3.listObjects(params, function(err, data) {
if (err) return console.log(err);
params = {Bucket: 'bucketName'};
});
};
Can anyone please suggest what am I doing wrong here?
Thanks
List objects doesn't take a key as a parameter since its wants to list all the keys in the bucket to you. Really its just looking for you to tell it which bucket to list the contents of. Additionally it does take some other parameters to help filter results and a max number of objects to return.
AWS.S3.listObjects Docs
// Acceptable Parameters as taken from the AWS.S3.listObjects Docs
var params = {
Bucket: 'STRING_VALUE', /* required */
Delimiter: 'STRING_VALUE',
EncodingType: 'url',
Marker: 'STRING_VALUE',
MaxKeys: 0,
Prefix: 'STRING_VALUE'
};
Essentially, the API is communicating to you that you're passing in an unnecessary parameter.
var params = { Bucket: bucket };
s3.listObjects(params, function(err, data) {
if (err) return console.error(err);
// data.Contents is the array of objects within the bucket
console.log(data.Contents);
return;
});
if a is your bucket and images are under a/b/c folder . Then Just use
Bucketname as "a" and add the path with image key.
Otherwise, just go to your aws service and find out your image key.
In my case images are under ctr/images/serviceImage.
var params = {
Bucket: "ctr",//bucket name
Delete: {
Objects: [
{
Key: "images/ServiceImage/01c99e0c-f21e-4860-bf01-e7c79274b0ae.jpg"
},
{
Key: "imgs/ServiceImage/0390cdf2-1989-43cd-8c93-77510dcd597e.jpg"
}
],
Quiet: false
}
};
Related
I am using multer, google cloud storage, and mongo within express to first upload two files to Google Cloud Storage, then store their url's on a mongo schema.
app.post('/api/songs', upload.fields([{ name: 'audio' }, { name: 'imgSrc' }]), async (req, res) => {
const files = req.files;
const newSong = new Song(req.body);
for (let key in files) {
const file = files[key][0];
const blob = bucket.file(file.originalname);
const blobStream = blob.createWriteStream({
metadata: {
contentType: file.mimetype,
},
resumable: false
});
blobStream.on('error', err => {
next(err);
console.log(err);
return
});
blobStream.on('finish', () => {
const url = `https://storage.googleapis.com/${bucket.name}/${blob.name}`;
console.log("Google Cloud URL: " + url)
if (file.fieldname === 'imgSrc') {
newSong.image = { url: url, filename: file.originalname }
}
if (file.fieldname === 'audio') {
newSong.audio = { url: url, filename: file.originalname }
}
});
blobStream.end(file.buffer);
}
newSong.save((err, userInput) => {
if (err) return res.status(400).send(err.name);
res.status(200).json(userInput)
});
});
I am able to console.log each individual URL, but cannot add them to newSong model before it saves after the for loop. Not sure what I am doing wrong. I have also tried to instantiate an empty array before the for loop and add values to it during the loop, but even after the loop, the array would come back empty.
I noticed that It looks like the newSong.save() method is running BEFORE the files are uploaded to google cloud storage. Any help is appreciated.
I have been using Javascript for a few months and my code runs well in local, but I have always the same problem in a Lambda function.
I cant access any data with s3.getObject.
This is a simple example code that doesn't run in Lambda:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.myHandler = function(event, context, callback) {
// Retrieve the object
s3.getObject({
Bucket: 'XXXXXX',
Key: 'YYYYY'
}, function(err, data) {
if (err) {
console.log(err);
} else {
console.log("data");
}
});
};
This is because your function is being terminated before your callback is executed, since your s3.getObject() call is asynchronous under the hood.
In order to get data from AWS Lambda, you'll have to return your value like this:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.myHandler = function(event, context, callback) {
// Retrieve the object
s3.getObject({
Bucket: 'XXXXXX',
Key: 'YYYYY'
}, function(err, data) {
if (err) {
console.log(err);
callback(err)
} else {
callback(null, {statusCode: 200, body: JSON.stringify(data) })
}
});
};
I suggest you use Node 8 though, so you can easily use async/await.
Your code would then look like this:
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.myHandler = async (event) => {
const data = await s3.getObject({
Bucket: 'XXXXXX',
Key: 'YYYYY'
}).promise();
return {
statusCode: 200,
body: JSON.stringify(data)
}
};
Another problem in your code is that you always print "data" instead of data, so a String is printed instead of the data itself.
Struggled here for two days. I am kind of new to javascript and AWS so any hint will be appreciate.
I have 11 buckets. Others work fine except this one.
When I pass in another bucket name and key value, it works, but when I pass in the one I needed I get error: "Missing required key 'Bucket' in params".
For example:
If I pass in bucket: 'businesspicture', it successfully load the picture I need.
$scope.$watch("userInfo.picture", function (imageValue) {
var defaultIcon = '/images/defaultuser.jpg';
if (imageValue !== defaultIcon && !imageValue.startsWith("https://")) {
pictureServices.picture.getPictureFromS3({
fileName: imageValue,
bucket: "userpicture"
}, {}, function (pic) {
$scope.iconPreviewImage = pic.url;
$scope.userInfo.picture = imageValue;
}, function (error) {
dialogService.showNgResourceError(error);
});
}
});
pictureService.js.
var AWS = require('aws-sdk');
AWS.config.loadFromPath('../s3_config.json');
var photoBuckets = new AWS.S3();
exports.getPictureFromS3 = function(fileName, bucketName) {
return new Promise(function(resolve, reject){
var params = {
Bucket: bucketName,
Key: fileName
};
photoBuckets.getSignedUrl('getObject', params, function(err, url){
if(err){
reject(err);
} else{
awsurl = {url:url};
resolve(awsurl);
}
});
});
};
Working my way through tutorials for AWS...So ive created an S3 bucket which when a file is dropped into it calls my lambda 'testHelloWorld' which sends an email...this all works fine (see below)
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var eParams = {
Destination: {
ToAddresses: ["johnb#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: "Hey! What is up?"
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnb#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but I want to extend the email to include data on the file that was uploaded to the bucket. I have found How to trigger my Lambda Function once the file is uploaded to s3 bucket which gives a node.js code snippet which should capture the data. I have tried to import this into my existing lambda
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context, exit){
console.log("Incoming: ", event);
// var output = querystring.parse(event);
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucketName',
Key: 'keyName',
Source : 'SourceName',
Destination : 'DestinationName',
Message : 'MessageName'
};
s3.getObject(function(err, data){
if (err) {
console.log('ERROR ' + err);
// exit(err);
} else {
// the data has the content of the uploaded file
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: data
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
}
});
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
but this is failing on the params
message: 'There were 3 validation errors:
* MissingRequiredParameter: Missing required key \'Source\' in params
* MissingRequiredParameter: Missing required key \'Destination\' in params
* MissingRequiredParameter: Missing required key \'Message\' in params',
code: 'MultipleValidationErrors',
errors:
These source, destination and message are listed in the params, are they not correctly formatted and it isnt picking them up?
I cant find much online....any help appreciated
UPDATE
Ok iv got it working without failing...if i use the test function in the lambda with the following code...
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// var output = querystring.parse(event);
var testData = null;
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucket',
Key: 'key',
};
s3.getObject(params, function(err, data){
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
}
});
var eParams = {
Destination: {
ToAddresses: ["jim#him.com"]
},
Message: {
Body: {
Text: { Data: 'testData2' + testData}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "jim#him.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, data){
if(err) console.log(err);
else {
console.log("===EMAIL SENT===");
console.log(data);
console.log("EMAIL CODE END");
console.log('EMAIL: ', email);
context.succeed(event);
}
});
};
I get the email with the body- testData2null
So I tried uploading an image through the s3 bucket and I still get the email with the body testData2null
is there anyway to debug this further or does anyone kno who it is saying null. I never actually tested the code from the other post which passes the data over to the email I just assumed it would work. Does anyone else know who to obtain the data from the upload please? thanks
You are declaring the var eParams within the callback of s3.getObject, but then you run the ses.sendMail outside of the callback. I think that's why!
You also need to move the ses.sendEmail to inside the callback of s3.getObject if you want to send the data from your object inside the email.
Try this:
s3.getObject(function(err, objectData) {
if (err) {
console.log('Could not fetch object data: ', err);
} else {
console.log('Data was successfully fetched from object');
var eParams = {
Destination: {
ToAddresses: ["johnboy#hotmail.com"]
},
Message: {
Body: {
Text: {
Data: objectData
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "johnboy#hotmail.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, emailResult) {
if (err) console.log('Error while sending email', err);
else {
console.log("===EMAIL SENT===");
console.log(objectData);
console.log("EMAIL CODE END");
console.log('EMAIL: ', emailResult);
context.succeed(event);
}
});
}
});
You need to read on how Nodejs works. It is event based and depends on callbacks and promises. You should do -
s3.getObject(params, function(err, data){
//This is your callback for s3 API call. DO stuff here
if (err) {
console.log('ERROR ' + err);
exit(err);
} else {
testData = data;
// Got your data. Send the mail here
}
});
I have added my comments in code above. Since Nodejs is single threaded it will make S3 api call and go ahead. When it is sending mail s3 api call is not complete so data is null. It is better to use promises here.
Anyway read up on callback and promises in nodejs and how it works. But hope this answers your logical error.
I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});