Using AWS JS SDK in Parse Cloud Code 'AWS is undefined' - javascript

I am trying to get the AWS JS SDK to work in Parse's Cloud Code. Below is a simple s3 bucket upload function that works in the browser, but not in the cloud code. I have the js sdk file uploaded in the cloud folder. What am I doing wrong?
Parse.Cloud.define("publish", function(request, response){
require("cloud/aws-sdk");
AWS.config.update({accessKeyId: 'MYKEY', secretAccessKey: 'MYSECRETKEY'});
AWS.config.region = 'us-west-2';
var s3bucket = new AWS.S3({params: {Bucket: 'maggiesotterocms'}});
var params = {Key: 'data.json', Body: 'Hello world!', ContentLanguage:'JSON'};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data: ", err);
}
else {
console.log("Success");
}
});
});
I get this error in parses error logs when trying to run it
Ran cloud function publish for user USERID with: Input: {"error":{},"success":{}} Result: ReferenceError: AWS is not defined at main.js:4:2

Related

AWS SDK : s3.upload is not a function

I am trying to upload files to my S3 bucket from my Node.js app, so I am following some very simple tutorials like this one.
The code is pretty straightforward :
const AWS = require("aws-sdk"); // fresh install, version : ^2.697.0
AWS.config.update({ // Credentials are OK
accessKeyId: process.env.s3_accessKeyId,
secretAccessKey: process.env.s3_secretAccessKey,
region: 'eu-central-1'
});
const s3 = new AWS.S3();
let params = {
// (some upload params, file name, bucket name etc)
};
s3.upload(params); // <-- crash with error: "s3.upload is not a function"
I had a look at the official AWS documentation and s3.upload() seems to be a thing. I have no idea why I get an error.
If I console.log(s3.upload) I get undefined.
Node.js v13.11.0.
EDIT
I ended up using s3.putObject() which does pretty much the same thing as s3.upload(), and works, while the latter is still inexplicably undefined...
console.log(`typeof s3.upload = `);
console.log(typeof s3.upload); // undefined?? WHY
console.log(`typeof s3.putObject = `);
console.log(typeof s3.putObject); // function, and works
Use putObject, example:
s3.client.putObject({
Bucket: bucketName,
Key: 'folder/file.txt',
Body: data,
ACL: 'public-read'
}, function (res) {
console.log('Successfully uploaded file.');
})
Documentation: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
Can also try reinstalling aws-sdk package.
Refer: https://github.com/aws/aws-sdk-js/issues/916#issuecomment-191012462
You can try this
s3 = new AWS.S3({apiVersion: '2006-03-01'});
s3.upload(params, function(err, data) {
console.log(err, data);
});

How to get an uploads url from amazon s3 service with node. (Auto signed)

I am attempting to use node to upload a file to a s3 server. I can do this successfully. However, the reason of using s3 is i need to upload a file for a users profile picture. How would i get the url(of the uploaded file in s3) in my code and store it in the db, so it is viewable on the frontend? I also need to append some kind of unique string to the file name, therefore it doesn't get overwritten. How would i go about doing this?
Thank you.
function uploadFileToS3Bucket(filePath) {
AWS.config.update({
accessKeyId: 'AWS ACCESS KEY',
secretAccessKey: 'AWS SECRETE KEY'
});
var s3 = new AWS.S3();
var params = {
Bucket: 'AWS_BUCKET_NAME',
Body: fs.createReadStream(filePath),
Key: "BB_Teams_PDF_" + Date.now() + "_" + path.basename(filePath)
};
s3.upload(params, function (err, data) {
if (err) {
console.log("Error", err);
}
//success
if (data) {
console.log("Uploaded in:", data.Location);
}
});
}

access denied in lambda when trying to access file uploaded to s3 bucket

Following on from the great help I received on my original post
Uploading a file to an s3 bucket, triggering a lambda, which sends an email containing info on the file uploaded to s3 buket
I have tested previously sending the email so I know that works but when I try to include the data of the upload it fires error
Could not fetch object data: { AccessDenied: Access Denied
at Request.extractError (/var/runtime/node_modules/aws-sdk/lib/services/s3.js:577:35)
at Request.callListeners (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:105:20)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/sequential_executor.js:77:10)
at Request.emit (/var/runtime/node_modules/aws-sdk/lib/request.js:683:14)
I have found many q's related to this online regarding policies around roles etc..So I have added lambda to the s3 event, and added s3 permission to the role eg.
https://stackoverflow.com/questions/35589641/aws-lambda-function-getting-access-denied-when-getobject-from-s3
Unfortunately none of these have helped. I noticed a comment however
Then the best solution is to allow S3FullAccess, see if it works. If it does, then remove one set of access at a time from the policy and find the least privileges required for your Lambda to work. If it does not work even after giving S3FullAccess, then the problem is elsewhere
So how would I go about finding where the problem is?
Thank Y
'use strict';
console.log('Loading function');
var aws = require('aws-sdk');
var ses = new aws.SES({
region: 'us-west-2'
});
//var fileType = require('file-type');
console.log('Loading function2');
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
console.log('Loading function3');
//var textt = "";
exports.handler = function(event, context) {
console.log("Incoming: ", event);
// textt = event.Records[0].s3.object.key;
// var output = querystring.parse(event);
//var testData = null;
// Get the object from the event and show its content type
// const bucket = event.Records[0].s3.bucket.name;
// const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: 'bucket',
Key: 'key',
};
s3.getObject(params, function(err, objectData) {
if (err) {
console.log('Could not fetch object data: ', err);
} else {
console.log('Data was successfully fetched from object');
var eParams = {
Destination: {
ToAddresses: ["fake#fake.com"]
},
Message: {
Body: {
Text: {
Data: objectData
// Data: textt
}
},
Subject: {
Data: "Email Subject!!!"
}
},
Source: "fake#fake.com"
};
console.log('===SENDING EMAIL===');
var email = ses.sendEmail(eParams, function(err, emailResult) {
if (err) console.log('Error while sending email', err);
else {
console.log("===EMAIL SENT===");
//console.log(objectData);
console.log("EMAIL CODE END");
console.log('EMAIL: ', emailResult);
context.succeed(event);
}
});
}
});
};
UPDATE
I have added comments to the code and checked the logs...it doesnt go past this line
var s3 = new aws.S3({ apiVersion: '2006-03-01', accessKeyId: process.env.ACCESS_KEY, secretAccessKey: process.env.SECRET_KEY, region: process.env.LAMBDA_REGION });
Is this anyway related to access denied?
NOTE: ALL I WANT IN THE FILENAME OF THE UPLOADED FILE
UPDATE 2
iv replaced the line causing issue with var s3 = new aws.S3().getObject({ Bucket: this.awsBucketName, Key: 'keyName' }, function(err, data)
{
if (!err)
console.log(data.Body.toString());
});
but this is firing as TypeError: s3.getObject is not a function
Also tried...var s3 = new aws.S3();
this is back to the original error of Could not fetch object data: { AccessDenied: Access Denied
First of all region should be S3 bucket region and not lambda region. Next you need to verify your credentials and if they have access to S3 bucket you have defined. As you stated in one of the comment try attaching S3 full access Amazon managed policy to your IAM user which is associated with credentials you are using in Lambda. Next step would be use aws cli to see if you can access this bucket. Maybe something like -
aws s3 ls
Having said above you should not use credentials at all. Since Lambda and S3 are amazon services you should use roles. Just give Lambda a role that gives it full access to S3 and do not use aws IAM credentials for this. And
var s3 = new AWS.S3();
is sufficient.

Error uploading data:The request signature we calculated does not match the signature you provided. Check your key and signing method

I trying to upload image over AWS S3 Bucket using react native platform but i am getting Error uploading data: Error: The request signature we calculated does not match the signature you provided. Check your key and signing method.
Have any one tried to upload images
JavaScript Code to upload Images over AWS S3
var uniqueFileName = image.fileName;
console.log("File Name",uniqueFileName)
var bodyData = image.data;
console.log("File Json",bodyData)
var filetype= image.type;
console.log("File Type",filetype)
var AWS3 = require('aws-sdk/dist/aws-sdk-react-native');
AWS3.config.update({
"accessKeyId": AWS.accessKeyId,
"secretAccessKey": AWS.secretAccessKey,
"region": "us-east-1"
});
var s3 = new AWS3.S3();
var params = {
Bucket: AWS.bucketName ,
Key: uniqueFileName,
ContentType: filetype,
Body: bodyData,
ContentEncoding: 'base64'
};
s3.upload(params, function (err, res) {
if (err) {
console.log("Error uploading data: ", err);
} else {
console.log("Successfully uploaded data");
}
});
}

Upload a binary file to S3 using AWS SDK for Node.js

Update: For future reference, Amazon have now updated the documentation from what was there at time of asking. As per #Loren Segal's comment below:-
We've corrected the docs in the latest preview release to document this parameter properly. Sorry about the mixup!
I'm trying out the developer preview of the AWS SDK for Node.Js and want to upload a zipped tarball to S3 using putObject.
According to the documentation, the Body parameter should be...
Body - (Base64 Encoded Data)
...therefore, I'm trying out the following code...
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
fs.readFile('myarchive.tgz', function (err, data) {
if (err) { throw err; }
var base64data = new Buffer(data, 'binary').toString('base64');
var s3 = new AWS.S3();
s3.client.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: base64data
}).done(function (resp) {
console.log('Successfully uploaded package.');
});
});
Whilst I can then see the file in S3, if I download it and attempt to decompress it I get an error that the file is corrupted. Therefore it seems that my method for 'base64 encoded data' is off.
Can someone please help me to upload a binary file using putObject?
You don't need to convert the buffer to a base64 string. Just set body to data and it will work.
Here is a way to send a file using streams, which might be necessary for large files and will generally reduce memory overhead:
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
var fileStream = fs.createReadStream('myarchive.tgz');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: fileStream
}, function (err) {
if (err) { throw err; }
});
});
I was able to upload my binary file this way.
var fileStream = fs.createReadStream("F:/directory/fileName.ext");
var putParams = {
Bucket: s3bucket,
Key: s3key,
Body: fileStream
};
s3.putObject(putParams, function(putErr, putData){
if(putErr){
console.error(putErr);
} else {
console.log(putData);
}
});

Categories

Resources