I want to upload a file from my frontend to my Amazon S3 (AWS).
I'm using dropzone so I convert my file and send it to my backend.
In my backend my file is like:
{ fieldname: 'file',
originalname: 'test.torrent',
encoding: '7bit',
mimetype: 'application/octet-stream',
buffer: { type: 'Buffer', data: [Array] },
size: 7449 },
and when I try to upload my file with my function:
var file = data.patientfile.file.buffer;
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: file };
s3.upload(params, function (err, data) {
if (err) {
console.log("******************",err)
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
I get as error:
Unsupported body payload object
Do you know how can I send my file?
I have tried to send it with putobject and get a similar error.
I think you might need to convert the file content (which probably in this case is the data.patientfile.file.buffer) to binary
var base64data = new Buffer(data, 'binary');
so the params would be like:
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: base64data };
Or if I'm mistaken and the buffer is already in binary, then you can try:
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: data.patientfile.file.buffer};
This is my production code that is working.
Please note the issue can happen at data1111.
But, to get full idea, add all key parts of working code below.
client:
// html
<input
type="file"
onChange={this.onFileChange}
multiple
/>
// javascript
onFileChange = event => {
const files = event.target.files;
var file = files[0];
var reader = new FileReader();
reader.onloadend = function(e) {
// save this data1111 and send to server
let data1111 = e.target.result // reader.result // ----------------- data1111
};
reader.readAsBinaryString(file);
}
server:
// node.js/ javascript
const response = await s3
.upload({
Bucket: s3Bucket, // bucket
Key: s3Path, // folder/file
// receiving at the server - data1111 - via request body (or other)
Body: Buffer.from(req.body.data1111, "binary") // ----------------- data1111
})
.promise();
return response;
To make the above code working, it took full 2 days.
Hope this helps someone in future.
Implemented Glen k's answer with nodejs ...worked for me
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: process.env.AWSAccessKeyID,
secretAccessKey: process.env.AWSSecretAccessKey,
});
let base64data = Buffer.from(file.productImg.data, 'binary')
const params = {
Bucket: BUCKET_NAME,
Key: KEY,
Body: base64data
}
s3.upload(params, function(err, data) {
if (err) {
console.log(err)
throw err;
}
console.log(data)
console.log(`File uploaded successfully. ${data.Location}`);
})
Related
I'm trying to upload my html result file to AWS S3 after my Protractor test suite execution is complete. I use JavaScript in my automation. Please help me resolve the error here:
static uploadtoS3() {
const AWS = require('aws-sdk');
var FILE_NAME_LOCAL;
var crypt = require("crypto");
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
console.log("File name: " + FILE_NAME_LOCAL);
// Get file stream
const fileStream = fs.createReadStream(FILE_NAME_LOCAL);
var hash = crypt.createHash("md5")
.update(new Buffer.from(FILE_NAME_LOCAL, 'binary'))
.digest("base64");
console.log("Hash: "+hash);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: 'automation_report.html',
Body: fileStream,
ContentType: "text/html",
ContentMD5: hash,
// CacheControl: "max-age=0,no-cache,no-store,must-revalidate",
ACL: 'public-read',
};
const s3 = new AWS.S3({
// TODO: use this `accessKeyId: <key>` annotation to indicate the presence of a key instead of placing the actual key here.
endpoint: "https://3site-abc-wip1.nam.nsroot.net",
accessKeyId: <access_key_id>,
secretAccessKey: <secret_access_key>,
signatureVersion: 'v4',
ca: fs.readFileSync('C:\\Users\\AB11111\\InternalCAChain_PROD.pem'),
sslEnabled: true
});
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
console.log('Upload Success. File location:' + data.Location);
}
});
}
Error: unable to get local issuer certificate at
TLSSocket.onConnectSecure (_tls_wrap.js:1049:34) at TLSSocket.emit
(events.js:182:13) at TLSSocket.EventEmitter.emit (domain.js:442:20)
at TLSSocket._finishInit (_tls_wrap.js:631:8)
I made it working. I needed to add the certiicate in AWS.Config. Full working code is below. This might help someone. Note: The below credentials and urls are representation purpose only and they aren't not real:
const AWS = require('aws-sdk');
const https = require('https');
var FILE_NAME_LOCAL;
AWS.config.update({
httpOptions: {
agent: new https.Agent({
// rejectUnauthorized: false, // Don't use this - this is insecure, just like --no-verify-ssl in AWS cli
ca: fs.readFileSync('./support/InternalCAChain_PROD.pem')
})
}
});
const s3 = new AWS.S3({
s3BucketEndpoint: true,
endpoint: "https://my.bucket.3site-abc.nam.nsroot.net/",
accessKeyId: "abABcdCD",
secretAccessKey: "kjlJLlklkLlUYt",
});
// Get file stream
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
const fileStream = fs.readFileSync(FILE_NAME_LOCAL);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: path.basename(FILE_NAME_LOCAL),
Body: fileStream,
ContentType: "text/html",
ContentEncoding: 'UTF-8',
ACL: 'public-read',
};
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
s3FileLocation = data.Location;
console.log('Upload Success. File location:' + data.Location);
}
});
I'm trying to add images to my s3 bucket in aws, but it doesn't seem to work. I get the error of SignatureDoesNotMatch
Here's how I'm uploading the file/image:
FrontEnd
const file = e.target.files[0];
const fileParts = file.name.split('.');
const fileName = fileParts[0];
const fileType = fileParts[1];
const response = axios.post('api/aws/sign_s3', { fileName, fileType );
Backend
router.post('/sign_s3', async (req, res) => {
aws.config.update({
accessKeyId: config.aws.accessKey,
secretAccessKey: config.aws.secretKey,
region: 'us-west-1'
});
const s3 = new aws.S3(); // Create a new instance of S3
const fileName = req.body.fileName;
const fileType = req.body.fileType;
const s3Params = {
Bucket: config.aws.bucketName,
Key: fileName,
Expires: 500,
ContentType: fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3Params, (err, data) => {
if (err) return res.send(err);
const returnData = {
signedRequest: data,
url: `https://sim-to-do.s3.amazonaws.com/${fileName}`
};
res.json({ success: true, responseData: returnData });
});
});
I get two urls. When I go to the first one, I get the following error code:
SignatureDoesNotMatch
Error Message
The request signature we calculated does not match the signature you provided. Check your key and signing method.
What am I doing wrong? What's the correct way of uploading a file to aws s3?
I was able to fix this issue after removing the Content-Type from the headers.
If you get "Signature does not match", it's highly likely you used a wrong secret access key. Can you double-check access key and secret access key to make sure they're correct?
from awendt answer
The base64 string is what I'm getting in the controller. I'm trying to save it as png file or upload it directly to the S3 server. But the uploaded file is blank on the AWS.
While that encoded string perfectly shows image when using online base64 image viewer. Here is the code.
imageCropped(image: string) {
this.croppedImage = image;
let base64Image = this.croppedImage.split(';base64,').pop();
var the_blob = new Blob([window.atob(base64Image)], {type: 'image/jpg'});
var reader = new FileReader();
var url = URL.createObjectURL(the_blob);
// AWS CONFIGURATIONS
s3.upload({ Key: 'upload_2.png', Bucket: bucketName, Body: the_blob, ContentEncoding: 'base64', ACL: 'public-read' }, function (err, data) {
if (err) {
console.log(err, 'there was an error uploading your file');
} else if (data) {
console.log(data, 'File has been uploaded successfully!');
}
});
}
I am new to angular 2 and nodejs and so far I have only ever needed to send json objects to the backend with POST requests but now I need to send an image file and I am running into problems. I want a user to input an image file which is then sent to the backend as the body of a POST request then in the backend the image is uploaded to s3. On the frontend I have the following html
<div>
<input type="file" (change)="testOnChange($event)" multiple/>
</div>
which triggers the following code in the associated component
testOnChange(event){
var files = event.srcElement.files;
this.jobService.s3Upload(files[0]).subscribe(data => {
console.log(data);
});
}
which uses the s3Upload function in the jobService which is
s3Upload(file){
const body = file;
const headers = new Headers({'Content-Type': 'image/jpg'});
return this.http.post('http://localhost:3000/job/s3upload', body, {headers: headers})
.map((response:Response) => {
console.log(response);
return response;
})
.catch((error:Response) => {
this.errorService.handleError(error.json());
return Observable.throw(error.json());
});
}
Then on the backend
router.post('/s3upload', function(req,res){
console.log(req.body);
const file = req.body;
aws.config.update({
"accessKeyId": process.env.AWS_ACCESS_KEY_ID,
"secretAccessKey": process.env.AWS_SECRET_ACCESS_KEY,
"region": "us-west-2"
});
const s3 = new aws.S3();
var params = {
Bucket: 'labeller-images',
Key: file.name,
ContentType: file.type,
Body: file,
ACL: 'public-read'
};
console.log(params);
s3.putObject(params, function (err, data) {
if (err) {
console.log("upload error");
return res.status(500).json({
title: 'An error occurred',
error: err
});
} else {
console.log("Successfully uploaded data");
res.status(200).json({
message: 'Success',
obj: `https://labeller-images.s3.amazonaws.com/${file.name}`
})
}
});
});
The specific error that arises is There were 2 validation errors: * MissingRequiredParameter: Missing required key 'Key' in params * InvalidParameterType: Expected params.Body to be a string, Buffer, Stream, Blob, or typed array object. But the logging statements reveal that req.body is just {}, so that params is
{ Bucket: 'labeller-images',
Key: undefined,
ContentType: undefined,
Body: {},
ACL: 'public-read' }
So it looks like the req.body is empty and the image file is never actually getting to the backend. Can anyone see what I'm doing wrong here? Is it some kind of file serialization issue?
Update: For future reference, Amazon have now updated the documentation from what was there at time of asking. As per #Loren Segal's comment below:-
We've corrected the docs in the latest preview release to document this parameter properly. Sorry about the mixup!
I'm trying out the developer preview of the AWS SDK for Node.Js and want to upload a zipped tarball to S3 using putObject.
According to the documentation, the Body parameter should be...
Body - (Base64 Encoded Data)
...therefore, I'm trying out the following code...
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
fs.readFile('myarchive.tgz', function (err, data) {
if (err) { throw err; }
var base64data = new Buffer(data, 'binary').toString('base64');
var s3 = new AWS.S3();
s3.client.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: base64data
}).done(function (resp) {
console.log('Successfully uploaded package.');
});
});
Whilst I can then see the file in S3, if I download it and attempt to decompress it I get an error that the file is corrupted. Therefore it seems that my method for 'base64 encoded data' is off.
Can someone please help me to upload a binary file using putObject?
You don't need to convert the buffer to a base64 string. Just set body to data and it will work.
Here is a way to send a file using streams, which might be necessary for large files and will generally reduce memory overhead:
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
var fileStream = fs.createReadStream('myarchive.tgz');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: fileStream
}, function (err) {
if (err) { throw err; }
});
});
I was able to upload my binary file this way.
var fileStream = fs.createReadStream("F:/directory/fileName.ext");
var putParams = {
Bucket: s3bucket,
Key: s3key,
Body: fileStream
};
s3.putObject(putParams, function(putErr, putData){
if(putErr){
console.error(putErr);
} else {
console.log(putData);
}
});