AWS S3 - Error: unable to get local issuer certificate - javascript

I'm trying to upload my html result file to AWS S3 after my Protractor test suite execution is complete. I use JavaScript in my automation. Please help me resolve the error here:
static uploadtoS3() {
const AWS = require('aws-sdk');
var FILE_NAME_LOCAL;
var crypt = require("crypto");
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
console.log("File name: " + FILE_NAME_LOCAL);
// Get file stream
const fileStream = fs.createReadStream(FILE_NAME_LOCAL);
var hash = crypt.createHash("md5")
.update(new Buffer.from(FILE_NAME_LOCAL, 'binary'))
.digest("base64");
console.log("Hash: "+hash);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: 'automation_report.html',
Body: fileStream,
ContentType: "text/html",
ContentMD5: hash,
// CacheControl: "max-age=0,no-cache,no-store,must-revalidate",
ACL: 'public-read',
};
const s3 = new AWS.S3({
// TODO: use this `accessKeyId: <key>` annotation to indicate the presence of a key instead of placing the actual key here.
endpoint: "https://3site-abc-wip1.nam.nsroot.net",
accessKeyId: <access_key_id>,
secretAccessKey: <secret_access_key>,
signatureVersion: 'v4',
ca: fs.readFileSync('C:\\Users\\AB11111\\InternalCAChain_PROD.pem'),
sslEnabled: true
});
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
console.log('Upload Success. File location:' + data.Location);
}
});
}
Error: unable to get local issuer certificate at
TLSSocket.onConnectSecure (_tls_wrap.js:1049:34) at TLSSocket.emit
(events.js:182:13) at TLSSocket.EventEmitter.emit (domain.js:442:20)
at TLSSocket._finishInit (_tls_wrap.js:631:8)

I made it working. I needed to add the certiicate in AWS.Config. Full working code is below. This might help someone. Note: The below credentials and urls are representation purpose only and they aren't not real:
const AWS = require('aws-sdk');
const https = require('https');
var FILE_NAME_LOCAL;
AWS.config.update({
httpOptions: {
agent: new https.Agent({
// rejectUnauthorized: false, // Don't use this - this is insecure, just like --no-verify-ssl in AWS cli
ca: fs.readFileSync('./support/InternalCAChain_PROD.pem')
})
}
});
const s3 = new AWS.S3({
s3BucketEndpoint: true,
endpoint: "https://my.bucket.3site-abc.nam.nsroot.net/",
accessKeyId: "abABcdCD",
secretAccessKey: "kjlJLlklkLlUYt",
});
// Get file stream
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
const fileStream = fs.readFileSync(FILE_NAME_LOCAL);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: path.basename(FILE_NAME_LOCAL),
Body: fileStream,
ContentType: "text/html",
ContentEncoding: 'UTF-8',
ACL: 'public-read',
};
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
s3FileLocation = data.Location;
console.log('Upload Success. File location:' + data.Location);
}
});

Related

How to read and upload image to AWS S3 with Nodejs

I have put together upload.js script that reads the image jpg file from a local drive and uploads it to AWS S3 bucket.
var fs = require('fs');
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
const BUCKET_NAME = 'my-bucket-name';
let filepath = '/home/user/test-image.jpeg';
const content = fs.readFileSync(filepath, {encoding: 'base64'});
let params = {
params: {
Bucket: BUCKET_NAME,
Key: 'test.jpeg',
Body: content
}
};
var upload = new AWS.S3.ManagedUpload(params);
var promise = upload.promise();
promise.then(
function(data) {
console.log("Successfully uploaded photo.");
},
function(err) {
console.error("There was an error uploading: ", err.message);
}
);
When I run it with node upload.js the image is uploaded. But when I donwload it back the downloaded image is corrupted and cannot be opened with image viewer. What am I doing wrong?
Add the ContentType: image/jpeg to your params object and Loose the base64 encoding, this configuration will work for you as well.
var fs = require('fs');
const AWS = require('aws-sdk');
// const s3 = new AWS.S3();
AWS.config.update({ region: 'us-east-1' });
const mime = require('mime');
const BUCKET_NAME = 'my-bucket-name';
let filepath = '/home/user/test-image.jpeg';
const content = fs.readFileSync(filepath);
console.log(mime.getType(filepath));
let params = {
params: {
Bucket: BUCKET_NAME,
Key: 'cancel.jpeg',
Body: content,
ContentType: mime.getType(filepath),
},
};
var upload = new AWS.S3.ManagedUpload(params);
var promise = upload.promise();
promise.then(
function (data) {
console.log('Successfully uploaded photo.');
},
function (err) {
console.error('There was an error uploading: ', err.message);
}
);

Downloading images from S3 with AWS-SDK Nodejs downloads a corrupt image

I'm trying to download images from aws s3 using the AWS-SDK for nodejs.
The file does get downloaded and the size is also correct. However, the file is corrupted and shows Decompression error in IDAT.
async download(accessKeyId, secretAccessKey, region, bucketName, baseImage) {
console.log("Entered download");
const s3 = new AWS.S3({region: region});
const params = {
Bucket: bucketName,
Key: `base/${baseImage}`
};
const outStream = fs.createWriteStream(this.config.baseFolder + baseImage);
const awsStream = s3.getObject(params, (uerr, data) => {
if(uerr) throw uerr;
console.log(`Base file downloaded successfully!`)
}).createReadStream().pipe(outStream);
awsStream.on('end', function() {
console.log("successfully Downloaded");
}).on('error', function() {
console.log("Some error occured while downloading");
});
}
Here's the link I followed - https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/requests-using-stream-objects.html
The file should get downloaded without any error. I tried searching on stack and there are some similar questions, however, they are using nodejs to deliver the output to the frontend and those solutions aren't working for me.
It wasn't necessary to make a mess and do all this...
It can directly be achieved by -
async download(accessKeyId, secretAccessKey, region, bucketName, baseImage) {
console.log("Starting Download... ")
const s3 = new AWS.S3({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey,
region: region
});
const params = {
Bucket: bucketName,
Key: `base/${baseImage}`
};
s3.getObject(params, (err, data) => {
if(err) console.error(err);
console.log(this.config.baseFolder + baseImage);
fs.writeFileSync(this.config.baseFolder + baseImage, data.Body);
console.log("Image Downloaded.");
});
}

"Unsupported body payload object" when trying to upload to Amazon S3

I want to upload a file from my frontend to my Amazon S3 (AWS).
I'm using dropzone so I convert my file and send it to my backend.
In my backend my file is like:
{ fieldname: 'file',
originalname: 'test.torrent',
encoding: '7bit',
mimetype: 'application/octet-stream',
buffer: { type: 'Buffer', data: [Array] },
size: 7449 },
and when I try to upload my file with my function:
var file = data.patientfile.file.buffer;
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: file };
s3.upload(params, function (err, data) {
if (err) {
console.log("******************",err)
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
I get as error:
Unsupported body payload object
Do you know how can I send my file?
I have tried to send it with putobject and get a similar error.
I think you might need to convert the file content (which probably in this case is the data.patientfile.file.buffer) to binary
var base64data = new Buffer(data, 'binary');
so the params would be like:
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: base64data };
Or if I'm mistaken and the buffer is already in binary, then you can try:
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: data.patientfile.file.buffer};
This is my production code that is working.
Please note the issue can happen at data1111.
But, to get full idea, add all key parts of working code below.
client:
// html
<input
type="file"
onChange={this.onFileChange}
multiple
/>
// javascript
onFileChange = event => {
const files = event.target.files;
var file = files[0];
var reader = new FileReader();
reader.onloadend = function(e) {
// save this data1111 and send to server
let data1111 = e.target.result // reader.result // ----------------- data1111
};
reader.readAsBinaryString(file);
}
server:
// node.js/ javascript
const response = await s3
.upload({
Bucket: s3Bucket, // bucket
Key: s3Path, // folder/file
// receiving at the server - data1111 - via request body (or other)
Body: Buffer.from(req.body.data1111, "binary") // ----------------- data1111
})
.promise();
return response;
To make the above code working, it took full 2 days.
Hope this helps someone in future.
Implemented Glen k's answer with nodejs ...worked for me
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: process.env.AWSAccessKeyID,
secretAccessKey: process.env.AWSSecretAccessKey,
});
let base64data = Buffer.from(file.productImg.data, 'binary')
const params = {
Bucket: BUCKET_NAME,
Key: KEY,
Body: base64data
}
s3.upload(params, function(err, data) {
if (err) {
console.log(err)
throw err;
}
console.log(data)
console.log(`File uploaded successfully. ${data.Location}`);
})

s3 isn't uploading file and getting error of SignatureDoesNotMatch

I'm trying to add images to my s3 bucket in aws, but it doesn't seem to work. I get the error of SignatureDoesNotMatch
Here's how I'm uploading the file/image:
FrontEnd
const file = e.target.files[0];
const fileParts = file.name.split('.');
const fileName = fileParts[0];
const fileType = fileParts[1];
const response = axios.post('api/aws/sign_s3', { fileName, fileType );
Backend
router.post('/sign_s3', async (req, res) => {
aws.config.update({
accessKeyId: config.aws.accessKey,
secretAccessKey: config.aws.secretKey,
region: 'us-west-1'
});
const s3 = new aws.S3(); // Create a new instance of S3
const fileName = req.body.fileName;
const fileType = req.body.fileType;
const s3Params = {
Bucket: config.aws.bucketName,
Key: fileName,
Expires: 500,
ContentType: fileType,
ACL: 'public-read'
};
s3.getSignedUrl('putObject', s3Params, (err, data) => {
if (err) return res.send(err);
const returnData = {
signedRequest: data,
url: `https://sim-to-do.s3.amazonaws.com/${fileName}`
};
res.json({ success: true, responseData: returnData });
});
});
I get two urls. When I go to the first one, I get the following error code:
SignatureDoesNotMatch
Error Message
The request signature we calculated does not match the signature you provided. Check your key and signing method.
What am I doing wrong? What's the correct way of uploading a file to aws s3?
I was able to fix this issue after removing the Content-Type from the headers.
If you get "Signature does not match", it's highly likely you used a wrong secret access key. Can you double-check access key and secret access key to make sure they're correct?
from awendt answer

Copying files from one folder to another folder in s3 of same bucket NODE JS

I am trying to copy a file from one folder to another folder in same bucket, But I am gettnin Access denined error. But if I try to do it on two different buckets means its wokring fine.
Please find what I have tried so far below:
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'xxx',
secretAccessKey: 'xxx'
});
var s3 = new AWS.S3();
var params = {
Bucket : 'bucketname', /* Another bucket working fine */
CopySource : 'bucketname/externall/1.txt', /* required */
Key : "1.txt", /* required */
ACL : 'public-read',
};
s3.copyObject(params, function(err, data) {
if (err)
console.log(err, err); // an error occurred
else {
console.log(data); // successful response
}
});
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'ACCESS_KEY',
secretAccessKey: 'SECRET_KEY',
region: 'REGION'
});
var s3 = new AWS.S3();
var bktName = 'BUCKET_NAME';
var options = {
Bucket: bktName,
Prefix: 'SOURCE_FOLDER/'
};
s3.listObjectsV2(options, function (err, data) {
if (err) {
console.log(err);
} else {
data['Contents'].forEach(function (obj) {
var lockey = obj.Key.replace(/SOURCE/g, 'TARGET');
// Example if you want to move from /test/123/ to /test/234/ (or 123/ to 234/) then SOURCE = 123 and TARGET = 234
var params = {
Bucket: bktName,
CopySource: '/' + bktName + '/' + obj.Key,
Key: lockey
};
s3.copyObject(params, function (err, data) {
if (err) {
console.log(err);
} else {
console.log('Inserted', lockey);
}
});
});
}
});
I used same method copyObject and used same bucket name in source and destination path, it worked.
below is my code sample
{
Bucket: bucketName,
CopySource: '/'+bucketName+'/local/Country.png',
Key: 'local/copy-Country.png'
}

Categories

Resources