Downloading a file from S3 into a Zapier Task - javascript

I would like to download a file from S3 in Zapier and use it (as file exists but not shown) in the rest of a Zap. Strangely the S3 integration doesnt offer this (although you can do this in Box, Drive etc).
I guess we will need to use Code by Zapier and I have this basic JS - but I think I need to specify region and not sure how to link the output into Zap?
var AWS = require('aws-sdk');
AWS.config.update(
{
accessKeyId: ".. your key ..",
secretAccessKey: ".. your secret key ..",
}
);
var s3 = new AWS.S3();
s3.getObject(
{ Bucket: "my-bucket", Key: "my-picture.jpg" },
function (error, data) {
if (error != null) {
alert("Failed to retrieve an object: " + error);
} else {
alert("Loaded " + data.ContentLength + " bytes");
// do something with data.Body
}
}
);
output = need to put my file here!!
I'd appreciate any pointers - I am surprised this has not been asked before - I hope it doesnt mean that I have missed something very obvious!

Here is example to get json document from s3
var AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: ".. your key ..",
secretAccessKey: ".. your secret key ..",
region: "region",
});
var s3params = {
Bucket: "bucket",
Key: "key",
};
s3.getObject(s3params, function (err, data) {
if (err) {
console.error(err.message);
} else {
let jsonString = data.Body.toString();
let result = JSON.parse(jsonString);
return result;
}
});

Related

Download File from AWS s3 Bucket to Local using s3.getObject

I am trying to write a function that retrieves a file from the s3 bucket and downloads it to my local machine. How can I achieve that in angular.js?
const AWS = require('aws-sdk');
AWS.config.update(
{
accessKeyId: ".. your key ..",
secretAccessKey: ".. your secret key ..",
}
);
const s3 = new AWS.S3();
s3.getObject(
{ Bucket: "my-bucket", Key: "my-picture.jpg" },
function (error, data) {
if (error != null) {
alert("can't retrieve object);
} else {
alert("downloaded " + data.ContentLength + " bytes");
// download retrieved file to host machine
}
}
);

AWS S3 - Error: unable to get local issuer certificate

I'm trying to upload my html result file to AWS S3 after my Protractor test suite execution is complete. I use JavaScript in my automation. Please help me resolve the error here:
static uploadtoS3() {
const AWS = require('aws-sdk');
var FILE_NAME_LOCAL;
var crypt = require("crypto");
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
console.log("File name: " + FILE_NAME_LOCAL);
// Get file stream
const fileStream = fs.createReadStream(FILE_NAME_LOCAL);
var hash = crypt.createHash("md5")
.update(new Buffer.from(FILE_NAME_LOCAL, 'binary'))
.digest("base64");
console.log("Hash: "+hash);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: 'automation_report.html',
Body: fileStream,
ContentType: "text/html",
ContentMD5: hash,
// CacheControl: "max-age=0,no-cache,no-store,must-revalidate",
ACL: 'public-read',
};
const s3 = new AWS.S3({
// TODO: use this `accessKeyId: <key>` annotation to indicate the presence of a key instead of placing the actual key here.
endpoint: "https://3site-abc-wip1.nam.nsroot.net",
accessKeyId: <access_key_id>,
secretAccessKey: <secret_access_key>,
signatureVersion: 'v4',
ca: fs.readFileSync('C:\\Users\\AB11111\\InternalCAChain_PROD.pem'),
sslEnabled: true
});
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
console.log('Upload Success. File location:' + data.Location);
}
});
}
Error: unable to get local issuer certificate at
TLSSocket.onConnectSecure (_tls_wrap.js:1049:34) at TLSSocket.emit
(events.js:182:13) at TLSSocket.EventEmitter.emit (domain.js:442:20)
at TLSSocket._finishInit (_tls_wrap.js:631:8)
I made it working. I needed to add the certiicate in AWS.Config. Full working code is below. This might help someone. Note: The below credentials and urls are representation purpose only and they aren't not real:
const AWS = require('aws-sdk');
const https = require('https');
var FILE_NAME_LOCAL;
AWS.config.update({
httpOptions: {
agent: new https.Agent({
// rejectUnauthorized: false, // Don't use this - this is insecure, just like --no-verify-ssl in AWS cli
ca: fs.readFileSync('./support/InternalCAChain_PROD.pem')
})
}
});
const s3 = new AWS.S3({
s3BucketEndpoint: true,
endpoint: "https://my.bucket.3site-abc.nam.nsroot.net/",
accessKeyId: "abABcdCD",
secretAccessKey: "kjlJLlklkLlUYt",
});
// Get file stream
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
const fileStream = fs.readFileSync(FILE_NAME_LOCAL);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: path.basename(FILE_NAME_LOCAL),
Body: fileStream,
ContentType: "text/html",
ContentEncoding: 'UTF-8',
ACL: 'public-read',
};
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
s3FileLocation = data.Location;
console.log('Upload Success. File location:' + data.Location);
}
});

Downloading images from S3 with AWS-SDK Nodejs downloads a corrupt image

I'm trying to download images from aws s3 using the AWS-SDK for nodejs.
The file does get downloaded and the size is also correct. However, the file is corrupted and shows Decompression error in IDAT.
async download(accessKeyId, secretAccessKey, region, bucketName, baseImage) {
console.log("Entered download");
const s3 = new AWS.S3({region: region});
const params = {
Bucket: bucketName,
Key: `base/${baseImage}`
};
const outStream = fs.createWriteStream(this.config.baseFolder + baseImage);
const awsStream = s3.getObject(params, (uerr, data) => {
if(uerr) throw uerr;
console.log(`Base file downloaded successfully!`)
}).createReadStream().pipe(outStream);
awsStream.on('end', function() {
console.log("successfully Downloaded");
}).on('error', function() {
console.log("Some error occured while downloading");
});
}
Here's the link I followed - https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/requests-using-stream-objects.html
The file should get downloaded without any error. I tried searching on stack and there are some similar questions, however, they are using nodejs to deliver the output to the frontend and those solutions aren't working for me.
It wasn't necessary to make a mess and do all this...
It can directly be achieved by -
async download(accessKeyId, secretAccessKey, region, bucketName, baseImage) {
console.log("Starting Download... ")
const s3 = new AWS.S3({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey,
region: region
});
const params = {
Bucket: bucketName,
Key: `base/${baseImage}`
};
s3.getObject(params, (err, data) => {
if(err) console.error(err);
console.log(this.config.baseFolder + baseImage);
fs.writeFileSync(this.config.baseFolder + baseImage, data.Body);
console.log("Image Downloaded.");
});
}

How to get an uploads url from amazon s3 service with node. (Auto signed)

I am attempting to use node to upload a file to a s3 server. I can do this successfully. However, the reason of using s3 is i need to upload a file for a users profile picture. How would i get the url(of the uploaded file in s3) in my code and store it in the db, so it is viewable on the frontend? I also need to append some kind of unique string to the file name, therefore it doesn't get overwritten. How would i go about doing this?
Thank you.
function uploadFileToS3Bucket(filePath) {
AWS.config.update({
accessKeyId: 'AWS ACCESS KEY',
secretAccessKey: 'AWS SECRETE KEY'
});
var s3 = new AWS.S3();
var params = {
Bucket: 'AWS_BUCKET_NAME',
Body: fs.createReadStream(filePath),
Key: "BB_Teams_PDF_" + Date.now() + "_" + path.basename(filePath)
};
s3.upload(params, function (err, data) {
if (err) {
console.log("Error", err);
}
//success
if (data) {
console.log("Uploaded in:", data.Location);
}
});
}

Copying files from one folder to another folder in s3 of same bucket NODE JS

I am trying to copy a file from one folder to another folder in same bucket, But I am gettnin Access denined error. But if I try to do it on two different buckets means its wokring fine.
Please find what I have tried so far below:
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'xxx',
secretAccessKey: 'xxx'
});
var s3 = new AWS.S3();
var params = {
Bucket : 'bucketname', /* Another bucket working fine */
CopySource : 'bucketname/externall/1.txt', /* required */
Key : "1.txt", /* required */
ACL : 'public-read',
};
s3.copyObject(params, function(err, data) {
if (err)
console.log(err, err); // an error occurred
else {
console.log(data); // successful response
}
});
var AWS = require('aws-sdk');
AWS.config.update({
accessKeyId: 'ACCESS_KEY',
secretAccessKey: 'SECRET_KEY',
region: 'REGION'
});
var s3 = new AWS.S3();
var bktName = 'BUCKET_NAME';
var options = {
Bucket: bktName,
Prefix: 'SOURCE_FOLDER/'
};
s3.listObjectsV2(options, function (err, data) {
if (err) {
console.log(err);
} else {
data['Contents'].forEach(function (obj) {
var lockey = obj.Key.replace(/SOURCE/g, 'TARGET');
// Example if you want to move from /test/123/ to /test/234/ (or 123/ to 234/) then SOURCE = 123 and TARGET = 234
var params = {
Bucket: bktName,
CopySource: '/' + bktName + '/' + obj.Key,
Key: lockey
};
s3.copyObject(params, function (err, data) {
if (err) {
console.log(err);
} else {
console.log('Inserted', lockey);
}
});
});
}
});
I used same method copyObject and used same bucket name in source and destination path, it worked.
below is my code sample
{
Bucket: bucketName,
CopySource: '/'+bucketName+'/local/Country.png',
Key: 'local/copy-Country.png'
}

Categories

Resources