I have put together upload.js script that reads the image jpg file from a local drive and uploads it to AWS S3 bucket.
var fs = require('fs');
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
const BUCKET_NAME = 'my-bucket-name';
let filepath = '/home/user/test-image.jpeg';
const content = fs.readFileSync(filepath, {encoding: 'base64'});
let params = {
params: {
Bucket: BUCKET_NAME,
Key: 'test.jpeg',
Body: content
}
};
var upload = new AWS.S3.ManagedUpload(params);
var promise = upload.promise();
promise.then(
function(data) {
console.log("Successfully uploaded photo.");
},
function(err) {
console.error("There was an error uploading: ", err.message);
}
);
When I run it with node upload.js the image is uploaded. But when I donwload it back the downloaded image is corrupted and cannot be opened with image viewer. What am I doing wrong?
Add the ContentType: image/jpeg to your params object and Loose the base64 encoding, this configuration will work for you as well.
var fs = require('fs');
const AWS = require('aws-sdk');
// const s3 = new AWS.S3();
AWS.config.update({ region: 'us-east-1' });
const mime = require('mime');
const BUCKET_NAME = 'my-bucket-name';
let filepath = '/home/user/test-image.jpeg';
const content = fs.readFileSync(filepath);
console.log(mime.getType(filepath));
let params = {
params: {
Bucket: BUCKET_NAME,
Key: 'cancel.jpeg',
Body: content,
ContentType: mime.getType(filepath),
},
};
var upload = new AWS.S3.ManagedUpload(params);
var promise = upload.promise();
promise.then(
function (data) {
console.log('Successfully uploaded photo.');
},
function (err) {
console.error('There was an error uploading: ', err.message);
}
);
Related
Consider the following S3 Upload code :
const Category = require('../models/category');
const Link = require('../models/link');
const slugify = require('slugify');
const uuidv4 = require('uuid/v4');
const AWS = require('aws-sdk');
// s3
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION,
});
// Using BASE 64
exports.create = (req, res) => {
const {image, name, content} = req.body;
const base64Data = new Buffer.from(
image.replace(/^data:image\/\w+;base64,/, ''),
'base64'
);
const type = image.split(';')[0].split('/')[1]; // get the png from "data:image/png;base64,"
// upload image to s3 (The params will be passed to the refactored code)
const params = {
Bucket: 'categories-react',
Key: `category/${uuidv4()}.${type}`,
Body: base64Data,
ACL: 'public-read',
ContentEncoding: 'base64',
ContentType: `image/${type}`,
};
s3.upload(params, (err, data) => {
if (err) {
console.log(err);
res.status(400).json({error: 'Upload to s3 failed'});
}
// ... more logic
return res.json(success : '....');
});
};
I want to export the AWS upload code to a different file and pass it the params without repeating the same code , refactored code :
/**
* Upload image to S3
*/
const AWS = require('aws-sdk');
const uploadImageToS3 = (params) => {
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: process.env.AWS_REGION,
});
// ... handle the upload
s3.upload(params, (err, data) => {
if (err) {
console.log(err);
res.status(400).json({error: 'Upload to s3 failed'});
}
// else handle the data ...
});
};
export {uploadImageToS3 as default};
But how can I use the callback function from S3 in any file the will use the refactored code of S3 ?
You must accept the callback as an argument to this function.
As a suggestion, instead of callbacks try using the promise API instead:
s3.upload(params).promise().then(data => {}).catch(err => {})
You will be able to return a promise from your function and tack on then/catch later.
I'm trying to upload my html result file to AWS S3 after my Protractor test suite execution is complete. I use JavaScript in my automation. Please help me resolve the error here:
static uploadtoS3() {
const AWS = require('aws-sdk');
var FILE_NAME_LOCAL;
var crypt = require("crypto");
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
console.log("File name: " + FILE_NAME_LOCAL);
// Get file stream
const fileStream = fs.createReadStream(FILE_NAME_LOCAL);
var hash = crypt.createHash("md5")
.update(new Buffer.from(FILE_NAME_LOCAL, 'binary'))
.digest("base64");
console.log("Hash: "+hash);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: 'automation_report.html',
Body: fileStream,
ContentType: "text/html",
ContentMD5: hash,
// CacheControl: "max-age=0,no-cache,no-store,must-revalidate",
ACL: 'public-read',
};
const s3 = new AWS.S3({
// TODO: use this `accessKeyId: <key>` annotation to indicate the presence of a key instead of placing the actual key here.
endpoint: "https://3site-abc-wip1.nam.nsroot.net",
accessKeyId: <access_key_id>,
secretAccessKey: <secret_access_key>,
signatureVersion: 'v4',
ca: fs.readFileSync('C:\\Users\\AB11111\\InternalCAChain_PROD.pem'),
sslEnabled: true
});
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
console.log('Upload Success. File location:' + data.Location);
}
});
}
Error: unable to get local issuer certificate at
TLSSocket.onConnectSecure (_tls_wrap.js:1049:34) at TLSSocket.emit
(events.js:182:13) at TLSSocket.EventEmitter.emit (domain.js:442:20)
at TLSSocket._finishInit (_tls_wrap.js:631:8)
I made it working. I needed to add the certiicate in AWS.Config. Full working code is below. This might help someone. Note: The below credentials and urls are representation purpose only and they aren't not real:
const AWS = require('aws-sdk');
const https = require('https');
var FILE_NAME_LOCAL;
AWS.config.update({
httpOptions: {
agent: new https.Agent({
// rejectUnauthorized: false, // Don't use this - this is insecure, just like --no-verify-ssl in AWS cli
ca: fs.readFileSync('./support/InternalCAChain_PROD.pem')
})
}
});
const s3 = new AWS.S3({
s3BucketEndpoint: true,
endpoint: "https://my.bucket.3site-abc.nam.nsroot.net/",
accessKeyId: "abABcdCD",
secretAccessKey: "kjlJLlklkLlUYt",
});
// Get file stream
fs.readdirSync("./reports/html/").forEach(file => {
if (file.startsWith("execution_report")) {
FILE_NAME_LOCAL = process.cwd() + "\\reports\\html\\" + file;
}
});
const fileStream = fs.readFileSync(FILE_NAME_LOCAL);
// Call S3 to retrieve upload file to specified bucket
const uploadParams = {
Bucket: 'my.bucket',
Key: path.basename(FILE_NAME_LOCAL),
Body: fileStream,
ContentType: "text/html",
ContentEncoding: 'UTF-8',
ACL: 'public-read',
};
// Create S3 service object and upload
s3.upload(uploadParams, function (err, data) {
console.log("Inside upload..");
if (err) {
throw err;
} if (data) {
s3FileLocation = data.Location;
console.log('Upload Success. File location:' + data.Location);
}
});
I have the following aws settings for uploading files.The file loads just fine but no matter what I do I cannot view the file through the aws object URL. It gives a blank page.
I have tried changing the ACL to 'public-read', providing the file 'Content-Type' and ContentDisposition: 'inline', but none of these seem to solve my problem
//config file
const AWS = require('aws-sdk');
const env = require('./s3.env.js');
const s3Client = new AWS.S3({
accessKeyId: env.AWS_ACCESS_KEY,
secretAccessKey: env.AWS_SECRET_ACCESS_KEY,
region : env.REGION
});
const uploadParams = {
Bucket: env.Bucket,
ACL: 'public-read',
ContentDisposition: 'inline'
};
const s3 = {};
s3.s3Client = s3Client;
s3.uploadParams = uploadParams;
module.exports = s3;
//upload controller
const s3 = require('../../s3.config.js');
const s3Client = s3.s3Client;
module.exports = {
uploadStuff(req, res) {
console.log(req.file)
const params = s3.uploadParams;
params.Key = req.file.filename;
params.Body = req.file.filename;
params.ContentType = req.file.mimetype;
s3Client.upload(params, (err, data) => {});
}
}
I expect that after the file is successfully uploaded, i should be able to preview it with the Object URL provided
//config file
const AWS = require('aws-sdk');
const env = require('./s3.env.js')
const s3Client = new AWS.S3({
accessKeyId: env.AWS_ACCESS_KEY,
secretAccessKey: env.AWS_SECRET_ACCESS_KEY,
region: env.REGION
});
const uploadParams = {
Bucket: env.Bucket,
ACL: 'public-read',
ContentDisposition: 'inline'
};
const s3 = {};
s3.s3Client = s3Client;
s3.uploadParams = uploadParams;
module.exports = s3;
//upload controller
const s3 = require('../../s3.config.js');
const s3Client = s3.s3Client;
module.exports = {
uploadStuff(req, res) {
console.log(req.file)
const params = s3.uploadParams;
params.Key = req.file.filename;
params.Body = req.file.filename;
params.ContentType = req.file.mimetype;
s3Client.upload(params, (err, data) => {
});
}
I want to upload a file from my frontend to my Amazon S3 (AWS).
I'm using dropzone so I convert my file and send it to my backend.
In my backend my file is like:
{ fieldname: 'file',
originalname: 'test.torrent',
encoding: '7bit',
mimetype: 'application/octet-stream',
buffer: { type: 'Buffer', data: [Array] },
size: 7449 },
and when I try to upload my file with my function:
var file = data.patientfile.file.buffer;
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: file };
s3.upload(params, function (err, data) {
if (err) {
console.log("******************",err)
} else {
console.log("Successfully uploaded data to myBucket/myKey");
}
});
I get as error:
Unsupported body payload object
Do you know how can I send my file?
I have tried to send it with putobject and get a similar error.
I think you might need to convert the file content (which probably in this case is the data.patientfile.file.buffer) to binary
var base64data = new Buffer(data, 'binary');
so the params would be like:
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: base64data };
Or if I'm mistaken and the buffer is already in binary, then you can try:
var params = { Bucket: myBucket, Key: data.patientfile.file.fieldname, Body: data.patientfile.file.buffer};
This is my production code that is working.
Please note the issue can happen at data1111.
But, to get full idea, add all key parts of working code below.
client:
// html
<input
type="file"
onChange={this.onFileChange}
multiple
/>
// javascript
onFileChange = event => {
const files = event.target.files;
var file = files[0];
var reader = new FileReader();
reader.onloadend = function(e) {
// save this data1111 and send to server
let data1111 = e.target.result // reader.result // ----------------- data1111
};
reader.readAsBinaryString(file);
}
server:
// node.js/ javascript
const response = await s3
.upload({
Bucket: s3Bucket, // bucket
Key: s3Path, // folder/file
// receiving at the server - data1111 - via request body (or other)
Body: Buffer.from(req.body.data1111, "binary") // ----------------- data1111
})
.promise();
return response;
To make the above code working, it took full 2 days.
Hope this helps someone in future.
Implemented Glen k's answer with nodejs ...worked for me
const AWS = require('aws-sdk');
const s3 = new AWS.S3({
accessKeyId: process.env.AWSAccessKeyID,
secretAccessKey: process.env.AWSSecretAccessKey,
});
let base64data = Buffer.from(file.productImg.data, 'binary')
const params = {
Bucket: BUCKET_NAME,
Key: KEY,
Body: base64data
}
s3.upload(params, function(err, data) {
if (err) {
console.log(err)
throw err;
}
console.log(data)
console.log(`File uploaded successfully. ${data.Location}`);
})
I'm attempting to handle file uploads using a Google Cloud Function. This function uses Busboy to parse the multipart form data and then upload to Google Cloud Storage.
I keep receiving a ERROR: { Error: ENOENT: no such file or directory, open '/tmp/xxx.png' error when triggering the function.
The error seems to occur within the finish callback function when storage.bucket.upload(file) attempts to open the file path /tmp/xxx.png.
Example code
const path = require('path');
const os = require('os');
const fs = require('fs');
const Busboy = require('busboy');
const Storage = require('#google-cloud/storage');
const moment = require('moment');
const _ = require('lodash');
const projectId = 'xxx';
const bucketName = 'xxx';
const storage = new Storage({
projectId: projectId,
});
exports.uploadFile = (req, res) => {
if (req.method === 'POST') {
const busboy = new Busboy({
headers: req.headers
});
const uploads = []
const tmpdir = os.tmpdir();
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
const filepath = path.join(tmpdir, filename)
var obj = {
path: filepath,
name: filename
}
uploads.push(obj);
var writeStream = fs.createWriteStream(obj.path);
file.pipe(writeStream);
});
busboy.on('finish', () => {
_.forEach(uploads, function (file) {
storage
.bucket(bucketName)
.upload(file.path, {
name: moment().format('/YYYY/MM/DD/x') + '-' + file.name
})
.then(() => {
console.log(`${file.name} uploaded to ${bucketName}.`);
})
.catch(err => {
console.error('ERROR:', err);
});
fs.unlinkSync(file.path);
})
res.end()
});
busboy.end(req.rawBody);
} else {
res.status(405).end();
}
}
Solved this with a stream instead of a temporary file. Only handles a single file at the moment though.
https://gist.github.com/PatrickHeneise/8f2c72c16c4e68e829e58ade64aba553#file-gcp-function-storage-file-stream-js
function asyncBusboy(req, res) {
return new Promise((resolve, reject) => {
const storage = new Storage()
const bucket = storage.bucket(process.env.BUCKET)
const fields = []
const busboy = Busboy({
headers: req.headers,
limits: {
fileSize: 10 * 1024 * 1024
}
})
busboy.on('field', (key, value) => {
fields[key] = value
})
busboy.on('file', (name, file, fileInfo) => {
const { mimeType } = fileInfo
const destFile = bucket.file(fileName)
const writeStream = destFile.createWriteStream({
metadata: {
contentType: fileInfo.mimeType,
metadata: {
originalFileName: fileInfo.filename
}
}
})
file.pipe(writeStream)
})
busboy.on('close', function () {
return resolve({ fields })
})
if (req.rawBody) {
busboy.end(req.rawBody)
} else {
req.pipe(busboy)
}
})
}