I am using javascript, node.js and aws sdk. There are many examples about uploading existing files to S3 directly with signed URL, but now I am trying to upload strings and create a file in S3, without any local saved files. Any suggestion, please?
Please follow the example here
http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
Convert your string to buffer and pass it. It should work.
Have not tried amazon-web-services, amazon-s3 or aws-sdk, though if you are able to upload File or FormData objects you can create either or both at JavaScript and upload the object.
// create a `File` object
const file = new File(["abc"], "file.txt", {type:"text/plain"});
// create a `Blob` object
// will be converted to a `File` object when passed to `FormData`
const blob = new Blob(["abc"], {type:"text/plain"});
const fd = new FormData();
fd.append("file", blob, "file.txt");
You could try something like this:
var fs = require('fs');
exports.upload = function (req, res) {
var file = req.files.file;
fs.readFile(file.path, function (err, data) {
if (err) throw err; // Something went wrong!
var s3bucket = new AWS.S3({params: {Bucket: 'mybucketname'}});
s3bucket.createBucket(function () {
var params = {
Key: file.originalFilename, //file.name doesn't exist as a property
Body: data
};
s3bucket.upload(params, function (err, data) {
// Whether there is an error or not, delete the temp file
fs.unlink(file.path, function (err) {
if (err) {
console.error(err);
}
console.log('Temp File Delete');
});
console.log("PRINT FILE:", file);
if (err) {
console.log('ERROR MSG: ', err);
res.status(500).send(err);
} else {
console.log('Successfully uploaded data');
res.status(200).end();
}
});
});
});
};
Related
I am trying to pass an image uploaded from a react app through express to a managed s3 bucket. The platform/host I am using creates and manages the s3 bucket and generates upload and access urls. This all works fine (I have tested a generated upload url in postman with an image in a binary body and it worked perfectly).
My problem is passing the image through express. I am using multer to get the image from the form but I am assuming multer is turning that image into some kind of file object and s3 is expecting some sort of blob or stream.
In following code, the image in req.file exists, I get a 200 response from s3 with no errors and when I visit the asset url the url works, but the image itself is missing.
const router = Router();
const upload = multer()
router.post('/', upload.single('file'), async (req, res) => {
console.log(req.file)
const asset = req.file
const assetPath = req.headers['asset-path']
let s3URLs = await getPresignedURLS(assetPath)
const sendAsset = await fetch(
s3URLs.urls[0].upload_url, // the s3 upload url
{
method: 'PUT',
headers: {
"Content-Type": asset.mimetype
},
body: asset,
redirect: 'follow'
}
)
console.log("s3 response", sendAsset)
res.status(200).json({"url": s3URLs.urls[0].access_url });
});
export default router;
I am not sure what to do to convert what multer gives me to something that aws s3 will accept. I am also open to getting rid of multer if there is an easier way to upload binary files to express.
Instead of multer, you can use multiparty to get file data from request object. And to upload file to s3 bucket you can use aws-sdk.
const AWS = require("aws-sdk");
const multiparty = require("multiparty");
/**
* Helper method which takes the request object and returns a promise with a data.
*/
const getDataFromRequest = (req) =>
new Promise(async(resolve, reject) => {
const form = new multiparty.Form();
await form.parse(req, (err, fields, files) => {
if (err) reject(err);
const bucketname = fields.bucketname[0];
const subfoldername = fields.subfoldername[0];
const file = files["file"][0]; // get the file from the returned files object
if (!file) reject("File was not found in form data.");
else resolve({
file,
bucketname,
subfoldername
});
});
});
/**
* Helper method which takes the request object and returns a promise with the AWS S3 object details.
*/
const uploadFileToS3Bucket = (
file,
bucketname,
subfoldername,
options = {}
) => {
const s3 = new AWS.S3();
// turn the file into a buffer for uploading
const buffer = readFileSync(file.path);
var originalname = file.originalFilename;
var attach_split = originalname.split(".");
var name = attach_split[0];
// generate a new random file name
const fileName = name;
// the extension of your file
const extension = extname(file.path);
console.log(`${fileName}${extension}`);
const params = {
Bucket: bucketname, //Bucketname
ACL: "private", //Permission
Key: join(`${subfoldername}/`, `${fileName}${extension}`), // File name you want to save as in S3
Body: buffer, // Content of file
};
// return a promise
return new Promise((resolve, reject) => {
return s3.upload(params, (err, result) => {
if (err) reject(err);
else resolve(result); // return the values of the successful AWS S3 request
});
});
};
router.post('/', upload.single('file'), async(req, res) => {
try {
// extract the file from the request object
const {
file,
bucketname,
subfoldername
} = await getDataFromRequest(req);
// Upload File to specified bucket
const {
Location,
ETag,
Bucket,
Key
} = await uploadFileToS3Bucket(
file,
bucketname,
subfoldername
);
let response = {};
res["Location"] = Location;
response["ETag"] = ETag;
response["Bucket"] = Bucket;
response["Key"] = Key;
res.status(200).json(response);
} catch (error) {
throw error;
}
});
Request body will be form data with following fields
bucketname:
subfoldername:
file: FileData
For anyone that ever stumbles across this question the solution was to create an custom multer storage engine. Inside the engine you get access to the file with a stream property that s3 accepted (with the correct headers).
I'm trying to upload images from the browser to Amazon S3, and the code below sends some sort of blob to Amazon S3 just fine, I can't read the resulting file in a browser. It doesn't seem to know it's an image file.
I send it to NodeJS from the browser:
let myReader=new FileReader();
myReader.onloadend=(e)=>{ app.ws.send(myReader.result); }
myReader.readAsDataURL(e.target.files[0]);
In NodeJS I send it to S3:
const s3=new AWS.S3();
const params= { Bucket:<bucketName>, Key:fileName, Body:imgData, ACL:"public-read", ContentEncoding:'base64' };
s3.putObject(params, (err, data)=>{
if (err) throw err;
});
Check AWS S3 guide,
This doc contains the logic needed to upload image from browser to S3 bucket
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/s3-example-photo-album.html
Turns out you need to modify the base64 image data coming in and explicitly set the ContentType:
const s3=new AWS.S3();
const type = imgData.split(';')[0].split('/')[1];
imgData= new Buffer.from(imgData.replace(/^data:image\/\w+;base64,/, ""), 'base64');
let params = { Bucket:<bucketName>, Key:fileName, Body:imgData,
ACL:"public-read", ContentType:"image."+type, ContentEncoding: 'base64' };
s3.upload(params, (err, data)=>{
if (err) throw err;
... Do something ...
});
I'm creating a temporary JSON file in my NodeJS backend which holds the information the user has filled in a form. At the end of the form when user clicks on the download button, I run some Python script in NodeJS to validate the data and then create a temporary file of this JSON data and return it to user as a HTTP GET response.
Right now I'm using a timer to delete this temporary file after 10 seconds, which is bad. I want to know how to detect when the user has fully downloaded the file to their local disk from the browser so I can delete this temporary file in backend.
The client Angular code:
$scope.downloadForm = function() {
var data = formDataFactory.getDataForSubmission();
var url = '/FormSubmission/DownloadData';
// Below POST call will invoke NodeJS to write the temporary file
$http.post(url, data)
.success(function(data, status) {
$scope.downloadPath = data.filePath;
$scope.downloadFile = data.fileName;
url = '/tmp/forms/' + $scope.downloadFile;
// If the temporary file writing is successful, then I get it with a GET method
$http.get(url)
.success(function(data, status) {
$log.debug("Successfully got download data");
$window.location = $scope.downloadPath;
})
.error(function(data, status) {
$log.error("The get data FAILED");
});
})
.error(function(data, status) {
$log.error("The post data FAILED");
});
}
$scope.download = function() {
$scope.downloadForm();
setTimeout(function() { //BAD idea
$scope.deleteForm($scope.downloadPath);
}, 10000);
}
The server NodeJS code:
// POST method for creating temporary JSON file
router.post('/FormSubmission/DownloadData', function(req, res) {
if (!req.body) return res.sendStatus(400); // Failed to get data, return error
var templateString = formTmpPath + 'form-XXXXXX.json';
var tmpName = tmp.tmpNameSync({template: templateString});
fs.writeFile(tmpName, JSON.stringify(req.body, null, 4), function(err) {
if (err) {
res.sendStatus(400);
} else {
res.json({ fileName: path.basename(tmpName), filePath: tmpName, out: ''});
}
});
});
// Get method for downloading the temporary form JSON file
router.get('/tmp/forms/:file', function(req, res) {
var file = req.params.file;
file = formTmpPath + file;
res.download(file, downloadFileName, function(err) {
if (err) debug("Failed to download file");
});
});
Update:
I'm trying to use a stream now to send the data back, but for some reason this get method is called twice!? Can't understand why!!
// Get method for downloading the temporary form JSON file
router.get('/tmp/forms/:file', function(req, res) {
var filename = "ipMetaData.json";
var file = req.params.file;
file = formTmpPath + file;
var mimetype = mime.lookup(file);
const stats = fs.statSync(file);
res.setHeader('Content-disposition', 'attachment; filename=' + filename);
res.setHeader('Content-type', mimetype);
res.setHeader('Content-Length', stats.size);
console.log("Will send the download response for file: ", file);
//var path = __dirname + "\\..\\tmp\\forms\\form-auSD9X.json";
console.log("Creating read stream for path: " + file);
var stream = fs.createReadStream(file);
// This will wait until we know the readable stream is actually valid before piping
stream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
stream.pipe(res);
});
// This catches any errors that happen while creating the readable stream (usually invalid names)
stream.on('error', function(err) {
console.log("Caught an error in stream"); console.log(err);
res.end(err);
});
stream.on('end', () => {
console.log("Finished streaming");
res.end();
//fs.unlink(file);
});
});
if I understand your problem correctly, you can do this in different ways, but easiest way is first, remove the timer to remove the file, and remove it after the download completes from the backend as follows
router.get('/tmp/forms/:file', function(req, res) {
var file = req.params.file;
file = formTmpPath + file;
res.download(file, downloadFileName, function(err) {
if (err) debug("Failed to download file");
else {
// delete the file
fs.unlink(file,function(err){
if(err) debug(err);
})
}
});
});
The problem was with doing a get call and then change location to the file path which has the same path. I changed my API path and used the stream .on('end', callback) to remove the file.
// If the temporary file writing is successful, then I get it with a GET method
$http.get(url) --> this URL should be different from $window.location
.success(function(data, status) {
$log.debug("Successfully got download data");
$window.location = $scope.downloadPath;
})
.err
save image in a folder using nodeJS and angularJS
Hi, i want save images in a folder, I have my backEnd in NodeJS and my frondEnd in AngularJS using ng-flow; my angular service code is the following
addImages : function (file) {
var uploadUrl = localPath + "addImages";
var fd = new FormData();
fd.append('file', file);
return $http.post(uploadUrl, fd, {
transformRequest: angular.identity,
headers: { 'Content-Type': undefined,
'file-name' : undefined,
'file-location' : undefined,
'user-id' : undefined
}
});
}
and i have the following POST service in Node
router.post('/addImages', function (req, res) {
console.log(req);
var tempPath = req.files.file.path;
console.log(tempPath);
var targetPath = path.resolve('/../static/image/image.png');
console.log(targetPath);
console.log(req.files.file.name);
if (path.extname(req.files.file.name).toLowerCase() === '.png') {
fs.rename(tempPath, targetPath, function(err) {
if (err) throw err;
console.log("Upload completed!");
});
} else {
fs.unlink(tempPath, function () {
if (err) throw err;
console.error("Only .png files are allowed!");
});
}
// ...
});
the error that i get is POST /assetImage/addImages 500
someone know a more easy form to get a image in angular and save in a folder using node?
Edit
when I print file using console.log i get o file object like File { name: "howls-moving-castle-40994-1920x1200…", lastModified: 1431122408000, lastModifiedDate: Date 2015-05-08T22:00:08.000Z, size: 910465, type: "image/jpeg" } but when print the fd get a empty object FormData { } I intentend pass file whitout using formData but in node get a Empty object to
If you are using Express.js, body-parser does not handle multipart bodies, due to their complex and typically large nature. For multipart bodies, you may be interested in the following modules:
busboy
multiparty
formidable
multer
See the suggested modules here for parsing multipart bodies.
Update: For future reference, Amazon have now updated the documentation from what was there at time of asking. As per #Loren Segal's comment below:-
We've corrected the docs in the latest preview release to document this parameter properly. Sorry about the mixup!
I'm trying out the developer preview of the AWS SDK for Node.Js and want to upload a zipped tarball to S3 using putObject.
According to the documentation, the Body parameter should be...
Body - (Base64 Encoded Data)
...therefore, I'm trying out the following code...
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
fs.readFile('myarchive.tgz', function (err, data) {
if (err) { throw err; }
var base64data = new Buffer(data, 'binary').toString('base64');
var s3 = new AWS.S3();
s3.client.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: base64data
}).done(function (resp) {
console.log('Successfully uploaded package.');
});
});
Whilst I can then see the file in S3, if I download it and attempt to decompress it I get an error that the file is corrupted. Therefore it seems that my method for 'base64 encoded data' is off.
Can someone please help me to upload a binary file using putObject?
You don't need to convert the buffer to a base64 string. Just set body to data and it will work.
Here is a way to send a file using streams, which might be necessary for large files and will generally reduce memory overhead:
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: 'key', secretAccessKey: 'secret' });
// Read in the file, convert it to base64, store to S3
var fileStream = fs.createReadStream('myarchive.tgz');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'mybucketname',
Key: 'myarchive.tgz',
Body: fileStream
}, function (err) {
if (err) { throw err; }
});
});
I was able to upload my binary file this way.
var fileStream = fs.createReadStream("F:/directory/fileName.ext");
var putParams = {
Bucket: s3bucket,
Key: s3key,
Body: fileStream
};
s3.putObject(putParams, function(putErr, putData){
if(putErr){
console.error(putErr);
} else {
console.log(putData);
}
});