I am using the javascript version of the aws sdk to upload a file to an amazon s3 bucket.
code :
AWS.config.update({
accessKeyId : 'access-key',
secretAccessKey : 'secret-key'
});
AWS.config.region = 'region';
var bucket = new AWS.S3({params: {Bucket: 'bucket-name'}});
//var fileChooser = document.getElementById('file');
var files = event.target.files;
$.each(files, function(i, file){
//console.log(file.name);
if (file) {
var params = {Key: file.name, ContentType: file.type, Body: file};
bucket.upload(params).on('httpUploadProgress', function(evt) {
console.log("Uploaded :: " + parseInt((evt.loaded * 100) / evt.total)+'%');
if("Uploaded :: " + parseInt((evt.loaded * 100) / evt.total)+'%' == 'Uploaded :: 20%'){
console.log("abort upload");
bucket.abort.bind(bucket);
}
}).send(function(err, data) {
if(err != "null"){
console.log(data);
//alert("Upload Success \nETag:"+ data.ETag + "\nLocation:"+ data.Location);
var filename = data.Location.substr(data.Location.lastIndexOf("/")+1, data.Location.length-1);
console.log(filename);
fileData = filename;
filename = filename.replace("%20"," ");
$('.aws-file-content').append('<i id="delete-aws-file'+i+'" class="delete-aws-file icon-remove-sign" data-filename=' + fileData +'></i><a href="'+data.Location+'" target=_blank >'+filename+'</a><br>');
}else{
console.log(err);
}
});
}
});
While the file is uploading parts of the file successfully and is still in progress, I want to abort/stop the file upload.
I tried:
bucket.abort();// not working
bucket.abort.bind(bucket); //not working.
Thanks for help.
Found the solution :
// replaced bucket.upload() with bucket.putObject()
var params = {Key: file.name, ContentType: file.type, Body: file};
request = bucket.putObject(params);
then for abort the request:
abort: function(){
request.abort();
}
You cannot bind from the bucket which is your S3 object, it must be called for the upload part.
change for something like this
var upload = bucket.upload(params)
upload.send(....)
so you can bind on upload like
upload.abort.bind(upload);
you can call within an timeout method as crowned in the example
// abort request in 1 second
setTimeout(upload.abort.bind(upload), 1000);
Calling abort() in the browser environment will not abort any requests that are already in flight. If a multipart upload was created, any parts not yet uploaded will not be sent, and the multipart upload will be cleaned up.
Default value for part size is (5 * 1024 * 1024)
Through dumb luck I've stumbled upon a way to do this for multipart uploads.
The accepted answer forces you to use the putObject method, which does not chunk uploads and sends them using the multipart upload API.
The following solution uses the s3.upload method of the AWS S3 SDK for Javascript in the Browser. And it seems to work just fine, even though the example from the official documentation doesn't work.
var bucket = new AWS.S3({params: {Bucket: 'bucket-name'}});
var params = {Key: file.name, ContentType: file.type, Body: file};
var bucket.upload(params).send();
setTimeout(bucket.abort, 1000);
That's it. I just tried calling bucket.abort() and it just worked. Not sure why AWS hasn't documented this.
Related
I am currently attempting to upload a photo to my Firebase app's storage in my Apache Cordova app. I currently get the photo's URI with the following code:
function getPhotoFromAlbum() {
navigator.camera.getPicture(onPhotoURISuccess, onFail, {
quality: 50,
sourceType: navigator.camera.PictureSourceType.SAVEDPHOTOALBUM,
destinationType: navigator.camera.DestinationType.FILE_URI
});
}
function onPhotoURISuccess(imageURI) {
var image = document.getElementById('image');
image.style.display = 'block';
image.src = imageURI;
getFileEntry(imageURI);
}
And then am attempting to convert the image into a file and push it to my Firebase storage with the following function:
function getFileEntry(imgUri) {
window.resolveLocalFileSystemURL(imgUri, function success(fileEntry) {
console.log("got file: " + fileEntry.fullPath);
var filename = "test.jpg";
var storageRef = firebase.storage().ref('/images/' + filename);
var uploadTask = storageRef.put(fileEntry);
}, function () {
// If don't get the FileEntry (which may happen when testing
// on some emulators), copy to a new FileEntry.
createNewFileEntry(imgUri);
});
}
I have both the file and the camera cordova plugins installed, the only errors I get when I attempt to do this is
Error in Success callbackId: File1733312835 : [object Object]
Which is just an error message from cordova.js
I also know I have my Firebase storage set up correctly because I have tested it through an emulator by adding a file input and successfully uploading whatever file the user added, to the Firebase storage.
Is it possible to upload a file to Firebase storage using this method of converting an image to a file through its URI, and then uploading it? If so, what is the correct way to do so / what is wrong with the way i'm doing it?
I was able to accomplish uploading an image by using a data url. Below is my code:
var filename = "test.jpg";
var storageRef = firebase.storage().ref('/images/' + filename);
var message = 'data:image/jpg;base64,' + imageUri;
storageRef.putString(message, 'data_url').then(function (snapshot) {
console.log('Uploaded a data_url string!');
});
Is it possible to upload a file to Firebase storage using this method of converting an image to a file through its URI, and then uploading it? If so, what is the correct way to do so / what is wrong with the way i'm doing it?
Yes it is possible to upload a file on firebase through its URI. However you have to follow the correct way.
1. You have to store the data in firebase after file reading operation is completed.you can use FileReader.onloadend for this.
2. By using a data_url you can store to firebase.
Here is the snippet for more clarity:
function getFileEntry(imgUri) {
window.resolveLocalFileSystemURL(imgUri, function onSuccess(fileEntry) {
fileEntry.file(function(file) {
var reader = new FileReader();
reader.onloadend = function() {
filename = "test.jpg";
var storageRef = firebase.storage().ref('/images/' + filename);
var data = 'data:image/jpg;base64,' + imgUri;
storageRef.putString(data, 'data_url').then(function (snapshot) {
console.log('Image is uploaded by base64 format...');
});
};
reader.readAsArrayBuffer(file);
});
},
function onError(err) {
console.log(err);
createNewFileEntry(imgUri);
});
}
I am successfully uploading an image to my Amazon S3 bucket, but when I go to the URL that it is generated, my image is instantly downloaded instead of being displayed in the browser. My code is below:
var fs = require('fs');
var AWS = require('aws-sdk');
AWS.config = new AWS.Config();
AWS.config.accessKeyId = "MY_ACCESS_KEY";
AWS.config.secretAccessKey = "MY_SECRET_KEY";
AWS.config.region = "us-west-2";
AWS.config.apiVersions = {
"s3": "2006-03-01"
}
var s3 = new AWS.S3();
var bodystream = fs.createReadStream('./meme1.png');
var params = {
'Bucket': 'MY_BUCKET_NAME',
'Key': 'uploads/images/' + 'Img4.png',
'Body': bodystream,
'ContentEncoding': 'base64',
'ContentType ': 'image/png',
'ACL':'public-read-write'
};
s3.upload(params, function(err, data){
console.log('RESPONSE: ', err, data);
});
Some other posts say to make sure the ContentType property of the params object is correct but I am pretty sure that mine is correct. How do I fix this and make the image display in the browser instead of downloading?
You are missing the Content Disposition, by default it's "attachement" which means the file is being downloaded instead of displayed.
ContentDisposition: inline
(Answering this Question for Googlers)
I am having trouble uploading a file with nodeJS and Angular.
I found solutions but it's only with Ajax which I don't know about. Is it possible to do without?
With the following code I get this error :
POST http://localhost:2000/database/sounds 413 (Payload Too Large)
Code:
HTML:
<div class="form-group">
<label for="upload-input">This needs to be a .WAV file</label>
<form enctype="multipart/form-data" action="/database/sounds" method="post">
<input type="file" class="form-control" name="uploads[]" id="upload-input" multiple="multiple">
</form>
<button class="btn-primary" ng-click="uploadSound()">UPLOAD</button>
</div>
Javascript:
$scope.uploadSound = function(){
var x = document.getElementById("upload-input");
if ('files' in x) {
if (x.files.length == 0) {
console.log("Select one or more files.");
} else {
var formData = new FormData();
for (var i = 0; i < x.files.length; i++) {
var file = x.files[i];
if(file.type==("audio/wav")){
console.log("Importing :");
if ('name' in file) {
console.log("-name: " + file.name);
}
if ('size' in file) {
console.log("-size: " + file.size + " bytes");
}
formData.append('uploads[]', file, file.name);
}else{
console.log("Error with: '"+file.name+"': the type '"+file.type+"' is not supported.");
}
}
$http.post('/database/sounds', formData).then(function(response){
console.log("Upload :");
console.log(response.data);
});
}
}
}
NodeJS:
//Upload a sound
app.post('/database/sounds', function(req, res){
var form = new formidable.IncomingForm();
// specify that we want to allow the user to upload multiple files in a single request
form.multiples = true;
// store all uploads in the /uploads directory
form.uploadDir = path.join(__dirname, '/database/sounds');
// every time a file has been uploaded successfully,
// rename it to it's orignal name
form.on('file', function(field, file) {
fs.rename(file.path, path.join(form.uploadDir, file.name));
});
// log any errors that occur
form.on('error', function(err) {
console.log('An error has occured: \n' + err);
});
// once all the files have been uploaded, send a response to the client
form.on('end', function() {
res.end('success');
});
// parse the incoming request containing the form data
form.parse(req);
});
EDIT:
The error became
POST http://localhost:2000/database/sounds 400 (Bad Request)
If your are using bodyParser
app.use(bodyParser.urlencoded({limit: '100mb',extended: true}));
app.use(bodyParser.json({limit: '100mb'}));
This will allow you to upload files upto 100mb
For json/urlencoded limit, it’s recommended to configure them in server/config.json as follows:
{
“remoting”: {
“json”: {“limit”: “50mb”},
“urlencoded”: {“limit”: “50mb”, “extended”: true}
}
Please note loopback REST api has its own express router with bodyParser.json/urlencoded middleware. When you add a global middleware, it has to come before the boot() call.
var loopback = require('loopback');
var boot = require('loopback-boot');
var app = module.exports = loopback();
//request limit 1gb
app.use(loopback.bodyParser.json({limit: 524288000}));
app.use(loopback.bodyParser.urlencoded({limit: 524288000, extended: true}));
With regards to checking that the data is actually a WAV file, your best bet is to look at the contents of the file and determine if it looks like a WAV file or not.
The WAVE PCM soundfile format article goes into the details of the format.
To be absolutely sure that this is a proper WAV file, and it's not broken in some way, you need to check that all of the fields defined there make sense. But a quick solution, might be to just check that the first four bytes of the content are the letters 'RIFF'. It won't guard against corrupted files or malicious content, but it's a good place to start I think.
I tried to change the object sent to url params as said in Very Simple AngularJS $http POST Results in '400 (Bad Request)' and 'Invalid HTTP status code 400' :
$http.post({
method: 'POST',
url: '/upload',
data: formData,
headers: {'Content-Type': 'application/x-www-form-urlencoded'},
transformRequest: function(obj) {
var str = [];
for(var p in obj)
str.push(encodeURIComponent(p) + "=" + encodeURIComponent(obj[p]));
return str.join("&");
}
}).success(function(response){
console.log("Uploaded :");
console.log(response.data);
});
But I get a bad request error
Why is there no data received ? The console.log before this shows I have one file in my formData.
Error: $http:badreq
Bad Request Configuration
Http request configuration url must be a string or a $sce trusted
object. Received: {"method":"POST","url":"/upload","data":
{},"headers":{"Content-Type":"application/x-www-form-urlencoded"}}
I have a dropzone.js instance that uploads files directly to an S3 bucket using CORS and then passes me the file information inside of javascript to use. This is the tutorial I followed for it...
The file upload itself seems to work fine, the files show up in the s3 bucket at the correct file path, however all of the files include something like this wrapped around it
------WebKitFormBoundaryMH4lrj8VmFKgt1Ar
Content-Disposition: form-data; name="files[0]"; filename="image-name.png"
Content-Type: image/png
IMAGE CONTENT HERE
------WebKitFormBoundaryMH4lrj8VmFKgt1Ar--
I cannot for the life of me figure out why this is happening. It doesn't matter what type/mime of file I upload, everything includes it.
Any help would be greatly appreciated!
inside your init: function() { .. }
add the following:
self.on("sending", function(file, xhr, formData) {
var _send = xhr.send;
xhr.send = function() {
_send.call(xhr, file);
}
});
#TadasTamosauskas is correct that catching the 'sending' event to patch xhr will not work for chunked uploads.
Below is another method that patches xhr with a params function passed in as an option to Dropzone. The chunked execution path also adds the headers required for a resumable file upload using the OneDrive API as documented here: https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_createuploadsession?view=odsp-graph-online
const CHUNK_SIZE=10485760 //10MiB
Dropzone.options.dropzone = {
method: "put",
headers: {
'Cache-Control': null,
'X-Requested-With': null
},
filesizeBase: 1024,
maxFilesize: 102400, // 100G in MB, max onedrive filesize
chunking: true,
chunkSize: CHUNK_SIZE,
params: function(files, xhr, chunk) {
if (chunk) {
const chunk_start = (chunk.index * CHUNK_SIZE)
xhr.setRequestHeader('Content-Range',
'bytes ' + chunk_start
+ '-' + (chunk_start + chunk.dataBlock.data.size - 1)
+ '/' + files[0].size)
var _send = xhr.send
xhr.send = function() {
_send.call(xhr, chunk.dataBlock.data)
}
} else {
var _send = xhr.send
xhr.send = function() {
_send.call(xhr, files[0])
}
}
}
}
I use Amazon Web Service S3 to upload and store my files. I generate a pre signed url with AWS Sdk for Node.js server-side to upload directly files from browser thanks to this pre signed url.
How it works
Server-side I have a method wich returns the pre-signed url.
AWS.config.loadFromPath(__dirname + '/../properties/aws-config.json');
AWS.config.region = 'eu-west-1';
//Credentials are loaded
var s3 = new AWS.S3();
var docId = req.query.doc;
var params = {
Bucket: res.locals.user.bucketId,
Key: docId+"."+req.query.fileExtension,
ACL : "bucket-owner-read",
ContentType : req.query.fileType
};
s3.getSignedUrl('putObject', params, function (err, url) {
if(url){
res.writeHead(200);
var result = {
AWSUrl : url
};
//Generates pre signed URL with signature param
res.end(JSON.stringify(result));
}
}
I upload directly my file to S3 client-side
var loadToAWSS3 = function(url, file, inputFileId){
var data = new FormData();
data.append('file', file);
$.ajax({
url: url,//url getted from server-side method
type: 'PUT',
data : data,
headers: {
'Content-Type': file.type
},
processData: false,
xhr: function() {
var myXhr = $.ajaxSettings.xhr();
if(myXhr.upload){
myXhr.upload.addEventListener('progress',function(e){
if(e.lengthComputable){
var max = e.total;
var current = e.loaded;
var percentage = (current * 100)/max;
//stuff to handle progress...
}
},
false);
}
return myXhr;
},
statusCode: {
200: function () {
//some stuff
}
});
}
Chrome & Firefox behaviors
Works as expected, the pre signed url is getted, then the file is uploaded, I can see it in my AWS S3 console.
Lovely IE 11
SignatureDoesNotMatch error ! IE add some extra stuff to Content-Type request header not expected by AWS which causes error in the signature comparison. Server-side, the Sdk generates signature based on :
ContentType : req.query.fileType //(something like application/pdf)
whereas when I inspect the request with IE debugger, I see
Content-Type application/pdf, multipart/form-data; boundary=---------------------------7df2f3283091c
in Chrome my request header is fine
Content-Type: application/pdf
What can I do to remove this IE extra Content-Type ? If not possible, how can I generate this extra stuff before sending the request in order to get the pre-signed url with the extra stuff in the signature ?
OK, I finally figured it out.
Using FormData() simulates that you are sending files through a form. That's why IE always adds
multipart/form-data; boundary=---------------------------7**********
To get around the problem I use raw Javascript with XMLHttpRequest thanks to this answer
var xmlHttpRequest = new XMLHttpRequest();
xmlHttpRequest.open('PUT', url, true);
xmlHttpRequest.setRequestHeader('Content-Type', file.type);
xmlHttpRequest.send(file);
And it works with Chrome, Firefox, IE 11 (I have not tested with IE<11 but according to W3Schools it works for IE7+). No more extra content type with IE.
Hope this helps
if you have access to xhr you can do:
var _send = xhr.send;
xhr.send = function() {
_send.call(xhr, file);
}