SignatureDoesNotMatch browser based upload to s3 - javascript

I know many asked this question and I looked into their post but I still never got the correct answer so here goes.
I'm trying to upload an image to s3 using the Browser Based uploading technique introduce by Amazon dev. Right now I'm able to calculate both policy & signature on my end. But when I tried to upload an image I always get a "Signature not match" (>.<). One main problem I'm having is that the credentials I have are just temporary: AWS Security Token Service, this consist of an accessKEy, secretKey and security token. I'll post my code so anyone please
Here's my policy_json conversion
function setValues(accesskey, secretkey, token) {
var folder = 'avatars/email#domain.com/',
acl = 'public-read',
bucket = 'my-bucket';
var POLICY_JSON = { "expiration": "2013-12-03T12:29:27.000Z",
"conditions": [
{"bucket": bucket},
["starts-with", "$key", folder],
{"acl": acl},
{"success_action_redirect": "201"},
["starts-with", "$Content-Type", "image/png"]
]
};
var policy = Base64.encode(JSON.stringify(POLICY_JSON));
var signature = b64_hmac_sha1(secretkey, policy);
return {
"policy":policy,
"signature":signature,
"folder" : folder,
"acl" : acl,
"bucket" : bucket
}
}
My upload
function upload(acl, accesskey, policy, signature, token) {
$(':button').click(function()
{
var file = document.getElementById('file').files[0];
// var key = "events/" + (new Date).getTime() + '-' + file.name;
var xdate = '20131016T105000Z';
// var formData = new FormData($('form')[0]); //$('form')[0]
var formData = new FormData(); //$('form')[0]
formData.append("key", "avatars/email#domain.com/${filename}");
formData.append("acl", acl);
formData.append("success_action_redirect", "201");
formData.append("Content-Type", file.type);
formData.append("AWSAccessKeyId", accesskey);
formData.append("policy", policy);
formData.append("signature", signature);
// formData.append("x-amz-security-token", token);
formData.append("file", file);
$.ajax({
url: 'https://mybucket.s3.amazonaws.com', //Server script to process data
type: 'POST',
xhr: function() { // Custom XMLHttpRequest
var myXhr = $.ajaxSettings.xhr();
if(myXhr.upload){ // Check if upload property exists
myXhr.upload.addEventListener('progress',progressHandlingFunction, false); // For handling the progress of the upload
}
return myXhr;
},
//Ajax events
beforeSend: function(e) {
e.setRequestHeader("Authorization", "AWS "+accesskey+":"+signature);
e.setRequestHeader("x-amz-date", xdate);
e.setRequestHeader("x-amz-acl", acl);
e.setRequestHeader("x-amz-security-token", token);
// alert('Are you sure you want to upload document.');
},
success: function(e) { alert('Upload completed'); } ,
error: function(jqXHR, textStatus, errorThrown) {
console.log(textStatus);
console.log(errorThrown);
} ,
// Form data
data: formData,
//Options to tell jQuery not to process data or worry about content-type.
cache: false,
contentType: false,
processData: false
});
});
}
My only html
<form enctype="multipart/form-data">
<input id="file" name="file" type="file" />
<input type="button" value="Upload" />
</form>
This is what confused me, at first inside the mybucket it has a folder named avatars now when my colleague uploaded an image (say image1.jpg) using his email_address (say other_email_Address#domain.com) and the uploading is successful when we look at the bucket it created a folder with the name of his email_address and inside it the image. Why is that?
mybucket/
- avatars/
- my_email_address.#domain.com
- image1
- other_email_address#domain.com
- image1
so on ...
the tools i used are: webtoolkit.base64.js, sha1.js, base64-binary.js.

Related

Get .wav blob from url

I'm trying to get a blob from an URL (in my case leads to a .wav file).
The URL is located at my own website, so I only do requests to the same site.
The purpose:
A user has uploaded some .wav files to my website to one schema
The user wants to copy one or more .wav files from one to another schema
The user selects the audiofiles to copy without uploading the files again.
The user interface looks like this:
The problem:
Each audiofile is located in its own directory.
https://mywebsite.nl/media/audiofiles/schemaGUID/recording.wav
So when copying to another schema, the file or files needs to get re-uploaded to the directory of the other schema.
The code im using to upload the selected audiofiles is this:
newwavfiles.forEach(function (item) {
var name = item["name"];
var filename = item["filename"];
var fileblob = fetch('http://mywebsite.nl/media/audiofiles/12345677/recording.wav').then(res => res.blob());
var formData = new FormData();
formData.append('file', fileblob, filename);
formData.append('guid', guid);
// upload file to server
$.ajax({
url: 'index.php?action=uploadAudiofile',
type: 'post',
data: formData,
enctype: 'multipart/form-data',
contentType: false,
processData: false,
success: function (response) {
},
error: function (xhr, status, error) {
console.log(xhr);
var errorMessage = xhr.status + ': ' + xhr.statusText
}
});
});
To get the blob of the file I tried this:
var fileblob = fetch('http://mywebsite.nl/media/audiofiles/12345677/recording.wav').then(res => res.blob());
But then I get this error:
Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'.
Does anyone have a solution to get the .wav file blob from an URL?

how to upload image from local disk to the web using imgur api?

I want to building a file uploader app that would allow users to upload images from the local disk to the web , I search on web and find this link to use imgur api this is url
but it did not work , I put the code in jsfiddle.net but also not work this is the link of jsfiddle
this is the code :
<form id="imgur">
<input type="file" class="imgur" accept="image/*" data-max-size="5000"/>
</form>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
<script>
$("document").ready(function() {
$('input[type=file]').on("change", function() {
var $files = $(this).get(0).files;
if ($files.length) {
// Reject big files
if ($files[0].size > $(this).data("max-size") * 1024) {
console.log("Please select a smaller file");
return false;
}
// Begin file upload
console.log("Uploading file to Imgur..");
// Replace ctrlq with your own API key
var apiUrl = 'https://api.imgur.com/3/image';
var apiKey = 'ctrlq';
var settings = {
async: false,
crossDomain: true,
processData: false,
contentType: false,
type: 'POST',
url: apiUrl,
headers: {
Authorization: 'Client-ID ' + apiKey,
Accept: 'application/json'
},
mimeType: 'multipart/form-data'
};
var formData = new FormData();
formData.append("image", $files[0]);
settings.data = formData;
// Response contains stringified JSON
// Image URL available at response.data.link
$.ajax(settings).done(function(response) {
console.log(response);
});
}
});
});
<script>
so what wrong can any body help ?
EDIT :
I register and get the key client_id:5b9144f6bcc473e and I put the apiKey=5b9144f6bcc473e but still did not work any help

Allow tus-ruby-server to save file records to database

As of now, I have a file upload API using Shrine to handle file attachments and I have added tus-ruby-server to the mix to add the ability to resume uploads.
I have followed the steps here to make it so that when a file is uploaded from tus-ruby-server's endpoint, a database record is also created using Shrine's file handling.
When I POST a file directly to my API's uploads endpoint, it sends the JSON data like so:
{"video"=>#<ActionDispatch::Http::UploadedFile:0x007f811d1d5920 #tempfile=#<Tempfile:/tmp/RackMultipart20170517-19364-6pfr0g.webm>, #original_filename="video.webm", #content_type="video/webm", #headers="Content-Disposition: form-data; name=\"video\"; filename=\"video.webm\"\r\nContent-Type: video/webm\r\n">, "id"=>"35"}
When I upload a file using tus and then POST that generated file data to my API, it sends the following JSON (which fails):
{"video"=>"{\"id\":\"http://localhost:3000/files/8794bf67f32d01cabb9416ca9febf3c3\",\"storage\":\"cache\",\"metadata\":{\"filename\":\"video.webm\",\"size\":142288,\"mime_type\":\"\"}}", "id"=>"35"}
Is there any indication of what is going wrong here?
Here's the JavaScript that handles the uploading (using tus-js-client):
var upload = new tus.Upload(file, {
chunkSize: 1024 * 1024,
endpoint: 'http://localhost:3000/files',
metadata: {filename: file.name, content_type: file.type},
onError: function(error) {
alert(error);
},
onProgress: function(bytesSent, bytesTotal) {
var progress = parseInt(bytesSent / bytesTotal * 100, 10);
var percentage = progress.toString() + '%';
console.log(percentage + ' finished');
},
onSuccess: function(result) {
var fileData = {
id: upload.url,
storage: 'cache',
metadata: {
filename: upload.file.name.match(/[^\/\\]+$/)[0],
size: upload.file.size,
mime_type: upload.file.type
}
};
var fileDataJSON = JSON.stringify(fileData);
var formData = new FormData();
formData.append('video', fileDataJSON);
makeXMLHttpRequest('http://localhost:3000/api/uploads/' + gon.currentId, 'PUT', formData);
}
});
console.log('Uploading video recording to server...');
upload.start();
The JavaScript I used before to handle uploading without tus:
var formData = new FormData();
formData.append('video', blob, 'video.webm');
makeXMLHttpRequest('http://localhost:3000/api/uploads/' + gon.currentId, 'PUT', formData);

How do you upload a single image with Dropzone.js?

Dropzone.js seems to be uploading images as multi-part form data. How do I get it to upload images in the same way an image upload would work with cURL or a binary image upload with Postman?
I'm getting a pre-signed URL for S3 from a server. The pre-singed URL allows an image upload, but not form fields:
var myDropzone = new Dropzone("#photo-dropzone");
myDropzone.options.autoProcessQueue = false;
myDropzone.options.autoDiscover = false;
myDropzone.options.method = "PUT";
myDropzone.on("addedfile", function ( file) {
console.log("Photo dropped: " + file.name );
console.log("Do presign URL: " + doPresignUrl);
$.post( doPresignUrl, { photoName: file.name, description: "Image of something" })
.done(function( data ) {
myDropzone.options.url = data.url
console.log(data.url);
myDropzone.processQueue();
});
});
If I use the returned URL with Postman and set the body to binary and attach the image, then the upload works fine. However, if the Dropzone library uses the same URL to upload the image to S3 then I get a 403 because S3 does not expect form fields.
Update:
An Ajax alternative works as below with a S3 signed url, but Dropzone.js does not seem willing to put the raw image data in the PUT message body.
$.ajax( {
url: data.url,
type: 'PUT',
data: file,
processData: false,
contentType: false,
headers: {'Content-Type': 'multipart/form-data'},
success: function(){
console.log( "File was uploaded" );
}
});
Set maxFiles to 1.
Dropzone.autoDiscover = false;
dzAllocationFiles = new Dropzone("div#file-container", {
url: "api.php?do=uploadFiles"
, autoDiscover: false
, maxFiles: 1
, autoQueue: true
, addRemoveLinks: true
, acceptedFiles: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
});
dzAllocationFiles.on("success", function (file, response) {
// Success Operations
});
dzAllocationFiles.on("maxfilesexceeded", function (file, response) {
allocationFileNames = [];
this.removeAllFiles();
this.addFile(file);
});
Add below options, then working.
myDropzone.options.sending = function(file, xhr) {
var _send = xhr.send;
xhr.send = function() {
_send.call(xhr, file);
}
}

File upload to the asp.net web service not working

I'm trying to upload file to the web service using JavaScript and JQuery. My current implementation is working correctly in the local environment.
But after deploying my solution to the server and try to access from outside it's not working and saying server side error occurred. Other thing I found is other web methods of the same web service are working correctly and just this file upload method is not working.
Here is my asp.net web service file upload method.
[WebMethod]
[ScriptMethod(ResponseFormat = ResponseFormat.Json)]
public object SaveFile()
{
var bugID = int.Parse(HttpContext.Current.Request.Form["bugID"]);
var fileName = HttpContext.Current.Request.Form["name"];
var fileType = HttpContext.Current.Request.Form["type"];
var file = HttpContext.Current.Request.Files[0];
// file saving logic
// .....
return new JavaScriptSerializer().Serialize(new
{
Message = 1,
Name = fileName,
DocumentID = attach.DocumentID // Saved file ID
});
}
Here is the javascript method I used to upload the file to the web service
function uploadFile() {
var data = new FormData(),
file = $("#fileToUpload")[0].files[0];
data.append("name", file.name);
data.append("size", file.size);
data.append("type", file.type);
data.append("file", file);
data.append("bugID", bugID);
$.ajax(
{
url: baseURL + "Webservice/BugsWebService.asmx/SaveFile",
dataType: "xml",
type: "POST",
data: data,
cache: false,
contentType: false,
processData: false,
success: function (res) {
console.log(res);
},
error: function (jqXHR, textStatus, errorThrown) {
console.log(jqXHR);
console.log(textStatus);
console.log(errorThrown);
}
});
}
In development environment web service url is
http://localhost:55225/Bugs/View.aspx
When host the site url is
http://[ip-address]/Internal/Bugs/View.aspx
This is the error I'm getting when try to upload the file
http://[ip-address]/Internal/Webservice/BugsWebService.asmx/SaveFile
500 (Internal Server Error)
What is the issue with my implementation?

Categories

Resources