How to upload Image Buffer data in AWS S3? - javascript

I am trying to upload buffer data from an image into S3. It gets uploaded fine. But when I try to download/view the image in S3, it throws an error. I have tried the following -
The image is available to me in Buffer format (JSON). I cannot change this
let image = { "type": "Buffer", "data": [45, 45, 45....]
let buf = new Buffer(image )
let params = {
Bucket: "bucketName",
Key: "TestImage123haha.PNG",
Body: buf ,
ACL: 'public-read',
ContentType: 'image/jpeg'
};
s3.upload(params, function(err, data) {
if (err) {
console.log('ERROR MSG: ', err);
} else {
console.log('Successfully uploaded data' + data.Location);
}
})
After upoading the image if I try to visit the URL of the bucket where it is stored and view the image, this is what I get -
When I print the buffer data - buf in console , this is what I get -

This problem took me many days but I was able to solve it:
In your API GATE WAY of funtion lambda go to Configuration > Binary types.
Add multipart/form-data

Related

Uploading Image to AWS presigned post URL using axios

I am trying to upload an image to an S3 bucket using a presigned URL generated using boto3 on Python. I have been using the example python code that was provided in the documentation and was successful (the image got correctly uploaded with the correct Content-Type). However, when trying to do this in Javascript for the purposes of our frontend application, I am really struggling to get it to work.
Here's the example dictionary returned by the backend:
{
"fields": {
"AWSAccessKeyId": "AKIAYS3VM3EBIFL7FKE5",
"key": "posts/623255a762fd9bdfbd13f91a",
"policy": "<very long string>",
"signature": "Qvc/sGBHk0uzirzIfR1YmE2kFlo="
},
"url": "https://hotspot-storage.s3.amazonaws.com/"
}
Here is the functioning Python code:
response = <json response object>
object_name = 'playground/example_profile_group.png'
response['fields']['Content-Type'] = "image/png"
# Demonstrate how another Python program can use the presigned URL to upload a file
with open(object_name, 'rb') as f:
files = {'file': (object_name, f)}
http_response = requests.post(response['url'], data=response['fields'], files=files)
# If successful, returns HTTP status code 204
print(http_response)
print(http_response.text)
Here is the non-functioning Javascript code:
const data = response.data;
let payload = data.fields;
payload['Content-Type'] = 'image/jpeg';
const file = {
uri: previewPath,
name: previewPath,
type: 'image/jpeg',
};
payload.file = file;
const url = data.url;
console.log(payload, "MY PAYLOAD")
axios({
method: 'post',
headers: {'Content-Type': 'multipart/form-data'},
url: url,
data: payload,
})
.then(function (response) {
console.log(response.data, 'uploaded');
const data = response.data;
})
.catch(function (error) {
console.log(
'error uploading image',
error.response.data,
);
});
})
.catch(function (error) {
console.log(
'error getting media link',
error.response.data,
);
});
This is the error that keeps getting returned:
error uploading image <?xml version="1.0" encoding="UTF-8"?>
<Error><Code>MalformedPOSTRequest</Code><Message>The body of your POST request is not well-formed multipart/form-data.</Message><RequestId>Q0ES6P4QP75YVVED</RequestId><HostId>eowLxSJQD1xP1EfHPnzGSJzXVGpPjurIMhkdwAD22JMvi9zRoFGg6Bq+mnUt/Lu7DNPY80iBDMc=</HostId></Error>
I have been stuck on this for an absurd amount of time, and cannot tell what I am doing wrong. Any help would be very much appreciated.
In order to send a multipart/form-data request body, you'll need to use a FormData instance instead of a JavaScript object.
For example
const { url, fields } = response.data;
const payload = new FormData();
payload.append("file", file); // this is the file blob, eg from <input type="file">
payload.append("Content-Type", "image/jpeg");
// add all the other fields
Object.entries(fields).forEach(([ key, val ]) => {
payload.append(key, val);
});
// No need to manually set content-type header, your browser knows what to do
const { data: result } = await axios.post(url, payload);
console.log("uploaded", result);

how to embed an image in a JSON response

I'm using Jimp to read in a JSON string that looks like this:
As you can see the image node is a base64-encoded JPEG.
I'm able to succesfully convert it to a TIFF and save it:
Jimp.read(Buffer.from(inputImage, "base64"), function(err, image) {
image.getBuffer(Jimp.MIME_TIFF, function(error, tiff) {
context.bindings.outputBlob = tiff
...}
However, when I attempted to embed the tiff inside of a JSON object, the TIFF gets all garbled up:
const response = {
image: tiff.toString('base64'),
correlation: correlation
};
context.bindings.outputBlob = response;
Here's the full code:
const Jimp = require("jimp");
module.exports = function(context, myBlob) {
const correlation = context.bindings.inputBlob.correlation;
const inputImage = context.bindings.inputBlob.image;
const imageName = context.bindings.inputBlob.imageName;
context.log(
correlation + "Attempting to convert this image to a tiff: " + imageName
);
Jimp.read(Buffer.from(inputImage, "base64"), function(err, image) {
image.getBuffer(Jimp.MIME_TIFF, function(error, tiff) {
const response = {
image: tiff.toString('base64'),
correlation: correlation
};
context.bindings.outputBlob = response;
context.log(
correlation + "Succesfully converted " + imageName + " to tiff."
);
context.done();
});
});
};
How do we embed the tiff inside of a JSON payload?
If this output is non-negotiable, how would I render the tiff from the saved payload?
Well since you confirmed you are looking for output with context.res here is my working sample.. note that there is a maximum response size, so you can't return every image/file the way I am returning the image here
const Jimp = require('jimp')
module.exports = async function (context, req)
{
let response = {}
try
{
let url = 'https://noahwriting.com/wp-content/uploads/2018/06/APPLE-300x286.jpg'
//call function to download and resize image
response = await resizeImage(url)
}
catch (err)
{
response.type = 'application/json'
if (err.response == undefined)
{
context.log(err)
response.status = 500
response.data = err
}
else
{
response.data = err.response.data
response.status = err.response.status
context.log(response)
}
}
//response
context.res =
{
headers: { 'Content-Type': `${response.type}` },
body: response.buf
}
}
async function resizeImage(url)
{
//read image to buffer
let image = await Jimp.read(url)
//resize image
image.resize(300, Jimp.AUTO)
//save to buffer
let image_buf = await image.getBufferAsync(image.getMIME())
//image.getMIME() returns something like `image/jpeg` which is a valid Content-Type for responses.
return { 'buf': image_buf, 'type': image.getMIME() }
}
(Offtopic but I saw that you are using blob storage so..) if you plan on storing photos/files/anything in Azure Blob Storage and you want to retrieve them in some systematic way you will find out very fast that you can't query the storage and you have to deal with ugly XML. My work around to avoid this way to create a function that stores photos/files in Blob Storage but then saves the url path to the file along with the file name and any other attributes to a mongo storage. So then I can make super fast queries to retrieve an array of links, which point to the respective files.

S3 uploads corrupt file when using stream

I've built a chat server using rails, socket.io, and node that allows users to send images to each other, which I'm trying to get to work with Amazon s3. Here are the relevant code snippets from the client and server:
Client:
var img_file = this.files[0]; //retrieved from file input
file_reader.onload = function(e) {
//console.log(img_file);
var extension = img_file.name.split('.');
extension = extension[extension.length-1];
get_user_id(username, function(to_id) {
socket.emit('message', {
from: "<%= #user.displayname %>",
fromid: <%= #user.id %>,
to: username,
toid: parseInt(to_id),
content: e.target.result,
type: 'image',
extension: extension,
size: img_file.size
});
})
};
file_reader.readAsBinaryString(img_file);
Server:
socket.on('message', function(message) {
console.log(message.type + " message received, sending to appropriate person: " + message.to);
if (message.type == 'text') {
//send message like normal
sendMessage(message);
}
else {
//it's an image, need additional processing
var blank_file = uuid.v4() + '-' + new Date().getTime() + '.' + message.extension;
var contents = message.content;
s3.putObject({
Bucket: 'bucket-name', //not the real bucket name
Key: blank_file,
ContentType: 'image/' + message.extension,
//ContentLength: message.size,
Body: contents
},
function(err, data) {
if (err) {
console.log(err)
}
else {
console.log('file uploaded to s3 successfully');
message.content = getS3Url(blank_file);
sendMessage(message);
}
})
}
}
Whenever I try to upload an image using this code, it appears to work, but when I check my s3 bucket and open up the image, my operating system says that the file is unreadable or corrupt. I've tried using the ContentLength parameter but when I use that I get the error "BadDigest: The Content-MD5 you specified did not match what we received."
Can anyone help me out? I got this code to work properly without s3, but for some reason I keep getting corrupt or unrecognizable files since I started using it.
I figured out what was wrong. You need to use a binary buffer to pass the data to s3 instead of just passing the string as is, so basically use these for the s3 parameters:
s3.putObject({
Bucket: 'bucket-name',
Key: file,
Body: new Buffer(contents, 'binary') //use a buffer to pass the data over
}, function() { });
By using the buffer, it should upload the files properly.

Send a file from mobile to Node js server

I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)

Issues with streams and S3 upload

I am having issues with a zero byte stream. I am resizing an image and uploading it as a stream to S3. If I pipe output to response it displays correctly.
// Fetch remote file
var request = http.get('mybigfile.jpg', function(response) {
// Setup IM convert
var convert = spawn('convert', ['-', '-strip', '-thumbnail', 600, '-']);
// Pipe file stream to it
response.pipe(convert.stdin);
// Pipe result to browser - works fine
//convert.stdout.pipe(res);
// S3 requires headers
var headers = {
'content-type': response.headers['content-type'],
'x-amz-acl': 'public-read'
};
// Upload to S3
var aws = aws2js.load('s3', aws.key, aws.secret);
aws.setBucket(aws.bucket);
aws.putStream('thumb.jpg', convert.stdout, false, headers, function(err) {
if (err) {
return console.error('Error storing:', err.toString());
} else {
// No errors - this shows - but file is 0kb
console.log(path + ' uploaded to S3');
}
}
I see notes about streams not working with S3 due to content length. I am trying buffers but no success with that so far.
Well no go on streams - I guess I can use pause-stream or multipart to technically achieve this but otherwise I don't think it's possible. I ended up using a buffer.
...
// Pipe file stream to it
response.pipe(convert.stdin);
// Save to buffer
var bufs = [] ;
convert.stdout.on('data', function(chunk) {
bufs.push(chunk);
});
convert.stdout.on('end', function() {
var buffer = Buffer.concat(bufs);
// S3 requires headers
...
aws.putBuffer(path, buffer, false, headers, function(err) {
...

Categories

Resources