react-native through upload image on s3 Bucket using aws-sdk - javascript

I am using aws-sdk for upload image on the s3 bucket. Please look at my code below I already spend one day in it.
uploadImageOnS3 = () => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
let file= {
uri: this.state.fileObj.uri,
type: this.state.fileObj.type,
name: this.state.fileObj.fileName
};
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: file,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
Error:
Unsupported body payload object
Please help me to short out I am also using react-native-image-picker for image upload.

You have to use array buffer in body stream to pass data object.
As per the aws documentation you can pass data stream, string, array buffer or blob data type in body parameter.
Please check below code, which will resolve your issue,
import fs from "react-native-fs";
import { decode } from "base64-arraybuffer";
uploadImageOnS3 = async() => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: "v4"
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
const fPath = this.state.fileObj.uri;
const base64 = await fs.readFile(fPath, "base64");
//console.log(base64);
const arrayBuffer = decode(base64);
//console.log(arrayBuffer);
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};

You can check out the React Native AWS amplify documentation for the proper process. In the documentation, it is mentioned that you can pass data stream, string, array buffer, or blob data type in body parameter.

import AWS from 'aws-sdk';
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'xxxxx';
const IAM_USER_KEY = 'xxxxxxxxxxxxxxxxx';
const IAM_USER_SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
}

Related

aws-sdk Signed URL Failing on Heroku during production but works in development

I have my code written below, and all of this generates my signed URL perfectly fine when on development and the files that I want to get and upload locally work.
const S3 = require("aws-sdk/clients/s3");
const fs = require("fs");
const s3 = new S3({
region: process.env.AWS_BUCKET_REGION,
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
signatureVersion: "v2",
});
const uploadFile = (file, id, directory) => {
const fileStream = fs.createReadStream(file.path);
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Body: fileStream,
Key: directory + id,
MimeType: file.mimetype,
};
return s3.upload(uploadParams).promise();
};
exports.uploadFile = uploadFile;
const deleteFile = (id, directory) => {
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Key: directory + id,
};
return s3.deleteObject(uploadParams).promise();
};
exports.deleteFile = deleteFile;
const getFileStream = ({ key }) => {
if (key) {
const downloadParams = {
Key: key,
Bucket: process.env.AWS_BUCKET_NAME,
};
return s3.getObject(downloadParams).createReadStream();
}
};
exports.getFileStream = getFileStream;
function generatePreSignedPutUrl({ key, operation }) {
var params = { Bucket: process.env.AWS_BUCKET_NAME, Key: key, Expires: 60 };
let x = s3.getSignedUrl(operation, params);
return x;
}
exports.generatePreSignedPutUrl = generatePreSignedPutUrl;
These are the requests I make from the client
const getSignedURL = async ({ key, operation }) =>
client.post(`${endpoint}/get-signed-url`, { key, operation });
const result = await getSignedURL({
key: directory + "/" + key,
operation: "getObject",
});
let url = result.data.data.url;
console.log({ url });
and this is the route on my server.
router.post("/get-signed-url", requireKey, async (req, res) => {
const { key, operation } = req.body;
try {
console.log({ b: req.body });
let url = generatePreSignedPutUrl({ key, operation });
console.log({ url });
res.json({ success: true, data: { url } });
} catch (e) {
console.log({ e });
res.status(400).json({ error: "Internal Server Error" });
}
});
I followed all of S3's documentation to get this set up and I am trying to make sure all files can be downloaded and uploaded securely from my application. Does anyone know how I can get this to work in production when the server is hosted in Heroku and the client is a nextjs site?

Audio Files Stored in S3 are not Playable

I can successfully upload audiofiles to AWS S3, however the files do not play whether downloaded or played in the browser. The files do have a filesize.
I'm uploading an audioBlob object to AWS S3:
const [audioBlob, setAudioBlob] = useState(null)
const submitVoiceMemo = async () => {
try {
await fetch('/api/createVoiceMemo', {
method: 'PUT',
body: audioBlob
})
} catch (error) {
console.error(error)
}
}
This is the API Route:
module.exports = requireAuth(async (req, res) => {
try {
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new AWS.S3();
const uuid = randomUUID()
const s3Params = {
Bucket: 'waveforms',
Key: `voicememo/${uuid}.mp3`,
Body: req.body,
ContentType: 'audio/mpeg',
ACL: 'public-read'
}
await s3.upload(s3Params).promise()
} catch (e) {
res.status(500).json({ error: e.message })
}
})
As per this question, I have confirmed that req.body is a string.
The file being received by S3 has a filesize and the correct contentType when checked in the S3 console. It just isn't playable when downloaded or played directly in the browser.
Below is how audioBlob and also audioFile (an MP3) are generated (Basically the user records a voice memo and when they click stopRecording, raw audio is stored as state):
const [audioBlob, setAudioBlob] = useState(null)
const [blobURL, setBlobUrl] = useState(null)
const [audioFile, setAudioFile] = useState(null)
const stopRecording = () => {
recorder.current
.stop()
.getMp3()
.then(([buffer, blob]) => {
const file = new File(buffer, 'audio.mp3', {
type: blob.type,
lastModified: Date.now()
})
setAudioBlob(blob)
const newBlobUrl = URL.createObjectURL(blob)
setBlobUrl(newBlobUrl)
setIsRecording(false)
setAudioFile(file)
})
.catch(e => console.log(e))
}
How can I correctly store audio generated on the front-end, in S3?

Uploading a Blob held in State to AWS S3

I have implemented a mic-recorder-to-mp3 component in a ReactJS NextJS app which stores a voice-memo recorded in the browser by the user and saves the resulting blob to React state, as well as a resulting MP3 to state as well.
I am struggling to upload either the blob or the MP3 file to AWS S3 - the problem is evident in that I cannot parse the req.body string which is received by the API.
Here is some code! This is the function that stores the raw audio as state:
const [audioBlob, setAudioBlob] = useState(null)
const [blobURL, setBlobUrl] = useState(null)
const [audioFile, setAudioFile] = useState(null)
const stopRecording = () => {
recorder.current
.stop()
.getMp3()
.then(([buffer, blob]) => {
const file = new File(buffer, 'audio.mp3', {
type: blob.type,
lastModified: Date.now()
})
setAudioBlob(blob)
const newBlobUrl = URL.createObjectURL(blob)
setBlobUrl(newBlobUrl)
setIsRecording(false)
setAudioFile(file)
})
.catch(e => console.log(e))
}
And this is the function that sends the payload to the API:
const submitVoiceMemo = async () => {
const filename = encodeURIComponent(audioFile)
const res = await fetch(`/api/upload-url?file=${filename}`)
const { url, fields } = await res.json()
const formData = new FormData()
Object.entries({ ...fields, audioFile }).forEach(([key, value]) => {
formData.append(key, value)
})
const upload = await fetch(url, {
method: 'PUT',
body: formData
})
if (upload.ok) {
console.log('Uploaded successfully!')
} else {
console.error('Upload failed.')
console.error()
}
}
This is the upload-url API Route:
module.exports = async (req, res) => {
try {
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new aws.S3()
const post = await s3.createPresignedPost({
Bucket: 'waveforms',
Key: `voicememo/${req.query.file}`,
ContentType: 'audio/mpeg',
ACL: 'public-read',
Expires: 60
})
res.status(200).json(post
} catch (e) {
res.status(500).json({ error: e.message })
}
}
It currently returns a 400 Bad Request error.
This is an alternative solution, which does upload an MP3 successfully to S3, however the file is not playing when accessed via the S3 console, although it does have a filesize.
module.exports = requireAuth(async (req, res) => {
try {
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new AWS.S3();
const uuid = randomUUID()
const s3Params = {
Bucket: 'waveforms',
Key: `voicememo/${uuid}.mp3`,
Body: req.body,
ContentType: 'audio/mp3',
ACL: 'public-read'
}
await s3.upload(s3Params).promise()
} catch (e) {
res.status(500).json({ error: e.message })
}
})
And this is the accompanying fetch request.
const submitVoiceMemo = async () => {
try {
await fetch('/api/createVoiceMemo', {
method: 'PUT',
body: audioBlob
})
} catch (error) {
console.error(error)
}
}

Why do I get a blank pdf when uploaded on S3?

I upload a pdf blob object to S3
const params = {
Bucket: "poster-print-bucket",
Key: Date.now().toString() + ".pdf",
Body: blob,
contentType: "application/pdf",
};
const uploaded = await S3.upload(params).promise();
When I open a url which is i.e https://poster-print-bucket.s3.ca-central-1.amazonaws.com/1633526785678.pdf
It downloads me a blank pdf
I thought maybe my blob is corrupted or something but I managed to upload same blob to firebase storage just fine.
btw I'm using nextjs api/upload-poster route
What's happening?
I spent more time fixing this issue than I would like to admit.
Here is the solution:
Frontend (converting blob to base64 before sending to backend):
function toBase64(blob) {
const reader = new FileReader();
return new Promise((res, rej) => {
reader.readAsDataURL(blob);
reader.onload = function () {
res(reader.result);
};
});
}
toBase64(currentBlob)
.then((blob) => {
return axios
.post("/api/upload-poster", blob, {
headers: {
"Content-Type": "application/pdf",
},
})
.then(({ data }) => data.uploaded.Location);
})
Backend:
const base64 = req.body;
const base64Data = Buffer.from(base64.replace(/^data:application\/\w+;base64,/, ""), "base64");
const params = {
Bucket: "poster-print-bucket",
Key: nanoid() + ".pdf",
Body: base64Data,
ContentEncoding: "base64",
contentType: "application/pdf",
};
const uploaded = await S3.upload(params).promise();
Why this all song and dance is required? Can it be something easier?
Using the AWS SDK v3 (up-to-date at the time of this post), you could use PutObjectCommand which accepts a Uint8Array as Body params (docs).
Convert your Blob instance to an ArrayBuffer (docs), and your ArrayBuffer to an Uint8Array.
Code would look like:
const { S3Client, PutObjectCommand } = require('#aws-sdk/client-s3');
const client = new S3Client(/* config */);
const arrayBuffer = await blob.arrayBuffer();
const typedArray = new Uint8Array(arrayBuffer);
await client.send(new PutObjectCommand({
Bucket: /* ... */,
Key: /* ... */,
Body: typedArray,
}));

Amazon S3 Remote File Upload with Axios

I am trying to write a function that would:
Take a remote URL as a parameter,
Get the file using axios
Upload the stream to amazon s3
And finally, return the uploaded url
I found help here on stackoverflow. So far, I have this:
/*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
return data.Location;
} else {
console.log(err, data);
}
});
return pass;
}
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
});
}
But uploadRemoteFileToS3 does not return anything (because it's a asynchronous function). How can I get the uploaded url?
UPDATE
I have further improved upon the code and wrote a class. Here is what I have now:
const config = require('../config.json');
const stream = require('stream');
const axios = require('axios');
const AWS = require('aws-sdk');
class S3RemoteUploader {
constructor(remoteAddr){
this.remoteAddr = remoteAddr;
this.stream = stream;
this.axios = axios;
this.config = config;
this.AWS = AWS;
this.AWS.config.update({
accessKeyId: this.config.api_key,
secretAccessKey: this.config.api_secret
});
this.spacesEndpoint = new this.AWS.Endpoint(this.config.endpoint);
this.s3 = new this.AWS.S3({endpoint: this.spacesEndpoint});
this.file_name = this.remoteAddr.substring(this.remoteAddr.lastIndexOf('/')+1);
this.obj_key = this.config.subfolder+'/'+this.file_name;
this.content_type = 'application/octet-stream';
this.uploadStream();
}
uploadStream(){
const pass = new this.stream.PassThrough();
this.promise = this.s3.upload({
Bucket: this.config.bucket,
Key: this.obj_key,
ACL: this.config.acl,
Body: pass,
ContentType: this.content_type
}).promise();
return pass;
}
initiateAxiosCall() {
axios({
method: 'get',
url: this.remoteAddr,
responseType: 'stream'
}).then( (response) => {
if(response.status===200){
this.content_type = response.headers['content-type'];
response.data.pipe(this.uploadStream());
}
});
}
dispatch() {
this.initiateAxiosCall();
}
async finish(){
//console.log(this.promise); /* return Promise { Pending } */
return this.promise.then( (r) => {
console.log(r.Location);
return r.Location;
}).catch( (e)=>{
console.log(e);
});
}
run() {
this.dispatch();
this.finish();
}
}
But still have no clue how to catch the result when the promise is resolved. So far, I tried these:
testUpload = new S3RemoteUploader('https://avatars2.githubusercontent.com/u/41177');
testUpload.run();
//console.log(testUpload.promise); /* Returns Promise { Pending } */
testUpload.promise.then(r => console.log); // does nothing
But none of the above works. I have a feeling I am missing something very subtle. Any clue, anyone?
After an upload you can call the getsignedurl function in s3 sdk to get the url where you can also specify the expiry of the url as well. You need to pass the key for that function. Now travelling will update with example later.
To generate a simple pre-signed URL that allows any user to view the
contents of a private object in a bucket you own, you can use the
following call to getSignedUrl():
var s3 = new AWS.S3();
var params = {Bucket: 'myBucket', Key: 'myKey'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
Official documentation link
http://docs.amazonaws.cn/en_us/AWSJavaScriptSDK/guide/node-examples.html
Code must be something like this
function uploadFileToS3AndGenerateUrl(cb) {
const pass = new stream.PassThrough();//I have generated streams from file. Using this since this is what you have used. Must be a valid one.
var params = {
Bucket: "your-bucket", // required
Key: key , // required
Body: pass,
ContentType: 'your content type',
};
s3.upload(params, function(s3Err, data) {
if (s3Err) {
cb(s3Err)
}
console.log(`File uploaded successfully at ${data.Location}`)
const params = {
Bucket: 'your-bucket',
Key: data.key,
Expires: 180
};
s3.getSignedUrl('getObject', params, (urlErr, urlData) => {
if (urlErr) {
console.log('There was an error getting your files: ' + urlErr);
cb(urlErr);
} else {
console.log(`url: ${urlData}`);
cb(null, urlData);
}
})
})
}
Please check i have update your code might its help you.
/*
* Method to upload remote file to s3
*/
const uploadRemoteFileToS3 = async (remoteAddr) => {
const response = await axios({
method: 'get',
url: remoteAddr,
responseType: 'stream'
})
if(response.status===200){
const file_name = remoteAddr.substring(remoteAddr.lastIndexOf('/')+1);
const content_type = response.headers['content-type'];
response.data.pipe(uploadFromStream(file_name, content_type));
}
return new Promise((resolve, reject) => {
response.data.on('end', (response) => {
console.log(response)
resolve(response)
})
response.data.on('error', () => {
console.log(response);
reject(response)
})
})
};
*
* Method to pipe the stream
*/
const uploadFromStream = (file_name, content_type) => {
return new Promise((resolve, reject) => {
const pass = new stream.PassThrough();
const obj_key = generateObjKey(file_name);
const params = { Bucket: config.bucket, ACL: config.acl, Key: obj_key, ContentType: content_type, Body: pass };
s3.upload(params, function(err, data) {
if(!err){
console.log(data)
return resolve(data.Location);
} else {
console.log(err)
return reject(err);
}
});
});
}
//call uploadRemoteFileToS3
uploadRemoteFileToS3(remoteAddr)
.then((finalResponse) => {
console.log(finalResponse)
})
.catch((err) => {
console.log(err);
});

Categories

Resources