Audio Files Stored in S3 are not Playable - javascript

I can successfully upload audiofiles to AWS S3, however the files do not play whether downloaded or played in the browser. The files do have a filesize.
I'm uploading an audioBlob object to AWS S3:
const [audioBlob, setAudioBlob] = useState(null)
const submitVoiceMemo = async () => {
try {
await fetch('/api/createVoiceMemo', {
method: 'PUT',
body: audioBlob
})
} catch (error) {
console.error(error)
}
}
This is the API Route:
module.exports = requireAuth(async (req, res) => {
try {
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new AWS.S3();
const uuid = randomUUID()
const s3Params = {
Bucket: 'waveforms',
Key: `voicememo/${uuid}.mp3`,
Body: req.body,
ContentType: 'audio/mpeg',
ACL: 'public-read'
}
await s3.upload(s3Params).promise()
} catch (e) {
res.status(500).json({ error: e.message })
}
})
As per this question, I have confirmed that req.body is a string.
The file being received by S3 has a filesize and the correct contentType when checked in the S3 console. It just isn't playable when downloaded or played directly in the browser.
Below is how audioBlob and also audioFile (an MP3) are generated (Basically the user records a voice memo and when they click stopRecording, raw audio is stored as state):
const [audioBlob, setAudioBlob] = useState(null)
const [blobURL, setBlobUrl] = useState(null)
const [audioFile, setAudioFile] = useState(null)
const stopRecording = () => {
recorder.current
.stop()
.getMp3()
.then(([buffer, blob]) => {
const file = new File(buffer, 'audio.mp3', {
type: blob.type,
lastModified: Date.now()
})
setAudioBlob(blob)
const newBlobUrl = URL.createObjectURL(blob)
setBlobUrl(newBlobUrl)
setIsRecording(false)
setAudioFile(file)
})
.catch(e => console.log(e))
}
How can I correctly store audio generated on the front-end, in S3?

Related

Uploading audio file buffer fails when uploading to aws s3

Here is my frontend code:
let audioFile = require("assets/hello.wav");
let blob = new Blob([audioFile], { type: "audio/wav" });
try {
await customFetch(`${API_URL}/new-audio-message`, {
method: "POST",
body: JSON.stringify({
audio: blob,
cloneId: cloneId,
}),
});
} catch (error) {
console.log(error);
}
Here is is how I upload the file to s3:
const { audio } = JSON.parse(event.body);
const fileKey = `${sub}/${cloneId}/audio/${uuidv4()}.wav`;
const buffer = Buffer.from(JSON.stringify(audio));
try {
await s3
.putObject({
Bucket: PUBLIC_BUCKET,
Key: fileKey,
Body: buffer,
})
.promise();
} catch (err) {
console.error(err);
}
The file uploads to s3 but the file size for every audio file is 155 B irrespective of the length of the audio file.
The issue seems to be that the audio file is not being properly converted to a buffer before being sent to S3. The line const buffer = Buffer.from(JSON.stringify(audio)) is attempting to convert the audio object to a string and then create a buffer from that string. However, this is not the correct way to convert a Blob object to a buffer.
Updated frontend code
let audioFile = require("assets/hello.wav");
let blob = new Blob([audioFile], { type: "audio/wav" });
const reader = new FileReader();
reader.readAsArrayBuffer(blob);
reader.onloadend = async () => {
const buffer = Buffer.from(reader.result);
try {
await customFetch(`${API_URL}/new-audio-message`, {
method: "POST",
body: JSON.stringify({
audio: buffer,
cloneId: cloneId,
}),
});
} catch (error) {
console.log(error);
}
};
Updated backend code
const { audio } = JSON.parse(event.body);
const fileKey = `${sub}/${cloneId}/audio/${uuidv4()}.wav`;
try {
await s3
.putObject({
Bucket: PUBLIC_BUCKET,
Key: fileKey,
Body: audio,
})
.promise();
} catch (err) {
console.error(err);
}

aws-sdk Signed URL Failing on Heroku during production but works in development

I have my code written below, and all of this generates my signed URL perfectly fine when on development and the files that I want to get and upload locally work.
const S3 = require("aws-sdk/clients/s3");
const fs = require("fs");
const s3 = new S3({
region: process.env.AWS_BUCKET_REGION,
accessKeyId: process.env.AWS_ACCESS_KEY,
secretAccessKey: process.env.AWS_SECRET_KEY,
signatureVersion: "v2",
});
const uploadFile = (file, id, directory) => {
const fileStream = fs.createReadStream(file.path);
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Body: fileStream,
Key: directory + id,
MimeType: file.mimetype,
};
return s3.upload(uploadParams).promise();
};
exports.uploadFile = uploadFile;
const deleteFile = (id, directory) => {
const uploadParams = {
Bucket: process.env.AWS_BUCKET_NAME,
Key: directory + id,
};
return s3.deleteObject(uploadParams).promise();
};
exports.deleteFile = deleteFile;
const getFileStream = ({ key }) => {
if (key) {
const downloadParams = {
Key: key,
Bucket: process.env.AWS_BUCKET_NAME,
};
return s3.getObject(downloadParams).createReadStream();
}
};
exports.getFileStream = getFileStream;
function generatePreSignedPutUrl({ key, operation }) {
var params = { Bucket: process.env.AWS_BUCKET_NAME, Key: key, Expires: 60 };
let x = s3.getSignedUrl(operation, params);
return x;
}
exports.generatePreSignedPutUrl = generatePreSignedPutUrl;
These are the requests I make from the client
const getSignedURL = async ({ key, operation }) =>
client.post(`${endpoint}/get-signed-url`, { key, operation });
const result = await getSignedURL({
key: directory + "/" + key,
operation: "getObject",
});
let url = result.data.data.url;
console.log({ url });
and this is the route on my server.
router.post("/get-signed-url", requireKey, async (req, res) => {
const { key, operation } = req.body;
try {
console.log({ b: req.body });
let url = generatePreSignedPutUrl({ key, operation });
console.log({ url });
res.json({ success: true, data: { url } });
} catch (e) {
console.log({ e });
res.status(400).json({ error: "Internal Server Error" });
}
});
I followed all of S3's documentation to get this set up and I am trying to make sure all files can be downloaded and uploaded securely from my application. Does anyone know how I can get this to work in production when the server is hosted in Heroku and the client is a nextjs site?

Uploading a Blob held in State to AWS S3

I have implemented a mic-recorder-to-mp3 component in a ReactJS NextJS app which stores a voice-memo recorded in the browser by the user and saves the resulting blob to React state, as well as a resulting MP3 to state as well.
I am struggling to upload either the blob or the MP3 file to AWS S3 - the problem is evident in that I cannot parse the req.body string which is received by the API.
Here is some code! This is the function that stores the raw audio as state:
const [audioBlob, setAudioBlob] = useState(null)
const [blobURL, setBlobUrl] = useState(null)
const [audioFile, setAudioFile] = useState(null)
const stopRecording = () => {
recorder.current
.stop()
.getMp3()
.then(([buffer, blob]) => {
const file = new File(buffer, 'audio.mp3', {
type: blob.type,
lastModified: Date.now()
})
setAudioBlob(blob)
const newBlobUrl = URL.createObjectURL(blob)
setBlobUrl(newBlobUrl)
setIsRecording(false)
setAudioFile(file)
})
.catch(e => console.log(e))
}
And this is the function that sends the payload to the API:
const submitVoiceMemo = async () => {
const filename = encodeURIComponent(audioFile)
const res = await fetch(`/api/upload-url?file=${filename}`)
const { url, fields } = await res.json()
const formData = new FormData()
Object.entries({ ...fields, audioFile }).forEach(([key, value]) => {
formData.append(key, value)
})
const upload = await fetch(url, {
method: 'PUT',
body: formData
})
if (upload.ok) {
console.log('Uploaded successfully!')
} else {
console.error('Upload failed.')
console.error()
}
}
This is the upload-url API Route:
module.exports = async (req, res) => {
try {
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new aws.S3()
const post = await s3.createPresignedPost({
Bucket: 'waveforms',
Key: `voicememo/${req.query.file}`,
ContentType: 'audio/mpeg',
ACL: 'public-read',
Expires: 60
})
res.status(200).json(post
} catch (e) {
res.status(500).json({ error: e.message })
}
}
It currently returns a 400 Bad Request error.
This is an alternative solution, which does upload an MP3 successfully to S3, however the file is not playing when accessed via the S3 console, although it does have a filesize.
module.exports = requireAuth(async (req, res) => {
try {
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new AWS.S3();
const uuid = randomUUID()
const s3Params = {
Bucket: 'waveforms',
Key: `voicememo/${uuid}.mp3`,
Body: req.body,
ContentType: 'audio/mp3',
ACL: 'public-read'
}
await s3.upload(s3Params).promise()
} catch (e) {
res.status(500).json({ error: e.message })
}
})
And this is the accompanying fetch request.
const submitVoiceMemo = async () => {
try {
await fetch('/api/createVoiceMemo', {
method: 'PUT',
body: audioBlob
})
} catch (error) {
console.error(error)
}
}

Uploading image to Firebase using React, Axios & Busboy

Having a hell of a time with this one.
I am building a full stack app very similar to the social-ape project on the YouTube channel Classsed. I have a React component grabbing a file from an input, using axios to post it to my server.
React submit handler:
handleSubmit = (event) => {
const image = event.target.files[0];
const formData = new FormData();
formData.append("image", image, image.name);
axios
.post(
`http://localhost:5000/album/${this.state.id}/editAlbumCover`,
formData
)
.then((res) => console.log(res.data))
.catch((err) => console.log(err));
};
Express Route
exports.editAlbumCover = (req, res) => {
const BusBoy = require("busboy");
const path = require("path");
const os = require("os");
const fs = require("fs");
const busboy = new BusBoy({ headers: req.headers });
console.log("Busboy.init");
let imageFileName;
let imageToBeUploaded = {};
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
console.log("Busboy.on");
if (mimetype !== "image/jpeg" && mimetype !== "image/png") {
return res
.status(400)
.json({ error: "Wrong file type. Please upload a JPEG or PNG file." });
}
const imageExtension = filename.split(".")[filename.split(".").length - 1];
imageFileName = `${Math.round(
Math.random() * 1000000000
)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), imageFileName);
imageToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
console.log("Busboy.finish");
admin
.storage()
.bucket(config.storageBucket)
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype,
},
},
})
.then(() => {
const albumCover = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageFileName}?alt=media`;
return db.doc(`/albums/${req.params.id}`).update({ albumCover });
})
.then(() => {
return res.json({ message: "Image uploaded successfully" });
})
.catch((err) => {
console.log(err);
return res.status(500).json({ error: err.code });
});
});
The FormData.append in the submit handler works because I can use .forEach to log the single entry in there before posting with Axios.
The express route is not triggering the busboy.on("file") callback, but everything else it triggered. Upon submission I get this error in my server console:
TypeError [ERR_INVALID_ARG_TYPE]: The "path" argument must be of type string. Received type undefined
Console logging ImageToBeUploaded shows that nothing ever gets added to that object. If I use axios to post some random string to the same route I can log it out from the req. The code is almost identical to the project I coded along to on YouTube so I'm really not sure why busboy.on isn't being triggered.

react-native through upload image on s3 Bucket using aws-sdk

I am using aws-sdk for upload image on the s3 bucket. Please look at my code below I already spend one day in it.
uploadImageOnS3 = () => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
let file= {
uri: this.state.fileObj.uri,
type: this.state.fileObj.type,
name: this.state.fileObj.fileName
};
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: file,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
Error:
Unsupported body payload object
Please help me to short out I am also using react-native-image-picker for image upload.
You have to use array buffer in body stream to pass data object.
As per the aws documentation you can pass data stream, string, array buffer or blob data type in body parameter.
Please check below code, which will resolve your issue,
import fs from "react-native-fs";
import { decode } from "base64-arraybuffer";
uploadImageOnS3 = async() => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: "v4"
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
const fPath = this.state.fileObj.uri;
const base64 = await fs.readFile(fPath, "base64");
//console.log(base64);
const arrayBuffer = decode(base64);
//console.log(arrayBuffer);
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
You can check out the React Native AWS amplify documentation for the proper process. In the documentation, it is mentioned that you can pass data stream, string, array buffer, or blob data type in body parameter.
import AWS from 'aws-sdk';
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'xxxxx';
const IAM_USER_KEY = 'xxxxxxxxxxxxxxxxx';
const IAM_USER_SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
}

Categories

Resources