Uploading audio file buffer fails when uploading to aws s3 - javascript

Here is my frontend code:
let audioFile = require("assets/hello.wav");
let blob = new Blob([audioFile], { type: "audio/wav" });
try {
await customFetch(`${API_URL}/new-audio-message`, {
method: "POST",
body: JSON.stringify({
audio: blob,
cloneId: cloneId,
}),
});
} catch (error) {
console.log(error);
}
Here is is how I upload the file to s3:
const { audio } = JSON.parse(event.body);
const fileKey = `${sub}/${cloneId}/audio/${uuidv4()}.wav`;
const buffer = Buffer.from(JSON.stringify(audio));
try {
await s3
.putObject({
Bucket: PUBLIC_BUCKET,
Key: fileKey,
Body: buffer,
})
.promise();
} catch (err) {
console.error(err);
}
The file uploads to s3 but the file size for every audio file is 155 B irrespective of the length of the audio file.

The issue seems to be that the audio file is not being properly converted to a buffer before being sent to S3. The line const buffer = Buffer.from(JSON.stringify(audio)) is attempting to convert the audio object to a string and then create a buffer from that string. However, this is not the correct way to convert a Blob object to a buffer.
Updated frontend code
let audioFile = require("assets/hello.wav");
let blob = new Blob([audioFile], { type: "audio/wav" });
const reader = new FileReader();
reader.readAsArrayBuffer(blob);
reader.onloadend = async () => {
const buffer = Buffer.from(reader.result);
try {
await customFetch(`${API_URL}/new-audio-message`, {
method: "POST",
body: JSON.stringify({
audio: buffer,
cloneId: cloneId,
}),
});
} catch (error) {
console.log(error);
}
};
Updated backend code
const { audio } = JSON.parse(event.body);
const fileKey = `${sub}/${cloneId}/audio/${uuidv4()}.wav`;
try {
await s3
.putObject({
Bucket: PUBLIC_BUCKET,
Key: fileKey,
Body: audio,
})
.promise();
} catch (err) {
console.error(err);
}

Related

Audio Files Stored in S3 are not Playable

I can successfully upload audiofiles to AWS S3, however the files do not play whether downloaded or played in the browser. The files do have a filesize.
I'm uploading an audioBlob object to AWS S3:
const [audioBlob, setAudioBlob] = useState(null)
const submitVoiceMemo = async () => {
try {
await fetch('/api/createVoiceMemo', {
method: 'PUT',
body: audioBlob
})
} catch (error) {
console.error(error)
}
}
This is the API Route:
module.exports = requireAuth(async (req, res) => {
try {
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new AWS.S3();
const uuid = randomUUID()
const s3Params = {
Bucket: 'waveforms',
Key: `voicememo/${uuid}.mp3`,
Body: req.body,
ContentType: 'audio/mpeg',
ACL: 'public-read'
}
await s3.upload(s3Params).promise()
} catch (e) {
res.status(500).json({ error: e.message })
}
})
As per this question, I have confirmed that req.body is a string.
The file being received by S3 has a filesize and the correct contentType when checked in the S3 console. It just isn't playable when downloaded or played directly in the browser.
Below is how audioBlob and also audioFile (an MP3) are generated (Basically the user records a voice memo and when they click stopRecording, raw audio is stored as state):
const [audioBlob, setAudioBlob] = useState(null)
const [blobURL, setBlobUrl] = useState(null)
const [audioFile, setAudioFile] = useState(null)
const stopRecording = () => {
recorder.current
.stop()
.getMp3()
.then(([buffer, blob]) => {
const file = new File(buffer, 'audio.mp3', {
type: blob.type,
lastModified: Date.now()
})
setAudioBlob(blob)
const newBlobUrl = URL.createObjectURL(blob)
setBlobUrl(newBlobUrl)
setIsRecording(false)
setAudioFile(file)
})
.catch(e => console.log(e))
}
How can I correctly store audio generated on the front-end, in S3?

Uploading a Blob held in State to AWS S3

I have implemented a mic-recorder-to-mp3 component in a ReactJS NextJS app which stores a voice-memo recorded in the browser by the user and saves the resulting blob to React state, as well as a resulting MP3 to state as well.
I am struggling to upload either the blob or the MP3 file to AWS S3 - the problem is evident in that I cannot parse the req.body string which is received by the API.
Here is some code! This is the function that stores the raw audio as state:
const [audioBlob, setAudioBlob] = useState(null)
const [blobURL, setBlobUrl] = useState(null)
const [audioFile, setAudioFile] = useState(null)
const stopRecording = () => {
recorder.current
.stop()
.getMp3()
.then(([buffer, blob]) => {
const file = new File(buffer, 'audio.mp3', {
type: blob.type,
lastModified: Date.now()
})
setAudioBlob(blob)
const newBlobUrl = URL.createObjectURL(blob)
setBlobUrl(newBlobUrl)
setIsRecording(false)
setAudioFile(file)
})
.catch(e => console.log(e))
}
And this is the function that sends the payload to the API:
const submitVoiceMemo = async () => {
const filename = encodeURIComponent(audioFile)
const res = await fetch(`/api/upload-url?file=${filename}`)
const { url, fields } = await res.json()
const formData = new FormData()
Object.entries({ ...fields, audioFile }).forEach(([key, value]) => {
formData.append(key, value)
})
const upload = await fetch(url, {
method: 'PUT',
body: formData
})
if (upload.ok) {
console.log('Uploaded successfully!')
} else {
console.error('Upload failed.')
console.error()
}
}
This is the upload-url API Route:
module.exports = async (req, res) => {
try {
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new aws.S3()
const post = await s3.createPresignedPost({
Bucket: 'waveforms',
Key: `voicememo/${req.query.file}`,
ContentType: 'audio/mpeg',
ACL: 'public-read',
Expires: 60
})
res.status(200).json(post
} catch (e) {
res.status(500).json({ error: e.message })
}
}
It currently returns a 400 Bad Request error.
This is an alternative solution, which does upload an MP3 successfully to S3, however the file is not playing when accessed via the S3 console, although it does have a filesize.
module.exports = requireAuth(async (req, res) => {
try {
AWS.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_1,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY_ID,
region: 'us-east-2',
signatureVersion: 'v4'
})
const s3 = new AWS.S3();
const uuid = randomUUID()
const s3Params = {
Bucket: 'waveforms',
Key: `voicememo/${uuid}.mp3`,
Body: req.body,
ContentType: 'audio/mp3',
ACL: 'public-read'
}
await s3.upload(s3Params).promise()
} catch (e) {
res.status(500).json({ error: e.message })
}
})
And this is the accompanying fetch request.
const submitVoiceMemo = async () => {
try {
await fetch('/api/createVoiceMemo', {
method: 'PUT',
body: audioBlob
})
} catch (error) {
console.error(error)
}
}

Why do I get a blank pdf when uploaded on S3?

I upload a pdf blob object to S3
const params = {
Bucket: "poster-print-bucket",
Key: Date.now().toString() + ".pdf",
Body: blob,
contentType: "application/pdf",
};
const uploaded = await S3.upload(params).promise();
When I open a url which is i.e https://poster-print-bucket.s3.ca-central-1.amazonaws.com/1633526785678.pdf
It downloads me a blank pdf
I thought maybe my blob is corrupted or something but I managed to upload same blob to firebase storage just fine.
btw I'm using nextjs api/upload-poster route
What's happening?
I spent more time fixing this issue than I would like to admit.
Here is the solution:
Frontend (converting blob to base64 before sending to backend):
function toBase64(blob) {
const reader = new FileReader();
return new Promise((res, rej) => {
reader.readAsDataURL(blob);
reader.onload = function () {
res(reader.result);
};
});
}
toBase64(currentBlob)
.then((blob) => {
return axios
.post("/api/upload-poster", blob, {
headers: {
"Content-Type": "application/pdf",
},
})
.then(({ data }) => data.uploaded.Location);
})
Backend:
const base64 = req.body;
const base64Data = Buffer.from(base64.replace(/^data:application\/\w+;base64,/, ""), "base64");
const params = {
Bucket: "poster-print-bucket",
Key: nanoid() + ".pdf",
Body: base64Data,
ContentEncoding: "base64",
contentType: "application/pdf",
};
const uploaded = await S3.upload(params).promise();
Why this all song and dance is required? Can it be something easier?
Using the AWS SDK v3 (up-to-date at the time of this post), you could use PutObjectCommand which accepts a Uint8Array as Body params (docs).
Convert your Blob instance to an ArrayBuffer (docs), and your ArrayBuffer to an Uint8Array.
Code would look like:
const { S3Client, PutObjectCommand } = require('#aws-sdk/client-s3');
const client = new S3Client(/* config */);
const arrayBuffer = await blob.arrayBuffer();
const typedArray = new Uint8Array(arrayBuffer);
await client.send(new PutObjectCommand({
Bucket: /* ... */,
Key: /* ... */,
Body: typedArray,
}));

Azure function don't accept to create file on remote

I would download file on local the create a stream then send to an API.
In localhost files get created via blobClient.downloadToFile(defaultFile);
But When I deploy function it can not find file to stream, so I think that the download does not happen or in bad location.
I get this error
[Error: ENOENT: no such file or directory, open 'D:\home\site\wwwroot\importPbix\exampleName.pbix'
Here's my code
const blobServiceClient = BlobServiceClient.fromConnectionString(
process.env.CONNEXION_STRING
);
const containerClient = blobServiceClient.getContainerClient(
params.containerName
);
const blobClient = containerClient.getBlobClient(process.env.FILE_LOCATION); // get file from storage
let blobData;
var defaultFile = path.join(params.baseDir, `${params.reportName}.pbix`); // use path module
let stream;
try {
blobData = await blobClient.downloadToFile(defaultFile);
console.log(blobData);
stream = fs.createReadStream(defaultFile);
} catch (error) {
params.context.log(error);
console.log(error);
}
var options = {
method: "POST",
url: `https://api.powerbi.com/v1.0/myorg/groups/${params.groupId}/imports?datasetDisplayName=${params.reportName}`,
headers: {
"Content-Type": "multipart/form-data",
Authorization: `Bearer ${params.accessToken} `,
},
formData: {
"": {
value: stream,
options: {
filename: `${params.reportName}.pbix`,
contentType: null,
},
},
},
};
//check if file keep in mem
return new Promise(function (resolve, reject) {
request(options, function (error, response) {
if (error) {
params.context.log(error);
reject(error);
} else {
params.context.log(response);
resolve(response.body);
}
fs.unlinkSync(defaultFile);
});
});
I found this post having same issue , that's why I user path module and passed __dirname to function params.baseDir.
If you want to download a file from Azure blob and read it as a stream, just try the code below, in this demo, I try to download a .txt file to a temp folder(you should create it first on Azure function)and print its content from the stream for a quick test:
module.exports = async function (context, req) {
const { BlockBlobClient } = require("#azure/storage-blob")
const fs = require('fs')
const connStr = '<connection string>'
const container = 'files'
const blobName = 'test.txt'
const tempPath = 'd:/home/temp/'
const tempFilePath = tempPath + blobName
const blobClient = new BlockBlobClient(connStr,container,blobName);
await blobClient.downloadToFile(tempFilePath).then(async function(){
context.log("download successfully")
let stream = fs.createReadStream(tempFilePath)
//Print text content,just check if stream has been readed successfully
context.log("text file content:")
context.log(await streamToString(stream))
//You can call your API here...
})
function streamToString (stream) {
const chunks = [];
return new Promise((resolve, reject) => {
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
stream.on('error', (err) => reject(err));
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
})
}
context.res = {
body: 'done'
}
}
Result
File has been downloaded:
read as stream successfully:

react-native through upload image on s3 Bucket using aws-sdk

I am using aws-sdk for upload image on the s3 bucket. Please look at my code below I already spend one day in it.
uploadImageOnS3 = () => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
let file= {
uri: this.state.fileObj.uri,
type: this.state.fileObj.type,
name: this.state.fileObj.fileName
};
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: file,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
Error:
Unsupported body payload object
Please help me to short out I am also using react-native-image-picker for image upload.
You have to use array buffer in body stream to pass data object.
As per the aws documentation you can pass data stream, string, array buffer or blob data type in body parameter.
Please check below code, which will resolve your issue,
import fs from "react-native-fs";
import { decode } from "base64-arraybuffer";
uploadImageOnS3 = async() => {
var S3 = require("aws-sdk/clients/s3");
const BUCKET_NAME = "testtest";
const IAM_USER_KEY = "XXXXXXXXXXXXX";
const IAM_USER_SECRET = "XXXXX/XXXXXXXXXXXXXXXXXXXXXX";
const s3bucket = new S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: "v4"
});
let contentType = "image/jpeg";
let contentDeposition = 'inline;filename="' + this.state.s3BucketObj + '"';
const fPath = this.state.fileObj.uri;
const base64 = await fs.readFile(fPath, "base64");
//console.log(base64);
const arrayBuffer = decode(base64);
//console.log(arrayBuffer);
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: this.state.s3BucketObj,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType
};
s3bucket.upload(params, (err, data) => {
if (err) {
console.log("error in callback");
console.log(err);
}
// console.log('success');
console.log(data);
});
});
};
You can check out the React Native AWS amplify documentation for the proper process. In the documentation, it is mentioned that you can pass data stream, string, array buffer, or blob data type in body parameter.
import AWS from 'aws-sdk';
import fs from 'react-native-fs';
import {decode} from 'base64-arraybuffer';
export const uploadFileToS3 = async (file) => {
const BUCKET_NAME = 'xxxxx';
const IAM_USER_KEY = 'xxxxxxxxxxxxxxxxx';
const IAM_USER_SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx';
const s3bucket = new AWS.S3({
accessKeyId: IAM_USER_KEY,
secretAccessKey: IAM_USER_SECRET,
Bucket: BUCKET_NAME,
signatureVersion: 'v4',
});
const contentType = file.type;
const contentDeposition = `inline;filename="${file.name}"`;
const fPath = file.uri;
const base64 = await fs.readFile(fPath, 'base64');
const arrayBuffer = decode(base64);
return new Promise((resolve, reject) => {
s3bucket.createBucket(() => {
const params = {
Bucket: BUCKET_NAME,
Key: file.name,
Body: arrayBuffer,
ContentDisposition: contentDeposition,
ContentType: contentType,
};
s3bucket.upload(params, (error, data) => {
if (error) {
reject(getApiError(error));
} else {
console.log(JSON.stringify(data));
resolve(data);
}
});
});
});
}

Categories

Resources