Use incomplete browser fetch request in video/audio element - javascript

I have some code like this:
fetch(audioSRC).then(response => {
return new Response( new ReadableStream({
start(controller) {
const reader = response.body.getReader();
read(); function read() {
reader.read().then(({done, value}) => {
if (done) {
controller.close();
return;
}
read();
})
}
}
})
);
}).then(response => response.blob()).then(blob => {
let vid = URL.createObjectURL(blob);
player.src = vid;
})
i was wondering if it is possible to take this stream while it's incomplete, and play what IS downloaded in the video/audio element before the download is complete? If there is a way to do it smoothly, it would be good so that the user doesn't have to wait until it's fully downloaded.

It works with https://developer.mozilla.org/en-US/docs/Web/API/MediaSource
After hours of experimenting, found a half-working solution: https://stackoverflow.com/a/68778572/11979842

Related

Downloading an mp3 file from S3 and manipulating it results in bad file

I did a script that downloads a MP3 file from my S3 bucket and then manipulates in before download (Adding ID3 Tags).
It's working and the tags are injected properly, but the files corrupts as it seems and unplayable.
I still can see my tags trough MP3tag so it has data in it, but no audio is playing trough the file.
Heres my code,
Trying to figure it what went wrong
const downloadFileWithID3 = async (filename, downloadName, injectedEmail) => {
try {
const data = await s3Client.send(
new GetObjectCommand({
Bucket: "BUCKETNAME",
Key: filename,
})
);
const fileStream = streamSaver.createWriteStream(downloadName);
const writer = fileStream.getWriter();
const reader = data.Body.getReader();
const pump = () =>
reader.read().then(({ value, done }) => {
if (done) writer.close();
else {
const arrayBuffer = value;
const writerID3 = new browserId3Writer(arrayBuffer);
const titleAndArtist = downloadName.split("-");
const [artist, title] = titleAndArtist;
writerID3.setFrame("TIT2", title.slice(0, -4));
writerID3.setFrame("TPE1", [artist]);
writerID3.setFrame("TCOM", [injectedEmail]);
writerID3.addTag();
let taggedFile = new Uint8Array(writerID3.arrayBuffer);
writer.write(taggedFile).then(pump);
}
});
await pump()
.then(() => console.log("Closed the stream, Done writing"))
.catch((err) => console.log(err));
} catch (err) {
console.log(err);
}
};
Hope you can help me solve this wierd bug,
Thanks in advance!
Ok so i've figured it out, instead of using chunks of the stream itself i've used getSignedUrl from the s3 bucket it works.
Thanks everyone for trying to help out!

Having issues with working with socket.io and mediastream api to send realtime, live audio chat between clients in a room

I am having an issue that I can't seem to figure out. I am creating an application in which clients in a room will be able to talk to each other via their microphone. So more specifically, I need to stream audio from one client's microphone, send the data (while it is being recorded) to the server and then to a different client so that they can can hear the other persons voice in real-time. I have a method of doing this, but it is inefficient, choppy, and blatantly bad...
My method as of now:
This snippet is on the client side of the person recording audio
setInterval(() => {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(stream => {
const mediaRecorder = new MediaRecorder(stream);
mediaRecorder.start();
const audioChunks = [];
mediaRecorder.addEventListener("dataavailable", (event) => {
audioChunks.push(event.data);
});
mediaRecorder.addEventListener("stop", () => {
socket.emit('liveAudioToServer', audioChunks)
});
setTimeout(() => {
mediaRecorder.stop();
},2000);
});
}, 2000);
This snippet is the server side:
socket.on('liveAudioToServer', (data) => {
socket.broadcast.to(room).emit('liveAudioToClient', data)
})
And this snippet is the client side on the receiving end of the audio:
socket.on('liveAudioToClient', (data) => {
const audioBlob = new Blob(data);
const audioUrl = URL.createObjectURL(audioBlob);
const audio = new Audio(audioUrl);
audio.play()
})
Basically, what this code does is it sends an audio buffer every two seconds to the server and then the server broadcast the buffer to the other clients in the room. once the other clients receive the buffer, they compile it into audio that is played. The main issue with this is that you can clearly tell when the audio ends and begins and it is not smooth in the slightest. However, I tried a different solution that with some tweaking I feel like it could work. But it doesn't work as of now..
Other Method:
if(navigator.getUserMedia) {
navigator.getUserMedia(
{audio: true},
function(stream) {
const audioContext3 = new AudioContext();
const audioSource3 = audioContext3.createMediaStreamSource(stream);
const analyser3 = audioContext3.createAnalyser();
audioSource3.connect(analyser3);
analyser3.fftSize = 256;
const bufferLength = analyser3.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
function sendAudioChunks(){
analyser3.getByteFrequencyData(dataArray);
requestAnimationFrame(sendAudioChunks);
socket.emit('liveAudioToServer', dataArray)
}
sendAudioChunks();
},
function(){ console.log("Error 003.")}
);
}
server side:
socket.on('liveAudioToServer', (data) => {
socket.broadcast.to(getRoom(socket.id)).emit('liveAudioToClient', data)
})
other clients side (receiving audio)
socket.on('liveAudioToClient', (data) => {
const audioBlob = new Blob([new Uint8Array(data)])
const audioUrl = URL.createObjectURL(audioBlob);
const audioo = new Audio(audioUrl);
audioo.play()
})
I've really looked everywhere for the solution to this and I have had no luck so if anyone could help me that would be greatly appreciated!!!

How to convert ulaw format file to wav format reactjs

I am trying to play .ulaw format in reactjs, I didn't find any straight forward way to play the ulaw file. So I tried to convert ulaw format to wav, by using the wavfile plugin. But after conversion it playing different noise, I am not able to identify where is the actual error.
ulaw file details:
Sample Rate 8000
Bit rate 64
Channel MONO
import voice from '../src/app/components/voice.ulaw'
const WaveFile = require('wavefile').WaveFile;
let wav = new WaveFile();
const playUlawFile = () => {
fetch(voice)
.then(r => r.text())
.then(text => Buffer.from(text, "utf-8").toString("base64"))
.then(result =>{
wav.bitDepth = '128000'
wav.fromScratch(1, 8000, '64', result);
wav.fromMuLaw();
wav.toSampleRate(64000);
return wav;
}).then(wav => {
audio = new Audio(wav.toDataURI())
return audio;
}).then(audio => {
audio.play();
})
}
Below changes worked for me, still some noise is there, but it can produce 80% matching sound.
const readFile = () => {
fetch(voice)
.then(r => r.text())
.then(text => btoa(unescape(encodeURIComponent(text))) )
.then(result =>{
wav.fromScratch(1, 18000, '8m', Buffer.from(result, "base64"));
wav.fromMuLaw(32);
wav.toSampleRate(64000, {method: "sinc"});
return wav;
}).then(wav => {
audio = new Audio(wav.toDataURI())
return audio;
}).then(audio => {
audio.play();
})
}

How to execute a function at same time for multiple people?

I have an application stored in IBM server where the same application is being used by multiple people. We have a feature where we need to execute that particular piece of code exactly at same time for multiple people. How can I achieve this? The feature is that, a particular person will initiate a recording feature which will open the microphones of every person. This is achieved by maintaining a variable whose value I am setting and de setting, but when I am ending the recording the person who ends immediately the code gets executed but for other people it takes few milliseconds and even seconds to execute that same piece of code? Any help how can I tackle to this issue.
render () {
this.props.meetingData.isMeetingDiscussionStarted ? this.recordFullAudio() : null;
!this.props.meetingData.isMeetingDiscussionStarted ? this.stopRecordingAudio() : null;
}
recordFullAudio = () => {
const constraints = {
audio: {
channels: 1
},
video: false
};
getUserMedia(constraints)
.then(stream => {
this.stream = stream;
const mimeType = 'audio/mpeg';
const mime = ['audio/wav', 'audio/mpeg', 'audio/webm', 'audio/ogg']
.filter(MediaRecorder.isTypeSupported)[0];
const options = {
mimeType: 'audio/webm',
};
this.mediaRecorder = new MediaRecorder(stream);
this.mediaRecorder.addEventListener('dataavailable', event => {
audioChunks.push(event.data);
});
this.mediaRecorder.addEventListener('stop', () => { });
}).catch(error => { });
}
stopRecordingAudio = () => {
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop();
this.stream.getTracks().forEach((track) => {
track.stop();
});
}
// Cleanup
this.mediaRecorder = null;
this.stream = null;
}
isMeetingDiscussionStarted is that variable maintained in NodeJS sharedb doc which is used to start and end recording.

Expand my image upload from 1 to 5 photos; map/foreach?

I am creating an app in which you can upload a photo, with some other data, to Firebase. The uploading part worked perfect with one picture. However I have now added a multiple-image picture (select 1 to 5 pictures) and would like my image upload function to upload the 5 pictures in stead of the 1.
The image upload works with 1 image provided, so how can I rearrange my code to upload the x-amount of photos in the array?
The pictures are added in the photos array with the following data (output shown below is a console.log from the images fetched);
Array [
Object {
"exists": true,
"file": "ph://8905951D-1D94-483A-8864-BBFDC4FAD202/L0/001",
"isDirectory": false,
"md5": "f9ebcab5aa0706847235887c1a7e4740",
"modificationTime": 1574493667.505371,
"size": 104533,
"uri": "ph://8905951D-1D94-483A-8864-BBFDC4FAD202/L0/001",
},
With this didFocus I check if the fethedImages param is set and set the photos array to the fetched images (So all the data that is shown above)
const didFocusSubscription = props.navigation.addListener(
'didFocus', () => {
let fetchedImages = props.navigation.getParam('fetchedImages')
console.log(fetchedImages)
setPhotos(fetchedImages)
setImageValid(true)
calculateImageDimensions()
}
);
When I save the page and start dispatching the data I run the following command the uploadImage function is ran and returns an uploadurl, this is then saved later on in the dispatch function to the Firebase Database to be fetched later;
uploadurl = await uploadImageAsync(photos)
SO the uploadImageAsync starts with the photos array forwarded. How can I make sure the function below is started for every photo.uri in the array? Can I use .map of for each for this, and in what context should I be using this?
Also I am not quite sure how I can send back an array of URLs to be saved together with the rest of the information.
async function uploadImageAsync(photos) {
console.log('uploadImage is gestart')
// Why are we using XMLHttpRequest? See:
// https://github.com/expo/expo/issues/2402#issuecomment-443726662
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
console.log(e);
reject(new TypeError('Network request failed'));
};
xhr.responseType = 'blob';
xhr.open('GET', photos, true);
xhr.send(null);
});
const ref = firebase
.storage()
.ref()
.child(uuid.v4());
const snapshot = await ref.put(blob);
// We're done with the blob, close and release it
blob.close();
return await snapshot.ref.getDownloadURL();
}
==============edited because of progress with uploading====================
Once again I am a little bit further. However the image upload function is now running, and because of is being multiple images I would like to await the response of all the images before continuing.
try {
uploadurl = await uploadImageAsync()
address = await getAddress(selectedLocation)
console.log(uploadurl)
if (!uploadurl.lenght) {
Alert.alert('Upload error', 'Something went wrong uploading the photo, plase try again', [
{ text: 'Okay' }
]);
return;
}
dispatch(
At this moment when I start the uploadImageAsync function. With he help of console.log I see it uploading the images, they also show up online. But while the pictures are uploading the upload url already returns with 0 and shows the Alert and stops the function.
uploadImageAsync = async () => {
const provider = firebase.database().ref(`providers/${uid}`);
let imagesArray = [];
try {
await photos.map(img => {
let file = img.data;
const path = "Img_" + uuid.v4();
const ref = firebase
.storage()
.ref(`/${uid}/${path}`);
ref.putString(file).then(() => {
ref
.getDownloadURL()
.then(images => {
imagesArray.push({
uri: images
});
console.log("Out-imgArray", imagesArray);
})
return imagesArray <== this return imagesArray is fired to early and starts the rest of my upload function.
} catch (e) {
console.error(e);
}
};
So a Discord chat pointed me in the way of a promise.all function for this to work. I tried that, but opened another stack overflow topic for getting this to work.
await response of image upload before continue function
The solution for my image upload function is in the topic above;
uploadImages = () => {
const provider = firebase.database().ref(`providers/${uid}`);
// CHANGED: removed 'let imagesArray = [];', no longer needed
return Promise.all(photos) // CHANGED: return the promise chain
.then(photoarray => {
console.log('all responses are resolved successfully');
// take each photo, upload it and then return it's download URL
return Promise.all(photoarray.map((photo) => { // CHANGED: used Promise.all(someArray.map(...)) idiom
let file = photo.data;
const path = "Img_" + uuid.v4();
const storageRef = firebase // CHANGED: renamed 'ref' to 'storageRef'
.storage()
.ref(`/${uid}/${path}`);
let metadata = {
contentType: 'image/jpeg',
};
// upload current photo and get it's download URL
return storageRef.putString(file, 'base64', metadata) // CHANGED: return the promise chain
.then(() => {
console.log(`${path} was uploaded successfully.`);
return storageRef.getDownloadURL() // CHANGED: return the promise chain
.then(fileUrl => ({uri: fileUrl}));
});
}));
})
.then((imagesArray) => { // These lines can
console.log("Out-imgArray: ", imagesArray) // safely be removed.
return imagesArray; // They are just
}) // for logging.
.catch((err) => {
console.error(err);
});
};

Categories

Resources