How I can know audio/video duration before uploading? - javascript

I need to upload file (audio/video) using default input type='file' and the I should pass duration of the video in api request, how i ca do this?
const uploadFile = async (event) => {
let file = await event.target.files[0];
//here api POST request where i should pass duration
}:

You can get the audio duration with HTMLMediaElement.duration:
async function getDuration(file) {
const url = URL.createObjectURL(file);
return new Promise((resolve) => {
const audio = document.createElement("audio");
audio.muted = true;
const source = document.createElement("source");
source.src = url; //--> blob URL
audio.preload= "metadata";
audio.appendChild(source);
audio.onloadedmetadata = function(){
resolve(audio.duration)
};
});
}
Then in your function:
const uploadFile = async (event) => {
let file = event.target.files[0];
//here api POST request where i should pass duration
const duration = await getDuration(file);
}:

You just need to create an element based on user input(video/audio) and get the duration property -
const VIDEO = "video",
AUDIO = "audio";
const uploadApiCall = (file, data = {}) => {
// ----- YOUR API CALL CODE HERE -----
document.querySelector("#duration").innerHTML = `${data.duration}s`;
document.querySelector("#type").innerHTML = data.type;
};
let inputEl = document.querySelector("#fileinput");
inputEl.addEventListener("change", (e) => {
let fileType = "";
let file = inputEl.files[0];
if (file.type.startsWith("audio/")) {
fileType = AUDIO;
} else if (file.type.startsWith("video/")) {
fileType = VIDEO;
} else {
alert("Unsupported file");
return;
}
let dataURL = URL.createObjectURL(file);
let el = document.createElement(fileType);
el.src = dataURL;
el.onloadedmetadata = () => {
uploadApiCall(file, {
duration: el.duration,
type: fileType
});
};
});
<form>
<input type="file" accept="video/*,audio/*" id="fileinput" />
<hr />
Type:<span id="type"></span>
<br />
Duration:<span id="duration"></span>
</form>

In Vue 3 JS, I had to create a function first:
const getDuration = async (file) => {
const url = URL.createObjectURL(file);
return new Promise((resolve) => {
const audio = document.createElement("audio");
audio.muted = true;
const source = document.createElement("source");
source.src = url; //--> blob URL
audio.preload = "metadata";
audio.appendChild(source);
audio.onloadedmetadata = function(){
resolve(audio.duration)
};
});
}
The user would select an MP3 file. Then when it was submitted I could call that function in the Submit function:
const handleAudioSubmit = async () => {
console.log('Your Epsiode Audio is being stored... please stand by!')
if (file.value) {
// returns a number that represents audio seconds
duration.value = await getDuration(file.value)
// remove the decimals by rounding up
duration.value = Math.round(duration.value)
console.log("duration: ", duration.value)
// load the audio file to Firebase Storage using a composable function
await uploadAudio(file.value)
.then((downloadURL) => {
// composable function returns Firebase Storage location URL
epAudioUrl.value = downloadURL
})
.then(() => {
console.log("uploadAudio function finished")
})
.then(() => {
// Set the Album Fields based on the album id to Firestore DB
const updateAudio = doc(db, "artist", artistId.value, "albums, albumID.value);
updateDoc(updateAudio, {
audioUrl: audioUrl.value,
audioDuration: duration.value
})
console.log("Audio URL and Duration added to Firestore!")
})
.then(() => {
console.log('Episode Audio has been added!')
router.push({ name: 'Next' })
})
} else {
file.value = null
fileError.value = 'Please select an audio file (MP3)'
}
}
This takes some time to run and needs refactoring, but works provided you allow the async functions the time to finish. Hope that helps!

Related

Can I upload record video file using MediaRecorder to aw3 without POST request from frontend?

I made front to record video using MediaRecorder.
And in handleUpload function, if I click upload button, I'd like to upload this videoFile (recoreded video's url) to be uploaded to aws.
But when I searched multer way, it usually is used when I click submit the form which requests POST. And multer is used as middleware.
In my case, I just click button.
Then How can I upload videoFile to Aws?
const VideoBtn = document.querySelector("#VideoBtn");
const recordVideo = document.querySelector("#recordVideo");
const VideoList = document.querySelector("#List");
const RecordForm = document.querySelector("#RecordForm");
let stream;
let recorder;
let videoFile;
const handleUpload = async () => {
VideoBtn.innerText = "Start Video Recording";
recordVideo.style.visibility = "hidden";
VideoBtn.removeEventListener("click", handleUpload);
VideoBtn.addEventListener("click", handleStart);
await uploadToS3(videoFile, "/upload");
// date and title to upload
const uploadBtn = document.createElement("input");
uploadBtn.type = "submit";
RecordForm.appendChild(uploadBtn);
uploadBtn.click();
RecordForm.removeChild(uploadBtn);
};
const handleStop = () => {
VideoBtn.innerText = "Upload Video Recording";
VideoBtn.removeEventListener("click", handleStop);
VideoBtn.addEventListener("click", handleUpload);
recorder.stop();
};
const handleStart = async () => {
VideoBtn.innerText = "Stop Video Recording";
VideoBtn.removeEventListener("click", handleStart);
VideoBtn.addEventListener("click", handleStop);
stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: true,
});
recordVideo.srcObject = stream;
recordVideo.play();
recorder = new MediaRecorder(stream);
recorder.ondataavailable = (event) => {
videoFile = URL.createObjectURL(event.data);
recordVideo.srcObject = null;
recordVideo.src = videoFile;
recordVideo.play();
};
recorder.start();
};
VideoBtn.addEventListener("click", handleStart);

How can I get video bitrate with javascript

I want to get the bitrate of video that uploader, because the backend need it.
var video = document.createElement('video');
video.preload = 'metadata';
video.src = URL.createObjectURL(document.getElementById('fileUp').files[0]);
window.URL.revokeObjectURL(video.src);
console.log(video.bitrate);
You can get the video duration then simply divide the file size by it to get an approximation (subtitles, audio and metadata would also be included in this value), as far as i know there is no standard api for getting the bitrate directly.
Example (credits https://stackoverflow.com/a/67899188/6072029 ) :
<div>
<script>
const getVideoInfos = (file) =>
new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
const media = new Audio(reader.result);
media.onloadedmetadata = () => resolve({
duration: media.duration,
file_size: file.size,
bitrate: file.size / media.duration,
});
};
reader.readAsDataURL(file);
reader.onerror = (error) => reject(error);
});
const handleChange = async (e) => {
const infos = await getVideoInfos(e.target.files[0]);
document.querySelector("#infos").innerText = `Infos : ${JSON.stringify(infos, null, 4)}`;
};
</script>
<input type="file" onchange="handleChange(event)" />
<p id="infos">infos: </p>
</div>

How to upload video recorded with MediaRecorder API using PHP?

I am working on a video recording task using MediaRecorder API. As from frontend I can start the webcam, record video, play the recorded video and download the video.
But when I try to upload the video to php server, it's not at all working. I don't really understand why it is happening, I also tried using so many methods but none of it is working. Please check the code attatched below.
JS:-
let mediaRecorder
let recordedBlobs
const errorMsgElement = document.querySelector('span#errorMsg');
const recordedVideo = document.querySelector('video#recorded');
const recordButton = document.querySelector('button#record');
const playButton = document.querySelector('button#play');
const downloadButton = document.querySelector('button#download');
document.querySelector("button#start").addEventListener("click", async function() {
const hasEchoCancellation = document.querySelector("#echoCancellation").checked
const constraints = {
audio: {
echoCancellation:{
exact: hasEchoCancellation
}
},
video: {
width: 1280,
height: 720
}
}
await init(constraints)
})
async function init(constraints) {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints)
handleSuccess(stream)
} catch(e) {
console.log(e)
}
}
function handleSuccess(stream) {
recordButton.disabled = false
window.stream = stream
const gumVideo = document.querySelector("video#gum")
gumVideo.srcObject = stream
}
recordButton.addEventListener("click", () => {
if(recordButton.textContent === "Record") {
startRecording()
} else {
stopRecording()
recordButton.textContent = 'Record'
playButton.disabled = false
downloadButton.disabled = false
}
})
function startRecording() {
recordedBlobs = []
let options = {
mimeType: "video/webm;codecs=vp9,opus"
}
try {
mediaRecorder = new MediaRecorder(window.stream, options)
} catch(e) {
console.log(e)
}
recordButton.textContent = "Stop Recording"
playButton.disabled = true
downloadButton.disabled = true
mediaRecorder.onstop = (event) => {
console.log('Recording Stopped')
}
mediaRecorder.ondataavailable = handleDataAvailable
mediaRecorder.start()
}
function handleDataAvailable(event) {
if(event.data && event.data.size > 0) {
recordedBlobs.push(event.data)
}
}
function stopRecording() {
mediaRecorder.stop()
}
playButton.addEventListener('click', function() {
const superBuffer = new Blob(recordedBlobs, {
type: 'video/webm'
})
var file = new File([superBuffer], 'test.webm')
var url = window.URL.createObjectURL(superBuffer)
// var video = blobToFile(superBuffer, 'test.webm')
sendToServer(file)
recordedVideo.src = null
recordedVideo.srcObject = null
recordedVideo.src = url
recordedVideo.controls = true
recordedVideo.play()
})
downloadButton.addEventListener('click', () => {
const blob = new Blob(recordedBlobs, {type: 'video/mp4'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.mp4';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
});
function sendToServer(file) {
let url = 'send.php'
let headers = {
'Content-Type': 'multipart/form-data'
}
var formData = new FormData()
formData.append("file", file)
axios.post(url, formData, headers)
.then((response) => {
console.log(response.data)
})
.catch((error) => {
console.log(error.response)
})
}
function blobToFile(theBlob, fileName){
//A Blob() is almost a File() - it's just missing the two properties below which we will add
theBlob.lastModifiedDate = new Date();
theBlob.name = fileName;
return theBlob;
}
PHP:-
$target_dir = "uploads/";
$target_file = $target_dir . 'test.webm';
if(move_uploaded_file($_FILES["file"]["tmp_name"], $target_file)) {
echo "File uploaded successfully";
} else {
echo "File not uploaded";
}
print_r($_FILES['file']['error']);
No matter how much I tried, I can't figure out why it is not working. It is showing that "File not uploaded" like it can't read the file from tmp_name. Please help me fix the problem.
Any help on this problem will be really appreciated.
Thank you.

Using MP4box.js and onSegment callback is not called no websocket

So I'm trying to use MP4Box.js. On its readme page it states: it has a demo: "A player that performs on-the-fly fragmentation".
However the onSegment callbacks which should feed MSE are not called at all:
const getVideo = async () => {
const data = await fetch(url, { headers: { range: 'bytes=0-567139' } });
let buff = await data.arrayBuffer();
console.log(buff)
mp4box = MP4Box.createFile();
mp4box.onError = function(e) {
console.log("mp4box failed to parse data.");
};
mp4box.onMoovStart = function () {
console.log("Starting to receive File Information");
};
mp4box.onReady = function(info) {
console.log(info.mime);
mp4box.onSegment = (id, user, buffer ) => {
setBuffer(buffer)
}
mp4box.setSegmentOptions(info.tracks[0].id, null, { nbSamples: 1000 });
var initSegs = mp4box.initializeSegmentation();
console.log(initSegs)
mp4box.start();
};
var nextBufferStart = 0;
buff.fileStart = nextBufferStart;
nextBufferStart = mp4box.appendBuffer(buff);
mp4box.flush();
const mediaSource = new MediaSource();
const mimeCodec = 'video/mp4; codecs="avc1.640029,mp4a.40.2"';
video.src = URL.createObjectURL(mediaSource);
function sourceopen() {
console.log('open')
const source = mediaSource.addSourceBuffer(mimeCodec);
const setBuffer = (buff) => {
source.appendBuffer(buff)
};
}
mediaSource.addEventListener('sourceopen', sourceopen, { once: true });
}
Now putting this into an HTML file would result this in the JavaScript console:
Starting to receive File Information
video/mp4; codecs="avc1.4d4028"; profiles="isom,iso2,avc1,iso6,mp41"
But nothing happens afterwards. Why is the onSegment not called here?

How to play audio from a url in chunks (streaming) with Javascript

I have a URL that fetches a music file from an AWS S3 bucket.
it takes about 10 - 15 seconds for the song to start playing, because it fetches the song first before playing.
I want to be able to fetch the song in chunks and play them in the html5 audio player.
So that once the user plays the song, it starts streaming while the other song is being fetched.
import React, { useEffect } from 'react';
import Waveform from './Waveform';
const appendBuffer = (buffer1, buffer2, context) => {
const numberOfChannels = Math.min(
buffer1.numberOfChannels,
buffer2.numberOfChannels
);
const tmp = context.createBuffer(
numberOfChannels,
buffer1.length + buffer2.length,
buffer1.sampleRate
);
for (let i = 0; i < numberOfChannels; i++) {
const channel = tmp.getChannelData(i);
channel.set(buffer1.getChannelData(i), 0);
channel.set(buffer2.getChannelData(i), buffer1.length);
}
return tmp;
};
const Player = ({ selectedTrack }) => {
// const [blob, setBlob] = useState();
const getData = async (selectedTrack) => {
const fetchedData = await fetch(selectedTrack);
const reader = await fetchedData.body.getReader();
const context = new AudioContext();
reader.read().then(async function processAudio({ done, value }) {
if (done) {
console.log('Stream finished. Content received:');
return;
}
try {
console.log('processAudio -> value', value.buffer);
const buffer = await context.decodeAudioData(value.buffer);
const source = context.createBufferSource();
const newaudioBuffer =
source && source.buffer
? appendBuffer(source.buffer, buffer, context)
: buffer;
source.buffer = newaudioBuffer;
source.connect(context.destination);
source.start(source.buffer.duration);
console.log(
'processAudio -> source.buffer.duration',
source.buffer.duration
);
} catch (error) {
console.log('processAudio -> error', error);
}
return reader.read().then(processAudio);
});
};
useEffect(() => {
getData(selectedTrack);
}, [selectedTrack]);
return (
<div className="player">
<Waveform url={selectedTrack} />
<br />
</div>
);
};
export default Player;
what i have now gets the code in chunks but plays so fast and there is no way to make it play one after another

Categories

Resources