cant get length of m4a audio file on webpage - javascript

I am trying to make a nodejs website that will return the length in seconds of any audio file a user chooses. So far I have it working with mp3, wav, and flac files. But it doesn't work for all .m4a or .aif files
The code for my HTML page with javascript is below:
choose audio file to get length:
<input style="cursor: pointer;" type="file" id="file" multiple="multiple" />
<script>
//when files are selected:
$("#file").change(async function (e) {
console.log('file(s) selected')
//get files
var files = e.currentTarget.files;
//get number of files
var numberOfFiles = files.length;
//for each file
for (i = 0; i < numberOfFiles; i++) {
console.log(`songs[${i}].type=`, files[i].type)
//get file length
let songLength = await getSongLength(files[i]);
console.log('songLength=', songLength)
}
});
//recieve audio file, return length
function getSongLength(song) {
return new Promise(function (resolve, reject) {
console.log('getSongLength() begin setup')
//create objectURL and audio object for ssong
objectURL = URL.createObjectURL(song);
mySound = new Audio([objectURL])
console.log('getSongLength() end setup')
//when song metadata is loaded:
mySound.addEventListener("canplaythrough", function (e) {
console.log('getSongLength() canplaythrough')
var seconds = e.currentTarget.duration;
resolve(seconds)
});
});
}
</script>
I gathered 6 different files for testing, and after running them on my above code have found out the following results:
aif: not working
flac: working
m4a_file: not working
m4a_file_from_comments_below:not working
mp3: working
wav: working
my test files for download: https://easyupload.io/m/la9xro
It seems like when I input my m4a file sample_M4A_file that it hangs inside the getSongLength() function and never enters the .addEventListener("canplaythrough" function, is there any alternative I can use to consistently get the duration in seconds for every audio file?

It's because your browser doesn't support Apple Lossless codecs. If you try in Safari, it will work.
If you attach an error event to your media element, you'll see it fires.
There is no real universal solution, apart from using an heavy machinery like mediainfo.js (2.4MB+) which should support most formats.
const input = document.querySelector( "input" );
input.onchange = async (evt) => {
const mediainfo = await new Promise( (res) => MediaInfo(null, res) );
const file = input.files[ 0 ];
const getSize = () => file.size;
const readChunk = async (chunkSize, offset) =>
new Uint8Array( await file.slice(offset, offset + chunkSize).arrayBuffer() );
const info = await mediainfo.analyzeData(getSize, readChunk);
// assumes we are only interested in audio duration
const audio_track = info.media.track.find( (track) => track[ "#type" ] === "Audio" );
console.log( audio_track.Duration );
};
<script src="https://unpkg.com/mediainfo.js#0.1.4/dist/mediainfo.min.js"></script>
<input type="file">
Ps: when using the media element solution, there is no need to wait for canplaythrough, just wait for loadedmetadata, and if you get Infinity, try this hack.

Related

How to get video's metadata (length) before uploading?

First thing first, I am trying to extract the video duration from the file and then display it without having to actually upload the file.
When a user selected a video - the information will be displayed below it includes file name, file size, file type. However much to my terrible skills - I cannot get the duration to display. I tried some code snippets i found from other sites as well as here but none of them seems to work. Just trying to find a simple code that would do the task.
I tried onloadedmetadata but I don't think that would even work.
Please note : I'm still learning javascript.
I also tried some sites tutorial & some code snippet I found via stackoverflow
function uploadFunction(){
//Variables
var cVideo = document.getElementById("fileUp");
var txtInfo = "";
//function
if ('files' in cVideo){
if(cVideo.files.length == 0){
txtInfo = "Please select a video";
} else {
for (var v = 0; v < cVideo.files.length; v++){
txtInfo += "<br><strong>#" + (v+1) + " File Information:</strong> <br>";
var infoFile = cVideo.files[v];
if('name' in infoFile){
txtInfo += "File name: " +infoFile.name +"<br>";
}
if('size' in infoFile){
txtInfo += "File size: " +infoFile.size +" Bytes <br>";
}
if('type' in infoFile){
txtInfo += "File Type: "+infoFile.type +" <br>";
}
if('duration' in infoFile){
txtInfo += "Duration : "+infoFile.duration +"<br>";
}
}
}
}
document.getElementById("information").innerHTML = txtInfo ;
}
HTML
<input type="file" id="fileUp" name="fileUpload" multiple size="50" onchange="uploadFunction()">
<p id="information"></p>
Can't get the duration to appear at all.
It's actually reasonably simple, but since this requires turning the file into a blob and then checking its duration with a video element, uploading a video longer than a couple of minutes will take a lot of processing and slow down your page immensly. I have added a filesize restriction (which is a reasonable start in my opinion). Here's a snippet (which will not work on Stack Overflow due to sandbox restrictions, but I did test it on a local server). I obviously also don't check MIME types or whether its a video at all, although loadedmetadata will not trigger if you upload anything that is not a video.
const fileInput = document.querySelector( 'input[type=file]' );
const fileDuration = document.getElementById( 'file-duration' )
// Listen for any change to the value of the file input
fileInput.addEventListener( 'change', event => {
fileDuration.textContent = 'Fetching video duration...';
// When the file selected by the user changes:
// - create a fresh <video> element that has not yet fired any events
// - create a file reader to safely retrieve and manipulate the file
const file = event.target.files[0];
const video = document.createElement( 'video' );
const reader = new FileReader();
// Cancel if the initial filesize just seems too large.
// If the maximum allowed is 30 seconds, then a Gigabyte of file seems too much.
if( file.size > 10000000 ){
fileDuration.textContent = `File is too large (${file.size}). Refused to read duration.`;
return;
}
// Before setting any source to the <video> element,
// add a listener for `loadedmetadata` - the event that fires
// when metadata such as duration is available.
// As well as an error event is case something goes wrong.
video.addEventListener( 'loadedmetadata', event => {
// When the metadata is loaded, duration can be read.
fileDuration.textContent = `Video is ${video.duration} seconds long.`;
});
video.addEventListener( 'error', event => {
// If the file isn't a video or something went wrong, print out an error message.
fileDuration.textContent = `Could not get duration of video, an error has occurred.`;
});
// Before reading any file, attach an event listener for
// when the file is fully read by the reader.
// After that we can use this read result as a source for the <video>
reader.addEventListener( 'loadend', function(){
// reader.result now contains a `blob:` url. This seems like a
// nonsensical collection of characters, but the <video> element
// should be able to play it.
video.src = reader.result;
// After we assigned it to the `src` of the <video>, it will be
// act like any other video source, and will trigger the related
// events such as `loadedmetadata`, `canplay`, etc...
});
// Instruct the reader to read the file as a url. When its done
// it will trigger the `loadend` event handler above.
reader.readAsDataURL( file );
});
<input type="file" />
<p id="file-duration">No video file</p>

JSZip temporary file location

I have a electron application using jszip to create a zip file that the user is then able save. Everything works fine but my problem is my application uses the users download folder. Im guessing its making a temporary file. I've submitted my application to the mac store and they want me to use another location instead of the users downloads folder for the temporary file. Is there anyway I can specify the temporary location or maybe something else other then jszip that will do this?
Here is the code I use
savePNGButton.addEventListener('click', function(e) {
var zip = new JSZip();
if (WatermarkText == ""){
var img = zip.folder("images");
} else {
var img = zip.folder(WatermarkText);
}
$(".WatermarkPhoto").each(function(index) {
imgsrc = this.src;
var DataURL = imgsrc.replace('data:image/png;base64,','');
img.file(WatermarkText+index+".png", DataURL, {base64: true});
});
zip.generateAsync({type:"blob"})
.then(function(content) {
saveAs(content, WatermarkText+".zip");
});
});
[edit]
Looking more into this it looks like my problem is not with JSZip but with chrome or FileSaver.js using the downloads folder as a temp folder for the file before the users chooses where to place the file. Is there anyway I can change the temp location for my electron app?
If anyone comes across this, I never figured a way around the HTML5 filesystem way of moving the temp file before the users selects the download location. Instead I am using nodejs file system with electrons showSaveDialog. I also had to change JSZip to use .generateNodeStream instead of .generateAsync. Below is my function that I got working for me.
savePNGButton.addEventListener('click', function(e) {
var zip = new JSZip();
if (WatermarkText == ""){
var img = zip.folder("images");
} else {
var img = zip.folder(WatermarkText);
}
$(".WatermarkPhoto").each(function(index) {
imgsrc = this.src;
var DataURL = imgsrc.replace('data:image/png;base64,','');
img.file(WatermarkText+index+".png", DataURL, {base64: true});
});
// zip.file("file", content);
// ... and other manipulations
dialog.showSaveDialog({title: 'Test',defaultPath: '~/'+WatermarkText+'.zip',extensions: ['zip']},(fileName) => {
if (fileName === undefined){
console.log("You didn't save the file");
return;
}
zip
.generateNodeStream({type:'nodebuffer',streamFiles:true})
.pipe(fs.createWriteStream(fileName))
.on('finish', function () {
// JSZip generates a readable stream with a "end" event,
// but is piped here in a writable stream which emits a "finish" event.
console.log("zip written.");
});
});
});

Synchronize video and audio (preferably without JavaScript)

If I have HTML5 video and audio elements, is there a clean way to keep them in sync? They should act like a video file that contains an audio track, so advancing one track manually should bring the other one along with it. Let's say the two tracks have the same duration.
I'd like a solution that works across browsers, but I don't particularly care if it works on older browsers. It would also be nice to avoid the use of JavaScript if possible. Otherwise, a dead-simple JavaScript library would be best -- something that only asks for which tracks to synchronize and takes care of the rest.
I've looked into mediagroup... but it looks like it only works in Safari. I've looked into audioTracks... but the user has to enable the feature in Firefox.
I've looked into Popcorn.js, which is a JavaScript framework that seems designed for this task... but it looks like there hasn't been any activity in over a year. Besides that, the only demonstrations I can find are of synchronizing things like text or slides to video, not audio to video.
You can use Promise.all(), fetch() to retrieve media resource as a Blob, URL.createObjectURL() to create a Blob URL of resource; canplaythrough event and Array.prototype.every() to check if each resource can play; call .play() on each resource when both resources can play
I didn't make it clear in the question. I meant that the two tracks
should stay in sync as though playing a regular video file with audio.
One approach could be to create a timestamp variable, utilize seeked event to update .currentTime of elements when set variable is greater than a minimum delay to prevent .currentTime being called recursively.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<button>load media</button>
<br>
<div id="loading" style="display:none;">loading media...</div>
<script>
var mediaBlobs = [];
var mediaTypes = [];
var mediaLoaded = [];
var button = document.querySelector("button");
var loading = document.getElementById("loading");
var curr = void 0;
var loadMedia = () => {
loading.style.display = "block";
button.setAttribute("disabled", "disabled");
return Promise.all([
// `RETURN` by smiling cynic are licensed under a Creative Commons Attribution 3.0 Unported License
fetch("https://ia600305.us.archive.org/30/items/return_201605/return.mp3")
, fetch("http://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4")
])
.then(responses => responses.map(media => ({
[media.headers.get("Content-Type").split("/")[0]]: media.blob()
})))
.then(data => {
for (var currentMedia = 0; currentMedia < data.length; currentMedia++) {
(function(i) {
var type = Object.keys(data[i])[0];
mediaTypes.push(type);
console.log(data[i]);
var mediaElement = document.createElement(type);
mediaElement.id = type;
var label = document.createElement("label");
mediaElement.setAttribute("controls", "controls");
mediaElement.oncanplaythrough = () => {
mediaLoaded.push(true);
if (mediaLoaded.length === data.length
&& mediaLoaded.every(Boolean)) {
loading.style.display = "none";
for (var track = 0; track < mediaTypes.length; track++) {
document.getElementById(mediaTypes[track]).play();
console.log(document.getElementById(mediaTypes[track]));
}
}
}
var seek = (e) => {
if (!curr || new Date().getTime() - curr > 20) {
document.getElementById(
mediaTypes.filter(id => id !== e.target.id)[0]
).currentTime = e.target.currentTime;
curr = new Date().getTime();
}
}
mediaElement.onseeked = seek;
mediaElement.onended = () => {
for (var track = 0; track < mediaTypes.length; track++) {
document.getElementById(mediaTypes[track]).pause()
}
}
mediaElement.ontimeupdate = (e) => {
e.target.previousElementSibling
.innerHTML = `${mediaTypes[i]} currentTime: ${Math.round(e.target.currentTime)}<br>`;
}
data[i][type].then(blob => {
mediaBlobs.push(URL.createObjectURL(blob));
mediaElement.src = mediaBlobs[mediaBlobs.length - 1];
document.body.appendChild(mediaElement);
document.body.insertBefore(label, mediaElement);
})
}(currentMedia));
}
})
};
button.addEventListener("click", loadMedia);
</script>
</body>
</html>
plnkr http://plnkr.co/edit/r51oh7KsZv8DEYhpRJlL?p=preview
To my knowledge, this is impossible with pure HTML.
You can use the currentTime property of both <video> and <audio> elements to synchronize the time.
var video = document.getElementById("your-video");
var audio = document.getElementByid("your-audio");
audio.currentTime = video.currentTime;
If necessary, you could also use the timeupdate event to continuously re-sync the video and audio.

slice large file into chunks and upload using ajax and html5 FileReader

What I want to implement is:
In the front end, I use the html5 file api to read the file, and then upload the file's content to the php backend using ajax, and it's ok if the filesize is small. However,if the file is big enough, it causes chrome to crash. So I split the large file into chunks using file.slice, when all chunks are uploaded to the php, merge the chunks into a single complete one.
the code is as follows:
the front end:
<style>
#container {
min-width:300px;
min-height:200px;
border:3px dashed #000;
}
</style>
<div id='container'>
</div>
<script>
function addDNDListener(obj){
obj.addEventListener('dragover',function(e){
e.preventDefault();
e.stopPropagation();
},false);
obj.addEventListener('dragenter',function(e){
e.preventDefault();
e.stopPropagation();
},false);
obj.addEventListener('drop',function(e){
e.preventDefault();
e.stopPropagation();
var ul = document.createElement("ul");
var filelist = e.dataTransfer.files;
for(var i=0;i<filelist.length;i++){
var file = filelist[i];
var li = document.createElement('li');
li.innerHTML = '<label id="'+file.name+'">'+file.name+':</label> <progress value="0" max="100"></progress>';
ul.appendChild(li);
}
document.getElementById('container').appendChild(ul);
for(var i=0;i<filelist.length;i++){
var file = filelist[i];
uploadFile(file);
}
},false);
}
function uploadFile(file){
var loaded = 0;
var step = 1024*1024;
var total = file.size;
var start = 0;
var progress = document.getElementById(file.name).nextSibling;
var reader = new FileReader();
reader.onprogress = function(e){
loaded += e.loaded;
progress.value = (loaded/total) * 100;
};
reader.onload = function(e){
var xhr = new XMLHttpRequest();
var upload = xhr.upload;
upload.addEventListener('load',function(){
if(loaded <= total){
blob = file.slice(loaded,loaded+step+1);
reader.readAsBinaryString(blob);
}else{
loaded = total;
}
},false);
xhr.open("POST", "upload.php?fileName="+file.name+"&nocache="+new Date().getTime());
xhr.overrideMimeType("application/octet-stream");
xhr.sendAsBinary(e.target.result);
};
var blob = file.slice(start,start+step+1);
reader.readAsBinaryString(blob);
}
window.onload = function(){
addDNDListener(document.getElementById('container'));
if(!XMLHttpRequest.prototype.sendAsBinary){
XMLHttpRequest.prototype.sendAsBinary = function(datastr) {
function byteValue(x) {
return x.charCodeAt(0) & 0xff;
}
var ords = Array.prototype.map.call(datastr, byteValue);
var ui8a = new Uint8Array(ords);
try{
this.send(ui8a);
}catch(e){
this.send(ui8a.buffer);
}
};
}
};
</script>
the php code:
<?php
$filename = "upload/".$_GET['fileName'];
//$filename = "upload/".$_GET['fileName']."_".$_GET['nocache'];
$xmlstr = $GLOBALS['HTTP_RAW_POST_DATA'];
if(empty($xmlstr)){
$xmlstr = file_get_contents('php://input');
}
$is_ok = false;
while(!$is_ok){
$file = fopen($filename,"ab");
if(flock($file,LOCK_EX)){
fwrite($file,$xmlstr);
flock($file,LOCK_UN);
fclose($file);
$is_ok = true;
}else{
fclose($file);
sleep(3);
}
}
The problem is, after the chunks of the file all being uploaded to the server and merged into a new one, the total file size is smaller than the original, and the merged one is broken. Where is the problem and how to fix it?
Using the readAsBinaryString fn is just bad practice
SendAsBinary is also depricated
Reading the chunks content is just pure dum. Slicing them is enough. xhr.send(blob.slice(0,10))
Slicing is also unnecessary unless the server don't accept such large files (such as dropbox limited REST API)
So if anyone trying to be smart about using worker threads, base64 or FileReader for uploading large files - don't do that, it's all unnecessary.
Only time it's okey to read/slice the file is if you are deciding to encrypt/decrypt/zip the files before sending it to the server.
But only for a limited time until all browser start supporting streams.
Then you should take a look at fetch and ReadableStream
fetch(url, {method: 'post', body: new ReadableStream({...})})
if you just need to forward the blob to the server, just do:
xhr.send(blob_or_file) and the browser will take care of reading it (correctly) and not consume any memory. And the file can be however large the file/blob is
There is a small error in your js script
I noticed that the reader.onprogress event is triggered more times than the xhr load event. In this case some chunks are skipped. Try to increment the loaded variable inside the load function.
function uploadFile(file){
var loaded = 0;
var step = 1024*1024;
var total = file.size;
var start = 0;
var progress = document.getElementById(file.name).nextSibling.nextSibling;
var reader = new FileReader();
reader.onload = function(e){
var xhr = new XMLHttpRequest();
var upload = xhr.upload;
upload.addEventListener('load',function(){
loaded += step;
progress.value = (loaded/total) * 100;
if(loaded <= total){
blob = file.slice(loaded,loaded+step);
reader.readAsBinaryString(blob);
}else{
loaded = total;
}
},false);
xhr.open("POST", "upload.php?fileName="+file.name+"&nocache="+new Date().getTime());
xhr.overrideMimeType("application/octet-stream");
xhr.sendAsBinary(e.target.result);
};
var blob = file.slice(start,step);
reader.readAsBinaryString(blob); }
This Problem is caused mostly by global restrictions of shared Hosts. They often control the amount of data and drop the Connection if Limit is overridden.
I tried this several times and several ways. Always stucking at the same Position. target file was smaller and corrupted. Even taking a smaller file for upload than this size, so that the merged data fits in the limit, the result was OK.
YOu have only one Chance: Get the max size increased, for files to be opened.
Every time you open the target file and write the chunk Content into it, and the size overrides the Limit, the hoster will break the Connection. Example hoster: Strato
Here the Limit is globally set to 16MB. I get max merge-result size double this size. No Chance to override it.

Reading mp3 file duration in Chrome with JavaScript/jQuery

I have an 'browser application' where users should be able to drag and drop local mp3 files to Chrome and should see some mp3 info (without uploading files to server)...
So far, I can successfully read and display mp3 tags (with JavaScript-ID3-Reader library), but I'm struggling a bit with displaying mp3's duration. Is it possible to get that info with js/jQuery only? If not, what would you recommend for resolving this simple problem?
Code I'm using to handle dropping files to browser looks like this:
function initialize() {
var target = document;
if (target === null) {
return false;
}
target.addEventListener('drop', function(event) {
event.preventDefault();
var files = event.dataTransfer.files;
console.log(files);
for (var i = 0; i < files.length; i++) {
processFile(files[i]);
objectURL = window.URL.createObjectURL(files[i]);
console.log(objectURL);
}
}, true);
}
Thanks!
Try this
var audio = new Audio()
audio.addEventListener("timeupdate", function() {
console.log(audio.currentTime)
})
audio.addEventListener("canplaythrough", function() {
console.log(audio.duration)
})
target.on("change", function(e) {
var file = e.currentTarget.files[0]
var objectUrl = URL.createObjectURL(file)
audio.src = objectUrl
$("#duration").html(audio.duration)
})
If you are using latest browsers that support audio tag, you can load the mp3 in an <audio> element and access the duration property via the DOM.
To load a audio file to browser see: window.URL.createObjectURL

Categories

Resources