I have a webpage where we generate PDFs based upon the user selection of on-page items. This causes a postback (it's an ASP.NET WebForms page) which creates the PDFs server-side. An <a class="documentDownload"> tag is then added to the page for each item.
When the page reloads in the browser the following jQuery script is executed to automatically download the files (if the user had chosen a auto-download option):
var divHost = document.createElement("div");
divHost.id = "elmntDnldLinks";
divHost.style.display = "none";
document.body.appendChild(divHost);
setTimeout(function() {
$(".documentDownload").each(function(idx, val) {
var lnkDownload = $(val),
save = document.createElement("a");
save.href = lnkDownload.attr("href");
save.download = lnkDownload.attr("download");
save.target = "_blank";
divHost.appendChild(save);
save.click();
});
}, 1000);
This script has a delay of 1 second, then for each .documentDownload element it creates a new <a> element with the same href attribute of the original element, appends it to a newly-added hidden element, then programmatically clicks it.
[This strategy of creating new links and clicking those instead of clicking the original DOM elements gets around a browser security measure.]
This works perfectly well in Firefox but Chrome never downloads more than 10 files. Why? I can see, for example, 15 links on the page and in the hidden element, but only 10 files are downloaded.
If you pause for a second between each 10 downloads, all of them will work in Chrome.
I used async timeout function for this workaround:
function pause(msec) {
return new Promise(
(resolve, reject) => {
setTimeout(resolve, msec || 1000);
}
);
}
async function downloadAll(elements) {
var count = 0;
for (var e in elements) {
download(elements[e]); // your custom download code here, click or whatever
if (++count >= 10) {
await pause(1000);
count = 0;
}
}
}
Simple timeouts multiplied by element counter could probably work too, but I did not test it.
You can do a pause in your downloads like that
function sleep(milliseconds) {
let timeStart = new Date().getTime();
while (true) {
let elapsedTime = new Date().getTime() - timeStart;
if (elapsedTime > milliseconds) {
break;
}
}
}
$("#aDescarga")[0].click();
$("#aDescarga").attr("href","");
if (i > 9) {
sleep(2000);
i = 0;
} else {
i = i + 1;
}
Setting a 200ms delay between each download solves the problem.
Tested on Google Chrome 100.0.4896.127 (Windows, macOS)
Demo in CodePen - 'Download multiple files' should be allowed.
const downloadFileWithAnchor = () => {
const anchor = document.createElement("a");
anchor.href = "data:text/plain;charset=utf-8,";
anchor.download = 'blank.txt';
document.body.appendChild(anchor);
anchor.click();
document.body.removeChild(anchor);
};
const repeatCount = 20;
for (let i = 0; i < repeatCount; i += 1) {
setTimeout(
() => {
downloadFileWithAnchor();
},
i * 200 // Delay download every 200ms
);
}
Related
I am trying to make a nodejs website that will return the length in seconds of any audio file a user chooses. So far I have it working with mp3, wav, and flac files. But it doesn't work for all .m4a or .aif files
The code for my HTML page with javascript is below:
choose audio file to get length:
<input style="cursor: pointer;" type="file" id="file" multiple="multiple" />
<script>
//when files are selected:
$("#file").change(async function (e) {
console.log('file(s) selected')
//get files
var files = e.currentTarget.files;
//get number of files
var numberOfFiles = files.length;
//for each file
for (i = 0; i < numberOfFiles; i++) {
console.log(`songs[${i}].type=`, files[i].type)
//get file length
let songLength = await getSongLength(files[i]);
console.log('songLength=', songLength)
}
});
//recieve audio file, return length
function getSongLength(song) {
return new Promise(function (resolve, reject) {
console.log('getSongLength() begin setup')
//create objectURL and audio object for ssong
objectURL = URL.createObjectURL(song);
mySound = new Audio([objectURL])
console.log('getSongLength() end setup')
//when song metadata is loaded:
mySound.addEventListener("canplaythrough", function (e) {
console.log('getSongLength() canplaythrough')
var seconds = e.currentTarget.duration;
resolve(seconds)
});
});
}
</script>
I gathered 6 different files for testing, and after running them on my above code have found out the following results:
aif: not working
flac: working
m4a_file: not working
m4a_file_from_comments_below:not working
mp3: working
wav: working
my test files for download: https://easyupload.io/m/la9xro
It seems like when I input my m4a file sample_M4A_file that it hangs inside the getSongLength() function and never enters the .addEventListener("canplaythrough" function, is there any alternative I can use to consistently get the duration in seconds for every audio file?
It's because your browser doesn't support Apple Lossless codecs. If you try in Safari, it will work.
If you attach an error event to your media element, you'll see it fires.
There is no real universal solution, apart from using an heavy machinery like mediainfo.js (2.4MB+) which should support most formats.
const input = document.querySelector( "input" );
input.onchange = async (evt) => {
const mediainfo = await new Promise( (res) => MediaInfo(null, res) );
const file = input.files[ 0 ];
const getSize = () => file.size;
const readChunk = async (chunkSize, offset) =>
new Uint8Array( await file.slice(offset, offset + chunkSize).arrayBuffer() );
const info = await mediainfo.analyzeData(getSize, readChunk);
// assumes we are only interested in audio duration
const audio_track = info.media.track.find( (track) => track[ "#type" ] === "Audio" );
console.log( audio_track.Duration );
};
<script src="https://unpkg.com/mediainfo.js#0.1.4/dist/mediainfo.min.js"></script>
<input type="file">
Ps: when using the media element solution, there is no need to wait for canplaythrough, just wait for loadedmetadata, and if you get Infinity, try this hack.
I would like to create a Jscript which I would paste to the Google Chrome Consol, and it should copy the innerHTML part of an element to the clipboard.
This should be done in every for a long period of time.
So far I managed to write a code which loops from 0 to 9, calling the function which copies the content to the clipboard, then sleeps for 1 seconds. It also writes to the console the the number of loops and the data itself.
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function mycopy() {
var copy = function (e) {
e.preventDefault();
var text = document.getElementsByClassName("md-tile")[0].innerHTML
text = text.substring(0,10)
console.log(text);
if (e.clipboardData) {
e.clipboardData.setData('text/plain', text);
} else if (window.clipboardData) {
window.clipboardData.setData('Text', text);
}
};
window.addEventListener('copy', copy);
document.execCommand('copy');
window.removeEventListener('copy', copy);
}
var text = "a"
for (let XYZ = 0; XYZ < 10; XYZ++) {
console.log('copy' + XYZ + ' Sec');
text = ""
mycopy();
await sleep(1000);
}
console.log('Done');
As far I understand with my limited knowledge, the above script should
copy the the inner HTML of the first element where the class is "md-title".
However it will stop copying exactly after 5 seconds. ( it is still 5 seconds if we change the sleep time.)
This is the response in the consol:
copy0 Sec
div class
copy1 Sec
div class
copy2 Sec
div class
copy3 Sec
div class
copy4 Sec
div class
copy5 Sec
copy6 Sec
copy7 Sec
copy8 Sec
copy9 Sec
Done
Is there any idea how to get around this?
Is there a time limit for reaching the clipboard after 5 seconds?
Is it a chrome limitation, or windows (7)?
So at the end I managed to solve my issue, however I was not able to use the clipboard as we loose access to it after 5 seconds.
My solution was that I downloaded the HTML part as txt.
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function download(filename, text) {
var element = document.createElement('a');
element.setAttribute('href', 'data:text/plain;charset=utf-8,' + text);
element.setAttribute('download', filename);
element.style.display = 'none';
document.body.appendChild(element);
element.click();
document.body.removeChild(element);
}
for (let XYZ = 0; XYZ < 10; XYZ++) {
var text0 = document.getElementById('lastUpdate').innerHTML
download("DialogLiveLogTime.txt",text0);
await sleep(2000);
console.log('Round: ' + XYZ)
}
With the above code the Google chrome will download the inner HTML as txt file. Originally this would popup a Save As window, but you can turn that off in the Chrome options menu, plus you can set the destination there.
In the meantime it can be wise to delete the downloaded logs with the same frequency or after read, with a different script.
I've created a loader for my website (the whole front end is custom,so as of right now i can edit about 95% of everything I have except for woocommerce plugin).
Super simple one, it follows this logic, if the anchor is an # or the page itself it wont do anything (which is what I wanted) but the woocommerce plugin to generate my image gallery is a link that isn't the page itself or a #. Which means I need to collect the path-name of the extension that if it ends on jpg png or any image file to continue; and skip over the rest of the animation and allow the plugin to run its course.
Ive use Barba JS, SWUP and other animations with woocommerce and this is the only one that doesnt interrupt or have so many conditions with woocommerce.
function fadeInPage() {
if (!window.AnimationEvent) { return; }
var fader = document.getElementById('fader');
fader.classList.add('fade-out');
}
document.addEventListener('DOMContentLoaded', function() {
if (!window.AnimationEvent) { return }
var anchors = document.getElementsByTagName('a');
******* for (var idx = 0; idx < anchors.length; idx += 1) {
if (anchors[idx].hostname !== window.location.hostname || anchors[idx].pathname === window.location.pathname) *******
{
continue;
}
anchors[idx].addEventListener('click', function(event) {
var fader = document.getElementById('fader'),
anchor = event.currentTarget;
var listener = function() {
window.location = anchor.href;
fader.removeEventListener('animationend', listener);
};
fader.addEventListener('animationend', listener);
event.preventDefault();
fader.classList.add('fade-in');
});
}
});
window.addEventListener('pageshow', function (event) {
if (!event.persisted) {
return;
}
var fader = document.getElementById('fader');
fader.classList.remove('fade-in');
});
I starred what i need changed. the animation works, the page transition works. I need the animation to recognize if the a tag ends with an jpg or png to skip and not do the animation and treat the link as if the animation wasn't there.
Never used woocommerce so I don't totally understand the use case, but you can get the file extension of a link like so:
for (var idx = 0; idx < anchors.length; idx += 1) {
let fileType = anchors[idx].href.split('.').pop();
//Do whatever
}
Or if you want to compare it to a preset list of extensions you can use a regex:
for (var idx = 0; idx < anchors.length; idx += 1) {
if (anchors[idx].href.match(/\.(jpg|png)$/)) {
//Do whatever
}
}
If I have HTML5 video and audio elements, is there a clean way to keep them in sync? They should act like a video file that contains an audio track, so advancing one track manually should bring the other one along with it. Let's say the two tracks have the same duration.
I'd like a solution that works across browsers, but I don't particularly care if it works on older browsers. It would also be nice to avoid the use of JavaScript if possible. Otherwise, a dead-simple JavaScript library would be best -- something that only asks for which tracks to synchronize and takes care of the rest.
I've looked into mediagroup... but it looks like it only works in Safari. I've looked into audioTracks... but the user has to enable the feature in Firefox.
I've looked into Popcorn.js, which is a JavaScript framework that seems designed for this task... but it looks like there hasn't been any activity in over a year. Besides that, the only demonstrations I can find are of synchronizing things like text or slides to video, not audio to video.
You can use Promise.all(), fetch() to retrieve media resource as a Blob, URL.createObjectURL() to create a Blob URL of resource; canplaythrough event and Array.prototype.every() to check if each resource can play; call .play() on each resource when both resources can play
I didn't make it clear in the question. I meant that the two tracks
should stay in sync as though playing a regular video file with audio.
One approach could be to create a timestamp variable, utilize seeked event to update .currentTime of elements when set variable is greater than a minimum delay to prevent .currentTime being called recursively.
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<button>load media</button>
<br>
<div id="loading" style="display:none;">loading media...</div>
<script>
var mediaBlobs = [];
var mediaTypes = [];
var mediaLoaded = [];
var button = document.querySelector("button");
var loading = document.getElementById("loading");
var curr = void 0;
var loadMedia = () => {
loading.style.display = "block";
button.setAttribute("disabled", "disabled");
return Promise.all([
// `RETURN` by smiling cynic are licensed under a Creative Commons Attribution 3.0 Unported License
fetch("https://ia600305.us.archive.org/30/items/return_201605/return.mp3")
, fetch("http://nickdesaulniers.github.io/netfix/demo/frag_bunny.mp4")
])
.then(responses => responses.map(media => ({
[media.headers.get("Content-Type").split("/")[0]]: media.blob()
})))
.then(data => {
for (var currentMedia = 0; currentMedia < data.length; currentMedia++) {
(function(i) {
var type = Object.keys(data[i])[0];
mediaTypes.push(type);
console.log(data[i]);
var mediaElement = document.createElement(type);
mediaElement.id = type;
var label = document.createElement("label");
mediaElement.setAttribute("controls", "controls");
mediaElement.oncanplaythrough = () => {
mediaLoaded.push(true);
if (mediaLoaded.length === data.length
&& mediaLoaded.every(Boolean)) {
loading.style.display = "none";
for (var track = 0; track < mediaTypes.length; track++) {
document.getElementById(mediaTypes[track]).play();
console.log(document.getElementById(mediaTypes[track]));
}
}
}
var seek = (e) => {
if (!curr || new Date().getTime() - curr > 20) {
document.getElementById(
mediaTypes.filter(id => id !== e.target.id)[0]
).currentTime = e.target.currentTime;
curr = new Date().getTime();
}
}
mediaElement.onseeked = seek;
mediaElement.onended = () => {
for (var track = 0; track < mediaTypes.length; track++) {
document.getElementById(mediaTypes[track]).pause()
}
}
mediaElement.ontimeupdate = (e) => {
e.target.previousElementSibling
.innerHTML = `${mediaTypes[i]} currentTime: ${Math.round(e.target.currentTime)}<br>`;
}
data[i][type].then(blob => {
mediaBlobs.push(URL.createObjectURL(blob));
mediaElement.src = mediaBlobs[mediaBlobs.length - 1];
document.body.appendChild(mediaElement);
document.body.insertBefore(label, mediaElement);
})
}(currentMedia));
}
})
};
button.addEventListener("click", loadMedia);
</script>
</body>
</html>
plnkr http://plnkr.co/edit/r51oh7KsZv8DEYhpRJlL?p=preview
To my knowledge, this is impossible with pure HTML.
You can use the currentTime property of both <video> and <audio> elements to synchronize the time.
var video = document.getElementById("your-video");
var audio = document.getElementByid("your-audio");
audio.currentTime = video.currentTime;
If necessary, you could also use the timeupdate event to continuously re-sync the video and audio.
I'm trying to make a link tracking script. It should work like Google Analytics only it should make posts to our own server. I'm using jQuery for this. The code i have written so far is the following:
jQuery(document).ready(function() {
var opts;
jQuery.fn.trackAllLinks = function(settings) {
settings = jQuery.extend({}, jQuery.fn.trackAllLinks.defaults, settings);
opts = settings;
function track() {
href = jQuery(this).attr('href');
var trackImage = new Image(1, 1);
trackImage.src = opts.linkredirector + '?eurl=' + jQuery.URLEncode(href) + '&rnd=' + new Date().getTime() + '&title=trackerimage.gif';
trackImage.onload = function() {
trackImage.onload = null;
doNothing();
}
delay(300);
return true;
};
function delay(mseconds) {
var currentTime = new Date();
var endTime = currentTime.getTime() + mseconds;
while (currentTime.getTime() < endTime) {
currentTime = new Date();
}
}
function doNothing() {
}
if(jQuery(this).is("a")) {
jQuery(this).click(track);
}
jQuery(this).find("a").click(track);
};
jQuery.fn.trackAllLinks.defaults = {
linkredirector : '__url_to_post_on__'
};
});
It works fine in all browsers except Safari. When i'm using a mailto link or an anchor it works but when i'm linking to another page it doesn't work. I have been testing a lot of different implementations and i can't get it to work. Any of you have an idea what i'm missing? I have tried to understand how Google Analytics works and as far as i can see it does the same. When i use WireShark to monitor my network i see that the image of Google is being requested but that my image isn't.
greets,
Daan
This is random, but you might try adding a randomized parameter in the query string (both the name & value), like:
Math.random(0, 1000) + '=' + Math.random(0, 1000)
I've had to do that in the past to get Safari to register a dynamically loaded resource.
(I see you have &rnd= already, but maybe try randomizing the name, too?)