I'm sure you know the case in which gmail shows the free storage on the "Lots of space" paragraph when you are on the login page. There is a counter running on that page and I'm curious to know how it works. Can me give me some pointers, links, tutorials, reads, explanations?
If you look at the source for that page you will find that it is a simple javascript function that just updates the number once a second. There is no magic involved and it is not live data in any way, just an estimate.
This is the code in charge of it. Looks like it is based on a time computation being mapped to a number of Bytes.
function updateQuota() {
if (!quota_elem) {
return;
}
var now = (new Date()).getTime();
var i;
for (i = 0; i < CP.length; i++) {
if (now < CP[i][0]) {
break;
}
}
if (i == 0) {
setTimeout(updateQuota, 1000);
} else if (i == CP.length) {
quota_elem.innerHTML = CP[i - 1][1];
} else {
var ts = CP[i - 1][0];
var bs = CP[i - 1][1];
quota_elem.innerHTML = format(((now-ts) / (CP[i][0]-ts) * (CP[i][1]-bs)) + bs);
setTimeout(updateQuota, 1000);
}
}
var PAD = '.000000';
You'll find a nice slideshow here
http://www.slideshare.net/kuchmuch/gmails-quota-secrets
Detailing how it works.
It is as mentioned just a date set to a predefined size, which the ticker then counts towards.
Related
I wrote a little script for Google Sheets. Whenever I add something new in the table, the new values are added. If I do this quickly (enter a value and press Enter directly so that I get to the next line) the script doesn't keep up and leaves out values (lines). Has anyone ever had this problem and knows how to solve it?
I'm not concerned with how it actually works, just how I can make these lines run faster in Google Sheets.
function addDateTimetoCell(e){
var pDate = new Date();
var pSheet = SpreadsheetApp.getActiveSheet();
var pRange = e.range;
switch(pRange.getColumn()){
case 1:{
if(pSheet.getRange(pRange.getRow(), 4).getValue().length <= 0){
pSheet.getRange(pRange.getRow(), 4).setValue(pDate);
if(pSheet.getRange(pRange.getRow(), 8).getValue() <= 0){
pSheet.getRange(pRange.getRow(), 8).setValue(pSheet.getRange(pRange.getRow(), pRange.getColumn()).getValue());
}
}
break;
}
case 2:{
if(pSheet.getRange(pRange.getRow(), 5).getValue().length <= 0){
pSheet.getRange(pRange.getRow(), 5).setValue(pDate);
if(pSheet.getRange(pRange.getRow(), 9).getValue() <= 0){
pSheet.getRange(pRange.getRow(), 9).setValue(pSheet.getRange(pRange.getRow(), pRange.getColumn()).getValue());
}
}
break;
}
case 3:{
if(pSheet.getRange(pRange.getRow(), 6).getValue().length <= 0){
pSheet.getRange(pRange.getRow(), 6).setValue(pDate);
if(pSheet.getRange(pRange.getRow(), 10).getValue() <= 0){
pSheet.getRange(pRange.getRow(), 10).setValue(pSheet.getRange(pRange.getRow(), pRange.getColumn()).getValue());
}
}
break;
}
}
}
Google Apps Script is pretty fast, but calls to the SpreadsheetApp API are very slow. You should minimize the number of calls to the API by assigning the calls to a variable especially if they are being called more than once.
I also think the code is quite not optimized. Repeating calls, unnecessary switch case usage (since there is a pattern with case vs column).
I assigned these duplicate calls to a variable and use those. Also, I did simplify the switch case to make it simpler. Test the code if it works like previously and faster.
Code:
function addDateTimetoCell(e){
var pDate = new Date();
var pSheet = SpreadsheetApp.getActiveSheet();
var pRange = e.range;
var row = pRange.getRow();
var column = pRange.getColumn();
var rangeCol1 = pSheet.getRange(row, column + 3);
if(rangeCol1.getValue().length <= 0){
rangeCol1.setValue(pDate);
var rangeCol2 = pSheet.getRange(row, column + 4);
if(rangeCol2.getValue() <= 0){
rangeCol2.setValue(rangeRC.getValue());
}
}
}
Full code looks like this, ideally we have 4 div boxes that need to be randomly filled with random numbers ansValue, one of them (rightAnsValue with its rightAnsId) is already done and works fine, I've managed to make it unique in comparison to others (code without commented section). But met a problem with making others unique, I keep having some identical values in my boxes. In comments is one way I tried to solve this, but pretty sure there is a much simpler and smarter solution that actually works. I would appreciate if you could help to find an understandable solution to this problem.
(P.S. I've seen similar questions but they are either too dificult or done without JS.)
function createAnswers(){
for(ansId=1; ansId<5; ansId++){
if(ansId!=rightAnsId){
for(i=1; i<10; i++){
digitArray[i-1] = i;
}
genNewRandNum();
// ansArray.length = 3;
// ansArray.push(ansValue);
// for(k=0; k<3; k++){
// if(ansArray[k] == ansArray[k+1] || ansArray[k] == ansArray[k+2]){
// genNewRandNum();
// ansArray[k] = ansValue;
// }else if(ansArray[k+1] == ansArray[k+2]){
// genNewRandNum();
// ansArray[k+1] = ansValue;
// }else{
// break;
// }
// }
if(ansValue!=rightAnsValue){
document.getElementById("box" + ansId).innerHTML = ansValue;
}else{
genNewRandNum();
document.getElementById("box" + ansId).innerHTML = ansValue;
}
}
}
}
The way I generate new numbers:
function genNewRandNum(){
rand1 = digitArray[Math.floor(Math.random() * digitArray.length)];
rand2 = digitArray[Math.floor(Math.random() * digitArray.length)];
ansValue = rand1 * rand2;
}
Replace your genNewRandNum() with below code. I have used IIFE to create a closure variable alreadyGeneratedNumbers thats available inside the function generateRandomNumber() thats returned.
So everytime genNewRandNum() is executed, it checks against alreadyGeneratedNumbers to make sure it always returns a unique between 1 and 9.
var genNewRandNum = (function(){
var alreadyGeneratedNumbers = {};
return function generateRandomNumber() {
var min = Math.ceil(1),
max = Math.floor(9);
randomNumber = Math.floor(Math.random() * (max - min + 1)) + min;
if(alreadyGeneratedNumbers[randomNumber]) {
return generateRandomNumber();
} else {
alreadyGeneratedNumbers[randomNumber] = randomNumber;
return randomNumber;
}
}
})();
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
console.log(genNewRandNum());
Note: If you call genNewRandNum() for the 10th time it will throw error. So if you have a use case where you would need to reset it after all numbers from 1 to 9 are returned, then you need to add code to handle that
The easiest way to brute-force this is to use accept/reject sampling. You can do something like so:
uniqueRandomNumbers = function(n, nextRandom)
{
var nums = {}; var m = 0;
while(m < n)
{
var r = nextRandom();
if(! nums.hasOwnProperty(r))
{
nums[r] = true; m++;
}
}
return Object.keys(nums);
}
Here I'm using the fact that js objects are implemented as hashmaps to get a hashset. (This has the downside of converting the numbers to strings, but if you're not planning on imediately doing arithmetic with them this is not a problem.)
In order to get four unique integers between 0 and 9 you can then do something like:
uniqueRandomNumbers(4, function() { return Math.floor(Math.random() * 10); })
If you want something a little better than brute force (which probably isn't relevant to your use case but could help someone googling this), one option is to go through each element and either take or leave it with an appropriate probability. This approach is outlined in the answers to this question.
I'm new in javascript and i need to maintain a site.
The actual fonctionnality is to download several card into a file, one card per tab. As users can have plenty of cards, treatment can not always succeed (too many tabs), i wanted create a file evey 20 tabs for instance.
var printTerm = function(grid){
var rows = grid.selected;
if(rows==null) return;
var ids = rows.map(function(val){return grid.getDataByRow(val).num_terme;});
var nbMax = 20;
var nbFic;
var idsPartiel;
var posDebut;
var posFin;
var a;
if(ids.length > nbMax)
{
idsPartiel = ids;
if(ids.length % nbMax == 0) nbFic = ids.length / nbMax;
else nbFic = ((ids.length - (ids.length % nbMax)) / nbMax) + 1;
for (i=0 ; i< nbFic ; i++)
{
posDebut = (nbMax * i);
if(i == nbFic - 1) posFin = idsPartiel.length + 1;
else posFin = posDebut + nbMax;
ids = idsPartiel.slice(posDebut,posFin);
a = new Element('a', 'id':'download','href':'php/utils/export2pdf.php?ids='+ids.join(',')})
.addEvent('click',function(){location.href=this.href;}).inject(document.body);
a.fireEvent('click');
a.dispose();
}
}
else
{
a = new Element('a',{'id':'download','href':'php/utils/export2pdf.php?ids='+ids.join(',')})
.addEvent('click',function(){location.href=this.href;}).inject(document.body);
a.fireEvent('click');
a.dispose();
}
};
When the number of cards is less or equal to nbMax, it works well, bu when there must be several files, not: only the last passage of the loop is creating the file.
When i try to see what happens with firebug, i see my lines of treatment, but only the last is ended.
Thanks for helping me.
If I read your code correctly, you are trying to make the browser download a ton of files into separate tabs. Most browsers won't really appreciate you trying to launch a loop of downloads that way, nor would a user really want that many save as dialogs potentially popping off.
You would be way better off packaging them into a zip file server side and sending ONE file. It would be more efficient and way more user friendly.
I'm writing a library for WebWorkers, and I want to test the difference between running a script in the main page thread, versus in one or more workers. The problem is: I can't find out of hand a short function which will strain my browser enough that I can observe the difference.
A quick search didn't return much, but it might just be that I don't really know what to search for; usually I try to optimise my code, not make it slower...
I'm looking for algorithms or patterns that can be easily implemented in pure Javascript, that do not depend on the DOM or XHR, and which can have an argument passed to limit or specify how far the calculation goes (no infinite algorithms); 1s < avg time < 10s.
Extra points if it can be built without recursion and if it does not incur a significant memory hog while still being as processor intensive as possible.
Try using the obvious (and bad) recursive implementation for the Fibonacci sequence:
function fib(x) {
if (x <= 0) return 0;
if (x == 1) return 1;
return fib(x-1) + fib(x-2);
}
Calling it with values of ~30 to ~35 (depending entirely on your system) should produce good "slow down" times in the range you seek. The call stack shouldn't get very deep and the algorithm is something like O(2^n).
/**
* Block CPU for the given amount of seconds
* #param {Number} [seconds]
*/
function slowdown(seconds = 0.5) {
const start = (new Date()).getTime()
while ((new Date()).getTime() - start < seconds * 1000){}
}
slowdown(2)
console.log('done')
Calling this method will slow code down for the given amount of seconds (with ~200ms precision).
Generate an array of numbers in reverse order and sort it.
var slowDown = function(n){
var arr = [];
for(var i = n; i >= 0; i--){
arr.push(i);
}
arr.sort(function(a,b){
return a - b;
});
return arr;
}
This can be called like so:
slowDown(100000);
Or whatever number you want to use.
Check out the benchmarking code referenced by the Google V8 Javascript Engine.
For some reason Bogosort comes to mind. Basically it's a sorting algorithm that consists of:
while not list.isInOrder():
list.randomize()
It has an average complexity of O(n * n!) with little memory, so it should slow things down pretty good.
The main downside is that its running time can be anywhere from O(n) to O(inf) (though really, O(inf) is pretty unlikely).
Everyone seems determined to be complicated. Why not this?
function waste_time(amount) {
for(var i = 0; i < amount; i++);
}
If you're concerned the browser will optimize the loop out of existence entirely, you can make it marginally more complicated:
function waste_time(amount) {
var tot = 0;
for(var i = 0; i < amount; i++)
tot += i;
}
Compute lots of square roots manually?
function sqrt(number, maxDecimal) {
var cDecimal = -1;
var cNumber = 0;
var direction = -1;
while(cNumber * cNumber !== number && cDecimal < maxDecimal) {
direction = -direction;
cDecimal++;
while((cNumber * cNumber - number) / Math.abs(cNumber * cNumber - number) === direction) cNumber += direction * Math.pow(10, -cDecimal);
}
return Math.abs(cNumber);
}
function performTest() {
for(var i = 0; i < 10000; i++) {
sqrt(i, 3);
}
}
Maybe this is what you are looking for:
var threadTest = function(durationMs, outputFkt, outputInterval) {
var startDateTime = (new Date()).getTime();
counter = 0,
testDateTime = null,
since = 0,
lastSince = -1;
do {
testDateTime = (new Date()).getTime();
counter++;
since = testDateTime - startDateTime;
if(typeof outputFkt != 'undefined' && lastSince != since && testDateTime % outputInterval == 0) {
outputFkt(counter, since);
lastSince = since;
}
} while(durationMs > since);
if(typeof outputFkt != 'undefined') {
outputFkt(counter, since);
}
return counter;
}
This method will simply repeat a check in a loop
durationMS - duartion it should run in miliseconds
OPTIONAL:
outputFkt - a callback method, for logging purpose function(currentCount, milisecondsSinceStart)
outputInterval - intervall the output function will be called
I figured since you do not want to test a real function, and even NP-Hard Problems have a ratio between input length and time this could be a easy way. You can measure performance at any interval and of course receive the number of loops as a return value, so you can easily measure how much threads interfere each others performance, with the callback even on a per cycle basis.
As an example here is how i called it (jQuery and Dom usage are here, but as you can see optional)
$(document).ready(function() {
var outputFkt = function(counter, since) {
$('body').append('<p>'+counter+', since '+since+'</p>');
};
threadTest(1000, outputFkt, 20);
});
A last Warning: Of course this function can not be more exact than JS itself. Since modern Browsers can do much more than one cycle in one Milisecond, there will be a little tail that gets cut.
Update
Thinking about it... actually using the ouputFkt callback for more than just output could give great insight. You could pass a method that uses some shared properties, or you could use it to test great memory usage.
NOTE: Originally had this listed as a memory leak. After looking into this deeper, I discovered that it's not a memory issue. It's just a very slow script. Any suggestions to speed this up would be greatly appreciated.
ANOTHER NOTE: After looking into this even further, I see that FF does not support any type of CSS that formats text in overflow. There is a hack and a workaround for that hack...but that will not be a suitable solution.
I have voted for and joined the e-mail list on this particular bug at mozilla. It's almost six years old so I resolve that users will just have to deal with it for now. At least it's not a common scenario for our product.
Original post:
The script truncates the value of an element and appends '...' while its scrollWidth is greater than it's offsetWidth. (e.g. A value of "LastName, VeryLongFirstName"will change to something like "LastName, Ver...", depending on the width of the column)
var eTable = document.getElementById(this._eDiv.id + "_tbl");
//...lots of code here...
//function called that gets all cells in a table, loops through them and clips the text
addEventListenerEx(window, "load", function() {
var aCells = eTable.getElementsByTagName("DIV");
window.alert(aCells.length);
//When aCells is length of 100, we're ok...but when it's big (like 3,000) I have problems
for (var i = 0; i < aCells.length; i++){
Grid.clipText(aCells[i]);
}
}, false);
//...lots of code here...
//This is the function doing the actual clipping
Grid.clipText = function (oDiv) {
//for tooltip
var oCurDiv;
var oTagA;
var sToolTip;
if (oDiv.firstChild) {
if (oDiv.firstChild.firstChild){
oCurDiv = oDiv.firstChild;
while (oCurDiv) {
if (is.ie) {
oTagA = oCurDiv;
} else {
// there are some different between IE & FireFox.
oTagA = oCurDiv.firstChild.parentNode;
}
if (oTagA.tagName == "A") {
sToolTip = oTagA.innerHTML;
if (sToolTip.indexOf('<b>') > 0) {
sToolTip = sToolTip.replace('<b>',"");
sToolTip = sToolTip.replace('</b>',"");
}
if (sToolTip.indexOf('<B>') > 0) {
sToolTip = sToolTip.replace('<B>',"");
sToolTip = sToolTip.replace('</B>',"");
}
oTagA.parentNode.title = convertHTMLToText(sToolTip);
}
oCurDiv = oCurDiv.nextSibling;
}
} else {
oDiv.title = convertHTMLToText(oDiv.innerHTML);
}
}
//NOTE: Additional steps to take for non-IE browsers
if (!is.ie) {
var oText = oDiv;
while (oText.nodeType != 3) {
oText = oText.firstChild;
}
var sDisplayText = oText.nodeValue;
if (sDisplayText.length < 3) return;
var lastThree;
sDisplayText = sDisplayText.slice(0, parseInt(oDiv.offsetWidth / 5));
oText.nodeValue = sDisplayText + "...";
//NOTE: Bad things happen here because of this loop
while (oDiv.scrollWidth > oDiv.offsetWidth && sDisplayText != "") {
lastThree = sDisplayText.slice(-3);
sDisplayText = sDisplayText.slice(0, sDisplayText.length - 3);
oText.nodeValue = sDisplayText + "...";
}
oText.nodeValue = sDisplayText + lastThree.slice(0, 1) + "...";
while (oDiv.scrollWidth > oDiv.offsetWidth && sDisplayText != "") {
oText.nodeValue = sDisplayText + "...";
}
}
The code works. However, the problem is that it's called over and over again after a table is loaded on the page. When the table is huge (>1,500 cells), that's when the issue starts.
So, I'm really looking for a way to make this sample (particularly the WHILE loop) more efficient.
Nothing in that is going to leak by itself. You're probably leaking oText in the closure, can you show the surrounding code?
Btw, here is a vastly more efficient way of doing this:
http://jsfiddle.net/cwolves/hZqyj/
If you really want to keep doing it the way you are, you can estimate the cutoff point by taking the length of the string and multiplying it by the proportional width it needs to be...
e.g. if the string is 100 characters and it's 2x as long as it should be, cut it to 50 chars and re-check. Or you could implement a binary 'search' algorithm to get the correct length.
The work-around, and best answer to my problem came from basic arithmetic: cross multiplication
I posted my answer in a more popular stackoverflow thread discussing the topic in better detail.