I want to show a list with 1000 rows using Json that's support by Struts2 like pug-in. I use flexigrid (jquery) to parse 1000 rows to display. But it's so slow, and sometimes my browser crashes. (Firefox & IE).
So, what is the fastest Javascript framework to parse about 1000 rows?
What is the fastest JSON parser for JavaScript?
eval or when available, native JSON parser, at least in Chrome, Safari, Firefox 3.something, Opera 10.50, and even IE8 (only in IE8-mode)
Show the user what they want to see.
Show 50 rows, add a filter or a search.
If you really think that data should be reachable in a single page, maybe what you want is to fetch data while the user scrolls (and thus pick up smaller portions at a time).
I don't think you'll get acceptable performance from just about any grid component showing 1,000 at the same time, especially not on IE (even IE8). But most grids should be able to support having 1,000 in memory (well, depending on how big they are) and displaying a window into them (say, 20 rows, 40 rows, etc.) with paging and filtering options, without a significant performance problem. That would be a better user experience as well, I would think.
Edit
I got curious enough to check, and yeah, JSON parse time is not the problem; it'll be the rendering. Below is an example of very, very simple (not production) paging entirely client-side. On my netbook, IE7 parses the 1,000 rows of simple JSON objects in 36ms, so even complex objects shouldn't be an issue. That's using Prototype's evalJSON, which (even now) just defers to eval and puts the data in parentheses (they'll be changing that).
1000rows.html
<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
<title>1,000 Row Test Page</title>
<style type='text/css'>
body {
font-family: sans-serif;
}
#log p {
margin: 0;
padding: 0;
}
</style>
<script type='text/javascript' src='http://ajax.googleapis.com/ajax/libs/prototype/1.6.1.0/prototype.js'></script>
<script type='text/javascript' src='1000rows.js'></script>
</head>
<body><div>
<input type='button' id='btnLoadData' value='Load Data'>
<input type='button' id='btnNext' value='Next'>
<input type='button' id='btnPrevious' value='Previous'>
<table>
<thead>
<tr><th>Name</th><th>Description</th><th>Count</th></tr>
</thead>
<tfoot>
<tr><th colspan='3' id='theLabel'></th></tr>
</tfoot>
<tbody id='theData'>
<tr><td colspan='3'></td></tr>
</tbody>
</table>
<hr>
<div id='log'></div>
</div></body>
</html>
1000rows.js (using Prototype; jQuery would be different but similar)
(function() {
var data, windowTop, WINDOW_SIZE;
// "Constant" for the size of our window into the data
WINDOW_SIZE = 20; // Rows
// No data yet
clearData();
// Hook up our observers when we can
document.observe('dom:loaded', function() {
$('btnLoadData').observe('click', loadData);
$('btnNext').observe('click', function(event) {
event.stop();
updateWindow(WINDOW_SIZE);
});
$('btnPrevious').observe('click', function(event) {
event.stop();
updateWindow(-WINDOW_SIZE);
});
});
// Clear our data to a known state
function clearData() {
data = [];
windowTop = 0;
}
// Click handler for the load data button
function loadData() {
var success;
log("Loading data..");
clearData();
updateWindow();
success = false;
// Note: Using text/plain rather than application/json so
// Prototype doesn't parse the data for me, so I can measure
// how long it takes to do it.
new Ajax.Request("data.txt", {
onSuccess: function(response) {
var start, duration;
success = true;
log("Got data, parsing");
start = new Date().getTime();
data = response.responseText.evalJSON();
duration = new Date().getTime() - start;
log("Data parsed in " + duration + "ms");
updateWindow.defer();
}
});
}
function updateWindow(offset) {
var dataElement, labelElement, markup, index, template;
// Get the target element
dataElement = $('theData');
labelElement = $('theLabel');
if (!dataElement || !labelElement) {
return;
}
// If no data, simply say that
if (!data || data.length <= 0) {
dataElement.update("");
labelElement.update("No information");
return;
}
// Ensure that windowTop is rational
if (WINDOW_SIZE > data.length) {
windowTop = 0;
}
else {
if (typeof offset == 'number') {
windowTop += offset;
}
if (windowTop + WINDOW_SIZE > data.length) {
windowTop = data.length - WINDOW_SIZE;
}
if (windowTop < 0) {
windowTop = 0;
}
}
template = new Template(
"<tr><td>#{name}</td><td>#{description}</td><td>#{count}</td></tr>"
);
markup = "";
index = windowTop + WINDOW_SIZE - 1;
if (index >= data.length) {
index = data.length - 1;
}
$('theLabel').update('Showing rows ' + windowTop + ' through ' + index);
while (index >= windowTop) {
markup = template.evaluate(data[index]) + markup;
--index;
}
dataElement.update(markup);
}
// Log a message
function log(msg) {
$('log').appendChild(new Element('p').update(msg));
}
})();
data.txt (quite boring, of course)
[
{"name": "Name #0001", "description": "Description #0001", "count": 1},
{"name": "Name #0002", "description": "Description #0002", "count": 2},
{"name": "Name #0003", "description": "Description #0003", "count": 3},
...
{"name": "Name #1000", "description": "Description #1000", "count": 1000}
]
...a full copy of data.txt can be found here.
1,000 rows of what? jQuery is actually pretty quick, especially since performance upgrades in version 1.4 (released just days ago). If you're experiencing problems showing 1,000 rows, I would first ask you why you're showing that many - no human ought to have to scroll that much. And second, is all of the information crucial, and are you only passing crucial information into the JSON value. And lastly, are you making your DOM unnecessarily-complicated with the way you're adding the data?
Again, if you're querying only what you need to show, and you're showing a reasonable about of data (posting 1,000 rows on the screen isn't reasonable), jQuery will be more than sufficient for your needs.
If you really want speed, the javascript eval("..."); function is the fastest. Unfortunately it's not safe as it can execute malicious javascript.
There's also the javascript JSON Parser (found here) from JSON.org. They've written the javascript to parse JSON strings to create a JSON object (I've heard that debugging using Firebug, a Firefox add-ons, creates an array of JSON objects but I've never tried it).
Related
I'm trying to understand why it takes so long to rebuild a table using javascript on Firefox 43.0.2
A simplified version of my code is below. Both the "real" code and the simple version use "publishTable() to add rows to a table. PublishTable deletes a table body element if it exists, creates a new one, adds about 9000 rows to it, and attaches the completed table body to the table.
PublishTable runs on load, and when the user clicks a "go" button. Therefore, I expect performance to be similar on load and rebuild.
When the "real" page first loads, Firefox takes about 300ms to construct the table [according to performance.now()]. When the alert() announcing this result is closed, i can immediately scroll the page up and down.
But if i click the "go" button to rebuild the table, Firefox spins its wheels for tens of seconds (or more) after I close the alert() dialog. A "stop script?" dialog can appear more than once. The simple version behaves similarly.
So: Why is the performance so radically different between initial build, and rebuild? It seems clearly possible to build the table in 300ms! Is there anything I can do about it?
Some further observations:
IE's performance is much worse on initial load, and as bad on rebuild. Chrome's performance is pretty good: 2 seconds to build or rebuild. If I use innerHTML, rather than insertRow, appendChild, etc., results are similar.
If i remove the line attaching the table body to the table, the wheel-spinning symptom does not occur.
In the "waterfall" chart (in the Firefox performance tool), the the "DOM event" takes up much more time than the "event handler" (which I think covers the run-time of my code), and I don't know what that means. What is happening between the time the js stops running, and the DOM event ends, that doesn't fall in one of the other waterfall categories?
The DOM event is followed by a brief time to recalculate style, a time to paint, and then a sequence of many "cycle collection" periods, then "incremental gc", "cc graph reduction", "cycle collection", "graph reduction", and so on, for tens of seconds. In one case, the performance call-tree allocated 49 seconds to "Gecko" (which seems to be idle time) and another 25 seconds to "graphics" (and within that, a mere 1 second is allocated to publishTable()). Is there something here I can act on?
I'm out of reasonable ideas for further analysis, or how I might modify the js. I don't understand enough about the performance information to act on it. (And now, after timing with IE and Chrome, I'm not even sure to whom the question should be addressed.)
Is there a fundamental error in the code? A better table construction strategy? A way to use the performance tool to understand the problem? A bug in Firefox? (And now I'm going to do the thing on the server side. But I'm still curious about what's going on.)
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body>
<div id='a'>
<button type="button" disabled id="btnGo">Go</button><br />
<button type="button" id="btnQ">?</button><br />
<table id="tideTable" style="Width:40%;margin-left:auto;margin-right:auto;">
</table>
</div>
<div id="b">
hello
</div>
<script>
(function() {
var mmm = ['one', 'two', 'three', "four", "five", "six", "seven"];
function publishTable() {
// The user may run this several times, varying some parameters each time.
var tStart = performance.now();
var table = document.getElementById('tideTable');
// var tableBody = table.getElementsByTagName('tbody')[0];
var tableBody = table.tBodies[0];
if (tableBody) {
tableBody.parentNode.removeChild(tableBody);
}
showHeight();
tableBody = document.createElement('tbody');
for (var i=0; i < 8500; i++) {
appendTableRow(tableBody, mmm);
}
table.appendChild(tableBody);
document.getElementById("btnGo").disabled = false;
alert("Time: " + (performance.now() - tStart) + "ms");
showHeight();
}
function appendTableRow(tableBody, columns) {
var cell;
var textNode;
var row = tableBody.insertRow(-1);
for (var i = 0; i < columns.length; i++) {
cell = row.insertCell(i);
textNode = document.createTextNode(columns[i]);
cell.appendChild(textNode);
}
}
function showHeight(){
var el = document.getElementById('b');
var topPos = el.offsetTop;
alert("position: " + topPos);
}
document.getElementById("btnGo").addEventListener("click", publishTable);
document.getElementById("btnQ").addEventListener("click", showHeight);
publishTable();
})();
</script>
</body>
</html>
I guess, it could be because of the removal of existing items, before inserting the new ones. You could try the following:
meassure what happens to the performance, if you just extend the table, without deletion
build the table before inserting it, e.g. make a variable tableContent put the rows in it, and then add tableContent to the table, that should be faster, because your browser has to rerender the page on every insert.
And I would advice you to consider the use of angularJS, if you plan to make the table dynamic
I tried swapping out the line:
var tableBody = table.getElementsByTagName('tbody')[0];
with the built-in getter:
var tableBody = table.tBodies[0];
and this seems to stabilize the build time. Still a bit slow, but near-consistent times reported for initial build and rebuilds.
This could be coincidental, but something you may want to mess around with.
Your JS is minified and in a CloudFront CDN.
The first demo is async and the second demo is defer
Async
https://jsfiddle.net/zer00ne/6m9f24j5/
Defer
https://jsfiddle.net/zer00ne/fcpy9z0c/
Results
Same times.
142ms on Firefox loading.
Avg of 230ms on each click event.
846ms on Chrome loading.
Avg of 930ms on each click event.
Put your <script> tags before the closing </body> tag
https://jsfiddle.net/zer00ne/y7mguyry/
(function() {
var mmm = ['one', 'two', 'three', "four", "five", "six", "seven"];
function publishTable() {
// The user may run this several times, varying some parameters each time.
var tStart = performance.now();
var table = document.getElementById('tideTable');
var tableBody = table.getElementsByTagName('tbody')[0];
if (tableBody) {
tableBody.parentNode.removeChild(tableBody);
}
tableBody = document.createElement('tbody');
for (var i = 0; i < 8500; i++) {
appendTableRow(tableBody, mmm);
}
table.appendChild(tableBody);
document.getElementById("btnGo").disabled = false;
alert("Time: " + (performance.now() - tStart) + "ms");
}
function appendTableRow(tableBody, columns) {
var cell;
var textNode;
var row = tableBody.insertRow(-1);
for (var i = 0; i < columns.length; i++) {
cell = row.insertCell(i);
textNode = document.createTextNode(columns[i]);
cell.appendChild(textNode);
}
}
document.getElementById("btnGo").addEventListener("click", publishTable);
publishTable();
})();
<button type="button" disabled id="btnGo">
Go</button>
<br />
<table id="tideTable" style="Width:40%;margin-left:auto;margin-right:auto;">
</table>
I have to show a progressbar/status indicator using pure JavaScript, no jQuery please.
My code is:
<script type="text/javascript">
function processObjects()
{
var selectedRows = {}; // array of selected rows from table
var count = selectedRows.length; // count value exceeds 100
var myDiv = document.getElementById("myDiv");
for(var i=0; i < count; i++)
{
myDiv.innerHTML = (i+1)+"/"+count;
// Process each object from array
// no Ajax call
// takes almost 0.1 sec for each object <- this is not an issue
}
}
</script>
<div id="myDiv"></div>
<input type="button" onclick="processObjects()" value="Process Objects" />
<table>
<!-- Table with lots of rows with checkboxs -->
</table>
Problem:
When I run this script in any Browser, the page becomes unresponsive and does not update the status in using innerHTML as 1/100...2/100...3/100 as so on.
what could be the possible solution to stop browser from becoming unresponsive?
JS is single threaded and it has to take the full attention of the browser while being inside a function.
You need to call long processes through setTimeout() function if you need to give the browser a chance to breath while processing something long.
See how I do this in the following example:
function doProgress(count) {
if (count == 100)
return;
document.getElementById("myDiv").innerHTML = count;
count++;
setTimeout(doProgress, 0, count); //<- calling the same function with new count here. "0" is the milliseconds to call it after. "count" is the argument to pass
}
It only demonstrate this technique and there are lot of best practices to follow once you master it.
Javascript locks the view while code is executing (unless you are using a canvas) so you must end the execution of your code before being able to see results in your DOM.
Even if this article is about angular, the intro explains quite well how javascript works and why it freezes a browser http://jimhoskins.com/2012/12/17/angularjs-and-apply.html
if you want to keep it simple you can do this:
<script type="text/javascript">
var start=0;
var selectedRows = {}; // array of selected rows from table
var count = selectedRows.length; // count value exceeds 100 value
var myDiv = document.getElementById("myDiv");
function processObject(){
myDiv.innerHTML = (++start)+"/"+count;
// Process one object from array using "start" as index
if(start<count){
setTimeout(processObject, 100);
}
}
function processObjects(){
//eventually update values
selectedRows=[] //adds items to array
count = selectedRows.length;
myDiv = document.getElementById("myDiv");
processObject();
}
</script>
<div id="myDiv"></div>
<input type="button" onclick="processObjects()" value="Process Objects" />
<table>
<!-- Table with lots of rows with checkboxs -->
</table>
if you don't want to use global variables you can do this:
function processObject(){
processObject.myDiv.innerHTML = (++processObject.start)+"/"+processObject.count;
// Process one object from array using "start" as index
if(processObject.start<processObject.count){
setTimeout(processObject, 100);
}
}
function processObjects(){
processObject.selectedRows=[]; //array with rows to process
processObject.count=processObject.selectedRows.length
processObject.start=0;
processObject.myDiv=document.getElementById("myDiv");
processObject();
}
I have this userscript (written with a great help from Stack Overflow) for the site metal-archives.com.
Its structure is like this:
function appendColumn(...) {
// code for appending column
// code for making the table sortable
}
waitForKeyElements ("table", appendColumn);
The script works okay except for a visual glitch/delay as you switch sub-tabs (tables).
When switching, the extra (6th) column is initially shown as expected. But then, the table is displayed momentarily in its original form, and then finally with the 6th column as it should be.
To see this, install the script, visit this typical target page, and then switch the sub-tabs (Between Complete Discography, Main, Lives, Demos, Misc, etc.).
It looks like this:
I've tried to make it so that the initial table does not appear by adding:
GM_addStyle(".display.discog {display: none;} ");
to the beginning of appendColumn() and:
GM_addStyle(".display.discog {display: inline !important;} ");
to the end of appendColumn().
But it didn't make any difference.
I used Firefox Network Monitor on that page and it seems that when you switch tabs:
The code immediately modifies the table (which is loaded from cache?? -- because there's no entry in Network Monitor).
Then the table (the relevant HTML file) is loaded from the server.
Then the table is modified one last time.
How can I change the code (while using waitForKeyElements) to prevent the key element from being displayed, and only display it after it's modified by my code?
Or how do I speed up the response?
Thank you.
I loaded your script, added timing lines, and tested it. The time elapsed from AJAX-complete until the table was fixed and finished was only 400 to 500 milliseconds! That's plenty fast for most people and situations.
But, for those times when you absolutely want to squeeze out the milliseconds, you can switch to MutationObservers. These are finicky, brittle, and less cross-browser portable, but they are fast.
In this case, MutationObservers took the AJAX-to-fixed-table time down to the 20 to 40 millisecond range.
I recommend using a library like Mutation Summary to take some of the pain out of the process.
To convert from a simple waitForKeyElements() implementation to Mutation Summary:
Add
#require https://raw.githubusercontent.com/rafaelw/mutation-summary/master/src/mutation-summary.js
to your metadata block.
Plug your waitForKeyElements callback and simple selector into this structure:
var muteObserver = new MutationSummary ( {
callback: handleDiscographyChanges,
rootNode: $( {ANY-JQUERY-SELECTOR} )[0],
queries: [ {element: {A-SIMPLE-SELECTOR}} ]
} );
function handleDiscographyChanges (muteSummaries) {
var mSummary = muteSummaries[0];
if (mSummary.added.length) {
{YOUR-CALLBACK} ( $(mSummary.added[0]) );
}
}
For example in this case, change:
waitForKeyElements (".display.discog", appendColumn);
To:
var muteObserver = new MutationSummary ( {
callback: handleDiscographyChanges,
rootNode: $("#band_disco")[0],
queries: [ {element: ".discog"} ]
} );
function handleDiscographyChanges (muteSummaries) {
var mSummary = muteSummaries[0];
if (mSummary.added.length) {
appendColumn ( $(mSummary.added[0]) );
}
}
Where the rootNode was determined by examining the page structure.
For reference, a complete script with 3 optional approaches and timing logging is below. It was only tested on Firefox, but should work with Tampermonkey too (maybe).
See the //OPTION n lines just above each line to optionally comment out.
// ==UserScript==
// #name Metal Archives (discography pages) - Reviews column split and sortable tables
// #include http://www.metal-archives.com/bands/*
// #include http://www.metal-archives.com/band/*
// #grant none
// #require http://code.jquery.com/ui/1.9.1/jquery-ui.min.js
// #require https://greasyfork.org/scripts/2199-waitforkeyelements/code/waitForKeyElements.js?version=6349
// #require https://greasyfork.org/scripts/5844-tablesorter/code/TableSorter.js?version=21758
// #require https://raw.githubusercontent.com/rafaelw/mutation-summary/master/src/mutation-summary.js
// ==/UserScript==
function appendColumn(jNode) {
logTime ("Table fixed");
// STEP 1+2: SPLIT THE 'REVIEWS' COLUMN INTO A 'REVIEWS' COLUMN AND A 'RATINGS' COLUMN
var tbl = jNode[0]; // table reference
// If the current sub-table has no data, then stop the execution of the function
if (tbl.rows[1].cells[0].innerHTML == '<em>Nothing entered yet. Please add the releases, if applicable. </em>') {
return;
}
var newCell, newText;
const cols = tbl.rows[0].cells.length - 1;
var tr = tbl.tHead.children[0],
th = document.createElement('th');
th.innerHTML = "Ratings";
th.className = "ratingsCol";
tr.appendChild(th);
for (i = 1; i < tbl.rows.length; i++) {
k = tbl.rows[i].cells[cols].innerHTML; // Retrieve the content of the current cell of the Review column and store it to variable k
re1 = /<a [^>]*>[^(]*[(]([^)]+)/ ; // (RegEx which matches the 'Ratings' percentage(incl.the % symbol)
l = re1.exec(k); // (Execute the RegEx and store it to variable l)
newCell = tbl.rows[i].insertCell(-1); // Add a new cell (for the new 'Ratings' column ) -for each row-
if (re1.test(k) != 0){ // If the RegEx has matches, (only) then create new cells with...
re0 = /(<a [^>]*>)[0-9]*[^(]/ ; // (RegEx which matches the reviews URL)
url = re0.exec(k); // (Execute the RegEx and store it to variable url)
newCell.innerHTML = url[1] + l[1] + '</url>'; // ...the Ratings percentage (which is also a link to the Reviews)...
re2 = /<a [^>]*>([0-9]*)[^(]/ ; // (RegEx which matches the 'Reviews' number)
m = re2.exec(k); // (Execute the RegEx and store it to variable m)
newCell = tbl.rows[i].cells[cols]; //
newCell.innerHTML = url[1] + m[1] + '</url>'; // ...and the Reviews number (which is also a link to the Reviews)
}
}
// STEP 3: MAKE THE DISCOGRAPHY TABLE SORTABLE (using the jQuery plugin "tablesorter")
$(tbl).tablesorter ( {
cssAsc: 'up',
cssDesc: 'down',
headers: {
0: {sorter: false}
}
} );
}
//OPTION 1
//waitForKeyElements (".display.discog", appendColumn);
$(document).ajaxComplete (function (e, xhr, config){
logTime ("Ajax complete");
//OPTION 2
return; //-- For compare test
if (config.url.indexOf ('/tab/') != -1){
$(".display.discog").each ( function () {
appendColumn ( $(this) );
} );
}
} );
$("#band_disco > ul > li").on ("click", "a.ui-tabs-anchor", function (zEvent) {
logTime (zEvent.target.textContent + " tab was clicked.");
} );
function logTime (lableTxt) {
var tNow = new Date ();
console.log (tNow.toLocaleFormat ('%H:%M:%S') + "." + tNow.getMilliseconds (), " <== " + lableTxt);
}
//OPTION 3
//*--- Remove leading slash, from this line, to comment out block, below.
var muteObserver = new MutationSummary ( {
callback: handleDiscographyChanges,
rootNode: $("#band_disco")[0],
queries: [ {element: ".discog"} ]
} );
//*/ -- Tail end of optional comment block
function handleDiscographyChanges (muteSummaries) {
var mSummary = muteSummaries[0];
if (mSummary.added.length) {
appendColumn ( $(mSummary.added[0]) );
}
}
Note that styling code, and some original comments, were omitted from this example.
waitForKeyElements is a slow way to insert your content. This is one reason you're seeing the rendering behavior that you are seeing.
The reason the tab is rendered appropriately when you first switch is because it is hidden and correctly adjusted while hidden. When the tab is shown the content is updated to reflect the latest from the server, which has not been adjusted. waitForKeyElements then notices the change and fires again to correct.
Using ajaxComplete you can hide the panel when the content is first loaded.
Hiding the panel when data first returns:
$(document).ajaxComplete(function(e, xhr, config){
if(config.url.indexOf('/tab/') != -1){ // Because the tables URLs are like `www.metal-archives.com/band/discography/id/xxx/tab/lives`
$('.ui-tabs-panel').css('visibility', 'hidden');
}
});
Showing the panel in your appendColumn function:
function appendColumn(...) {
// code for appending column
// code for making the table sortable
$('.ui-tabs-panel').css('visibility', 'visible');
}
Happy coding!
I used PHP to create an HTML page, which compiles a list of data points and pushes them into an array, declares the array in the header, and also echo's a huge list of form input objects into the body.
The list I'm working with is just under 15,000 lines which are put into 1 array.
I more or less created a search box that when I blur() an action is supposed to occur, Javascript function is supposed to search through the array and hide unmatched form options and display matches. This seems to work fine up to 5000 but if I have it run through all 15000 array items it hangs up and freezes.
I'm currently hosting it on a free site while I test... here is the link to the actual page TEST PAGE that hangs up
I'm including a snippet of the JS code with a truncated array so you don't have to scroll for thousands of lines.
<script type="text/javascript" >
var array_ICDDx = new Array('[ICD Code] Diagnosis','[001.0] Cholera due to vibrio cholerae','[001.1] Cholera due to vibrio cholerae el tor','[001.9] Cholera, unspecified','[002.0] Typhoid fever','[002.1] Paratyphoid fever A','[002.2] Paratyphoid fever B','[002.3] Paratyphoid fever C','[002.9] Paratyphoid fever, unspecified','[003.0] Salmonella gastroenteritis','[003.1] Salmonella septicemia','[003.20] Localized salmonella infection, unspecified','[003.21] Salmonella meningitis','[003.22] Salmonella pneumonia','[003.23] Salmonella arthritis','[003.24] Salmonella osteomyelitis',[...GOES ON FOREVER ~15000 ARRAY VALUES...]);
function searchICDDx(ICDDx,line_start,line_end) {
for (var a = line_start; a < line_end; a++) {
var ICDDx_check = array_ICDDx[a].toLowerCase();
var Row = "R" + a;
if (ICDDx_check.search(ICDDx) >= 0) {
document.getElementById(Row).style.display = "block";
}
else {
document.getElementById(Row).style.display = "none";
}
}
if (line_end < array_ICDDx.length) {
line_end += 1000;
if (line_end > array_ICDDx.length) { line_end = array_ICDDx.length; }
var timer = setTimeout(searchICDDx(ICDDx,a,line_end),100);
// searchICDDx(ICDDx,a,line_end);
}
// else if (line_end >= array_ICDDx.length) {
// clearTimeout(timer);
return;
// }
}
function searchICD() {
var find_ICD = Coder.elements['ICD'].value;
if (find_ICD != "") {
document.Coder.Dx.value = "";
find_ICD = find_ICD.toLowerCase();
searchICDDx(find_ICD,1,1000);
}
}
function searchDx() {
var find_Dx = Coder.elements['Dx'].value;
if (find_Dx != "") {
document.Coder.ICD.value = "";
find_Dx = find_Dx.toLowerCase();
searchICDDx(find_Dx,1,1000);
}
}
</script>
It doesn't appear to be an issue with the code not functioning. As I mentioned, if I limit the search to just 1000 array values it seems to work, its the massive amount of array values that is killing the page.
Any suggestions?
Thank you in advance!
With this many data points, you should probably do this on the server. However, you can try the following:
instead of using a for loop (which completely freezes the browser until it is done), use a setInterval that checks a new result every 5 ms or so. Periodically, check if all the results have been searched, and clear the interval if so. It will still take a bit to search, but won't freeze the browser.
search only until you have a set number of results (40 or so), and store the last index of the array that was searched. Wait to load more searches until the user scrolls down the page.
Also, you should probably implement an infinite scroll for displaying results. My browser froze and had to be restarted just opening the link you attached.
Update: if you don't want the items displayed until after you search, you should have no items on the page initially and add them when they match the search. This prevents the initial lag, prevents you from having to change the visibility of every element, and reduces the number of elements on the page (which causes issues).
Thank you for all your input and suggestions.
I went back and took out all of entries when listed in the form. Then I had JS create a list of checkbox inputs based on all the positive results and element.innerHTML the results. The array is still a huge list on client side through which the JS searches for matches. I updated the code in the link from my original post to show the faster and working result.
<script type="text/javascript" >
var array_ICDDx = new Array('[icd code] diagnosis','[001.0] cholera due to vibrio cholerae','[001.1] cholera due to vibrio cholerae el tor','[001.9] cholera, unspecified','[002.0] typhoid fever','[002.1] paratyphoid fever a',[...etc...]);
function searchICDDx(array_Results,ICDDx,line_start,line_end) {
for (var a = line_start; a < line_end; a++) {
if (array_ICDDx[a].indexOf(ICDDx) >= 0) {
array_Results.push("<span style='display:block' ><input type='checkbox' value='"+array_ICDDx[a]+"' >"+array_ICDDx[a]+"</span>");
}
}
if (line_end < array_ICDDx.length) {
line_end += 1000;
if (line_end > array_ICDDx.length) { line_end = array_ICDDx.length; }
searchICDDx(array_Results,ICDDx,a,line_end);
}
else if (line_end >= array_ICDDx.length) {
var string_Results = array_Results.join("\n");
document.getElementById("Results_here").innerHTML = string_Results;
return;
}
}
function searchICD() {
var array_Results = new Array();
var find_ICD = Coder.elements['ICD'].value;
if (find_ICD != "") {
document.Coder.Dx.value = "";
find_ICD = find_ICD.toLowerCase();
searchICDDx(array_Results,find_ICD,1,1000);
}
}
function searchDx() {
var array_Results = new Array();
var find_Dx = Coder.elements['Dx'].value;
if (find_Dx != "") {
document.Coder.ICD.value = "";
find_Dx = find_Dx.toLowerCase();
searchICDDx(array_Results,find_Dx,1,1000);
}
}
</script>
In the past I've had poor results with forms and innerHTML added options, which I'll tackle another time when I try to move this code into the larger project.
Thank you again
I would like to ask you to find the point, why the site -I'm working on- is slow.
the conditions of the problem:
large row count (so I think maybe the problem is related to this.)
there is ajaxing event (I have tired to comment it out and the problem disappeared)
using not Mozilla (this freeze effect appear in IE and Chrome)
description of the problem (see the image):
I change the value of input
after there is an ajax call (in order to calculate prize) and it takes in FF about 30 ms otherwise more than 1 s
there is a freeze until the ajax finished (but ajax is not set to async:false)
only after that can I change the next input
I have tired to reproduce the error, but I could't. So see the original site:
site: foto/fotokidolgozas/elohivas-beallitasok.php
Log in and pass: d838292#rtrtr.com
Update: It works now fine, the trick is the following:
I use hidden input fields, their values are json_encode-d strings. I can process them anytime with js.
Thank you for any help!
Code:
$('#cikkek,#magic_bar').on("change","select,textarea,input[type!=hidden]",function(event_object){
if( $(this).attr('name') == "kijelolve" && !$(this).parents('#magic_bar').length)return true;
var cikk_id = $(this).parents('.cikk').attr('id');
var cikk_tipus = $("input[name=cikk_tipus]").val();
var tulajdonsag = $(this).attr('name');
var ertek = $(this).val();
if(ertek == "-1")return false;
if($(this).is('[type=checkbox]'))ertek = $(this).prop("checked")?'1':'0';
if(cikk_tipus=='fotokidolgozas' && (tulajdonsag=='meret'||tulajdonsag=='vagas'))
{
var sor = $(event_object.target).parents('.cikk');
var act_meret = sor.find('select[name=meret]').val();
var act_fill = sor.find('select[name=vagas]').val();
var act_zold_class = sor.find("input[name=zold_"+act_meret+"]").val()=="1" ?"zold":"feher" ;
var name = "src_"+act_meret+"_"+act_fill;
var name2 = "szoveges_uzenet_"+act_meret+"_"+act_fill;
sor.find(".img_cont").find("img").attr("src",sor.find("input[name="+name+"]").val());
sor.find(".szoveges_uzenet").text(sor.find("input[name="+name2+"]").val());
sor.find(".dpi_megfelel").text(sor.find("input[name=minoseg_"+act_meret+"]").val()+" ("+sor.find("input[name=dpi_"+act_meret+"]").val()+" dpi)");
sor.find("select[name=meret]").removeClass("feher zold").addClass(act_zold_class);
}
var before = now();
//this is the ajax part
if(ajax_modositaskor)
$.post('/_fn/cikk/mod.php',{
'cikk_tipus':cikk_tipus,
'cikk_id':cikk_id,
'tulajdonsag':tulajdonsag,
'ertek':ertek
},function(a){
var elapsed = now() - before;
if(a[0]!="1")
{
//error
alert(a[0]);
return;
}
if(a[1]!="-1")
{
//there is new price
$(event_object.target).parents('.cikk').find('.ar').text(a[1]);
}
if(a[2]!="-1")$('#cikkek_ara').text(a[2]);
osszegzest_frissit(a[3]);
var php_time = Math.round(a[4])
a_min = Math.min(a_min,elapsed);
p_min = Math.min(p_min,parseFloat(php_time));
a_max = Math.max(a_max,elapsed);
p_max = Math.max(p_max,parseFloat(php_time));
if(!a_avg)a_avg = elapsed;else a_avg= Math.round((a_avg+elapsed)/2);
if(!p_avg)p_avg = php_time;else p_avg = Math.round((p_avg+php_time)/2);
trace("ajax="+elapsed+"\tphp="+php_time+"\tajax_min="+a_min+"\tphp_min="+p_min+"\tajax_max="+a_max+" \tphp_max="+p_max+"\tajax_avg="+a_avg+" \tphp_avg="+p_avg);
},"json").error(function() { postHiba() });
});
The problem was that the hidden data was too large (see my other question), and it decreased the processing time. (Firefox seems to be well coded, because this does not mattered)
Now the problem is fixed.