Page rendering is slow in pdf-annotate.js - javascript

Am using pdf-annotate.js for pdf. Its reading the content very fast but while rendering in page its very slow. How can i able to fix the problem?
My render page code:
function render()
{
PDFJS.getDocument(RENDER_OPTIONS.documentId).then(function (pdf)
{
RENDER_OPTIONS.pdfDocument = pdf;
var viewer = document.getElementById('viewer');
localStorage.getItem(RENDER_OPTIONS.documentId + '/VIEWER');
if(localStorage.getItem(RENDER_OPTIONS.documentId + '/VIEWER')){
viewer.innerHTML = localStorage.getItem(RENDER_OPTIONS.documentId + '/VIEWER');
console.log("loaded")
console.lo
}
// else {
viewer.innerHTML = '';
// }
NUM_PAGES = pdf.pdfInfo.numPages;
for (var i = 0; i < NUM_PAGES; i++)
{
console.log(i);
var page = UI.createPage(i + 1);
viewer.appendChild(page);
console.log(page)
UI.renderPage(i+1, RENDER_OPTIONS).then(function (_ref) {
console.log(_ref)
var _ref2 = _slicedToArray(_ref, 2),
pdfPage = _ref2[0],
annotations = _ref2[1];
var viewport = pdfPage.getViewport(RENDER_OPTIONS.scale, RENDER_OPTIONS.rotate);
PAGE_HEIGHT = viewport.height;
localStorage.setItem(RENDER_OPTIONS.documentId + '/HEIGHT', pdfPage.pageInfo.view[3]);
});
$("#pgloader").css("display", "none");
}
localStorage.setItem(RENDER_OPTIONS.documentId + '/VIEWER',viewer);
renderedPages.push(1);
});
}
How do i increase the rendering speed in my application. Kindly help me in this issue.

Please consider this "answer" just as an opinion piece, orientation, not a helpful answer particularly as I want to just add my 2 cents here.
We have similar problems with pdf-annotate.js, but so far this performance issue is unresolved.
What we did was, we basically split up larger/more complex PDF files page-by-page. When the user wants to see a particular page, we show a spinner as long as it's needed, processing that particular page, then when it's done, store that page in the browser's storage.
Sadly, this method doesn't fix the real problem, hence consider this as a temporary duck-tape "fix".
Please, check out this issue on GitHub regarding performance and report your case there too.

Related

JavaScript works when setTimeout() is used, but it isn't working when document.eventListener('DOMContentLoaded', x) is used on a WordPress page. Why?

I have a few lines of JavaScript code that pick up heading texts from separate sections and place them into their respective input fields. They are also executed on single pages using wp_enqueue_script.
It works absolutely fine when setTimeout() is used:
function passengerElevator() {
var getProductName = document.querySelectorAll('[data-id="6657316"]');
getProductName.forEach(function(item) {
var productName = item.querySelector('.lift');
var inputArea = item.querySelector('input[name=product]');
inputArea.value = productName.innerText;
});
var getProductName = document.querySelectorAll('[data-id="e9c06d5"]');
getProductName.forEach(function(item) {
var productName = item.querySelector('.lift');
var inputArea = item.querySelector('input[name=product]');
inputArea.value = productName.innerText;
});
setTimeout(function() { passengerElevator() },3000);
However, there is problem of page size (some pages have more than 10 input fields) and I don't want to set an astronomically high ms to delay the script. So I decided to fire it on DOMContentLoaded:
document.addEventListener("DOMContentLoaded", passengerElevator);
function passengerElevator() {
var getProductName = document.querySelectorAll('[data-id="6657316"]');
getProductName.forEach(function(item) {
var productName = item.querySelector('.lift'); // heading text (ex:Panoramic Lift)
var inputArea = item.querySelector('input[name=product]');
inputArea.value = productName.innerText; //ouput here
});
var getProductName = document.querySelectorAll('[data-id="e9c06d5"]');
getProductName.forEach(function(item) {
var productName = item.querySelector('.lift'); // Heading text (ex:Home Lift)
var inputArea = item.querySelector('input[name=product]');
inputArea.value = productName.innerText; // Output here
});
}
As you may have already guessed, it is not working. Is my code too messy to be executed faster or is there any other problem I am missing?
I know similar questions have been asked previously, however, no existing answer I found was able to help me.
It seems like you try to loop through elements that are still not loaded. Perhaps they are being appended to the page via Ajax, so DOMContentLoaded can't help there.
You can create your own check for those elements using setInterval, so use something like this:
let dataIdCheck = setInterval(() => {
if (document.querySelectorAll('[data-id="6657316"]').length > 0 && document.querySelectorAll('[data-id="e9c06d5"]').length > 0) {
clearInterval(dataIdCheck);
// your code here
}
}, 500);
This code will run every 500 milliseconds and check if those two elements exists, using .length. Once they do exists, we stop the interval and run the code.
I also suggest to do console.log('in') to check that our interval stop running once the elements are found.

Cordova iOS app using IndexedDB database size calculation without crashing app

I have a cordova app for iOS in which I'm using indexedDB to store significant amounts of data in separate stores in one database.
I want to inform the user of the amount of space being used by the app in this way, partly as the limit for indexedDB seems to be unclear/different on different devices, and I'd like to use it to see where the usage is at at point of failure, and also as a way to warn the user that they need to manage the data they're storing offline before it becomes a problem (although I know I can capture this is the transaction abort event - I just have no idea what the limit is!)
In development I've been using the function below in the browser (I have the browser platform added, just for development) which has worked well:
function showIndexedDbSize(db_name) {
"use strict";
var this_db;
var storesizes = new Array();
function openDatabase() {
return new Promise(function(resolve, reject) {
var request = window.indexedDB.open(db_name);
request.onsuccess = function (event) {
this_db = event.target.result;
resolve(this_db.objectStoreNames);
};
});
}
function getObjectStoreData(storename) {
return new Promise(function(resolve, reject) {
var trans = this_db.transaction(storename, IDBTransaction.READ_ONLY);
var store = trans.objectStore(storename);
var items = [];
trans.oncomplete = function(evt) {
var szBytes = toSize(items);
var szMBytes = (szBytes / 1024 / 1024).toFixed(2);
storesizes.push({'Store Name': storename, 'Items': items.length, 'Size': szMBytes + 'MB (' + szBytes + ' bytes)'});
resolve();
};
var cursorRequest = store.openCursor();
cursorRequest.onerror = function(error) {
reject(error);
};
cursorRequest.onsuccess = function(evt) {
var cursor = evt.target.result;
if (cursor) {
items.push(cursor.value);
cursor.continue();
}
}
});
}
function toSize(items) {
var size = 0;
for (var i = 0; i < items.length; i++) {
var objectSize = JSON.stringify(items[i]).length;
size += objectSize * 2;
}
return size;
}
openDatabase().then(function(stores) {
var PromiseArray = [];
for (var i=0; i < stores.length; i++) {
PromiseArray.push(getObjectStoreData(stores[i]));
}
Promise.all(PromiseArray).then(function() {
this_db.close();
console.table(storesizes);
});
});
};
It works well on the device too when the stores total <150MB, or thereabouts (there isn't a clear threshold), but it uses JSON.stringify to serialize the objects in order to count the bytes, and the process of doing this as the database grows larger on the device forces the app to restart. I'm watching the memory usage in XCode and it doesn't peak at all. Nothing. It hovers between 25 and 30MB whatever you do, not just this, which seems ok to me. The CPU is also <5%. The energy usage is high, but I'm not sure this would affect the app negatively, just drain the battery faster (unless I've misunderstood something). So I'm not sure why it's forcing an ugly restart. In my endless googling I've learnt that JSON.parse and JSON.stringify are very hungry processes, which is why I switched to indexedDB in the first place as it allows the storage of objects, avoiding these processes entirely.
My questions are as follows:
Is there a way to amend the function to slow it down (it doesn't need to be fast, just reliable!) to prevent the restart?
Why would the app refresh if there is not discernible pressure on the memory in XCode? Or is this not a very good way of detecting this sort of thing? Is there some hidden garbage collection problem in the function (I'm a noob when it comes to GC generally, but there doesn't seem to be any leaks in the app)
Is there a better way to show the usage of the database that would avoid this problem? Everything I find always relies on these JSON processes and the navigator.storage Web API doesn't appear to be supported on the cordova iOS platform (which is a real shame as it works amazingly on the browser! Gah!)
Any suggestions/thoughts massively appreciated!

Salesforce, Locker: Cannot "createObjectURL" using a unsecure [object File]

I'm having some problem with a lightning component that was done by other developer that left the company, users are telling me that the tool was working perfectly 1 month ago but i don't have any idea of what is happening then
The error is :
This page has an error. You might just need to refresh it. Action
failed: c:EMB_CCW_Panel$controller$onPickFile [Locker: Cannot
"createObjectURL" using a unsecure [object File]!] Failing descriptor:
{c:EMB_CCW_Panel$controller$onPickFile}
and the javascript method is this one
onPickFile : function(component, event, helper) {
var catalog = component.get("v.catalogWrapper");
var brandsList = component.get("v.brandsList");
console.log("onPickFile", catalog);
var file = event.target.files[0];
var fileURL = URL.createObjectURL(file);
var req = new XMLHttpRequest();
req.open('GET', fileURL);
req.onload = function() {
URL.revokeObjectURL(fileURL);
component.set("v.catalogWrapper",
helper.fillCatalogWithXMLData(catalog, helper.extractSlideNotesFromODTContentXML(this.responseXML), brandsList));
};
req.onerror = function() {
URL.revokeObjectURL(fileURL);
console.log('Error loading XML file.');
};
req.send();
},
and the helper methods,
extractSlideNotesFromODTContentXML : function(xmlDoc){
var output = [];
var slides = xmlDoc.getElementsByTagName("draw:page");
for(var s=0;s<slides.length;s++){
var notes = slides[s].getElementsByTagName("presentation:notes")[0].getElementsByTagName("draw:frame")[0].getElementsByTagName("draw:text-box")[0].getElementsByTagName("text:p");
var slideNotesList = [];
for(var i =0;i<notes.length;i++){
slideNotesList.push(notes[i].textContent);
}
output.push(slideNotesList);
}
return output;
},
fillCatalogWithXMLData : function(catalog, slidesList, brandsList){
try {
var referenceRegEX = /^(\d){9}/;
for(var i=0;i<slidesList.length;i++){
catalog.slides.splice(i, 0, this.generateSlideObject(i+1));
for(var j=0;j<slidesList[i].length;j++){
var wholeLine = slidesList[i][j];
var firstWord = wholeLine.split(" ")[0].toUpperCase();
console.log('firstWord', firstWord)
// Lines that begin with a number are references (SAP Id code). Consider the rest brand names:
if(referenceRegEX.test(firstWord) && firstWord.length == 9){
catalog.slides[i].referencesText += wholeLine+"\n";
}else{
// That's not a reference, check if it's a brand:
// 1.- Check if the whole line is a brand (removing leading and trailing spaces)
if(brandsList.includes(wholeLine.trim())){
// Found brand:
catalog.slides[i].brandsText += wholeLine + "\n";
}else{
// Not found, not recognized:
catalog.slides[i].unrecognizedText += wholeLine + "\n";
}
}
}
}
component.set("v.catalogWrapper", catalog);
} catch(err) {
}
return catalog;
}
anyone can't help me or tell me how can i fixe it !
thanks
If it used to work 1 month ago it's probably something Salesforce patched in Summer release. No idea what (if anything) is unsecure in your code but sounds like you're hit by Lightning Locker Service. Do you get same result in different browsers?
See if it works if you knock back the API version of component to version 39. It's a hack but might be a temp relief while you figure out what to do.
This suggests File is supported all right: https://developer.salesforce.com/docs/component-library/tools/locker-service-viewer
Maybe you need to read the file's content different way, maybe you need to give up on parsing it with JavaScript and push to server-side apex? I don't know what your functionality is.
If you go to setup -> lightning components -> debug mode and enable for yourself it might help a bit. You will see more human-friendly code generated in browser's developer tools, debugging might be simpler. Lesson learned would be to pay more attention to release preview windows (from ~September 12th we can preview Winter'21 release, SF should publish blog post about it in 1-2 weeks)
This looks promising: https://salesforce.stackexchange.com/a/245232/799
Maybe your code needs proper Aura accessors, event.getSource().get("v.files")[0] instead of event.target.files[0]. You really would have to debug it and experiment in browser's console, see what sticks.

Is there a way in javascript to get all iframes that don't violate same-origin-policy with getElementsByTagName?

I am writing a chrome extension that needs to get an html5 video element on a page, but some video elements were in iframes. I added onto my function to search through iframes, but I started getting errors because of the Same-Origin-Policy related to iframes with different src domains.
In my function, is there an easy way to exclude iframes that violate the Same-Origin-Policy? Or better yet (even though likely not possible), is there a way to still search iframes with different src domains from the main page?
The function in question:
function getVideo() {
var videos = document.getElementsByTagName("video");
if (videos.length >= 1) {
return videos[0];
}
var iframes = document.getElementsByTagName("iframe");
if (iframes.length >= 1) {
for (const frame of iframes) {
videos = frame.contentWindow.document.getElementsByTagName("video");
if (videos.length >= 1) {
return videos[0];
}
}
}
return null; // if a video doesn't exist
}
Edit:
See bottom of answer for current solution.
Original Answer:
Just when you think you have searched long enough on here to warrant a question, you find something that is actually useful.
Based off the answer to this question, I updated my code to this:
function getVideo() {
var videos = document.getElementsByTagName("video");
if (videos.length >= 1) {
return [videos[0], "main"];
}
// Code that will run only inside iframe
if (parent === top) {
var videos = document.getElementsByTagName("video");
if (videos.length >= 1) {
return [videos[0], "iframe"];
}
}
return [null, null]; // if a video doesn't exist
}
And included this in my manifest, so that the script would be injected into the iframes as well:
"content_scripts": [{
"all_frames": true,
...
...
}],
This works, but it does make my other functions pretty ugly. Example:
// if a video exists, set it to the speed and give a notification
function setSpeed(newSpeed) {
var returnVal = getVideo();
var video = returnVal[0], type = returnVal[1];
if (video != null && type === "main") {
video.playbackRate = newSpeed;
inWindowAlert("Speed: " + newSpeed, 1000);
setIcon(newSpeed);
}
if (parent === top) {
if (video != null && type === "iframe") {
video.playbackRate = newSpeed;
inWindowAlert("Speed: " + newSpeed, 1000);
setIcon(newSpeed);
}
}
}
I will try to come up with a cleaner solution, but for now it works. Hope this helps anyone.
New Solution:
Just wanted to update this to say that I completely rewrote the extension's logic to handle this more easily.
The new solution lets each instance of the content script running in each frame run independently and simply collect all videos in its context. I also collect them in a Set to avoid accidental duplication.
I still want to go through again to eliminate the usage of global variables, but the extension works well enough now, so it will likely remain as it is for awhile.
New code:
function getVideos() {
let new_videos = document.getElementsByTagName("video");
if (new_videos.length >= 1) {
VIDEOS = new Set(new_videos);
return VIDEOS;
}
return new Set();
}
and
// set video to the speed and give a notification, restricts available speeds
function setSpeed(newSpeed, video) {
//* BUG-FIX: playback rates below 0.07 rather than 0 were causing errors
newSpeed = newSpeed > 16 ? 16 : newSpeed < 0.07 ? 0 : newSpeed;
// limit decimal values to 2 digits, + in front truncates 2.00 -> 2
SPEED = +newSpeed.toFixed(2);
video.playbackRate = SPEED;
tempAlert("Speed: " + SPEED, 2000, video);
setIcon(SPEED);
}
Where VIDEOS and SPEED are global variables.

A good way to utilize javascript to download files in batches to prevent browser time out

So to start, I am using the fancybox library here.
http://fancyapps.com/fancybox/
My end goal is to create a photoalbum that downloads images stored on Amazon S3 via Cloudfront. Right now it is setup to where there is a main page and each page has a single image. Click that image and it opens an album.
The problem is that if my album has 75 items or whatever large amount, the browser will timeout and crash or cause my computer to run out of memory (I am running this locally as I dev it).
here is some sample code
function photoDL(){
var num = 0;
var batch = 0;
for(i = 1; i < 62; i++){
var myphoto = 'https://MYCODEFRONTSTUFF/'+i+'.jpg';
var albumpic = new Array();
albumpic.push(myphoto);
for(batch = 1; batch < 5; batch++){
$.fancybox.open([
{
src : albumpic[num],
opts : {
caption : 'First caption'
}
},
], {
loop : false,
hash : "album3"
});
setTimeout(photoDL(), 10000);
num += 1;
}
}
}
I know there is some stuff in there that doesn't look right. I was thinking I could set the SetTimeout to buffer and give it some time to download each image but that didn't help. It still tries to download them all at once. I was playing with the idea of creating batches but hit a deadend. Any help is greatly appreciatd.
Your code loops 61 time and inside each of the loop, you have inner loop that runs 5 times. And inside each inner loop - you open new fancyBox instance (over the existing one) and then you call main loop!
So, you open fancyBox 243 times and after 10000ms you open 58,806 new instances and you are wondering why your browser crashes? :)
Thank for the insights, I did manage to figure this one out. Introducing the fixed code
function photoDL(batch, url, ftype){
var num = 0;
var fArr = [];
var albumpic = new Array();
var fboxOpen = '';
for(i = 1; i < batch; i++){
var myphoto = url+i+ftype;
albumpic[i] = myphoto;
//console.log(albumpic[i]);
var fboxOpen =
{
src : albumpic[i],
opts : {
caption : 'First caption'
}
};
fArr[i] = fboxOpen;
}
fArr.shift();
var jsonString = JSON.stringify(fArr);
console.log(jsonString);
$.fancybox.open(
fArr
, {
loop : false,
hash : "album3"
});
}
A little slow but it's definitely working!

Categories

Resources