I am runing protractor on a slow machine and I need protractor to slow down each key press and each action. The action part is done, but how can I do the keyPress part?
I have a local solution with is:
function delay(el, value, newDelay) {
for (var i = 0; i < value.length; i++) {
browser.sleep(newDelay || browser.params.delay);
el.sendKeys(value[i]);
}
}
In onPrepare I was able to slow down each action with:
browser.driver.controlFlow().execute = function () {
var args = arguments;
if (arguments[1] === "WebElement.sendKeys()")
debugger;
origFn.call(browser.driver.controlFlow(), function () {
return protractor.promise.delayed(100);
});
return origFn.apply(browser.driver.controlFlow(), args);
};
but I don't know how to slow down the sendKeys, I belive I have to do something where I placed the debugger, but what?
Aparenty, the only solution I found, was to try and send at first the entire string and if it fails, send the keys one by one, and check again, so my code is something like this:
el.getAttribute('value').then(function (insertedValue) {
if (insertedValue !== value) {
el.clear().then(function () {
el.sendKeys(protractor.Key.END);
for (var i = 0; i < value.length; i++) {
browser.sleep(100);
el.sendKeys(value[i]);
el.sendKeys(protractor.Key.END);
}
if (tryNo < 1) {
el.getAttribute('value').then(function (insertedValue) {
if (insertedValue !== value) {
.......................
}
});
}
});
}
});
Related
I have these two functions, where "form" is the name of the Vue object:
form.sizeChartAsImage();
form.setSizeChart();
This is the code of said functions:
setSizeChart: function () {
for (i = 0; i < this.columns.length; i++) {
this.product.size_chart.push({
position_x: 0,
position_y: i,
value: this.columns[i],
})
for (j = 0; j < this.data.length; j++) {
for (var key in this.data[j]) {
if(key === this.columns[i]) {
this.product.size_chart.push({
position_x: j+1,
position_y: i,
value: this.data[j][key],
})
}
}
}
}
}
sizeChartAsImage: function() {
html2canvas($("#size-chart").get(0)).then(canvas => {
canvas.toBlob (blob => {
var sizechartImg = document.createElement('img');
url = URL.createObjectURL(blob);
sizechartImg.src = url;
this.product.size_chart_image = sizechartImg;
debugger;
}, 'image/png');
})
}
Nevertheless, the second function get executed first (debugger enters first) and then the rest of the code runs; the form is submitted, and lastly, sizeChartAsImage() gets executed, causing no effect (since the form was submitted with "size_chart_image" as null)
This is what I'm trying to render, and it does generate the image.
<demo-grid
:data="data"
:columns="columns"
id="size-chart">
</demo-grid>
Could it be because it's a Vue component? Performance issues? Or do I need to use a callback maybe?
It's because html2canvas($("#size-chart").get(0)) is returning a promise (maybe just a thenable), which is an async call.
So sizeChartAsImage will run, and html2canvas($("#size-chart").get(0)) will execute. While the script is waiting for that to return it'll continue to setSizeChart function and run it. And then it'll return to the code within the then(canvas => callback.
You could either call setSizeChart at the end of the callback. Or, if you're using ES2017 or greater, you could re-write sizeChartAsImage to be async and await it.
I have a recursive Javascript function which gets the links from one Wikipedia page, follows them, and then gets all of those links (repeating a specified number of times).
It calls itself an unknown number of times to construct an object of a known depth. When it completes, I want to output the object. Currently the object immediately outputs, and is empty, meaning the function obviously isn't waiting for all the recursive calls to complete.
As you can see, I have attempted to use callbacks, but I assume incorrectly. What am I doing wrong, how should I be doing it? I'm going to presume there's a few other things wrong I haven't spotted too; I'm relatively new to Javascript.
$(document).ready(function ()
{
pageLinks[START_PAGE] = {};
//Get initial pages
links = getLinks(START_PAGE, 0, printLinks));
});
function printLinks()
{
console.log(links);
}
function getLinks(currentPage, level, callback)
{
visitedPages.push(currentPage)
var pageLinks = {}
var data = $.getJSON(URL_BEGIN + currentPage + URL_END, function(data)
{
var pages = data.query.pages;
for(var page in pages)
{
pageContentObj = pages[page].revisions[0];
for(var key in pageContentObj) if(pageContentObj[key].length > 100)
{
var pageContent = pageContentObj[key];
//Get links
hyperlinks = getFromBetween.get(pageContent,"[[","]]");
for(var link in hyperlinks)
{
link = hyperlinks[link].split("|")[0]; //Remove friendly name
link = link.replaceAll(" ", "%20");
//Add to pagelist object
prefix = link.split(":")[0];
if(prefix != "Category" && prefix != "File" && prefix != "wikipedia")
if(level < ITERATIONS && !visitedPages.includes(arguments, link))
{
console.log(level + ": " + link)
pageLinks[link] = getLinks(link, level+1, callback); //===Recursive call===
}
}
}
}
});
if(level == 0 && callback) callback();
return pageLinks;
}
Any help is appreciated, thanks in advance.
**EDIT: ** Link: https://github.com/JakeStanger/Wikipedia-Mapper/blob/master/init.js#L53
The recursive call needs to be like this:
var counter = 0;
//the big for loop
counter++;
getLinks(link, level + 1, function(res) {
for (var key in res) { //with an array it would be concat...
pageLinks[key] = res[key];
}
counter--;
if (counter == 0 && callback) callback(pageLinks); //callback if all callbacks called
});
Also remove this weird code:
if(level == 0 && callback) callback();
No you can do:
getLinks(START_PAGE, 0, console.log);
I have a function examples(a,b,c,…). Each argument can throw an error. All of them are handled the same way. I guess examples([array]) would fall in the same category.
In my code I have something like that currently:
for (i = 0; i < len; i++) {
try { this.example(arg[i]) }
catch (e) { log(e) }
}
From an user perspective Id like to be able to see the errors of all arguments at once instead of fixing one and then discovering the next one etc. But I end up catching myself which seems, to me, not desirable for an utility function.
Is there a way to rethrow all errors at once?
What are the best practices?
If there's a standard, why is it being recommended?
Well you can throw pretty much anything, but rather than just a simple Array, you may find you have more maneuverability using a custom error type
function ErrorCollection(msg) {
this.name = 'ErrorCollection';
this.message = msg || '';
this.errors = [];
}
ErrorCollection.prototype = Object.create(Error.prototype);
ErrorCollection.prototype.constructor = ErrorCollection;
ErrorCollection.prototype.push = function (e) {
return this.errors.push(e);
}
// ...
var i, ec = new ErrorCollection();
for (i = 0; i < 10; ++i) {
try {
throw new Error('Error ' + i);
} catch (e) {
ec.push(e);
}
}
// do something with ec, e.g.
if (ec.errors.length) {
console.log(ec, ec.errors);
throw ec;
}
An example of stopping on the first error then validity checking the remainder
var i, a = [];
for (i = 0; i < arguments.length; ++i) {
try { // assume everything will work
this.example(arguments[i]);
} catch (e) { // our assumption was wrong
a.push({arg: i, val: arguments[i]});
for (++i; i < arguments.length; ++i) { // loop from where we left off
if (!this.valid_arg(arguments[i])) { // some costly test
a.push({arg: i, val: arguments[i]});
}
}
throw a; // throw the list of bad args up a level
}
}
If the validity test will be fast/isn't costly then you may consider doing it in advance of your main loop instead of waiting for the first error, as you should then be able to avoid try..catch at this level entirely.
I have a web application where I load (via ajax) a dictionary file (1MB) into the javascript array. I found the reason why the Mobile Safari crashes after 10 seconds. But now what I'm wondering is how do I get around this issue?
On the link above the answer suggest using setInterval, but this would mean I would have to have a dictionary file chunked into pieces and have them loaded one by one. This surely could be done, but I would have to make a lot of chunks taking into account the internet speed and too many requests would take forever for the page to load (and if I make the chunks too big it could happen that some mobile users wouldn't be able to download the chunk in a given 10second period).
So, my question is: has anyone encountered this kind of problem and how did you go about it? A general push in the right direction is appreciated.
edit:
This is the js code which I use to load the dictionary:
var dict = new Trie();
$.ajax({
url: 'data/dictionary_342k_uppercase.txt',
async: true,
success: function (data) {
var words = data.split('\n');
for (var i = words.length - 1; i >= 0; i--) {
dict.insert(words[i]);
}
},
error: function(){
$('#loading-message').text("Problem s rječnikom");
}
});
Trie.js:
function Trie () {
var ALPHABET_SIZE = 30;
var ASCII_OFFSET = 'A'.charCodeAt();
this.children = null;
this.isEndOfWord = false;
this.contains = function (str) {
var curNode = this;
for (var i = 0; i < str.length; i++) {
var idx = str.charCodeAt(i) - ASCII_OFFSET;
if (curNode.children && curNode.children[idx]) {
curNode = curNode.children[idx];
} else {
return false;
}
}
return curNode.isEndOfWord;
}
this.has = function (ch) {
if (this.children) {
return this.children[ch.charCodeAt() - ASCII_OFFSET] != undefined;
}
return false;
}
this.next = function (ch) {
if (this.children) {
return this.children[ch.charCodeAt() - ASCII_OFFSET];
}
return undefined;
}
this.insert = function (str) {
var curNode = this;
for (var i = 0; i < str.length; i++) {
var idx = str.charCodeAt(i) - ASCII_OFFSET;
if (curNode.children == null) {
curNode.children = new Array(ALPHABET_SIZE);
curNode = curNode.children[idx] = new Trie();
} else if (curNode.children[idx]) {
curNode = curNode.children[idx];
} else {
curNode = curNode.children[idx] = new Trie();
}
}
curNode.isEndOfWord = true;
return curNode;
}
}
This is a very common issue once you start doing processing in JS. If the Mobile Safari issue is the cause then what you want to do is figure out where the CPU time is going here.
I'm assuming it's the dict.insert() loop and not the data.split() call (that would be a bit more difficult to manage).
The idea here is to split up the dict.insert() loop into functional blocks that can be called asynchronously in a sequenced loop (which is what the setupBuildActions function does). After the first block each subsequent block is called via setTimeout, which effectively resets the function-time counter in the JS runtime (which seems to be what's killing your process).
Using the Sequencer function means you also keep control of the order in which the functions are run (they always run in the sequence they are generated in here and no two or more functions are scheduled for execution at the same time). This is much more effective than firing off thousands of setTimeout calls without callbacks. Your code retains control over the order of execution (which also means you can make changes during execution) and the JS runtime isn't overloaded with scheduled execution requests.
You might also want to check the node project at https://github.com/michiel/sequencer-js for more sequencing examples and http://ejohn.org/blog/how-javascript-timers-work/ for an explanation on setTimeout on different platforms.
var dict = new Trie();
// These vars are accessible from all the other functions we're setting up and
// running here
var BLOCKSIZE = 500;
var words = [];
var buildActions = [];
function Sequencer(funcs) {
(function() {
if (funcs.length !== 0) {
funcs.shift()(arguments.callee);
}
})();
}
// Build an Array with functions that can be called async (using setTimeout)
function setupBuildActions() {
for (var offset=0; offset<words.length; offset+= BLOCKSIZE) {
buildActions.push((function(offset) {
return function(callback) {
for (var i=offset; i < offset + BLOCKSIZE ; i++) {
if (words[i] !== null) { // ugly check for code brevity
dict.insert(words[i]);
}
}
// This releases control before running the next dict.insert loop
setTimeout(callback, 0);
};
})(offset));
}
}
$.ajax({
url: 'data/dictionary_342k_uppercase.txt',
async: true,
success: function (data) {
// You might want to split and setup these calls
// in a setTimeout if the problem persists and you need to narrow it down
words = data.split('\n');
setupBuildActions();
new Sequencer(buildActions);
},
error: function(){
$('#loading-message').text("Problem s rječnikom");
}
});
Here's an example using setTimeout to defer the actual insertion of words into your trie. It breaks up the original string into batches, and uses setTimeout to defer processing of inserting each batch of words. The batch size in my example is 5 words.
The actual batch insertion happens as subsequent event handlers in the browser.
It's possible that just breaking the words up into batches might take too long. If you hit this problem, remember you can chain setTimeout() calls, eg iterating for a while then using setTimeout to schedule another event to iterate over some more, then setTimeout again, etc.
function addBatch(batch)
{
console.log("Processing batch:");
for (var i = 0; i < batch.length; i++)
console.log(batch[i]);
console.log("Return from processing batch");
}
var str = "alpha\nbravo\ncharlie\ndelta\necho\nfoxtrot\n" +
"golf\nhotel\nindia\njuliet\nkilo\nlima\n" +
"mike\nnovember\noscar\npapa\nquebec\n" +
"romeo\nsierra\ntango\nuniform\n" +
"victor\nwhiskey\nxray\nyankee\nzulu";
var batch = []
var wordend;
for (var wordstart = 0; wordstart < str.length; wordstart = wordend+1)
{
wordend = str.indexOf("\n", wordstart);
if (wordend < 0)
wordend = str.length;
var word = str.substring(wordstart, wordend);
batch.push(word);
if (batch.length > 5)
{
setTimeout(addBatch, 0, batch);
batch = [ ];
}
}
setTimeout(addBatch, 0, batch);
batch = [ ];
I'm using Javascript to write an application that will be used with Phonegap to make an Android application. I'm using the Phonegap File API to read directories and files. The relevant code is shown below:
document.addEventListener("deviceready", onDeviceReady, false);
// PhoneGap is ready
//
function onDeviceReady() {
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, onFileSystemSuccess, fail);
}
function onFileSystemSuccess(fileSystem) {
fileSystem.root.getDirectory("/sdcard", {create: false, exclusive: false}, getDirSuccess, fail);
}
function getDirSuccess(dirEntry) {
// Get a directory reader
var directoryReader = dirEntry.createReader();
// Get a list of all the entries in the directory
directoryReader.readEntries(readerSuccess,fail);
}
var numDirs = 0;
var numFiles = 0;
function readerSuccess(entries) {
var i;
for (i=0; i<entries.length; i++)
{
if(entries[i].isFile === true)
{
numFiles++;
entries[i].file(fileSuccess,fail);
}
else if (entries[i].isDirectory === true)
{
numDirs++;
getDirSuccess(entries[i]);
}
}
}
So as of now, the program works fine. The reader will read the contents of the /sdcard directory..if it encounters a file, it will call fileSuccess (which I've excluded in the code for brevity), and if it encounters another directory, it will call getDirSuccess again. My question is this: How can I know when the entire /sdcard directory is read? I can't think of a good way of accomplishing this without going through the /sdcard directory more than one time. Any ideas are appreciated, and thank you in advance!
+1 on a good question since I have to do this anyway myself. I would use the old setTimeout trick. Once the cancel doesn't occur anymore, you know you are done and can fire your event, but just ensure its only fired once.
Here's what I mean and I've named the variables long simply to be more readable (not my style)...
// create timeout var outside your "readerSuccess" function scope
var readerTimeout = null, millisecondsBetweenReadSuccess = 100;
function readerSuccess(entries) {
var i = 0, len = entries.length;
for (; i < len; i++) {
if (entries[i].isFile) {
numFiles++;
entries[i].file(fileSuccess,fail);
} else if (entries[i].isDirectory) {
numDirs++;
getDirSuccess(entries[i]);
}
if (readerTimeout) {
window.clearTimeout(readerTimeout);
}
}
if (readerTimeout) {
window.clearTimeout(readerTimeout);
}
readerTimeout = window.setTimeout(weAreDone, millisecondsBetweenReadSuccess);
}
// additional event to call when totally done
function weAreDone() {
// do something
}
So the logic in this is you keep cancelling the "weAreDone" function from being called as you are reading through stuff. Not sure if this is the best way or more efficient but it would not result in more than one loop given the appropriate "millisecondsBetweenReadSuccess".
Instead of using a setTimeout, which can fail if you have a very slow device, you can use a counter to see how many callbacks still need to be called. If the counter reaches zero, you're all done :)
This is the recursive code:
var fileSync = new function(){
this.filesystem = null;
this.getFileSystem = function(callback){
var rfs = window.requestFileSystem || window.webkitRequestFileSystem;
rfs(
1// '1' means PERSISTENT
, 0// '0' is about max. storage size: 0==we don't know yet
, function(filesystem){
fileSync.filesystem = filesystem;
callback(filesystem);
}
, function(e){
alert('An error occured while requesting the fileSystem:\n\n'+ e.message);
}
);
}
this.readFilesFromReader = function(reader, callback, recurse, recurseFinishedCallback, recurseCounter)
{
if (recurse && !recurseCounter)
recurseCounter = [1];
reader.readEntries(function(res){
callback(res);
if (recurse)
{
for (var i=0; i<res.length; i++) {
/* only handle directories */
if (res[i].isDirectory == true)
{
recurseCounter[0]++;
fileSync.readFilesFromReader(res[i].createReader(), callback, recurse, recurseFinishedCallback, recurseCounter);
}
}
}
/* W3C specs say: Continue calling readEntries() until an empty array is returned.
* You have to do this because the API might not return all entries in a single call.
* But... Phonegap doesn't seem to accommodate this, and instead always returns the same dir-entries... OMG, an infinite loop is created :-/
*/
//if (res.length)
// fileSync.readFilesFromReader(reader, callback, recurse, recurseFinishedCallback, recurseCounter);
//else
if (recurse && --recurseCounter[0] == 0)
{
recurseFinishedCallback();
}
}
, function(e){
fileSync.onError(e);
if (recurse && --recurseCounter[0] == 0)
recurseFinishedCallback();
});
};
this.onError = function(e){
utils.log('onError in fileSync: ' + JSON.stringify(e));
if (utils.isDebugEnvironment())
alert('onError in fileSync: '+JSON.stringify(e));
}
}
var utils = new function(){
this.log = function(){
for (var i=0;i<arguments.length;i++)
console.log(arguments[i]);
}
this.isDebugEnvironment = function(){ return true }// simplified
}
Example code to test this:
var myFiles = [];
var directoryCount = 0;
window.onerror = function(){ alert('window.onerror=\n\n' + arguments.join('\n')) }
var gotFilesCallback = function(entries)
{
for (var i=0;i<entries.length;i++)
{
if (entries[i].isFile == true)
myFiles.push(entries[i].fullPath)
else
++directoryCount;
}
}
var allDoneCallback = function(){
alert('All files and directories were read.\nWe found '+myFiles.length+' files and '+directoryCount+' directories, shown on-screen now...');
var div = document.createElement('div');
div.innerHTML = '<div style="border: 1px solid red; position: absolute;top:10px;left:10%;width:80%; background: #eee;">'
+ '<b>Filesystem root:</b><i>' + fileSync.filesystem.root.fullPath + '</i><br><br>'
+ myFiles.join('<br>').split(fileSync.filesystem.root.fullPath).join('')
+ '</div>';
document.body.appendChild(div);
}
/* on-device-ready / on-load, get the filesystem, and start reading files */
var docReadyEvent = window.cordova ? 'deviceready':'load';
document.addEventListener(docReadyEvent, function()
{
fileSync.getFileSystem(function(filesystem){
var rootDirReader = filesystem.root.createReader();
fileSync.readFilesFromReader(rootDirReader, gotFilesCallback, true, allDoneCallback);
})
}, false);