I am using WebDriverIO for the UI Automation and I am trying to automate for a login page.
When I click the Login button, *.com/user/login request will get trigger in the background. Here, I want to catch that *.com/user/login call response, because I have to get a token from the response to proceed to next test case.
Can anyone please help me?
Thanks in advance!
Capture network traffic using BrowserMob, selenium server and below code. For more details , refer my this answer. Then parse stuff.har , to get desired result. Har file is in json format.
var Proxy = require('browsermob-proxy').Proxy
, webdriverio = require("./node_modules/webdriverio/")
, fs = require('fs')
, proxy = new Proxy()
;
proxy.cbHAR('webdriver.io', doSeleniumStuff, function(err, data) {
if (err) {
console.error('ERR: ' + err);
} else {
fs.writeFileSync('stuff.har', data, 'utf8');
}
});
function doSeleniumStuff(proxy, cb) {
var browser = webdriverio.remote({
host: 'localhost'
, port: 4444
, desiredCapabilities: { browserName: 'firefox', seleniumProtocol: 'WebDriver', proxy: { httpProxy: proxy } }
});
browser
.init()
.url("http://webdriver.io")
//.setValue("#yschsp", "javascript")
//.submitForm("#sf")
.end().then(cb);
}
Related
I'm new to IBM cloud and I'm trying to build an application where I can write a text, press a button and that text is analysed by the service tone analyser returning a JSON so I can show it.
I have created an instance of said service and I have connected it to my application (a toolchain) using the 'connections' tab on the service.
I also have this code on the app.js file on my app:
const ToneAnalyzerV3 = require('ibm-watson/tone-analyzer/v3');
const { IamAuthenticator } = require('ibm-watson/auth');
const toneAnalyzer = new ToneAnalyzerV3({
version: '2019-10-10',
authenticator: new IamAuthenticator({
apikey: [API key found in service credentials],
}),
url: [API url found in service credentials],
});
app.get('/', function(req, res) {
res.render('index');
});
app.post('/api/tone', async function(req, res, next) {
try {
const { result } = await toneAnalyzer.tone(req.body);
res.json(result);
} catch (error) {
next(error);
}
});
The problem is that when I make the following call on my javascript:
$.post( "/api/tone", {text: textInput}, function(data){
console.log(data);
});
I get the error: 500 (Internal Server Error).
Does anybody know what I am doing wrong?
The issue is that you are sending req.body to be analysed for tone. If you take a look at the API Docs - https://cloud.ibm.com/apidocs/tone-analyzer?code=node#tone - you will see that you only need to send
const toneParams = {
toneInput: { 'text': text },
contentType: 'application/json',
};
I doubt very much that req.body has a toneInput field, and if it does have contentType it may not be set to one of the allowable values.
i'm creating script using nodejs to upload my APK to Google Play Developer via Publishing API, however it failed. I think, it's nothing wrong with APK file, because it's good file. so any idea to solve this ?
i also try multipart upload, but return error ( i will attach here soon )
Below are the Error message :
Upload successful! Server responded with: {
"error": {
"errors": [
{
"domain": "androidpublisher",
"reason": "apkInvalidFile",
"message": "Invalid APK file."
}
],
"code": 403,
"message": "Invalid APK file."
}
}
My Source Code
var https = require('https');
var request = require('request');
https.post = require('https-post');
var fs = require('fs');
var tokens = process.argv[2];
var formData = {
my_file: fs.createReadStream('example_file.apk')
}
if (tokens != '')
{
var url_create_listing = 'https://www.googleapis.com/androidpublisher/v2/applications/com.example.apps/edits?access_token='+tokens;
request.post( url_create_listing, function (error, response, body) {
var str_body = JSON.parse (body);
var listing_id = str_body.id;
var url_upload = 'https://www.googleapis.com/upload/androidpublisher/v2/applications/com.example.apps/edits/'+listing_id+'/apks?uploadType=media&access_token='+tokens;
var options = {
url: url_upload,
headers: {
'Content-Type': 'application/vnd.android.package-archive'
}
};
request.post( options, function optionalCallback(err, httpResponse, body)
{
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
}).pipe(fs.createReadStream('example_file.apk'));
});
}
I think you have a misunderstanding of how NodeJS streams works.
In the sample code you have shown above you are trying to pipe the result of request.post into fs.createReadStream which doesn't make sense and that's reason for the error as you are not sending any apk file. Instead it should be done like the following. (I am not sure how androidpublisher api works, I assume your sample shows the correct usage.)
var url_create_listing = 'https://www.googleapis.com/androidpublisher/v2/applications/com.example.apps/edits?access_token='+tokens;
request.post( url_create_listing, function (error, response, body) {
var str_body = JSON.parse (body);
var listing_id = str_body.id;
var url_upload = 'https://www.googleapis.com/upload/androidpublisher/v2/applications/com.example.apps/edits/'+listing_id+'/apks?uploadType=media&access_token='+tokens;
// request module is capable of auto detecting the Content-Type header
// based on the file extension.
var options = {
url: url_upload
};
fs.createReadStream('example_file.apk')
.pipe(request.post(options, function optionalCallback(err, httpResponse, body){
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
}));
According to https://developers.google.com/games/services/publishing/ :
The Google Play games services Publishing API allows you to automate
frequent tasks having to do (...)
Are you under the absolute constraint to use JS & node.js for this task ? If not, you may find useful this Google management-tools code example.
In this repository you can find tools to help you publish, manage, and
test your Play Games services apps
In my opinion, it is always better to install Python (and re-use official code) to do this.
(Google Devs apparently prefer python for this kind of task)
Have you looked at fastlane.tools? Full scripting of publishing, uploading, taking screenshots, slack notifications, and a lot more for both Android and iOS.
gun 0.8.8, Node.js-to-Node.js, Node.js-to-browser
I see the following error in browser console:
VM103:161 WebSocket connection to 'wss://127.0.0.1:8080/gun' failed: Error in connection establishment: net::ERR_INSECURE_RESPONSE
VM103:161 WebSocket connection to 'wss://10.42.0.56:8080/gun' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED
And there are no messages on Node.js side.
Sorce code of my server:
const Hapi = require('hapi');
const Gun = require('gun');
const pem = require('pem');
pem.createCertificate({ days: 1, selfSigned: true }, function (err, keys) {
if (err) {
throw err
}
const server = new Hapi.Server;
var tls = {
key: keys.serviceKey,
cert: keys.certificate
};
server.connection({
port: 8080,
tls
});
server.connections.forEach(c => Gun({ web: c.listener, file: 'data.json' }));
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
reply('Server works!');
}
});
server.start();
})
In order to make gun work with a self-signed certificate you need two things:
Lunch browser ignoring the certificate errors. For example, Chrome
google-chrome --ignore-certificate-errors
Put the following process option in Node.js code
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
or add the environment variable
export NODE_TLS_REJECT_UNAUTHORIZED=0
I'm having a sporadic issue with my Node/loopback server. My setup is as follows:
EDIT: My node version is v4.2.4, I'm running Node on Windows 10 Professional.
Client side:
nw.js client that traverses the local file-system and computes the MD5 value of files.
Using a request.post, the client sends the hash of the file to the server in the format 'fileHash: d41d8cd98f00b204e9800998ecf8427e' (that's just an example hash, I know it's an empty file)
function checkHash (fileHash){
request.post({
headers: {'content-type' : 'application/x-www-form-urlencoded'},
url: 'http://localhost:3000/api/checkBoths/hashcheck',
method: 'POST',
form: {
fileHash: fileHash
}
}, function(error, response, body){
if(error) {
console.log(error);
} else {
console.log(response.statusCode, body);
}
});
}
Server side:
Node/loopback server running at localhost:3000.
The hashCheck function is meant to read the data that's posted from the client and query a MySQL database which is for a match of the hash.
The response from the server will be in the format goodResult : true if the hash exists in the database or goodResult : false if not.
var request = require('request');
module.exports = function (CheckBoth) {
var goodResult;
CheckBoth.hashCheck = function (fileHash, cb) {
requestGood(fileHash);
function requestGood (fileHash) {
request.get('http://127.0.0.1:3000/api/Goodhashes/' + fileHash + '/exists', function (error, response, body) {
if (!error && response.statusCode == 200) {
goodResult = JSON.parse(body).exists;
}
if (error) {
console.error(error);
}
});
console.log(goodResult);
}
cb( goodResult);
};
CheckBoth.remoteMethod(
'hashCheck',
{
accepts: {arg: 'fileHash', type: 'string'},
returns: [{arg: 'goodResult', type: 'string'}]
}
);
};
Issue:
The server can respond with ~1000 queries before the following appears throughout the responses:
{ [Error: connect ECONNREFUSED 127.0.0.1:3000]
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 3000 }
I've tried putting in different callbacks within the server code but that has not made any difference. I think I should probably throttle the requests to the server but I don't know how to achieve this.
Any help would be greatly appreciated.
The ulimit default for most systems is 1024. See the manual for limits.conf (http://linux.die.net/man/5/limits.conf).
While #migg is correct about ensuring no memory leaks in your application and its process handling tuning the system for high loads is also normal procedure for quite a few applications.
Try this and see if it helps;
$ ulimit -n 65535
Edit: I have not tested this but here is a document from ibm regarding Windows; http://www-01.ibm.com/support/docview.wss?uid=swg21392080
I was requested to create a simple rest api with node js and make a script that creates 10000 elements in the database through api calls. I created the server with the Hapi framework. If I send a single or 100 'PUT' requests to the API it creates a new element without problem but if I try to make 1000 requests or more it wont be able to create all of them or anything at all. I would like to know what may be the problem, if I'm not doing the script correctly or is the server the problem. So far I've received 2 errors:
{ [Error: connect ECONNRESET] code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'connect' }
and (libuv) kqueue(): Too many open files in system
The first one trying to call the api 1000 times and the second one trying with 10000 times
The code of the server is the following
var Hapi = require('hapi');
var server = new Hapi.Server();
var joi = require("joi");
var dbOpts = {
"url" : "mongodb://localhost:27017/songsDB",
"settings" : {
"db" : {
"native_parser" : false
}
}
};
server.register({
register: require('hapi-mongodb'),
options: dbOpts
},function (err) {
if (err) {
console.error(err);
throw err;
}
});
server.connection({
host: 'localhost',
port: 8080
});
server.route({
method: 'POST',
path: '/songs',
config: {
handler: function (request, reply) {
var newSong = {
name: request.payload.name,
album: request.payload.album,
artist: request.payload.artist
};
var db = request.server.plugins['hapi-mongodb'].db;
db.collection('songs').insert(newSong, {w:1}, function (err, doc){
if (err){
return reply(Hapi.error.internal('Internal MongoDB error', err));
}else{
reply(doc);
}
});
},
validate:{
payload: {
name: joi.string().required(),
album: joi.string().required(),
artist: joi.string().required()
}
}
}
});
server.start(function () {
console.log('Server running at:', server.info.uri);
});
The code for the request is the following
var unirest = require('unirest');
for(var i = 1; i<=10000; i++){
unirest.post('http://localhost:8080/songs')
.header('Accept', 'application/json')
.send({ "name": "song"+i, "artist": "artist"+i, "album":"album"+i})
.end(function (response) {
console.log(response.body);
});
}
If running under OSX, open terminal and then try using:
sudo launchctl limit maxfiles 1000000 1000000
and then try again.
For "too many open files in system", looks like you reach the limit of your system. If you are using Linux, you can do a ulimit -a to display all settings.
There is one which may limit your number of open files.
open files (-n) 1024
Assuming you are on a Mac or Linux, you need to increase the maximum number of open files allowed by the system.
If you insert this into the terminal it will show you what your settings are:
lsof | wc -l
You will see that your 'open files' setting is likely smaller than the number you are trying to work with.
To change this setting, use the following command:
ulimit -n #####
where ##### is some arbitrary number (but higher than what you have).
If you are on a Windows machine, the answer is slightly more complicated. It seems Windows has a per-process limit, which can be modified (though it doesn't sound easy). Look here for some more details:
Windows equivalent of ulimit -n
When I ran the code, the first 11 POSTs would throw errors. Apparently it is because the script begins sending them before the mongodb connection is active. All I did was added a brief timeout to the POSTing to give mongodb a chance to start breathing. When I did that it worked fine. 10000 records upon completion.
All I changed was this:
setTimeout(function () {
for(var i = 1; i<=10000; i++){
unirest.post('http://localhost:8080/songs')
.header('Accept', 'application/json')
.send({ "name": "song"+i, "artist": "artist"+i, "album":"album"+i})
.end(function (response) {
//console.log(response.body);
});
}
}, 5000);