Show console messages on Android browser - javascript

I do web development with an Android (not rooted) phone and look for a method to show the browser (Chrome or Firefox) console messages.
Neither Android Chrome nor Firefox has web inspector/console and I don't found Firefox (working) add-on.
Update: I can't connect my phone to a computer (ADB, Chrome remote tool... are unavailable).
Anybody can hint me a viable solution?

Try https://github.com/liriliri/eruda, very good approximation of Dev Tools.
All you need to do is add this snippet on top of the page:
<script src="//cdn.jsdelivr.net/npm/eruda"></script>
<script>eruda.init();</script>

Edit
A practical solution can be found at https://stackoverflow.com/a/60106504/1025638
Original (self) answer
I don't found a "just work" solution to solve my problem so I made a short tool to send the logs and errors from the browser to a backend server.
It uses a Proxy around window.console object and implements the function window.onerror to post the messages to the server.
I structured the code to use it as an expressjs middleware for reusability.
It isn't perfect and it may not be compatible with
all browsers, but it really helps if there isn't dev tools in a browser.
Anyone can test it through repl.it.
// Use this module as middleware with expressjs compatible server:
//
// In the server:
// consoleWrapperMiddleware(basePath, app)
// basePath: URL path to send browser messages
// app: expressjs application reference
// return: nothing
//
// In the html page:
// <script src="basePath" />
// basePath: URL path to send browser messages
function consoleWrapper(oldConsole, oldOnerror, serverUrl) {
function _post(log) {
const req = new XMLHttpRequest()
req.open('POST', serverUrl, true)
req.setRequestHeader('Content-Type', 'application/json')
req.send(JSON.stringify(log))
}
const console = new Proxy(oldConsole, {
get: (target, propKey, receiver) => {
const origMethod = target[propKey]
return function (...args) {
if (origMethod === undefined) {
const message = 'unknown console method: '+propKey
_post({ level: 'wrap', message: [message]})
return message
}
else {
let result = origMethod.apply(this, args)
_post({ level: origMethod.name, message: args })
return result
}
}
}
})
const onerror = function(msg, url, line, col) {
if (typeof oldOnerror === 'function')
oldOnerror(arguments)
const content = [ msg, url+':'+line+':'+col ]
_post({ level: 'js-err', message: content })
}
return [ console, onerror ]
}
function consoleWrapperMiddleware(basePath, app) {
app.get(basePath, (req, res) => {
res.status(200).send('[ window.console, window.onerror ] = '+consoleWrapper.toString()+'(window.console, window.onerror, location.protocol.concat("//").concat(location.host).concat("'+basePath+'"))')
console.log('Console wrapper sent')
})
app.post(basePath, (req, res) => {
let body = []
req.on('data', (chunk) => {
body.push(chunk)
}).on('end', () => {
body = Buffer.concat(body).toString()
const logMsg = JSON.parse(body)
console.log('['+logMsg.level+']', ...logMsg.message)
res.writeHead(200)
res.end()
})
})
console.log('Log server listening from',basePath)
}
module.exports = consoleWrapperMiddleware

Related

NodeJS Requested data leaks across HTTP requests

I have a simple webserver created in Express.js. The server serves files that are dynamically created by processing data from a third-party API.
Here is my webserver code, it requests builder.js to build the file, which requests, receives, processes and returns data from a third-party API by awaiting the response of a promisified oauth request as needed. The builder will at least call the API two or more times to create a complete file ready for serving.
const express = require('express');
const app = express();
const builder = require('./builder.js');
let requestID = 0;
app.get('/', function (req, res) {
requestID++;
console.log(`tab: ${requestID}<`);
res.set('Content-Type', 'text/plain; charset=UTF-8')
res.status(200).send('Hello World');
console.log(`tab: ${requestID}>`);
});
app.get('/file', async function (req, res) {
requestID++;
console.log(`tab: ${requestID}<`);
if (req.query["id"] != undefined) {
let URLparams = new URLSearchParams(req.query);
console.log(`tab: ${requestID}, requested id: ${URLparams.get("id")}`);
let output = await builder.buildFile(URLparams);
try {
console.log(`tab: ${requestID}, requested id: ${URLparams.get("q")}, served ${getIDfromOutput(output)}`);
res.set('Content-Type', 'application/rss+xml; charset=UTF-8')
res.status(200).send(output);
} catch(e) {
console.log(`tab: ${requestID}, ${e}`);
if (e instanceof String) { res.send(JSON.stringify(JSON.parse(e), null, 3)); }
else { res.send(JSON.stringify(e, null, 3)); }
};
} else {
res.set('Content-Type', 'text/plain; charset=UTF-8')
res.status(404)
.send("404: Page not found.");
}
console.log(`tab: ${requestID}>`);
});
app.listen(3000, "localhost");
The code works as intended when making requests to the /file one at a time.
//1 tab loaded
tab: 1<
tab: 1, requested: 1331444331778101248
tab: 1, requested: 1331444331778101248, loaded 1331444331778101248
tab: 1>
However, when the endpoint is requested for multiple unique requests at the same time (opening multiple tabs at the same time or running parallel wget commands), the server either responds correctly in some cases, but it mostly responds with the same file served previously.
// 5 unique tabs loaded at the same time: 1551641441679597569, 1448115610173558787, 1370689539505860613, 1328121208022446086, 1509637745140019212
tab: 1<
tab: 1, requested: 1551641441679597569
tab: 2<
tab: 2, requested: 1448115610173558787
tab: 2, requested: 1551641441679597569, loaded 1551641441679597569
tab: 2>
tab: 3<
tab: 3, requested: 1370689539505860613
tab: 3, requested: 1448115610173558787, loaded 1448115610173558787
tab: 3>
tab: 3, requested: 1370689539505860613, loaded 1370689539505860613
tab: 3>
The result of these simultaneous requests causes tabs 1-4 load fine, but tab 5 shows the output of tab 4. The console logger can't also seem to show the issue, but it's definitely different to the normal, one-off request.
I do not want this to happen as I fear that this may happen in production, and I do not want the outputs to be leaked across requests. However, I have no idea what is causing this or how to investigate to fix this issue. The code works fine when builder.buildFile() has to make one API call to the third-party, but I am always making 2 or more calls.
As requested, here is the stripped version of the buildFile function. When serving directly from the API through builder.getData(url), the responses for each request is unique and the responses do not cross over other requests. It happens only when it goes through builer.buildFile(). I feel like the issue is with the way the promises are handles, but I am not sure.
const OAuth = require('oauth');
const { promisify } = require('util');
require('dotenv').config();
let oauth = getOAuth();
module.exports = {
buildFile: async function (urlParam) { //URLSearchParams(req.query)
let id = urlParam.get("id");
try {
let metadata = await getData(`http://API/id=${id}?type=metadata`);
let XMLFile = await fileBuilder(metadata);
return XMLFile;
} catch (e) {
console.log("Err: ");
console.log(e);
return Promise.reject(e);
}
},
getData: async function (url) {
return await getData(url);
}
}
async function fileBuilder(metadata) {
let response = await getData(`$http://API/id=${id}?type=fulldata`);
response = extendData(response); //calls await getData() once more to fill in any gaps in the initial response
let xml = ``;
/* Build XMl file as a string, appending as the function processes the data*/
return xml;
}
function getOAuth() {
return new OAuth.OAuth(
'https://API/request_token',
'https://API/access_token',
process.env.api_key,
process.env.api_secret,
'1.0A', null, 'HMAC-SHA1'
);
}
async function getData(url) {
const get = promisify(oauth.get.bind(oauth));
let body;
try {
body = await get(
url,
process.env.access_key,
process.env.access_secret
);
} catch (e) {
console.log("getData failed: \n" + JSON.stringify(e));
return Promise.reject(e);
}
return JSON.parse(body);
}
You see the mixup in the console because you are using the shared requestId after it was changed. In order to avoid this you need to fix it at the beginning of the function.
The problem you have with the wrong file being served might come from the buildFile function since I can't locate it in this code fragment.
app.get('/file', async (req, res) => {
const localReqId = requestID++;
console.log(`tab: ${localReqId}<`);
if (req.query.id !== undefined) {
const URLparams = new URLSearchParams(req.query);
console.log(`tab: ${localReqId}, requested id: ${URLparams.get('id')}`);
const output = await builder.buildFile(URLparams);
try {
console.log(`tab: ${localReqId}, requested id: ${URLparams.get('q')}, served ${getIDfromOutput(output)}`);
res.set('Content-Type', 'application/rss+xml; charset=UTF-8');
res.status(200).send(output);
} catch (e) {
console.log(`tab: ${localReqId}, ${e}`);
if (e instanceof String) {
res.send(JSON.stringify(JSON.parse(e), null, 3));
} else {
res.send(JSON.stringify(e, null, 3));
}
}
} else {
res.set('Content-Type', 'text/plain; charset=UTF-8');
res.status(404)
.send('404: Page not found.');
}
console.log(`tab: ${localReqId}>`);
});

ltijs: Deep Linking hanging when sending back the data to the LMS

I'm loosing my sleep trying to make ltijs work with Deep Linking.
I've built a simple client that registers a tool against an LMS platform. I'm able to setup the tool on the platform (Moodle and Canvas) and perform launches.
The issue I've is when trying to add "Deep Linking" to my tool. Following the directions in the ltjs docs (https://cvmcosta.me/ltijs/#/deeplinking) I've setup an "lti.onDeepLinking()" handler, that redirects to an html page where some content can be selected.
I then post this form to my own tool express server, where its data gets encoded and an auto-submit form is generated and sent back to the client (and then auto-submitted to the LMS).
This is where things "hang". I clearly see the POST request being sent over to the deep_link_redirect_url as requested by the original deep link request message, but the LMS then just "hangs" there until it eventually times out (I guess) and shows a generic error page...
I'm pretty sure I'm missing some vital piece of the puzzle here but I've no clue on what it could be..
const path = require("path");
const lti = require("ltijs").Provider;
const ltiKey = "myverylongandspecialltikeyfortesting";
toolUrl = "http://192.168.1.25:3010";
// setup provider
lti.setup(ltiKey, {
url: "mongodb://127.0.0.1:3001/lticlient",
}, {
// options
appRoute: "/lti/launch",
loginRoute: "/lti/login",
cookies: {
secure: false, // Set secure to true if the testing platform is in a different domain and https is being used
sameSite: "None" // set it to "None" if the testing platform is in a different domain and https is being used
},
devMode: true, // set it to false when in production and using https,
keysetRoute: "/lti/keys",
});
// set the lti launch callback
lti.onConnect((token, req, res) => {
console.log("IDTOKEN", token);
return res.send("LTI TOOL LAUNCHED!");
});
lti.onDeepLinking((token, req, res) => {
console.log("DEEP LINKING", token);
// Call redirect function to deep linking view
lti.redirect(res, '/deeplink')
})
// GET request to show the selection page
lti.app.get("/deeplink", async (req, res) => {
res.sendFile(path.join(__dirname, '/public/select.html'))
});
// POST submit from selection page with selected content item
lti.app.post("/deeplink", async (req, res) => {
const resource = req.body
const items = [
{
type: 'ltiResourceLink',
title: resource.product,
url: `${toolUrl}/lti/launch`,
custom: {
product: resource.product
}
}
]
const form = await lti.DeepLinking.createDeepLinkingForm(res.locals.token, items, { message: 'Successfully registered resource!' })
console.log("RETURNING SELF-SUBMITTING FORM", form);
return res.send(form);
})
const getPlatforms = () => {
return [
{
url: "http://192.168.1.239",
name: "MoodleClient1",
clientId: "client-id-provided-by-Moodle",
authenticationEndpoint: "http://192.168.1.239/mod/lti/auth.php",
accesstokenEndpoint: "http://192.168.1.239/mod/lti/token.php",
authConfig: { method: 'JWK_SET', key: "http://192.168.1.239/mod/lti/certs.php" }
}
];
}
const registerPlatforms = async () => {
const platforms = getPlatforms();
platforms.forEach(async (cfg) => {
console.log(`Registering platform ${cfg.name}`);
await lti.deletePlatform(cfg.url, cfg.clientId);
await lti.registerPlatform(cfg);
const platform = await lti.getPlatform(cfg.url, cfg.clientId);
await platform.platformActive(true)
});
}
const setup = async () => {
await lti.deploy({ port: 3010 });
registerPlatforms();
console.log("platforms registered and active");
}
setup();

How to resolve Ripple test server connection error?

I am persistently getting following error on connecting to test server using RippleAPI for Javascript:
[ConnectionError(Error: connect() timed out after 2000 ms. If your internet connection is working, the rippled server may be blocked or inaccessible.)]
However, if I try to get balance from curl, it works:
curl 'https://testnet.data.api.ripple.com/v2/accounts/rwAVpkGNU9Shn63EpFq7ju1tr89SsSBwHz/balances?currency=XRP'
Code snippet below:
'use strict';
const RippleAPI = require('ripple-lib').RippleAPI;
const api = new RippleAPI({
server: 'wss://s.altnet.rippletest.net:51233' // Public rippled server
});
api.connect().then(() => {
/* begin custom code ------------------------------------ */
const myAddress = 'rwAVpkGNU9Shn63EpFq7ju1tr89SsSBwHz';
console.log('getting account info for', myAddress);
return api.getAccountInfo(myAddress);
}).then(info => {
console.log(info);
console.log('getAccountInfo done');
/* end custom code -------------------------------------- */
}).then(() => {
return api.disconnect();
}).then(() => {
console.log('done and disconnected.');
}).catch(console.error);
timeout param doesn't seem to work even if I suggest;
use:
api.connection._config.connectionTimeout = 3e4;

Electron interceptBufferProtocol proxying requests does not work

Using interceptBufferProtocol, I can successfully intercept the loadURL event to https://google.com mainWindow.loadURL("https://google.com/"); and replace it with my custom HTML code. The HTML code has an iframe which I am trying to proxy. This can usually be achieved by setting the electron browserWindow proxy but in my case, it fails to work. I set the proxy with the following code:
mainWindow.webContents.session.setProxy({
proxyRules: "http://" + proxy
}, () => {
console.log('Proxy: http://' + proxy)
})
Intercept url code:
ses.protocol.interceptBufferProtocol('https', (req, callback) => {
ses.resolveProxy(req.url, (x) => {
console.log(x)
})
if (req.url == "https://google.com/") {
fs.readFile(path.join(__dirname, "/../../path/stuff.html"), 'utf8', function(err, html) {
callback(Buffer.from(html, 'utf8'));
});
} else {
const request = net.request(req)
request.on('response', res => {
const chunks = []
res.on('data', chunk => {
chunks.push(Buffer.from(chunk))
})
res.on('end', async () => {
const file = Buffer.concat(chunks)
callback(file)
})
})
if (req.uploadData) {
req.uploadData.forEach(part => {
if (part.bytes) {
request.write(part.bytes)
} else if (part.file) {
request.write(fs.readFileSync(part.file))
}
})
}
request.end()
}
})
However, no matter what I do, it appears to use my local IP instead of a proxy. Do I have any options?
The code runs fine without a proxy. I'm trying to run it with one. The problem lies within the .interceptBufferProtocol() function. Any help would be appreciated!

CORS requests in electron js

I am developing a react application (chat app) using electron js (for desktops) I want to make Http requests to certain websites, to get URL metadata (opengraph, schema.org, twitterCard, etc). This cannot be done without disabling the webSecurity in electronJS.
a) is it a good idea to disable webSecurity in electron JS ? since users can send others pretty much anything ?
b) I have managed to achieve this using electron net package. I used it in react (renderer process) and it works smoothly, no need to disable webSecurity. however when a invalid URL is provided it throws an exception in main process (net::ERR_NAME_NOT_RESOLVED) which pops a error dialog box. is there a way to catch this exception in the renderer process?
below is how I used electron net package.
const {net} = window.require('electron').remote
function ScrapeMeta(url) {
var promise = new Promise((resolve, reject) => {
const options = {
url: url,
timeout: 2000
};
const request = net.request(options)
request.on('response', (response) => {
var body = '';
response.on('data', function (d) {
body += d;
});
response.on('end', () => {
if (response.statusCode == 200) {
ParseMeta(body)
.then(meta => resolve(meta))
.catch(err => reject(err))
} else {
reject("request failed with " + response.statusCode);
}
})
})
request.end();
})
return promise;
}
What is the best way to achieve this. thanks.

Categories

Resources