Node.js: "process run out of memory" - javascript

I have the following code which leads to the error: FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - process out of memory
It doesn't make any difference whether I set --max_old_space_size=4096 (or bigger numbers) or not. I have 16 GB RAM on my PC.
System: win10/node.js 6.9.2/mongodb 3.4
I have over 16 000 000 messages in the "chatModel". With a smaller amount of messages the code works.
Do you have any suggestions how to solve the problem/optimize the code?
function sortOutMessages(){
var atSymbol = "#";
var rgx = new RegExp("^\\" +atSymbol);
chatModel.find({messageContent: rgx}, function (err, doc){
var docs = doc;
var docsLength = docs.length;
for (var i =0; i<docsLength;i++) {
var directedMessagesObj = new directedMessagesModel
({
timeStamp: docs[i].timeStamp,
channelName: docs[i].channelName,
userName: docs[i].userName,
userID: docs[i].userID,
messageContent: docs[i].messageContent,
messageLength: docs[i].messageLength,
subscriber: docs[i].subscriber,
turbo: docs[i].turbo,
moderator: docs[i].moderator
});
directedMessagesObj.save({upsert:true}, function (err) {
var fs = require('fs');
if (err) {
fs.appendFile("undefinedLog.txt", "error at " + new Date().toLocaleString() + " directedMessagesObj.save " + "\r\n")
loggerWinston.warn("error at " + new Date().toLocaleString() + " directedMessagesObj.save " + "\r\n");
return console.log(err);
}
});
}
});
}

You are creating docs.length amount of new Promise with this code. What you should be doing instead is to limit the amount of Promise that can be running.
Since what you have been trying to write is a synchronous code, I would advise calling the next dbSave in the callback of the previous one.
If you are after a parallel system, I would create a simple semaphore system.

I advise
module.exports.asyncEach = function(iterableList, callback, done) {
var i = -1,
length = iterableList.length;
function loop() {
i++;
if (i === length) {
done();
return;
}
callback(iterableList[i], loop);
}
loop();
};
then you call asyncEach with async save model is very beter

Thank you all for your answers! I decided to go a different way and it works for now:
var cursor = chatModel.find({messageContent: rgx}).cursor();
cursor.on('data',function (doc) {
var directedMessagesObj = new directedMessagesModel
({
timeStamp: doc.timeStamp,
channelName: doc.channelName,
userName: doc.userName,
userID: doc.userID,
messageContent: doc.messageContent,
messageLength: doc.messageLength,
subscriber: doc.subscriber,
turbo: doc.turbo,
moderator: doc.moderator
});
directedMessagesObj.save({upsert:true},function (err) {
var fs = require('fs');
if (err) {
fs.appendFile("undefinedLog.txt", "error at " + new Date().toLocaleString() + " directedMessagesObj.save " + "\r\n");
loggerWinston.warn("error at " + new Date().toLocaleString() + " directedMessagesObj.save " + "\r\n");
return console.log(err);
}
});
});

Related

Promises problem: Trying to wrap mysql queries to use it on NodeJS / Express

My goal is to wrap MySQL queries, pass the parameters to a function and another function does the MySQL job, returning the results.
Here's my code so far:
//mysql lib
var mysql = require('mysql');
//database credentials
exports.pool = mysql.createPool({
connectionLimit: 50,
host: 'localhost',
user: 'root',
password: 'password',
database: '_app',
debug: false
});
//my wrapper =(
var returnResultset = exports.returnResultset = function (qry) {
return new Promise(function (resolve, reject) {
try {
mysql_.pool.getConnection(function (err, connection) {
if (err) {
console.log("Error on function returnResultset - MYSQL ERROR: " + err);
return reject(err);
}
connection.query(qry, [], function (error, results, fields) {
connection.release();
if (error) {
console.log("Error on function returnResultset - MYSQL ERROR: " + error);
return reject(error);
}
return resolve(results);
});
});
}
catch (e) {
console.log('error:' + e);
}
});
};
//wrapper function for testing purposes
var selectOneField = exports.selectOneField = function (tbl, field, pk, pkval) {
var qry_ = "SELECT " + field + " FROM " + tbl + " WHERE " + pk + " = '" + pkval + "'";
returnResultset(qry_).then(function (results) {
return results;
}, function (error) {
console.log("Error: " + error);
})
};
//...and on another page I want to be able to receive the results from the function above:
var isExpired = exports.isExpired = function (cod) {
var rtf = db_.selectOneField('view_expiredusers', 'cod', 'cod', cod);
console.log(rtf);
return rtf;
};
The code above returns undefined. I can't get to make this function working properly.
I have tried console.log(results). The query works like a charm. Only thing I can't get to work is to catch the result from an external function.
Any thoughts? Thanks in advance!
You should return the promise and chain it inside isExpired function.
//wrapper function for testing purposes
var selectOneField = exports.selectOneField = function (tbl, field, pk, pkval) {
var qry_ = "SELECT " + field + " FROM " + tbl + " WHERE " + pk + " = '" + pkval + "'";
return returnResultset(qry_);
};
//...and on another page I want to be able to receive the results from the function above:
var isExpired = exports.isExpired = function (cod) {
return db_.selectOneField('view_expiredusers', 'cod', 'cod', cod)
};
When you call the isExpired in other files you should use the then method of the promise and return the results. do it as follows
var cod_customer = 1;
var isexpired;
isExpired(cod_customer).then(function (results) {
isexpired = results;
console.log(isexpired);
}, function (error) {
console.log("Error: " + error);
});
you are not returning the promise in selectOneField function it must return the promise and also you cant simply do
rtf = db_.selectOneField('view_expiredusers', 'cod', 'cod', cod);
.you will have to use async-await or then
Must be handled this way
//wrapper function for testing purposes
var selectOneField = exports.selectOneField = function (tbl, field, pk, pkval) {
var qry_ = "SELECT " + field + " FROM " + tbl + " WHERE " + pk + " = '" + pkval + "'";
return returnResultset(qry_).then(function (results) {
return results;
}).catch(error) {
console.log("Error: " + error);
})
};
//...and on another page I want to be able to receive the results from the function above:
var isExpired = exports.isExpired = function (cod) {
var rtf = db_.selectOneField('view_expiredusers', 'cod', 'cod', cod).then(rtf => {
console.log(rtf);
return rtf;
});
};

NJS-024: memory allocation failed in OracleDB - Nodejs

I am trying to run a query using OracleDB with Nodejs to get the view populated in the UI but I get a NJS-024: memory allocation failed error. Can someone help me out? The view contains 120 columns in total and when I query the view in SQL Developer, it works just fine.
ConnectionPool.js:
var path = require('path');
var oracledb = require('oracledb');
var poolMap = {};
var logger = require(path.join(global.root + '/app/util/logger.js'))();
function createPool(poolName, config, callback) {
oracledb.createPool(
config,
function(err, p) {
if (err){
logger.error(err);
return;
}
poolMap[poolName] = p;
callback(poolMap[poolName]);
}
);
}
function getPool(poolName) {
return poolMap[poolName];
}
module.exports = {
createPool: createPool,
getPool: getPool
};
This is my poolAttributes:
var pool;
oracledb.prefetchRows = 10000;
oracledb.maxRows = 400000;
var poolAttrs = {
user: dbcfg.username,
password: dbcfg.password,
connectString: dbcfg.connectionString,
connectionClass : 'Report API',
poolMin : 3,
poolMax : 10,
poolIncrement: 2,
poolTimeout : 600 //seconds
};
connectionPool.createPool("Reports", poolAttrs, function(connPool){
pool = connPool;
logger.info("Pool created by reports.");
});
This is my code:
router.post('/report/', jsonParser, function (req, res) {
var data = req.body,
startRow = data.startRow,
numRows = data.numRows,
sortCol = data.sortCol,
sortDir = data.sortDir;
var countQuery = 'SELECT COUNT(*) ' +
'FROM this_view ' ;
var query = 'SELECT * ' +
'FROM this_view' ;
var seg,
orderBy,
offset;
orderBy = ' ORDER BY UPPER(' + sortCol + ') ' + sortDir;
offset = ' OFFSET ' + startRow + ' ROWS FETCH NEXT ' + numRows + ' ROWS ONLY';
query += orderBy;
query += offset;
logger.info("Begin: " + (new Date().toString()));
async.parallel({
rows: function (callback) {
pool.getConnection(function (err, connection) {
logger.info("Begin Connection: " + (new Date().toString()));
if (err) {
logger.error(err.message);
return;
}
logger.info("Begin execute: " + (new Date().toString()));
connection.execute(
query,
{},
{
resultSet: true,
prefetchRows: 1000
},
function (err, results) {
logger.info("End execute: " + (new Date().toString()));
var rowsProcessed = 0;
var startTime;
if (err) {
logger.error(err.message);
callback("Something broke in the first thing");
doRelease(connection);
return;
}
var procJson = [];
function fetchRowsFromRS(connection, resultSet, numRows) {
resultSet.getRows(
numRows, // get this many rows
function (err, rows) {
if (err) {
console.error(err);
doClose(connection, resultSet); // always close the result set
} else if (rows.length >= 0) {
/**
* For each row in the result, pushes a new object to the rows array
* In each new object, the key is assigned and the result row value set
*/
for (var i = 0; i < rows.length; i++) {
procJson.push({});
console.log(procJson);
for (var j = 0; j < resultSet.metaData.length; j++) {
procJson[i][resultSet.metaData[j].name.toLowerCase()] = rows[i][j];
}
}
//TODO: Add null handling
logger.info("Send JSON: " + (new Date().toString()));
logger.info("JSON Sent: " + (new Date().toString()));
if (rows.length === numRows) // might be more rows
fetchRowsFromRS(connection, resultSet, numRows);
else
doClose(connection, resultSet); // always close the result set
} else { // no rows
doClose(connection, resultSet); // always close the result set
}
});
}
fetchRowsFromRS(connection, result.resultSet, numRows);
callback(null, procJson);
});
});
},
totalRows: function (callback) {
pool.getConnection(function (err, connection) {
logger.info("Begin Connection: " + (new Date().toString()));
if (err) {
logger.error(err.message);
return;
}
logger.info("Begin execute: " + (new Date().toString()));
connection.execute(
countQuery,
function (err, result) {
logger.info("End execute: " + (new Date().toString()));
if (err) {
logger.error(err.message);
callback("Something broke");
doRelease(connection);
return;
}
logger.info("Send JSON: " + (new Date().toString()));
console.log(result.rows);
callback(null, result.rows[0][0]);
logger.info("JSON Sent: " + (new Date().toString()));
doRelease(connection);
});
});
}
}, function(err, result){
if(err){
logger.error(err);
}
res.send(result);
});
});
If rows.length >=0 and if the query returns 0 results, I get this.
How much memory does your Node.js server have? You're setting maxRows very high and grabbing all the data in a single shot. This is likely causing you to run out of memory. Generally, the key is to balance round trips (which you want to reduce) with memory usage (which goes up as round trips are reduced.
You'll want to leverage the ResultSet API, which allows you to stream a read-consistent view of data in smaller chunks. Have a look at this for ideas: https://jsao.io/2015/07/an-overview-of-result-sets-in-the-nodejs-driver/
Rather than buffer the data in the Node.js server (which would lead to the same problem), you'll want to stream it down to the http request.
Finally, but perhaps most importantly, note that your code is currently open to SQL injection. Values that come in from users via req.body cannot be trusted. They must either be bound in using bind variables OR sanitized using something like dbms_assert.
Only values (like numRows) can be bound in. Identifiers (like sortCol) have to be sanitized. You'll likely want to do the sanitization in Node.js, so here's a really basic check that should help.
You could create an "assert" module:
function simpleSqlName(name) {
if (name.length > 30) {
throw new Error('Not simple SQL');
}
// Fairly generic, but effective. Would need to be adjusted to accommodate quoted identifiers,
// schemas, etc.
if (!/^[a-zA-Z0-9#_$]+$/.test(name)) {
throw new Error('Not simple SQL');
}
return name;
}
module.exports.simpleSqlName = simpleSqlName;
function validSortOrder(order) {
if (order !== 'desc' && order !== 'asc') {
throw new Error('Not valid sort order');
}
return order;
}
module.exports.validSortOrder = validSortOrder;
Then your code would look more like this (notice I'm using both the assert module and bind variables):
let assert = require('assert.js');
router.post('/report/', jsonParser, function (req, res) {
var data = req.body,
startRow = data.startRow,
numRows = data.numRows,
sortCol = assert.simpleSqlName(data.sortCol),
sortDir = assert.validSortOrder(data.sortDir);
var countQuery = 'SELECT COUNT(*) ' +
'FROM this_view ' ;
var query = 'SELECT * ' +
'FROM this_view' ;
var seg,
orderBy,
offset;
orderBy = ' ORDER BY UPPER(' + sortCol + ') ' + sortDir;
offset = ' OFFSET :start_row ROWS FETCH NEXT :num_rows ROWS ONLY';
query += orderBy;
query += offset;
logger.info("Begin: " + (new Date().toString()));
async.parallel({
rows: function (callback) {
pool.getConnection(function (err, connection) {
logger.info("Begin Connection: " + (new Date().toString()));
if (err) {
logger.error(err.message);
return;
}
logger.info("Begin execute: " + (new Date().toString()));
connection.execute(
query,
{
start_row: startRow,
num_rows: numRows
},
function (err, result) {
logger.info("End execute: " + (new Date().toString()));
if (err) {
logger.error(err.message);
callback("Something broke in the first thing");
doRelease(connection);
return;
}
console.log(result.rows);
var procJson = [];
/**
* For each row in the result, pushes a new object to the rows array
* In each new object, the key is assigned and the result row value set
*/
for (var i = 0; i < result.rows.length; i++) {
procJson.push({});
for (var j = 0; j < result.metaData.length; j++) {
procJson[i][result.metaData[j].name.toLowerCase()] = result.rows[i][j];
}
}
logger.info("Send JSON: " + (new Date().toString()));
callback(null, procJson);
logger.info("JSON Sent: " + (new Date().toString()));
doRelease(connection);
});
});
},
totalRows: function (callback) {
pool.getConnection(function (err, connection) {
logger.info("Begin Connection: " + (new Date().toString()));
if (err) {
logger.error(err.message);
return;
}
logger.info("Begin execute: " + (new Date().toString()));
connection.execute(
countQuery,
function (err, result) {
logger.info("End execute: " + (new Date().toString()));
if (err) {
logger.error(err.message);
callback("Something broke");
doRelease(connection);
return;
}
logger.info("Send JSON: " + (new Date().toString()));
console.log(result.rows);
callback(null, result.rows[0][0]);
logger.info("JSON Sent: " + (new Date().toString()));
doRelease(connection);
});
});
}
}, function(err, result){
if(err){
logger.error(err);
}
res.send(result);
});
});
Learn more about bind variables here: https://github.com/oracle/node-oracledb/blob/master/doc/api.md#bind
Also, check out the slides from a recent talk I gave. You may get something out of them... https://www.dropbox.com/s/2rhnu74z2y21gsy/Tips%20and%20Tricks%20for%20Getting%20Started%20with%20the%20Oracle%20Database%20Driver%20for%20Node.pdf?dl=0

MySQL, Node.js Sequential actions - How can I do that?

I've the following code:
function query1() {
var defered = Q.defer();
console.log("In query1");
var connection = mysql.createConnection({
host: '........',
user: 'm...c....a.....i',
password: '......Z....9...K',
database: '.....ol'
});
connection.connect(function(err) {
if (!err) {
console.log("Database is connected ...");
} else {
console.log("Error connecting database ...");
}
});
sql = '' +
'select c.ID as CENA_ID, ' +
' c.I_KEY as CENA_NUMERO, ' +
' c.NM_CENA as CENA_NOME, ' +
' b.DS_MAC as MAC_BOX, ' +
' v.DS_CLIENTID as ALEXA_ID, ' +
' v.FK_ID_GRUPO as GRUPO_ID ' +
' from TB_DISPOSITIVOS_VOZ v ' +
' inner join TB_GRUPOS g ' +
' on g.ID = v.FK_ID_GRUPO ' +
' inner join TB_CENAS c ' +
' on g.ID = c.FK_ID_GRUPO ' +
' inner join TB_CENTRAIS b ' +
' on g.ID = b.FK_ID_GRUPO ' +
'where v.DS_CLIENTID = "' + userId + '" ' +
'and lower(c.NM_CENA) like "%' + sceneName.toLowerCase() + '%"';
console.log("Created query");
try{
connection.query(sql, function(erro, rows, fields) {
if (!erro) {
console.log("Executed query verifying the userId");
contador = 0;
if (rows.length > 0) {
cena_id = rows[0].CENA_ID;
cena_numero = rows[0].CENA_NUMERO;
cena_nome = rows[0].CENA_NOME;
alexa_id = rows[0].ALEXA_ID;
grupo_id = rows[0].GRUPO_ID;
mac_box = rows[0].MAC_BOX;
contador = contador + 1;
}
console.log("contador: " + contador);
} else {
console.log("Error - getting the Alexa register in database" + erro);
context.fail("Error - getting the Alexa register in database" + erro);
}
});
}catch (ex){
console.log("exception: " + ex);
}
}
And this code as well:
Q.all([query1()]).then(function(results) {
console.log("Q.all log function");
if (contador > 0) {
console.log("contador > 0");
var client = mqtt.connect('mqtt://.............com');
console.log("connected to MQTT broker");
var buffer = [26,
0,0,0,0,555,645,0,0,0,0,0,
0,5555,2,Math.floor((Math.random() * 200) + 1),
0,0,0,333,13,4,0,1,0,
cena_numero
];
console.log("Created buffer");
client.on('connect', function() {
client.publish('n/c/' + mac_box + '/app', buffer);
console.log("sent MQTT");
});
speechOutput = "Command " + sceneName + " executed successfully";
repromptText = "";
console.log("Process executed successfully")
} else {
console.log("contador <= 0");
speechOutput = "This command was not found!";
repromptText = "";
}
}, function (reason) {
console.log("reason: " + reason);
});
How can I do for the second code execute only if the first query1() executed correctly? Because in the function query1() i've a MySQL Query, and I only can continue with the process after the result of this query.
Anyone can help me?
Thanks a lot!
You're missing some key concepts regarding callbacks and asynchronous behavior in Node.js. You're using the "Q" library (btw I'd recommend trying bluebird instead) to handle promises, but your "query1" function does not return a promise. That's why query1 executes but your "Q.all log function" will execute before query1 is finished.
You can structure your code like this instead (I'll give an example with bluebird since I'm more familiar with it):
var Promise = require('bluebird');
var _connection;
function query1() {
return new Promise(resolve, reject) {
//open your connection
connection.open(function (err, connection) {
if (err) return reject(err);
_connection = connection;
//do your query
_connection.query(sql, [params], function (err, data) {
if (err) return reject(err);
else resolve(data);
});
});
});
}
function query2(data) {
return new Promise(resolve, reject) {
//do your query, using data passed in from query1
_connection.query(sql, [params], function (err, data) {
if (err) return reject(err);
else resolve(data);
});
});
}
query1
.then(function (data) { query2(data); })
.catch(function (err) {
console.log('error:', err);
});
Also, just FYI, concatenating SQL string like this is a no-no that will open you up to a SQL injection attack:
like "%' + sceneName.toLowerCase() + '%"
Instead, use like "%?%" and call your SQL with connection.query(sql, [sceneName], function(err, data) {}). Hope this helps.
I solved my problem with asyncpackage like this:
var async = require('async');
async.series([
function(callback) {
//action 1...
},
function(callback){
//action 2...
}
], function(err) {
if (err) {
speechOutput = "Scene not found!";
repromptText = "Please try again.";
}
console.log("Before speechOutput");
callback(sessionAttributes,
buildSpeechletResponse(cardTitle, speechOutput, repromptText, shouldEndSession));
});

Node.js Downloading multiples files asynchronously

In trying to get a hang of node.js asynchronous coding style, I decided to write a program that would read a text file containing a bunch of URLS to download and download each file. I started out writing a function to download just one file (which works fine), but having trouble extending the logic to download multiple files.
Here's the code:
var http = require("http"),
fs = require("fs"),
input = process.argv[2],
folder = "C:/Users/Wiz/Downloads/",
regex = /(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?/,
urls = null,
url = "",
filename = "";
fs.readFile(input, "utf8", function(e, data) {
console.log("Reading file: " + input);
if (e) console.log("Got error:" + e.message);
urls = data.split("\n");
for (var i = urls.length; i--;) {
url = urls[i];
if (!url.match(regex)) continue;
filename = folder + url.substring(url.lastIndexOf('/') + 1);
downloadQueue.addItem(url, filename);
}
});
var downloadQueue = {
queue: [],
addItem: function(p_sSrc, p_sDest) {
this.queue.push({
src: p_sSrc,
dest: p_sDest
});
if (this.queue.length === 1) {
this.getNext();
}
},
getNext: function() {
var l_oItem = this.queue[0];
http.get(l_oItem.src, function(response) {
console.log("Downloading: " + l_oItem.dest);
var file = fs.createWriteStream(l_oItem.dest);
response.on("end", function() {
file.end();
console.log("Download complete.");
downloadQueue.removeItem();
}).on("error", function(error) {
console.log("Error: " + error.message);
fs.unlink(l_oItem.dest);
});
response.pipe(file);
});
},
removeItem: function() {
this.queue.splice(0, 1);
if (this.queue.length != 0) {
this.getNext();
} else {
console.log("All items downloaded");
}
}
};
How do I structure the code so that the completion of the first download can signal the initiation of the next one. Please note that this exercise is just for learning purposes, to understand how asynchronous coding works. In practice, I'm sure there are much better tools out there to download multiple files.
Try simple at first, it look like you copy paste codes and quite don't understand what they do.
Do a simple loop, that get the url, and print something.
var http = require('http');
URL = require('url').parse('http://www.timeapi.org/utc/now?format=%25F%20%25T%20-%20%25N')
URL['headers'] = {'User-Agent': 'Hello World'}
// launch 20 queries asynchronously
for(var i = 0; i < 20; i++) {
(function(i) {
console.log('Query ' + i + ' started');
var req = http.request(URL, function(res) {
console.log('Query ' + i + ' status: ' + res.statusCode + ' - ' + res.statusMessage);
res.on('data', function(content){
console.log('Query ' + i + ' ended - ' + content);
});
});
req.on('error', function(err) {
console.log('Query ' + i + ' return error: ' + err.message);
});
req.end();
})(i);
}
All the urls will be fetched asynchronously. You can observe that the response does not arrive in order, but are still processed correctly.
The difficulty with async is not to do the things is parallel, because you just write like a single task, and execute multiple time. It becomes complicated when you need for instance to wait for all tasks to finished before continuing. And for that, have a look at promises
Here is what I started out with. Figuring that each download was invoked asynchronously, they would all be independent of each other.
var http = require("http"),
fs = require("fs"),
input = process.argv[2],
folder = "C:/Users/Wiz/Downloads/",
regex = /(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?/,
urls = null,
url = "",
filename = "";
fs.readFile(input, "utf8",
function(e, data) {
console.log("Reading file: " + input);
if (e) console.log("Got error:" + e.message);
urls = data.split("\n");
for (var i = urls.length; i--;) {
url = urls[i];
if (!url.match(regex)) continue;
filename = folder + url.substring(url.lastIndexOf('/') + 1);
http.get(url, function(response) {
var file = fs.createWriteStream(filename);
response.on("end", function() {
file.end();
});
response.pipe(file);
})
}
});

ReferenceError: Can't find variable with SpookyJS

I try to call an external function in SpookyJS by doing the same thing than in the wiki: https://github.com/WaterfallEngineering/SpookyJS/wiki/Introduction
But when I try the following code, I have this error:
ReferenceError: Can't find variable: test
try {
var Spooky = require('spooky');
} catch (e) {
var Spooky = require('../lib/spooky');
}
var urls = ["http://www.google.fr",
"http://www.yahoo.com"
];
exports.clicker = function(req, res)
{
console.log("FIRST: " + visitUrl + " \n\n\n END FIRST");
var visitUrl = function(urlIndex, nbClicked)
{
console.log("HELLO");
};
var spooky = new Spooky(
{
child: {
// transport: 'http'
},
casper: {
logLevel: 'debug',
verbose: true
}
}, function (err)
{
if (err)
{
e = new Error('Failed to initialize SpookyJS');
e.details = err;
throw e;
}
spooky.start(urls[0]);
console.log("SECOND: " + visitUrl + " \n\n\n END SECOND");
spooky.then([{
test: visitUrl
}, function(){
console.log("THIRD: " + test + " \n\n\n END THIRD");
}]);
spooky.run();
});
// Uncomment this block to see all of the things Casper has to say.
// There are a lot.
// He has opinions.
spooky.on('console', function (line) {
console.log(line);
});
spooky.on('hello', function (greeting) {
console.log(greeting);
});
spooky.on('log', function (log) {
if (log.space === 'remote') {
console.log(log.message.replace(/ \- .*/, ''));
}
});
}
These two following logs work:
console.log("FIRST: " + visitUrl + " \n\n\n END FIRST");
console.log("SECOND: " + visitUrl + " \n\n\n END SECOND");
But the third one is responsible for the error message:
console.log("THIRD: " + test + " \n\n\n END THIRD");
Any suggestion?
I would like to comment on your post instead of going with a big answer but I do not have the reputation for it, meh.
You can not pass functions in the ashing. If you were to do
var x = 'HELLO'
spooky.then([{
XinCasper : x
}, function(){
//do something with XinCasper
}])
That would work. If you want to pass an object or array, use JSON.stringify and rebuild in the casper scope.
If you want to access spooky functions from the casper scope, use event emitters instead, as following (See mostly the last lines):
try {
var Spooky = require('spooky');
} catch (e) {
var Spooky = require('../lib/spooky');
}
var urls = ["http://www.google.fr",
"http://www.yahoo.com"
];
exports.clicker = function(req, res)
{
console.log("FIRST: " + visitUrl + " \n\n\n END FIRST");
var visitUrl = function(urlIndex, nbClicked)
{
console.log("HELLO");
};
var spooky = new Spooky(
{
child: {
// transport: 'http'
},
casper: {
logLevel: 'debug',
verbose: true
}
}, function (err)
{
if (err)
{
e = new Error('Failed to initialize SpookyJS');
e.details = err;
throw e;
}
spooky.start(urls[0]);
console.log("SECOND: " + visitUrl + " \n\n\n END SECOND");
spooky.then(function(){
//casper scope
var y = 'something'
this.emit('third', y)
});
spooky.run();
});
spooky.on('third', function(y){
console.log('Hey, I can output ' + y + ' here.')
}
Finally I succeed to do what I needed by using PhantomJS-node instead of SpookyJS.
Clearly, you are using variable test
console.log("THIRD: " + test + " \n\n\n END THIRD");
without declaring it first.
var test;
Replace it with req maybe. :)

Categories

Resources