Im using nodejs and the xml2js module. I'm reading an XML file and try to emit an event after the xml is converted to an json object. My code looks like this :
var fs = require('fs'),
util = require('util'),
events = require('events'),
xml2js = require('xml2js');
var CIRCUITMODELSFILENAME = "ControlCircuitModels.xml";
var CIRCUITPARTMODELSFILENAME = "ControlCircuitParts.xml";
var circuitModels, circuitPartModels;
function ModelController() {
if (false === (this instanceof ModelController)) {
return new ModelController();
}
events.EventEmitter.call(this);
};
util.inherits(ModelController, events.EventEmitter);
ModelController.prototype.load = function (baseDir) {
var parser = new xml2js.Parser({
normalize: true,
trim: true,
explicitArray: false
});
fs.readFile(baseDir + "/" + CIRCUITMODELSFILENAME, function (err, data) {
parser.parseString(data, function (err, result) {
circuitModels = result;
console.log('circuit models loaded');
parser.reset();
fs.readFile(baseDir + "/" + CIRCUITPARTMODELSFILENAME, function (err, data) {
parser.parseString(data, function (err, result) {
circuitPartModels = result;
console.log('circuit part models loaded');
moduleReady = true;
this.emit("modelsloaded", null);
});
});
});
});
};
// public interface
exports.ModelController = ModelController;
Problem is that the scope when emitting the event is lost.
this.emit("modelsloaded", null);
this hasn't inherited the emit from EventEmitter.
How can I pass the scope to the parser.parseString function?
Thanks
Chris
Not sure if this is the best solution, bis this works (doesn't look straight foreward):
fs.readFile(baseDir + "/" + CIRCUITMODELSFILENAME, function (err, data) {
parser.parseString(data, function (err,result) {
circuitModels = result;
parser.reset();
fs.readFile(baseDir + "/" + CIRCUITPARTMODELSFILENAME, function (err, data) {
circuitPartModels = result;
console.log('circuit models loaded');
parser.parseString(data, function (err, result) {
console.log('circuit part models loaded');
this.emit("modelsloaded", null);
moduleReady = true;
circuitPartModels = result;
}.bind(this));
}.bind(this));
}.bind(this));
}.bind(this));
Related
I have a weird looking object that I would like to turn into an object with multiple objects. (what I mean by multiple objects in nested objects) The current object looks like this:
{
'test.txt': "This is a test\r\n\r\nI hope it'll work",
'testy.js': 'console.log("thonk");\r\n',
'thonk\\i swear\\egg.txt': 'am going to be happy?',
'thonk\\pls work.txt': 'dasdas'
}
And I want it to look like this:
{
"test.txt": "This is a test\r\n\r\nI hope it'll work",
"testy.js": "console.log('thonk');\r\n",
"thonk": {
"I swear": {
"egg.txt": "am going to be happy?"
},
"pls work.txt": "dasdas"
}
}
Edit:
here's what my code is (if u need it):
var fs = require("fs");
var path = require("path");
var walk = function (dir, done) {
var results = [];
fs.readdir(dir, function (err, list) {
if (err) return done(err);
var i = 0;
(function next() {
var file = list[i++];
if (!file) return done(null, results);
file = path.resolve(dir, file);
fs.stat(file, function (err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function (err, res) {
results = results.concat(res);
next();
});
} else {
results.push(file);
next();
}
});
})();
});
};
var root = "test";
var data = {};
walk("test", function (err, results) {
if (err) throw err;
for (i in results) {
data[
results[i].replace(__dirname + "\\" + root + "\\", "")
] = fs.readFileSync(results[i], "utf8");
}
console.log(data);
});
This can be done by combining Object.keys() and Array.reduce() as follows:
const source = {
'test.txt': "This is a test\r\n\r\nI hope it'll work",
'testy.js': 'console.log("thonk");\r\n',
'thonk\\i swear\\egg.txt': 'am going to be happy?',
'thonk\\pls work.txt': 'dasdas'
}
const result = Object.keys(source).reduce((target, k) => {
const keys = k.split('\\');
if (keys.length == 1) {
target[k] = source[k];
} else {
const nestedObj = target[keys[0]] || {};
keys.slice(1).reduce((o, nestedKey, i) => {
const value = i < keys.length -2 ? {} : source[k];
o[nestedKey] = value;
return value;
}, nestedObj);
target[keys[0]] = nestedObj;
}
return target;
}, {});
console.log(result);
So you should create new object, then go through each element, combining items that you need and placing them to a new one.
I am trying to pass an object variable to a callback
var sql = require('mssql');
var asset_update = function (connection, addr) {
this.connection = connection;
this.addr = addr;
this.addr_long = parseInt(addr, 16);
}
asset_update.prototype.getFromMac = function () {
var ps = new sql.PreparedStatement(this.connection);
ps.input('addr', sql.Binary);
ps.prepare('SELECT asset_id FROM asset_addr WHERE addr = #addr', function (err) {
ps.execute({ addr: this.addr_long }, function (err, recordset) {
ps.unprepare();
console.log(recordset.length);
console.log(this.addr_long);
})
});
}
How can I pass this.addr_long to the ps.execute() callback?
You can use an arrow function to preserve this :
ps.prepare('SELECT asset_id FROM asset_addr WHERE addr = #addr', (err) => {
ps.execute({ mac: this.addr_long }, function (err, recordset) { //this has been preserved
ps.unprepare();
console.log(recordset.length);
console.log(this.addr_long);
})
});
Check out this doc for the ES5 alternatives, which you shouldn't use anymore.
A popular one would be to rename this to that or self. But this is now obsolete. It would look like :
var self = this;
ps.prepare('SELECT asset_id FROM asset_addr WHERE addr = #addr', function (err) {
ps.execute({ mac: self.addr_long }, function (err, recordset) {
ps.unprepare();
console.log(recordset.length);
console.log(that.addr_long);
})
});
As you're new to node/JS, I suggest you read the following resources :
What does "this" mean?
Callbacks overview and tricks
asset_update.prototype.getFromMac = function () {
var that = this;
var ps = new sql.PreparedStatement(this.connection);
ps.input('addr', sql.Binary);
ps.prepare('SELECT asset_id FROM asset_addr WHERE addr = #addr', function (err) {
ps.execute({ mac: that.addr_long }, function (err, recordset) {
ps.unprepare();
console.log(recordset.length);
console.log(that.addr_long);
})
});
}
I'd like to mock MongoDB dependency with proxyquire
by doing this in my test:
var proxyquire = require('proxyquire');
var controller = path.resolve('.path/to/controller/file.js');
in the before each statement:
mocked_mongoose = {
isMocked: true,
model: function(name, schema, collection, skipInit) {
return {
find: function(conditions, projection, options, callback) {
console.log('callback find');
return callback();
},
save: function(options, fn) {
console.log('callback save');
return callback();
},
findOne: function(conditions, projection, options, callback) {
console.log('callback find one');
var model = mongoose.model(name);
var fakeModel = fakery.fake(model);
return callback(null, fakemodel);
}
}
}
};
proxyquire(controller, {
'mongoose': mocked_mongoose
});
and when I go to the controller and do
console.log(mongoose.isMocked) I got undefined and if I print mongoose.model.toString() seems like the mongoose methods aren't overridden.
I followed up this article and tried to implement the same logic, but I'm not getting the same results.
any help will be appreciated,
thanks!
I ended up by changing the way I was defining my mocked mongoose object, to match exactly the scenarios I wanted to test in mt case:
first model instantiation
var Model = mongoose.model('SchemaDef');
var other = Model({
_id:'someId'
name'Some other fields'
});
model search:
Model.findOne(query, callback);
this the version how it works:
'use strict';
var factory = require('factory-girl');
var mongoose = require('mongoose');
function viewModel(name){
function constructor(obj){
if(obj){
obj.find = constructor.find;
obj.save = constructor.save;
obj.findOne = constructor.findOne;
}
return obj;
};
constructor.isMocked = true;
constructor.find = function(query, callback){
factory.build(name, function(err, mockedModel){
return callback(null,[mockedModel]);
});
};
constructor.save = function(callback){
factory.build(name, function(err, mockedModel){
return callback(null,[mockedModel]);
});
};
constructor.findOne=function(query,callback){
factory.build(name, function(err, mockedModel){
return callback(null,mockedModel);
});
};
return constructor;
};
module.exports = {
model:function(name){
factory.define(name, mongoose.model(name));
return viewModel(name);
}
};
I have an Express route like this in an node server (file is required):
var redis = require('../modules/redis');
module.exports = function (app) {
var redisClient = redis.init();
app.post('/auth/ticket', cors(), function (req, res) {
var hashes = ['hash1','hash2', 'hash3'];
var candidates = []; // An array to collect valid hashes
var key;
// to check each hash against a RedisDB I use a For Loop
for (key in hashes) {
var hash = hashes[key];
console.log("Hash " + hash + " will be proofed now:");
//now I try to collect the valid hashes in the candidates array
if (redisClient.exists(hash) === 1) candidates.push(hash);
}
console.log(JSON.stringify(candidates));
});
};
Now here is the code of my module which shall manage all the redis requests:
exports.init = function () {
Redis = exports.Redis = function () {
var promiseFactory = require("q").Promise,
redis = require('promise-redis')(promiseFactory);
this.client = redis.createClient();
this.client.on('error', function (err) {
console.log('redis error – ' + client.host + ':' + client.port + ' – ' + err);
});
Redis.prototype.exists = function (key) {
this.client.exists(key, function (err, data) {
return data === 1 ? true : false;
});
};
return new Redis();
};
So what I experience is that the module is able to console.log the results properly. If a hash is valid, it returns true and otherwise false. This works as expected.
Problem is, that the for-loop continuous the execution without fetching getting the results. I think this is caused by race-conditions.
As you can see, I have started to workout something there with the use of Q and promise-redis in the top of my code:
var promiseFactory = require("q").Promise,
redis = require('promise-redis')(promiseFactory);
this.client = redis.createClient();
I like to know, how I make my for-loop (in the Express route) waiting for the results of redisClient.exists(hash) or in other words, to get all valid hashes into my candidates array.
Please help
like #brad said, you could use Q.all, it would take an array of promises as input and then return an array of results when all the promises are finished:
there is a mistake in your answer:
Redis.prototype.exists = function (key) {
return this.client.exists(key) // CHANGED, you still need to return a promise.
.then(function (reply) {
console.log("reply " + reply);
return (reply);
})
.catch(console.log);
};
If I understand correctly, what you want is something like
exports.init = function () {
Redis = exports.Redis = function () {
var Q = require("q"),
promiseFactory = Q.Promise,
redis = require('promise-redis')(promiseFactory);
this.client = redis.createClient();
this.client.on('error', function (err) {
console.log('redis error – ' + client.host + ':' + client.port + ' – ' + err);
});
Redis.prototype.exists = function (key) {
return this.client.exists(key).then(function (data) {
return data === 1 ? true : false;
});
};
Redis.prototype.getActive = function (arry) {
var self = this;
return Q.all(arry.map(self.exists.bind(self))
).then(function(res){
return arry.filter(function(val, idx){ return res[idx];});
});
};
return new Redis();
};
# mido22: But did you also recognize that I outsourced all the reds functions to the module file (1st Codeblock) which requires the promise-redid and builds a factory for Q. I changed the code inside the module file to:
Redis.prototype.exists = function (key) {
this.client.exists(key)
.then(function (reply) {
console.log("reply " + reply);
return (reply);
})
.catch(console.log);
};
and this results correctly like the console.log evidently shows.
Your codechange of the for-loop works very well but I think it don't fulfills my needs perfectly. If I could, I would like to have it completely outsourced in to the module file, so that I can use the prototyped method in similar cases from everywhere. Is that possible anyhow?
I see, that it would result in having two promise supported functionalities, if I would create an Instance of Redis Client with promise-redid and Q inside the auth/ticket/ router, too.
like this:
var Q = require('q'),
promiseFactory = Q.Promise,
redis = require("promise-redis")(promiseFactory),
client;
an then the express route (there are a lot of more routes each in a single file) like in your code.
Do you understand what I mean? Of course your solution will be fine for my needs at all, but a module resolving the job completely could have more elegance if possible so far.
Using with redis, bluebird and typescript:
import { RedisClient, createClient, ClientOpts } from "redis";
import { promisifyAll, PromisifyAllOptions } from "bluebird";
export module FMC_Redis {
export class Redis {
opt: ClientOpts;
private rc: RedisClient;
private rcPromise: any;
private static _instance: Redis = null;
public static current(_opt?: ClientOpts): Redis {
if (!Redis._instance) {
Redis._instance = new Redis(_opt);
Redis._instance.redisConnect();
}
return Redis._instance;
}
public get client(): RedisClient {
if (!this.rc.connected) throw new Error("There is no connection to Redis DB!");
return this.rc;
}
/******* BLUEBIRD ********/
public get clientAsync(): any {
// promisifyAll functions of redisClient
// creating new redis client object which contains xxxAsync(..) functions.
return this.rcPromise = promisifyAll(this.client);
}
private constructor(_opt?: ClientOpts) {
if (Redis._instance) return;
this.opt = _opt
? _opt
: {
host: "127.0.0.1",
port: 6379,
db: "0"
};
}
public redisConnect(): void {
this.rc = createClient(this.opt);
this.rc
.on("ready", this.onReady)
.on("end", this.onEnd)
.on("error", this.onError);
}
private onReady(): void { console.log("Redis connection was successfully established." + arguments); }
private onEnd(): void { console.warn("Redis connection was closed."); }
private onError(err: any): void { console.error("There is an error: " + err); }
/****** PROMISE *********/
// promise redis test
public getRegularPromise() {
let rc = this.client;
return new Promise(function (res, rej) {
console.warn("> getKeyPromise() ::");
rc.get("cem", function (err, val) {
console.log("DB Response OK.");
// if DB generated error:
if (err) rej(err);
// DB generated result:
else res(val);
});
});
}
/******* ASYNC - AWAIT *******/
// async - await test function
public delay(ms) {
return new Promise<string>((fnResolve, fnReject) => {
setTimeout(fnResolve("> delay(" + ms + ") > successfull result"), ms);
});
}
public async delayTest() {
console.log("\n****** delayTest ")
let a = this.delay(500).then(a => console.log("\t" + a));
let b = await this.delay(400);
console.log("\tb::: " + b);
}
// async - await function
public async getKey(key: string) {
let reply = await this.clientAsync.getAsync("cem");
return reply.toString();
}
}
}
let a = FMC_Redis.Redis.current();
// setTimeout(function () {
// console.warn(a.client.set("cem", "naber"));
// console.warn(a.client.get("cem"));
// console.warn(a.client.keys("cem"));
// }, 1000);
/***** async await test client *****/
a.delayTest();
/** Standart Redis Client test client */
setTimeout(function () {
a.client.get("cem", function (err, val) {
console.log("\n****** Standart Redis Client")
if (err) console.error("\tError: " + err);
else console.log("\tValue ::" + val);
});
}, 100)
/***** Using regular Promise with Redis Client > test client *****/
setTimeout(function () {
a.getRegularPromise().then(function (v) {
console.log("\n***** Regular Promise with Redis Client")
console.log("\t> Then ::" + v);
}).catch(function (e) {
console.error("\t> Catch ::" + e);
});
}, 100);
/***** Using bluebird promisify with Redis Client > test client *****/
setTimeout(function () {
var header = "\n***** bluebird promisify with Redis Client";
a.clientAsync.getAsync("cem").then(result => console.log(header + result)).catch(console.error);
}, 100);
I am playing with NodeJS and for this purpose created an email extractor. Somehow when i create multiple http requests the node.exe memory useage in windows task manager keeps increasing. I understand that the node needs more memory to process the requests but what i noticed that this memory usage does not come down even after all requests have been successfully processed.
When i start nodejs it consumes about 35000K memory but after about 80-100 request this goes upto 50000K and stays.
Here is my simple email extractor module:
var request = require('request'),
cheerio = require('cheerio'),
async = require('async'),
urlHelper = require('url');
function Extractor(config) {
this.baseUrl = config.url;
this.parsedUrl = urlHelper.parse(config.url);
this.urls = [];
this.emails = [];
}
Extractor.prototype.getEmails = function getEmails(html) {
var foundEmails = html.match(/([a-zA-Z0-9._-]+#[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+)/gi) || [];
if(foundEmails.length) this.emails = this.emails.concat(foundEmails);
}
Extractor.prototype.extract = function extract(html) {
var $ = cheerio.load(html),
that = this;
if($('body')){
this.getEmails($('body').html());
}
if(!this.emails.length){
$("a[href^='http://" + this.parsedUrl.host + "'], a[href^='https://" + this.parsedUrl.host + "'], a[href^='/'], a[href^='./'], a[href^='../']").each(function(k, v) {
that.urls.push(urlHelper.resolve(that.baseUrl, $(v).attr('href')));
});
}
};
/**
* Process the base URL
*/
Extractor.prototype.processBase = function processBase(next) {
request(this.baseUrl, function(err, response, body) {
return next(err, body);
});
}
/**
* Process the internal pages
*/
Extractor.prototype.processInternal = function processInternal(cb) {
var that = this;
async.whilst(
// while this condition returns true
function () { return that.emails.length === 0 && that.urls.length > 0; },
// do this
function (callback) {
request(that.urls.shift(), function (err, response, body) {
var $ = cheerio.load(body);
if($(body)){
that.getEmails($('body').html());
}
callback(); // async internal, needs to be called after we are done with our thing
});
},
// call this if any errors occur. An error also stops the series
// this is also called on successful completion of the series
function (err) {
cb(that);
}
);
}
Extractor.prototype.process = function process(next) {
var that = this;
this.processBase(function(err, html) {
if(err) {
console.log(err);
} else {
that.extract(html);
if(!that.emails.length) {
that.processInternal(function(res) {
return next(null, that);
});
}
}
});
}
module.exports = Extractor;
and here is how i call it:
var express = require('express');
var router = express.Router();
var Extractor = require('../services/Extractor');
router.get('/', function(req, res) {
res.json({msg: 'okay'});
var extractor = new Extractor({url: 'http://lior-197784.use1-2.nitrousbox.com:4000/crawl'});
extractor.process(function(err, res) {});
});
module.exports = router;