Node.JS RemoteExec call not firing properly - javascript

Querying a database for a list of servers to perform a command on. The array is populated properly and echos out as planned, but none of the connections occur. I tried both passing the array directly into rexec and looping through a forEachAsync. Neither process the server list properly. Am I referencing the array elements improperly?
Mind the syntax errors at the end, I was just trying to include both methods I tried.
#!
var mysql = require('mysql');
var resultset = require('node-array');
var rexec = require('remote-exec');
var fs = require('fs');
var _ = require('lodash');
//var streamBuffers = require('stream-buffers');
var moment = require('moment');
var util = require('util');
var now = moment().format('YYYYMMDD_HHmmss');
var logStdout = process.stdout;
var errStderr = process.stderr;
console.log = function () {
logStdout.write(util.format.apply(null, arguments) + '\n');
}
console.error = function () {
errStderr.write(util.format.apply(null, arguments) + '\n');
}
var connection = mysql.createConnection({
host : 'abc',
user : 'user',
password : '******',
database : 'db'
});
var ssh_options = {
port: 22,
username: 'e109gh',
privateKey: fs.readFileSync('R:/nodeJS/sshkey.priv'),
stdout: fs.createWriteStream('./out.txt'),
stderr: fs.createWriteStream('./err.txt')
}
var my_conn_options = _.clone(ssh_options);
var cmds = ['hostname -i'];
connection.query('SELECT name FROM server', function(err, rows) {
rows.forEachAsync(function(element, index, array) {
console.log(element.name);
rexec(element.name,cmds,my_conn_options,function(err){
if (err) {
now = moment().format('YYYY-MM-DD HH:mm:ss');
console.error(err);
} else {
console.log("it worked for "+element.name);
}
});
});
});
// var buffer = new streamBuffers.WritableStreamBuffer();
connection.end(function(err) {});
// my_conn_options.stdout = buffer;
//
// rexec(rows,cmds,my_conn_options,function(err){
// if (err) {
// now = moment().format('YYYY-MM-DD HH:mm:ss');
// console.error(err);
// } else {
// console.log()
// }
// });
//
//});

Related

Node.JS - want to move output off console into log/err files

Can someone give me a hand with converting the following code from console output to file output? I'm struggling with logging and the asynchronous nature of Node. The script works great in a console, but I'd like to pipe the sorted output into individual server sections within a file with STDERR going to another file.
var rexec = require('remote-exec');
var fs = require('fs');
var lineReader = require('line-reader');
var streamBuffers = require('stream-buffers');
var _ = require('lodash');
var conn_options = {
port: 22,
username: '*****',
privateKey: fs.readFileSync('R:/nodeJS/sshkey.priv')
}
// something that dumps out a bunch of data...
var cmds = ['df']
var filename = 'servers.txt';
lineReader.eachLine(filename,function(line,last,cb){
var buffer = new streamBuffers.WritableStreamBuffer();
var my_conn_options = _.clone(conn_options);
rexec(line,cmds,my_conn_options,function(err){
if (err) {
console.log(line, err);
} else {
console.log('>>>> Start: ' + line + '<<<<')
console.log(buffer.getContentsAsString());
console.log('>>>> End: ' + line + '<<<<')
};
});
if (last) {
cb(false); // stop reading
} else {
cb();
}
});
check this example, that should help..
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
logFile.write(util.format.apply(null, arguments) + '\n');
logStdout.write(util.format.apply(null, arguments) + '\n');
}
console.error = console.log;

Querying embedded mongodb with node js

I try to fetch a value from my age collection and check by applying the condition if(age===5) then it will fetch data again from other collection question and send the data to the browser. But it will show me the error
Error While Saving the result TypeError: Cannot read property 'age_group_5' of undefined
Code:
1). Node js
var agegroupQuiz = require('../models/agegroupSchema.js');
var questionQuiz = require('../models/question.js');
exports.agegroupController = function(req,res,next){
try{
//var age=5;
var queryObj = {};
var projection1 = '-_id, age_group.age_group_5.max_age';
var projection2 = '-_id question';
//agegropup Schema
var a = agegroupQuiz.findOne(queryObj,projection1);
console.log(a.age_group.age_group_5.max_age);
var age = a.age_group.age_group_5.max_age;
if(age===5){
//question Schema
questionQuiz.find(queryObj,projection2,function(err, data){
if(err){
console.log('getQuestions : Error while getting Questions ' + err);
return next(err);
}
//console.log(question);
res.send(data);
});
}else{console.log("error");}
}catch(err){
console.log('Error While Saving the reuslt ' +err);
return next(err);
}
}
/* exports.agemethod = function(req, res, next){
}*/
2). Mongodb Schema
a). var mongoose = require('mongoose');
module.exports = (function question () {
var Schema = mongoose.Schema;
var question = new Schema({
question:{type:Array,
_id:{type:Number},
title:{type:String},
options:{type:Array},
result:{type:Array},
feedback:{type:String}
},
metadata:{
type:String,
category:String,
age_group:String,
location:String
}
});
var results = mongoose.model('userquestion', question);
return results;
})();
b). var mongoose = require('mongoose');
module.exports = (function agegroup () {
var Schema = mongoose.Schema;
var agegroup = new Schema({
age_group : {
age_group_5: {
_id:{type:String},
max_age:{type:Number}
}
}
});
var results = mongoose.model('age', agegroup);
return results;
})();
I would recommend you try this code and hopefully it should work.
var agegroupQuiz = require('../models/agegroupSchema.js');
var questionQuiz = require('../models/question.js');
exports.agegroupController = function(req,res,next){
try{
//var age=5;
var queryObj1 = {}; //for max age 5
var queryObj2 = {}; //for question
queryObj1 = {
_id: //id,
maxAge: 5 //please put the key in accordance to your schema
};
queryObj2 = {
_id: //id,
question: //question
}
//agegropup Schema
return agegroupQuiz.findOne(queryObj1)
.then(function(maxAge){
var age = maxAge;
if(age == 5){
questionQuiz.find(queryObj,projection2,function(err, data){
if(err){
console.log('getQuestions : Error while getting Questions ' + err);
}
//console.log(question);
res.send(data);
});
}else{
console.log("Error");
}
}).catch(function(error){
console.log('Error While Saving the reuslt ' +err);
});
}
}
I may have missed some curly braces, please verify that, and ignore the indentation.
make sure you use the correct keys of your model.

How to pass parameter between two javascript files in express.js

I am using express js and i want to pass parameter from one javascript file to other. How can I achieve this??
The two files are 1. process.js
var WebPageTest = require('webpagetest');
var wpt = new WebPageTest('server address');
var data_url;
exports.process = function (req, res){
//Running the test
wpt.runTest(script,{runs: 1},function(err, data) {
console.log("<----STARTING TEST---->");
if(err){ console.log(err);}
data_url = data.data.summaryCSV;
console.log('-----------');
console.log(data_url);
console.log('-----------');
});
res.render('index.jade',{par: 'welcome to webpagetest performance, the tests are running in background.'})
};
storedata.js is here
var request = require('request');
var CSV = require('csv-string');
var moment = require('moment');
var process = require('./process.js');
exports.storedata = function(req,res){
var URL;
var loadTime;
var TTFB;
var TTFB1;
var date;
var date1;
var date2;
var db;
console.log(process.process.data_url);
request({uri:process.process.data_url,method:'GET'}, function (error,response,body) {
//console.log('----######----');
//console.log(response.headers);
console.log('----######----');
//console.log(response);
if (error) {
console.log('got an error' + error);
}
//console.log(response);
//console.log(body);
var data = body;
console.log('here is the body');
console.log('----######----');
console.log(body);
CSV.forEach(data, ',', function (row, index) {
if (index == 1 || index == 2) {
URL = row[0];
loadTime = row[1];
TTFB = row[2];
TTFB1 = parseInt(TTFB);
date = new Date(row[59] * 1000);
month = date.getUTCMonth() + 1;
month = month.toString();
var day = date.getUTCDate();
day = day.toString();
var year = date.getUTCFullYear();
year = year.toString();
date = year + "-" + month + "-" + day;
date1 = new Date(date);
date2 = moment(date1).format('YYYY-MM-DD');
//console.log(loadTime);
var app_re = new RegExp(/^https\:\/\/some-url/);
var staging_re = new RegExp(/^https\:\/\/some-url2/);
var webuinqa_re = new RegExp(/^https\:\/\/some-url3/);
// Writting into the databse for some-url
if(app_re.test(URL)){
var db = req.db;
var collection = db.get('app');
collection.insert({
"Date": date2,
"TTFB": TTFB1,
"loadTime": loadTime,
"Url": URL
}, function (err, doc) {
if (err) {
res.send("There was a problem adding the information to the database.");
}
});}
//Writting into the database for some-url2
if(staging_re.test(URL)){
var db = req.db;
var collection = db.get('staging');
collection.insert({
"Date": date2,
"TTFB": TTFB1,
"loadTime": loadTime,
"Url": URL
}, function (err, doc) {
if (err) {
res.send("There was a problem adding the information to the database.");
}
});}
//Writting into the database for some-url3
if(webuinqa_re.test(URL)){
var db = req.db;
var collection = db.get('webuinqa');
collection.insert({
"Date": date2,
"TTFB": TTFB1,
"loadTime": loadTime,
"Url": URL
}, function (err, doc) {
if (err) {
res.send("There was a problem adding the information to the database.");
}
});}
res.render('index', {title: "All the test Results have been added to the databases, Go to localhost/getData to get the graph"});
//res.redirect('/getData');
}
});
});
};
I want to pass parameter data_url in process.js file to storedata.js file so. I will use the value of data_url in method request in storedata.js.
You could try something like this:
In your storedata.js
module.exports = function(data_url) {
// ...
}
In process.js:
var request = require('request');
var CSV = require('csv-string');
var moment = require('moment');
// The path like this assumes storedata.js and process.js
// are in the same folder.
var storeData = require('./storedata');
exports.process = function (req, res){
var URL;
var loadTime;
var TTFB;
var TTFB1;
var date;
var date1;
var date2;
var db;
var data_url;
// initiating the test
var WebPageTest = require('webpagetest');
var wpt = new WebPageTest('server-address');
//Running the test
wpt.runTest(script,function(err, data) {
//console.log("hello -->",err || data);
data_url = data.data.summaryCSV;
console.log('-----------');
console.log(data_url);
console.log('-----------');
// Once your data_url is ready
storeData(data_url);
UPDATE:
Based on your comments, here is a possible solution.
var WebPageTest = require('webpagetest');
var wpt = new WebPageTest('server address');
var data_url;
exports.process = function (req, res){
//Running the test
wpt.runTest(script,{runs: 1},function(err, data) {
console.log("<----STARTING TEST---->");
if(err){ console.log(err);}
data_url = data.data.summaryCSV;
console.log('-----------');
console.log(data_url);
console.log('-----------');
});
res.render('index.jade',{par: 'welcome to webpagetest performance, the tests are running in background.'})
};
// Create another method that is able to return the saved data.
exports.getSavedDataURL = function() { return data_url; }
Then in storedata.js
exports.storedata = function(req,res){
var URL;
var loadTime;
var TTFB;
var TTFB1;
var date;
var date1;
var date2;
var db;
//
var url = process.getSavedDataURL();
request({uri:url,method:'GET'}, function (error,response,body) {

Reading a file line by line, parse them and insert them in mongo in node js

I have a file which is tab separated. It has thousands of data. How can I use nodeJs to read the file, line by line, parse them and create an object and insert them in a mongo DB.
I am just learning node and mongo. I come from different background. So how can this be done.
Finally the Mongo DB has to be populated with proper data.
I searched in net but I could not find the complete solution.
Thanks.
I had an issue with the answer by Juvenik. My problem was that the database would not be populated by the time readline had completed. The lines were being read synchronously, but the DB insertion was asynchronous.
Instead, I found a simpler solution with the line-reader package. It reads the lines and waits for a callback before continuing.
var MongoClient = require('mongodb').MongoClient
var dbName = 'yourDbName'
var url = 'mongodb://localhost:27017/' + dbName
var collectionName = 'yourCollectionName'
var filename = 'yourFileName.txt'
var printLine = 1000
MongoClient.connect(url, function(err, db) {
if (err) {
console.error('Problem connecting to database')
} else {
console.log('Connected correctly to server.')
var lineReader = require('line-reader')
var collection = db.collection(collectionName)
var lineNum = -1
var headers = []
lineReader.eachLine(filename, function(line, last, cb) {
lineNum++
try {
var split = line.split('\t')
var object = {}
if (lineNum > 0) {
for (var i = 0; i < split.length; i += 1) {
object[headers[i]] = split[i]
}
collection.insert(object, function (insertErr, insertObj) {
if (insertErr) console.error(insertErr)
if (lineNum % printLine === 0) console.log('Line ' + lineNum)
if (last) {
console.log('Done with ' + filename + ' (' + lineNum + ' records)')
process.exit(0)
} else {
cb()
}
})
} else {
headers = line.split('\t')
cb()
}
} catch (lineError) {
console.error(lineError)
}
})
}
})
I came across similar problem. This approach worked for me.
Have a look, it might be helpful.
var mongoDb = require('mongodb');
var mongoClient = mongoDb.MongoClient;
var dbname = 'YOUR_DB_NAME';
var collectionName = 'YOUR_COLLECTION_NAME';
var url = 'mongodb://localhost:27017/'+dbname;
var filename = 'FIle_Name.txt';
console.log('***************Process started');
mongoClient.connect(url,function(err,db){
if(err){
console.log('error on connection '+err);
}
else{
console.log('***************Successfully connected to mongodb');
var collection = db.collection(collectionName);
var fs = require('fs');
var readline = require('readline');
var stream = require('stream');
var instream = fs.createReadStream(filename);
var outstream = new stream;
var rl = readline.createInterface(instream,outstream);
console.log('***************Parsing, please wait ...');
rl.on('line',function(line){
try{
var arr = line.split('\t');
var object = {};
//Parse them here
//Example
object['name'] = arr[0]; //Just an example
var res = collection.insert(object);
}
catch (err){
console.log(err);
}
});
rl.on('close',function(){
db.close();
console.log('***************completed');
});
}
});
I am a learner too. If someone can make it better, it will be good.
Here is a more performant (inserting batches of objects) and updated version (using async and latest mongo driver) of frank-0's answer
const lineReader = require('line-reader');
async function readFileAndInsertInMongo(file) {
let total = 0;
return new Promise((resolve, reject) => {
let buffer = [];
lineReader.eachLine(file, (line, last, cb) => {
// prepare your object based on the line content
let insertObject = {'some_content': 'some_value'};
if (total % 10000 === 0 || last) {
collection.insertMany(buffer, function(err, res){
if (last) {
if (err) {
reject(err);
} else {
resolve(res);
}
} else {
buffer = [];
return cb();
}
});
} else {
buffer.push(insertObject);
return cb();
}
});
});
}
This really is the best solution I have found to parse huge files and insert them in the database without exploding Node's memory. Hope this can help ;)

Global Dictionary/Scope Issue

I've defined a global dictionary, and I want to add to it within a MySQL connection block. Problem is, once outside of that block, the dictionary appears empty. This feels like a basic scope problem, but it seems odd that things added to the dictionary won't stay put, no?
Code:
var express = require("express");
var http = require("http");
var mysql = require("mysql");
objects = {};
getBiz = function() {
var connection = mysql.createConnection({
host:"localhost",
user:"APIUser",
password:"password"
});
connection.query("USE biz");
var bizQuery = "SELECT * FROM biz";
var bizObjects = [];
connection.query(bizQuery, function(err, bizRows) {
if (err) {
throw err;
} else {
for (bizRow in bizRows) {
var bizObject = {};
bizObject['id'] = bizRows[bizRow]['id'];
bizObject['biz_name'] = bizRows[bizRow]['biz_name'];
bizObjects.push(bizObject);
}
}
objects['biz'] = bizObjects;
console.log(objects); // prints the objects
});
console.log(objects); // prints {}
};
var app = express();
app.get("/", function(req, res) {
res.send(getBiz());
});
var server = app.listen(8888, function() {
console.log("Listening........");
});
Your code is synchronous in style when it should be asynchrounous. use callbacks
getBiz = function(onGetObjects) {
var connection = mysql.createConnection({
host:"localhost",
user:"APIUser",
password:"password"
});
connection.query("USE biz");
var bizQuery = "SELECT * FROM biz";
var bizObjects = [];
connection.query(bizQuery, function(err, bizRows) {
if (err) {
throw err;
} else {
for (bizRow in bizRows) {
var bizObject = {};
bizObject['id'] = bizRows[bizRow]['id'];
bizObject['biz_name'] = bizRows[bizRow]['biz_name'];
bizObjects.push(bizObject);
}
}
objects['biz'] = bizObjects;
onGetObjects(objects); // prints the objects
});
console.log(objects); //will definitely print {}
};
function onGetObjects(obj){
this.objects = obj;
console.log(objects) // should give you the correct objects
}
var app = express();
//pass in the callback
app.get("/", function(req, res) {
res.send(getBiz(onGetObjects));
});
var server = app.listen(8888, function() {
console.log("Listening........");
});

Categories

Resources