I am using ssh2 nodejs module to connect to a UNIX application and run a script and it is successful.
Now i want to connect to multiple servers one by one and get the output and store it.
When i try using a for loop to pass the servers one by one from a json as input to the ssh2 the for loop completes much faster than the block which is supposed to get the output from the server.
This is also causing me handshake error.
Kindly help
Here is the code
inc_cron.schedule("*/20 * * * * *", function(id) {
});
//inc_cron.js
var cronFunction = function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path, function(err, data) {
if(err) {
logger.info("Error is in cronFunction = ", err);
} else if(data) {
output_data +=data;
} ssh.close_second();
});
}
}
}
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
ssh.close_second();
ssh.connect_second({
host: hostname,
username: username,
password: password
}, function(err) {
if(err) {
logger.error('Err: ', err);
} else {
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
cb(err, data);
}); } }); }
//connect.js
SSHConnection.prototype.sftp = function(type, path, cb) {
var self = this;
var log_data = '';
self.connection2.exec(path +' ' + type, { pty: true }, function(err, stream) {
if (err) {
logger.log('SECOND :: exec error: ' + err);
}
stream.on('end', function() {
self.connection2.end(); // close parent (and this) connection
}).on('data', function(data) {
logger.info(data.toString());
});
});
};
Without watch your code, be sure to handle correctly the async issue with ssh2... use a promise factory.
One way to do this is to use es7 async await. For this you have to rewrite your getDataFromServer function to return a promise:
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
return new Promise(function(resolve,reject){
ssh.close_second();
sh.connect_second({
host: hostname,
username: username,
password: password
},function(err) {
if(err){
reject(err)
}else{
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
if(err){
reject(err)
}else{
resolve(data)
}
})
}
})
})
}
now you can rewrite your cron function to be an async function.
var cronFunction = async function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
try{
output_data + = await getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path)
}catch(err){
logger.info("Error is in cronFunction = ", err);
}
ssh.close_second();
}
}
}
async await enables you to write async code in syncronous coding style.
However async await is currently (node 7.*) hidden behind a flag (--harmony-async-await). this feature will be enable by default in the upcomming node release (8.0.0) in April 2017.
so to start your app you currently have to use
node --harmony-async-await yourapp.js
P.S.: This code is currently untested and most probably contains bugs .. but you get the idea.
Related
I'm very new to node.js so I think I'm missing something obvious here.
I'm simply trying to get a list of SQS queues using aws-sdk and return them from a module to be accessible to other code. list_queues is the function in question.
The code below works to an extent, I see a "success" log and a log of a string array of all my queues, however, the function does not return that array to the caller and I don't understand why.
const AWS = require('aws-sdk');
AWS.config.update({region: 'eu-west-1'});
var sqs;
var sts = new AWS.STS();
sts.assumeRole({
RoleArn: 'arn:aws:iam::xxxxx:role/UserRole',
RoleSessionName: 'NodeDeveloperRoleSession'
}, function(err, data) {
if (err) { // an error occurred
console.log('Cannot assume role :(');
console.log(err, err.stack);
} else { // successful response
console.log('Assumed role success :)');
AWS.config.update({
accessKeyId: data.Credentials.AccessKeyId,
secretAccessKey: data.Credentials.SecretAccessKey,
sessionToken: data.Credentials.SessionToken
});
sqs = new AWS.SQS({apiVersion: '2012-11-05'});
}
});
exports.list_queues = function() {
sqs.listQueues({}, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("success");
console.log(data.QueueUrls);
return data.QueueUrls;
}
});
}
Any help is appreciated
exports.list_queues = function() { // 2. but you actually want to return from this one
sqs.listQueues({}, function(err, data) { <-----------------
if (err) { |
console.log("Error", err); |
} else { |
console.log("success"); |
console.log(data.QueueUrls); |
return data.QueueUrls; // 1. you are returning from this one
}
});
}
there are two ways you can make it work
Promise based
exports.list_queues = function() {
return sqs.listQueues({}).promise().then((data) => data.QueueUrls);
}
// and in another file you would:
const {list_queues} = require('./list_queues.js');
list_queues.then((queues) => console.log(queues));
Callback based
exports.list_queues = function(cb) { // notice I added callback here
sqs.listQueues({}, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("success");
console.log(data.QueueUrls);
cb(data.QueueUrls);
}
});
}
// and in another file you would:
const {list_queues} = require('./list_queues.js');
list_queues(function(queues) {
console.log(queues);
});
I strongly recommend you to use promise based approach, since it's much more readable and you can make use of async/await on it, which is great.
I want to fetch icon PNGS from gridfs out of our mongodb database with mongoose. These icons then should be zipped and served at a specific route.
My current code is as follows:
var zip = require("node-native-zip");
async function getZipFile() {
//get the events out of the DB
db.Category.find({}).populate('icons.file').exec(async function (err, cats) {
if (err) {
//oh oh something went wrong, better pass the error along
return ({
"success": "false",
message: err
});
}
else {
//all good, build the message and return
try {
const result = await buildZip(cats);
return ({
"success": "true",
message: result
});
}
catch (err) {
console.log("ZIP Build Failed")
}
}
});
}
async function buildZip(cats) {
let archive = new zip();
for (let i = 0; i < cats.length; i++) {
cats[i].icons.forEach(function (icon) {
if (icon.size === "3x") {
db.Attachment.readById(icon.file._id, function (err, buffer) {
if (err)
return;
archive.add(cats[i]._id + ".png", buffer);
});
}
});
//return when everything is done
if (i === cats.length - 1) {
return archive.toBuffer();
}
}
}
module.exports =
{
run: getZipFile
};
I don't want to build the zip before runtime, as I want to rename the icons acording to the category ID. I tried going for a async/await structure, but my callback is being returned before the building of the zip file even started.
I'm calling the function with
case 'categoryZip':
categoryHelper.getZipFile.run().then((result) => {
callback(result);
});
break;
This should (as far as I understood it) fire the callback when the zipping is done, but I think I'm missing something essential here.
I wrapped both your callback methods into promises, and also awaited your double for-loop of callbacks in parallel using Promise.all() since they don't rely on each other and I assume they don't need to be in any particular order in the zip file:
async function getZipFile() {
//get the events out of the DB
return new Promise((resolve, reject) => {
db.Category.find({}).populate('icons.file').exec(async function(err, cats) {
if (err) {
//oh oh something went wrong, better pass the error along
reject({
success: false,
message: err
});
} else {
//all good, build the message and return
try {
const result = await buildZip(cats);
resolve({
success: true,
message: result
});
} catch (err) {
console.log("ZIP Build Failed")
reject({
success: false,
message: err
});
}
}
});
});
}
async function buildZip(cats) {
let archive = new zip();
await Promise.all(
cats.map(cat => Promise.all(cat.icons
.filter(icon => icon.size === '3x')
.map(icon => new Promise((resolve, reject) => {
db.Attachment.readById(icon.file._id, function(err, buffer) {
if (err) return reject(err);
archive.add(cat._id + ".png", buffer);
resolve();
});
}))
))
);
return archive.toBuffer()
}
given the following node\JavaScript code, for some reason the counters variables (failedCounter, successMatchedCounter, successUnmatchedCounter) are counted in an unexpectedly manner when running. I think that it's a matter of scopes and asynchronously but still can't spot the cause.
UPDATE: I think that I should be notified when all calls to connection.query() have finished and only then to log the counters..
See the line of "//BUG: counted numbers are not logged as expected":
var MongoClient = require('mongodb').MongoClient;
var mysql = require('mysql2');
var fs = require('fs');
var dir = './logs';
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir);
}
var testMode = false;
var mongoUrl = 'mongodb://xxx:27017/yy';
var mySqlconnString = {
host: 'xxxxx',
user: 'xxxx',
password: 'xxxxx',
database: 'xxxxx'
};
var connection = mysql.createConnection(mySqlconnString);
connection.connect(function(err) {
if (err) {
console.log('Error connecting to MySql DB');
return;
}
console.log('Connection established to MySql DB');
MongoClient.connect(mongoUrl, function(err, db) {
if (err) {
console.log('Error connecting to MongoDB');
return;
}
console.log("Connection established to MongoDB");
markSitesAsDeleted(db, function() {
console.log('closing DBs connections..');
connection.end(function(err) {});
db.close();
});
});
});
var failedCounter = 0;
var successMatchedCounter = 0;
var successUnmatchedCounter = 0;
var totalCounter = 0;
var markSitesAsDeleted = function(db, closeConnectionsCallback) {
console.log(`\nMigration process is starting..`);
var cursor = db.collection('someCollection').find({
"isDeleted": true
});
console.log(`Migrating data..\r\n`);
cursor.each(function(err, siteDoc) {
if (siteDoc != null) {
var siteID = Math.trunc(siteDoc._id)
if (testMode === false) {
connection.query(`CALL MarkSiteAsDeleted_3(${siteID})`, function(error, rows) {
if (error) {
//TODO: Print error
failedCounter++;
fs.appendFileSync('logs/log.txt', `Error occured when calling MarkSiteAsDeleted_3 SP for SiteId=${siteID}. see error: ${JSON.stringify(error)}\n`);
} else {
if (rows.affectedRows === 1) { // Has match
successMatchedCounter++;
} else {
successUnmatchedCounter++;
}
}
});
}
totalCounter++;
} else {
//BUG: counted numbers are not logged as expected
fs.appendFileSync('logs/log.txt', `Total: ${totalCounter}, Success and Matched: ${successMatchedCounter}, Success but Unmatched: ${successUnmatchedCounter}, Failed: ${failedCounter}\r\n`);
closeConnectionsCallback();
}
});
};
It sure looks like an async problem. Inside your cursor.each you have async callbacks to handle connection.query. So as the each iterates it makes queries. but all the queries are running asynchronously.
Once the each is finished setting up all the async queries it then hits your else which uses a synchronous write. That is where the problem occurs. Some of your async query handlers have not finished when that write occurs.
I am trying to create REST service using Nodejs and Mysql.
his is my code:
var sqlDb = require("mysql");
var settings = require("../settings");
exports.executeSql = function (sql, callback) {
var conn = sqlDb.createConnection(settings.dbConfig);
conn.connect()
.then(function () {
var req = new sqlDb.Request(conn);
req.query(sql)
.then(function (recordset) {
callback(recordset);
})
.catch(function (err) {
console.log(err);
callback(null, err);
});
})
.catch(function (err) {
console.log(err);
callback(null, err);
});
};
But I have an error
.then(function(){
^
TypeError: cannot read property 'then' of undefined
Can anybody help me to solve this problem?
The correct function is createConnection. Read the docs please:
https://www.npmjs.com/package/mysql#introduction
In the documentation, the connect method is not returning a promise if you look at the documentation this can work as callback method:
var connection = mysql.createConnection({
host : 'example.org',
user : 'bob',
password : 'secret'
});
//** Look here...
connection.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
console.log('connected as id ' + connection.threadId);
});
As a good alternative, you can use the package promise-mysql that is using bluebird and you can locate here (which I strongly suggest to use) or you can wrap it up in a promise like using bluebird like:
async function connect(connection) {
return new Promise((resolve, reject) => {
connection.connect((err) => {
return err ? reject(err) : resolve(connection);
})
});
}
Is it sensible to use Node.js to write a stand alone app that will connect two REST API's?
One end will be a POS - Point of sale - system
The other will be a hosted eCommerce platform
There will be a minimal interface for configuration of the service. nothing more.
Yes, Node.js is perfectly suited to making calls to external APIs. Just like everything in Node, however, the functions for making these calls are based around events, which means doing things like buffering response data as opposed to receiving a single completed response.
For example:
// get walking directions from central park to the empire state building
var http = require("http");
url = "http://maps.googleapis.com/maps/api/directions/json?origin=Central Park&destination=Empire State Building&sensor=false&mode=walking";
// get is a simple wrapper for request()
// which sets the http method to GET
var request = http.get(url, function (response) {
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "",
data,
route;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
// finished transferring data
// dump the raw data
console.log(buffer);
console.log("\n");
data = JSON.parse(buffer);
route = data.routes[0];
// extract the distance and time
console.log("Walking Distance: " + route.legs[0].distance.text);
console.log("Time: " + route.legs[0].duration.text);
});
});
It may make sense to find a simple wrapper library (or write your own) if you are going to be making a lot of these calls.
Sure. The node.js API contains methods to make HTTP requests:
http.request
http.get
I assume the app you're writing is a web app. You might want to use a framework like Express to remove some of the grunt work (see also this question on node.js web frameworks).
/*Below logics covered in below sample GET API
-DB connection created in class
-common function to execute the query
-logging through bunyan library*/
const { APIResponse} = require('./../commonFun/utils');
const createlog = require('./../lib/createlog');
var obj = new DB();
//Test API
routes.get('/testapi', (req, res) => {
res.status(201).json({ message: 'API microservices test' });
});
dbObj = new DB();
routes.get('/getStore', (req, res) => {
try {
//create DB instance
const store_id = req.body.storeID;
const promiseReturnwithResult = selectQueryData('tablename', whereField, dbObj.conn);
(promiseReturnwithResult).then((result) => {
APIResponse(200, 'Data fetched successfully', result).then((result) => {
res.send(result);
});
}).catch((err) => { console.log(err); throw err; })
} catch (err) {
console.log('Exception caught in getuser API', err);
const e = new Error();
if (err.errors && err.errors.length > 0) {
e.Error = 'Exception caught in getuser API';
e.message = err.errors[0].message;
e.code = 500;
res.status(404).send(APIResponse(e.code, e.message, e.Error));
createlog.writeErrorInLog(err);
}
}
});
//create connection
"use strict"
const mysql = require("mysql");
class DB {
constructor() {
this.conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'pass',
database: 'db_name'
});
}
connect() {
this.conn.connect(function (err) {
if (err) {
console.error("error connecting: " + err.stack);
return;
}
console.log("connected to DBB");
});
}
//End class
}
module.exports = DB
//queryTransaction.js File
selectQueryData= (table,where,db_conn)=>{
return new Promise(function(resolve,reject){
try{
db_conn.query(`SELECT * FROM ${table} WHERE id = ${where}`,function(err,result){
if(err){
reject(err);
}else{
resolve(result);
}
});
}catch(err){
console.log(err);
}
});
}
module.exports= {selectQueryData};
//utils.js file
APIResponse = async (status, msg, data = '',error=null) => {
try {
if (status) {
return { statusCode: status, message: msg, PayLoad: data,error:error }
}
} catch (err) {
console.log('Exception caught in getuser API', err);
}
}
module.exports={
logsSetting: {
name: "USER-API",
streams: [
{
level: 'error',
path: '' // log ERROR and above to a file
}
],
},APIResponse
}
//createlogs.js File
var bunyan = require('bunyan');
const dateFormat = require('dateformat');
const {logsSetting} = require('./../commonFun/utils');
module.exports.writeErrorInLog = (customError) => {
let logConfig = {...logsSetting};
console.log('reached in writeErrorInLog',customError)
const currentDate = dateFormat(new Date(), 'yyyy-mm-dd');
const path = logConfig.streams[0].path = `${__dirname}/../log/${currentDate}error.log`;
const log = bunyan.createLogger(logConfig);
log.error(customError);
}
A more easy and useful tool is just using an API like Unirest; URest is a package in NPM that is just too easy to use jus like
app.get('/any-route', function(req, res){
unirest.get("https://rest.url.to.consume/param1/paramN")
.header("Any-Key", "XXXXXXXXXXXXXXXXXX")
.header("Accept", "text/plain")
.end(function (result) {
res.render('name-of-the-page-according-to-your-engine', {
layout: 'some-layout-if-you-want',
markup: result.body.any-property,
});
});