Nodejs rest service - javascript

I am trying to create REST service using Nodejs and Mysql.
his is my code:
var sqlDb = require("mysql");
var settings = require("../settings");
exports.executeSql = function (sql, callback) {
var conn = sqlDb.createConnection(settings.dbConfig);
conn.connect()
.then(function () {
var req = new sqlDb.Request(conn);
req.query(sql)
.then(function (recordset) {
callback(recordset);
})
.catch(function (err) {
console.log(err);
callback(null, err);
});
})
.catch(function (err) {
console.log(err);
callback(null, err);
});
};
But I have an error
.then(function(){
^
TypeError: cannot read property 'then' of undefined
Can anybody help me to solve this problem?

The correct function is createConnection. Read the docs please:
https://www.npmjs.com/package/mysql#introduction

In the documentation, the connect method is not returning a promise if you look at the documentation this can work as callback method:
var connection = mysql.createConnection({
host : 'example.org',
user : 'bob',
password : 'secret'
});
//** Look here...
connection.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
console.log('connected as id ' + connection.threadId);
});
As a good alternative, you can use the package promise-mysql that is using bluebird and you can locate here (which I strongly suggest to use) or you can wrap it up in a promise like using bluebird like:
async function connect(connection) {
return new Promise((resolve, reject) => {
connection.connect((err) => {
return err ? reject(err) : resolve(connection);
})
});
}

Related

How to return a list of SQS queues in a module exports function?

I'm very new to node.js so I think I'm missing something obvious here.
I'm simply trying to get a list of SQS queues using aws-sdk and return them from a module to be accessible to other code. list_queues is the function in question.
The code below works to an extent, I see a "success" log and a log of a string array of all my queues, however, the function does not return that array to the caller and I don't understand why.
const AWS = require('aws-sdk');
AWS.config.update({region: 'eu-west-1'});
var sqs;
var sts = new AWS.STS();
sts.assumeRole({
RoleArn: 'arn:aws:iam::xxxxx:role/UserRole',
RoleSessionName: 'NodeDeveloperRoleSession'
}, function(err, data) {
if (err) { // an error occurred
console.log('Cannot assume role :(');
console.log(err, err.stack);
} else { // successful response
console.log('Assumed role success :)');
AWS.config.update({
accessKeyId: data.Credentials.AccessKeyId,
secretAccessKey: data.Credentials.SecretAccessKey,
sessionToken: data.Credentials.SessionToken
});
sqs = new AWS.SQS({apiVersion: '2012-11-05'});
}
});
exports.list_queues = function() {
sqs.listQueues({}, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("success");
console.log(data.QueueUrls);
return data.QueueUrls;
}
});
}
Any help is appreciated
exports.list_queues = function() { // 2. but you actually want to return from this one
sqs.listQueues({}, function(err, data) { <-----------------
if (err) { |
console.log("Error", err); |
} else { |
console.log("success"); |
console.log(data.QueueUrls); |
return data.QueueUrls; // 1. you are returning from this one
}
});
}
there are two ways you can make it work
Promise based
exports.list_queues = function() {
return sqs.listQueues({}).promise().then((data) => data.QueueUrls);
}
// and in another file you would:
const {list_queues} = require('./list_queues.js');
list_queues.then((queues) => console.log(queues));
Callback based
exports.list_queues = function(cb) { // notice I added callback here
sqs.listQueues({}, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("success");
console.log(data.QueueUrls);
cb(data.QueueUrls);
}
});
}
// and in another file you would:
const {list_queues} = require('./list_queues.js');
list_queues(function(queues) {
console.log(queues);
});
I strongly recommend you to use promise based approach, since it's much more readable and you can make use of async/await on it, which is great.

NODE JS | MS API facing issues while multiple connection connected to API

I just wanted to be so clear as I can, So I have an MS SQL nodejs API, through which I interact with my android and Desktop Application. Currently its working fine, but it is not on pool connection. I think that is why when more people use my app it just doesn't give the response and gives an error more LIKE
Connection already exists close SQL.close() first
So I was planning on upgrading my API to pool connection, by which means more people can connect to my API simultaneously. Right?
So I have this connection to the DB code that has the connection and query look like this :
Connection var dbConfig = {
user: 'sa',
password: "pmis13",
server: '19',
database: 'CUBES_HO',
};
Query handler :
function executeQuery(query) {
return new Promise((resolve, reject) => {
sql.connect(dbConfig, function (err) {
if (err) {
reject(err);
sql.close();
} else {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query(query, function (err, data) {
if (err) {
reject(err);
sql.close();
} else {
resolve(data);
sql.close();
}
});
}
});
});}
And the query look like this :
app.get("/dailysale/:date", function (req, res) {
var query = "SELECT SUM(bill_amt) AS totalSale FROM [CUBES_HO].[dbo].[vw_bill_summary] where inv_loc_key = 2 and bill_sale_date = '"+req.params.date+"'";
executeQuery(query)
.then((data) => {
res.status(200).send({ "msg": "Records fetched", "data": data.recordsets });
}).catch((err) => {
res.status(500).json({ "msg": err.message });
});});
I want to convert this or we can say upgrade this api to pool connection, which sounds more reliable for multiple connection. Correct me I am wrong.
I couldn't put this link in the comments section so posting it here. This answer on SO explains difference between mysql.createConnection and mysql.createPool.
An example to help you create pool connection
const pool = new sql.ConnectionPool({
user: '...',
password: '...',
server: 'localhost',
database: '...'
})
Found it here.
I found a work arround by doing this
var config ={
user: 'sa',
password: "pdt09",
server: '3',
database: 'CUBES',
options: {encrypt: true}
};
async function executeQuery(sqlquery) {
return new Promise(function(resolve, reject) {
(async() => {
const pool = new sql.ConnectionPool(config);
pool.on('error', err => {
// ... error handler
console.log('sql errors', err);
});
try {
await pool.connect();
let data = await pool.request().query(sqlquery);
resolve(data);
} catch (err) {
reject(err)
} finally {
pool.close(); //closing connection after request is finished.
}
})();
}).catch(function(err) {
});
}
and the worker will remain the same

How can I have parameters and call back functions at the same time in MongoDB update() function?

I want to have a customized callback function to check if the update() is successful and return a Q promise accordingly.
var myFunction = function (name, email) {
var deferred = Q.defer();
MongoClient.connect(mongodbUrl, function (err, db) {
var collection = db.collection('myCollection');
collection.update(
{'name' : name},
{$set:{'email' : email}},
function(err, result) {
if (err) {
console.log(err);
deferred.resolve(false);
}
else
//console.log(result);
deferred.resolve(true);
}
);
db.close();
});
return deferred.promise;
};
Then the caller is:
app.post('/abc', function(req, res) {
myFunction(name, email)
.then(function () {
req.session.success = "success";
res.redirect('/');
})
.fail(function (err){
console.log(err.body);
req.session.error = "error";
res.redirect('/');
});
});
First of all, the .fail() function in the caller is always executed, why?
Secondly, I also need a {upsert : true}, how can I added it to update() function while keeping the callback function?
I took at stab at this (and did some refactoring), here's the result (I hope I understood what you were trying to do):
var MongoClient = require('mongodb').MongoClient;
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.json());
const updateFunc = (name, email) => {
return MongoClient.connect('mongodb://mongo-database:27017/stackoverflow')
.then((db) => {
return db.collection('collection').updateOne({ 'name' : name }, { $set: { 'email' : email }})
.then((result) => {
if (!result.matchedCount)
throw Error(`${name} does not exist in database`);
else if (result.matchedCount && !result.modifiedCount)
throw Error(`Email for ${name} is already ${email}`);
else
return `Email for ${name} has been updated to ${email}`;
});
});
};
app.post('/update_user', function(req, res) {
updateFunc(req.body.name, req.body.email)
.then((result) => {
console.log(result);
res.send('success');
})
.catch((error) => {
console.log(error);
res.send('error');
});
});
app.listen(3000, () => {
console.log('App listening on port 3000');
});
Some notes:
update had been deprecated in the mongodb package, it is recommended
that you use updateOne or updateMany
the mongodb package ships with a promise library, so you don't need
to use the Q package
the promise doesn't error when the there are zero matchedCount or
zero modifiedCount, so we need to throw custom errors
I took some liberties with the usage express.js to make it easy for
me to test...so the response is not what you had in your example
Hope that helps

ssh2 connect to multiple server and get the output nodejs

I am using ssh2 nodejs module to connect to a UNIX application and run a script and it is successful.
Now i want to connect to multiple servers one by one and get the output and store it.
When i try using a for loop to pass the servers one by one from a json as input to the ssh2 the for loop completes much faster than the block which is supposed to get the output from the server.
This is also causing me handshake error.
Kindly help
Here is the code
inc_cron.schedule("*/20 * * * * *", function(id) {
});
//inc_cron.js
var cronFunction = function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path, function(err, data) {
if(err) {
logger.info("Error is in cronFunction = ", err);
} else if(data) {
output_data +=data;
} ssh.close_second();
});
}
}
}
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
ssh.close_second();
ssh.connect_second({
host: hostname,
username: username,
password: password
}, function(err) {
if(err) {
logger.error('Err: ', err);
} else {
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
cb(err, data);
}); } }); }
//connect.js
SSHConnection.prototype.sftp = function(type, path, cb) {
var self = this;
var log_data = '';
self.connection2.exec(path +' ' + type, { pty: true }, function(err, stream) {
if (err) {
logger.log('SECOND :: exec error: ' + err);
}
stream.on('end', function() {
self.connection2.end(); // close parent (and this) connection
}).on('data', function(data) {
logger.info(data.toString());
});
});
};
Without watch your code, be sure to handle correctly the async issue with ssh2... use a promise factory.
One way to do this is to use es7 async await. For this you have to rewrite your getDataFromServer function to return a promise:
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
return new Promise(function(resolve,reject){
ssh.close_second();
sh.connect_second({
host: hostname,
username: username,
password: password
},function(err) {
if(err){
reject(err)
}else{
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
if(err){
reject(err)
}else{
resolve(data)
}
})
}
})
})
}
now you can rewrite your cron function to be an async function.
var cronFunction = async function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
try{
output_data + = await getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path)
}catch(err){
logger.info("Error is in cronFunction = ", err);
}
ssh.close_second();
}
}
}
async await enables you to write async code in syncronous coding style.
However async await is currently (node 7.*) hidden behind a flag (--harmony-async-await). this feature will be enable by default in the upcomming node release (8.0.0) in April 2017.
so to start your app you currently have to use
node --harmony-async-await yourapp.js
P.S.: This code is currently untested and most probably contains bugs .. but you get the idea.

Using Node.js to connect to a REST API

Is it sensible to use Node.js to write a stand alone app that will connect two REST API's?
One end will be a POS - Point of sale - system
The other will be a hosted eCommerce platform
There will be a minimal interface for configuration of the service. nothing more.
Yes, Node.js is perfectly suited to making calls to external APIs. Just like everything in Node, however, the functions for making these calls are based around events, which means doing things like buffering response data as opposed to receiving a single completed response.
For example:
// get walking directions from central park to the empire state building
var http = require("http");
url = "http://maps.googleapis.com/maps/api/directions/json?origin=Central Park&destination=Empire State Building&sensor=false&mode=walking";
// get is a simple wrapper for request()
// which sets the http method to GET
var request = http.get(url, function (response) {
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "",
data,
route;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
// finished transferring data
// dump the raw data
console.log(buffer);
console.log("\n");
data = JSON.parse(buffer);
route = data.routes[0];
// extract the distance and time
console.log("Walking Distance: " + route.legs[0].distance.text);
console.log("Time: " + route.legs[0].duration.text);
});
});
It may make sense to find a simple wrapper library (or write your own) if you are going to be making a lot of these calls.
Sure. The node.js API contains methods to make HTTP requests:
http.request
http.get
I assume the app you're writing is a web app. You might want to use a framework like Express to remove some of the grunt work (see also this question on node.js web frameworks).
/*Below logics covered in below sample GET API
-DB connection created in class
-common function to execute the query
-logging through bunyan library*/
const { APIResponse} = require('./../commonFun/utils');
const createlog = require('./../lib/createlog');
var obj = new DB();
//Test API
routes.get('/testapi', (req, res) => {
res.status(201).json({ message: 'API microservices test' });
});
dbObj = new DB();
routes.get('/getStore', (req, res) => {
try {
//create DB instance
const store_id = req.body.storeID;
const promiseReturnwithResult = selectQueryData('tablename', whereField, dbObj.conn);
(promiseReturnwithResult).then((result) => {
APIResponse(200, 'Data fetched successfully', result).then((result) => {
res.send(result);
});
}).catch((err) => { console.log(err); throw err; })
} catch (err) {
console.log('Exception caught in getuser API', err);
const e = new Error();
if (err.errors && err.errors.length > 0) {
e.Error = 'Exception caught in getuser API';
e.message = err.errors[0].message;
e.code = 500;
res.status(404).send(APIResponse(e.code, e.message, e.Error));
createlog.writeErrorInLog(err);
}
}
});
//create connection
"use strict"
const mysql = require("mysql");
class DB {
constructor() {
this.conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'pass',
database: 'db_name'
});
}
connect() {
this.conn.connect(function (err) {
if (err) {
console.error("error connecting: " + err.stack);
return;
}
console.log("connected to DBB");
});
}
//End class
}
module.exports = DB
//queryTransaction.js File
selectQueryData= (table,where,db_conn)=>{
return new Promise(function(resolve,reject){
try{
db_conn.query(`SELECT * FROM ${table} WHERE id = ${where}`,function(err,result){
if(err){
reject(err);
}else{
resolve(result);
}
});
}catch(err){
console.log(err);
}
});
}
module.exports= {selectQueryData};
//utils.js file
APIResponse = async (status, msg, data = '',error=null) => {
try {
if (status) {
return { statusCode: status, message: msg, PayLoad: data,error:error }
}
} catch (err) {
console.log('Exception caught in getuser API', err);
}
}
module.exports={
logsSetting: {
name: "USER-API",
streams: [
{
level: 'error',
path: '' // log ERROR and above to a file
}
],
},APIResponse
}
//createlogs.js File
var bunyan = require('bunyan');
const dateFormat = require('dateformat');
const {logsSetting} = require('./../commonFun/utils');
module.exports.writeErrorInLog = (customError) => {
let logConfig = {...logsSetting};
console.log('reached in writeErrorInLog',customError)
const currentDate = dateFormat(new Date(), 'yyyy-mm-dd');
const path = logConfig.streams[0].path = `${__dirname}/../log/${currentDate}error.log`;
const log = bunyan.createLogger(logConfig);
log.error(customError);
}
A more easy and useful tool is just using an API like Unirest; URest is a package in NPM that is just too easy to use jus like
app.get('/any-route', function(req, res){
unirest.get("https://rest.url.to.consume/param1/paramN")
.header("Any-Key", "XXXXXXXXXXXXXXXXXX")
.header("Accept", "text/plain")
.end(function (result) {
res.render('name-of-the-page-according-to-your-engine', {
layout: 'some-layout-if-you-want',
markup: result.body.any-property,
});
});

Categories

Resources