NodeJS IMAP polling server returns LOGIN failed - javascript

I'd like to check emails every T minutes, using imap module for NodeJS.
So I set a timer:
_pollingTimer = setInterval(receiveEmails, constants.POLLING_TIME * 60 * 1000); // POLLING_TIME minutes polling
whereas the receiveEmails function is the following:
function receiveEmails() {
const imap = new Imap({
user: user_name,
password: user_pwd,
host: host_name,
port: host_port_imap,
tls: true
});
imap.once('ready', function () {
imap.openBox('INBOX', false, function (err, box) { //false means write enable
if (err) throw err;
imap.search(['UNSEEN'], function (err, results) {
if (err) throw err;
else if (!results || !results.length) {
logger.debug(`No new messages on the INBOX`);
}
else {
var f = imap.fetch(results, { bodies: ''});
f.on('message', _processMessage);
f.once('error', function (err) { console.log('Fetch error: ' + err); });
f.once('end', function() {
logger.debug(`Done fetching all messages!`);
imap.end();
});
}
});
});
});
imap.once('error', function (err) {
logger.error(`${err}`);
});
imap.once('end', function () {
logger.debug(`IMAP connection ended`);
});
imap.connect();
}
but calling this function multiple times returns LOGIN failed error. It seems related to multiple login attempts.
I tried declaring the imap variable outside the function but nothing changes.
So, how can I periodically check for new emails avoiding reconnections?

Related

Getting error while reading outlook mails in Node.js

Goal: I am trying to read the mails (outlook) with certain filters like 'from specified user','read','sent' etc. Used a module "IMAP" for parsing. I have to read the mail and download and store attachments from the mail in a certain location (preferably local). But my code is failing to connect to the mail server.
Below is my code which results in 'Auth:timeout error' when I ran.
Please let me know what is wrong with my code. Thanks in advance!
var Imap = require('imap'),
inspect = require('util').inspect;
var fs = require('fs'), fileStream;
var buffer = '';
var myMap;
var imap = new Imap({
user: "put user id here",
password: "put your password here",
host: "outlook.office365.com", //this may differ if you are using some other mail services like yahoo
port: 993,
tls: true,
// connTimeout: 10000, // Default by node-imap
// authTimeout: 5000, // Default by node-imap,
debug: console.log, // Or your custom function with only one incoming argument. Default: null
tlsOptions: true,
mailbox: "INBOX", // mailbox to monitor
searchFilter: ["UNSEEN", "FLAGGED"], // the search filter being used after an IDLE notification has been retrieved
markSeen: true, // all fetched email willbe marked as seen and not fetched next time
fetchUnreadOnStart: true, // use it only if you want to get all unread email on lib start. Default is `false`,
mailParserOptions: { streamAttachments: true }, // options to be passed to mailParser lib.
attachments: true, // download attachments as they are encountered to the project directory
attachmentOptions: { directory: "attachments/" } // specify a download directory for attachments
});
function openInbox(cb) {
imap.openBox('INBOX', false, cb);
}
imap.once('ready', function () {
openInbox(function (err, box) {
if (err) throw err;
imap.search(['UNSEEN', ['SUBJECT', 'Give Subject Here']], function (err, results) {
if (err) throw err;
var f = imap.fetch(results, { bodies: '1', markSeen: true });
f.on('message', function (msg, seqno) {
console.log('Message #%d' + seqno);
console.log('Message type' + msg.text)
var prefix = '(#' + seqno + ') ';
msg.on('body', function (stream, info) {
stream.on('data', function (chunk) {
buffer += chunk.toString('utf8');
console.log("BUFFER" + buffer)
})
stream.once('end', function () {
if (info.which === '1') {
console.log("BUFFER" + buffer)
}
});
console.log(prefix + 'Body');
stream.pipe(fs.createWriteStream('msg-' + seqno + '-body.txt'));
});
msg.once('attributes', function (attrs) {
console.log(prefix + 'Attributes: %s', inspect(attrs, false, 8));
});
msg.once('end', function () {
console.log(prefix + 'Finished');
});
});
f.once('error', function (err) {
console.log('Fetch error: ' + err);
});
f.once('end', function () {
console.log('Done fetching all messages!');
imap.end();
});
});
});
});
imap.once('error', function (err) {
console.log(err);
});
imap.once('end', function () {
console.log('Connection ended');
});
imap.connect();
It's not exactly what you asked for but an alternative implementation that uses ImapFlow module instead of node-imap, and that I just verified to work against Outlook looks like the script below. If you still get timeouts etc. then it is probably a firewall issue.
const { ImapFlow } = require("imapflow");
const fs = require("fs").promises;
const client = new ImapFlow({
host: "outlook.office365.com",
port: 993,
secure: true,
auth: {
user: "example.user#hotmail.com",
pass: "secretpass",
},
logger: false, // set to true if you want to see IMAP transaction logs
});
// can't run await in main scope, have to wrap it to an async function
async function main() {
// establish the connection and log in
await client.connect();
// open INBOX folder
let mailbox = await client.mailboxOpen("INBOX");
// list messages matching provided criteria
for await (let msg of client.fetch(
{
// search query to filter messages
// https://imapflow.com/global.html#SearchObject
seen: false,
subject: "Give Subject Here",
},
{
// attributes to request for
// https://imapflow.com/global.html#FetchQueryObject
uid: true,
flags: true,
internalDate: true,
bodyStructure: true,
// include full message body in the response as well
source: true,
}
)) {
// extract variables
let { seq, uid, flags, bodyStructure, internalDate, source } = msg;
console.log(`#${seq} Attributes:`, { seq, uid, flags, bodyStructure, internalDate });
// store message body as an eml file
await fs.writeFile(`msg-${seq}.eml`, source);
}
// close the connection
await client.logout();
}
main().catch(console.error);

AWS Lambda Node.js. mysql Error "Cannot enqueue Query after invoking quit" on second call onwards:

I am currently creating an AWS Lambda resolver that utilizes Nodejs mysql library. Right now, my lambda is capable of executing a query to the database during the first invocation. However, on the subsequent invocations the error "Cannot enqueue Query after invoking quit" occurs. I am aware that it is something to do with the connection to the database but if I am not wrong, 'connection.query' should be making an implicit connection to the database. I am also calling the 'connection.end' within the callback. Could someone lend me a helping hand on this one?
Here is the Lambda index.js code:
/* Amplify Params - DO NOT EDIT
API_SIMPLETWITTERCLONE_GRAPHQLAPIENDPOINTOUTPUT
API_SIMPLETWITTERCLONE_GRAPHQLAPIIDOUTPUT
API_SIMPLETWITTERCLONE_GRAPHQLAPIKEYOUTPUT
AUTH_SIMPLETWITTERCLONEB1022521_USERPOOLID
ENV
REGION
Amplify Params - DO NOT EDIT */
const mysql = require("mysql");
// const util = require("util");
const config = require("./config.json");
const connection = mysql.createConnection({
host: config.dbhost,
user: config.dbuser,
password: config.dbpassword,
database: config.dbname,
});
// resolvers
const resolvers = {
Query: {
getAllUser: (event) => {
return getAllUser();
},
},
};
function executeQuery(sql, params) {
return new Promise((resolve, reject) => {
const queryCallback = (error, results) => {
if (error) {
console.log("Error occured during query: " + error.message);
connection.destroy();
reject(error);
} else {
console.log("Connected to database and executed query");
console.log("Results:", results);
connection.end((err) => {
if (err) {
console.log("there was an error closing database:" + err.message);
}
console.log("database closed");
resolve(results);
});
}
};
if (params) {
connection.query(sql, [...params], (error, results) => {
queryCallback(error, results);
});
} else {
connection.query(sql, (error, results) => {
queryCallback(error, results);
});
}
});
}
async function getAllUser() {
const sql = "SELECT * FROM User";
try {
const users = await executeQuery(sql);
return users;
} catch (error) {
throw error;
}
}
exports.handler = async (event) => {
// TODO implement
console.log("event", event);
console.log('DB connection var', connection);
const typeHandler = resolvers[event.typeName];
if (typeHandler) {
const resolver = typeHandler[event.fieldName];
if (resolver) {
try {
return await resolver(event);
} catch (error) {
throw error;
}
}
}
throw new Error("Resolver not found");
};
You are ending your connection during the first invocation. As a result, the second invocation does not have that connection anymore.
If you create the connection object during module import, do not .end() it in your invocation.
If you wish to .end() the connection object during invocation, you have to create it during invocation as well.

How to keep object state in electron/node.js?

I'm trying to create a Database class using node.js mysql module.
The point is, I want to be able to use the class in different files. But the class itself should keep one connection persistent. So I need somehow to keep object state between files.
My code so far:
main.js
const database = require('./app/database.js');
//// somewhere later in BrowserWindow initianisation
database.init(config);
custom_file.js
const database = require('./app/database.js');
setInterval(
() => {
updateOrdersTable();
}
, 10000
);
function updateOrdersTable() {
console.log('updateOrdersTable');
database.getCurrentOrders((orders) => {
orders.forEach((row) => {
console.log(row);
});
});
}
app/database.js
const mysql = require('mysql');
class Database {
config = {};
connection = {};
customerWindow = {};
stockWindow = {};
init(config) {
this.config = config;
process.stdout.write('Connecting to MySQL Server...');
this.connect();
}
connect() {
this.connection = mysql.createConnection({
host: this.config.database.host,
port: this.config.database.port,
user: this.config.database.user,
password: this.config.database.password,
database: this.config.database.database
});
const that = this;
this.connection.connect(function(err) {
if (err) {
console.log('Database error', err);
setTimeout(that.connect, that.config.database.reconnect_interval * 1000);
return;
}
console.log('Success!');
});
this.connection.on('error', function(err) {
console.log('Database error', err);
setTimeout(that.connect, that.config.database.reconnect_interval * 1000);
});
}
getCurrentOrders(callback) {
this.connection.query("SELECT * FROM current_orders", function (err, result, fields) {
if (err) throw err;
callback(result)
});
}
}
module.exports = new Database();
The error message I'm getting:
Uncaught TypeError: this.connection.query is not a function
at Database.getCurrentOrders (F:\my-app\app\database.js:91)
Line 91 is this:
this.connection.query("SELECT * FROM current_orders", function (err, result, fields) {

NODE JS | MS API facing issues while multiple connection connected to API

I just wanted to be so clear as I can, So I have an MS SQL nodejs API, through which I interact with my android and Desktop Application. Currently its working fine, but it is not on pool connection. I think that is why when more people use my app it just doesn't give the response and gives an error more LIKE
Connection already exists close SQL.close() first
So I was planning on upgrading my API to pool connection, by which means more people can connect to my API simultaneously. Right?
So I have this connection to the DB code that has the connection and query look like this :
Connection var dbConfig = {
user: 'sa',
password: "pmis13",
server: '19',
database: 'CUBES_HO',
};
Query handler :
function executeQuery(query) {
return new Promise((resolve, reject) => {
sql.connect(dbConfig, function (err) {
if (err) {
reject(err);
sql.close();
} else {
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query(query, function (err, data) {
if (err) {
reject(err);
sql.close();
} else {
resolve(data);
sql.close();
}
});
}
});
});}
And the query look like this :
app.get("/dailysale/:date", function (req, res) {
var query = "SELECT SUM(bill_amt) AS totalSale FROM [CUBES_HO].[dbo].[vw_bill_summary] where inv_loc_key = 2 and bill_sale_date = '"+req.params.date+"'";
executeQuery(query)
.then((data) => {
res.status(200).send({ "msg": "Records fetched", "data": data.recordsets });
}).catch((err) => {
res.status(500).json({ "msg": err.message });
});});
I want to convert this or we can say upgrade this api to pool connection, which sounds more reliable for multiple connection. Correct me I am wrong.
I couldn't put this link in the comments section so posting it here. This answer on SO explains difference between mysql.createConnection and mysql.createPool.
An example to help you create pool connection
const pool = new sql.ConnectionPool({
user: '...',
password: '...',
server: 'localhost',
database: '...'
})
Found it here.
I found a work arround by doing this
var config ={
user: 'sa',
password: "pdt09",
server: '3',
database: 'CUBES',
options: {encrypt: true}
};
async function executeQuery(sqlquery) {
return new Promise(function(resolve, reject) {
(async() => {
const pool = new sql.ConnectionPool(config);
pool.on('error', err => {
// ... error handler
console.log('sql errors', err);
});
try {
await pool.connect();
let data = await pool.request().query(sqlquery);
resolve(data);
} catch (err) {
reject(err)
} finally {
pool.close(); //closing connection after request is finished.
}
})();
}).catch(function(err) {
});
}
and the worker will remain the same

ssh2 connect to multiple server and get the output nodejs

I am using ssh2 nodejs module to connect to a UNIX application and run a script and it is successful.
Now i want to connect to multiple servers one by one and get the output and store it.
When i try using a for loop to pass the servers one by one from a json as input to the ssh2 the for loop completes much faster than the block which is supposed to get the output from the server.
This is also causing me handshake error.
Kindly help
Here is the code
inc_cron.schedule("*/20 * * * * *", function(id) {
});
//inc_cron.js
var cronFunction = function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path, function(err, data) {
if(err) {
logger.info("Error is in cronFunction = ", err);
} else if(data) {
output_data +=data;
} ssh.close_second();
});
}
}
}
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
ssh.close_second();
ssh.connect_second({
host: hostname,
username: username,
password: password
}, function(err) {
if(err) {
logger.error('Err: ', err);
} else {
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
cb(err, data);
}); } }); }
//connect.js
SSHConnection.prototype.sftp = function(type, path, cb) {
var self = this;
var log_data = '';
self.connection2.exec(path +' ' + type, { pty: true }, function(err, stream) {
if (err) {
logger.log('SECOND :: exec error: ' + err);
}
stream.on('end', function() {
self.connection2.end(); // close parent (and this) connection
}).on('data', function(data) {
logger.info(data.toString());
});
});
};
Without watch your code, be sure to handle correctly the async issue with ssh2... use a promise factory.
One way to do this is to use es7 async await. For this you have to rewrite your getDataFromServer function to return a promise:
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
return new Promise(function(resolve,reject){
ssh.close_second();
sh.connect_second({
host: hostname,
username: username,
password: password
},function(err) {
if(err){
reject(err)
}else{
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
if(err){
reject(err)
}else{
resolve(data)
}
})
}
})
})
}
now you can rewrite your cron function to be an async function.
var cronFunction = async function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
try{
output_data + = await getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path)
}catch(err){
logger.info("Error is in cronFunction = ", err);
}
ssh.close_second();
}
}
}
async await enables you to write async code in syncronous coding style.
However async await is currently (node 7.*) hidden behind a flag (--harmony-async-await). this feature will be enable by default in the upcomming node release (8.0.0) in April 2017.
so to start your app you currently have to use
node --harmony-async-await yourapp.js
P.S.: This code is currently untested and most probably contains bugs .. but you get the idea.

Categories

Resources