ldapjs client.search results to be accessed outside of the function - javascript

I am using ldapjs library in nodejs. I want to access the results of client.search outside of the function.
Here is my code
items = [];
client.search('cn=users,dc=test,dc=com', opts, function (err, res) {
if (err)
console.log(err);
res.on('searchEntry', function (entry) {
items.push(entry.object);
});
res.on('error', function (err) {
console.error('error: ' + err.message);
});
res.on('end', function (result) {
console.log('status: ' + result.status);
console.log(items[0]); **//Prints results correctly**
});
});
console.log(items[0]); **//Prints []**
I tried return items inside search. Still doesn't print. Any suggestions?

I encountered the same problem. Since res.on 'searchEntry' is an event emitter
, a simple solution that I use is to wrap the whole thing in a promise and return that.
let search = function( 'cn=users,dc=test,dc=com', opts ) {
return new Promise( ( resolve, reject ) => {
items = [];
client.search('cn=users,dc=test,dc=com', opts, function (err, res) {
if (err)
console.log(err);
reject( err )
res.on('searchEntry', function (entry) {
items.push(entry.object);
});
res.on('error', function (err) {
console.error('error: ' + err.message);
reject( error )
});
res.on('end', function (result) {
console.log('status: ' + result.status);
console.log(items[0]); **//Prints results correctly**
resolve( items )
});
});
}
};
Then you can do something like this:
let entries = await search( 'cn=users,dc=test,dc=com', opts );

You can use a wrapper function and a callback
function search(dn, options, callback){
// your code ...
res.on('end', function (result) {
callback(items);
}
});
and to call it like this
search(dn, options, function(err, result) {
//your result ...
console.log(result);
}

Related

return result from memcached.get()?

I am using npm memcached package https://www.npmjs.com/package/memcached
It is possible to return data from memcached get method?
Now return data is undefined/
memcached.set('foo', 'bar', 10, function (err) { });
let data1;
memcached.get('foo', function (err, data) {
console.log(data); // bar
data1 = data
});
console.log(data1); // undefined
let data2 = memcached.get('foo', function (err, data) {
console.log(data); // undefined
return data;
});
console.log(data2); // undefined
let data3 = async () => {
let result = await memcached.get('foo', function (err, data) {
console.log(data); // bar
});
console.log(result); // undefined
return result;
}
console.log(data); // {}
You can't return from get() as it's an asynchronous callback. What you can do is:
with callbacks:
memcached.get('foo',(err, data) => {
if(err) {
console.log('error: ', err);
} else {
console.log('data: ', data);
}
});
with promises:
memcached.get('foo')
.then((data) => {
console.log('data: ', data);
})
.catch((err) => {
console.log('error: ', err);
});
and with async/await:
const getData = async () => {
try {
const data = await memcached.get('foo');
console.log('data: ', data);
} catch (err) {
console.log('error: ', err);
}
}
hope this helps :)
Since memcached is async you can't really return from it, altho I see OP found a way via a promisify wrapper. What you can do is call another function from inside the callback.
Working with async socket communication I've had to split up a lot of my workflows into request and receives.
memcached.get('foo',(err, data) => {
doStuff(data);
});
function doStuff(data) {
//do some stuff here
}
Of course you could always do your work with the data inside the handler too. But in some cases that isnt the best approach.

NodeJS Async Database fetch server freezing

I have an application running on NodeJS(express + mongoose + jade).
I have a post-route /search (all routes are in a separate module) which should handle fetching data from mongo database and inserting it into jade template(in this case just printing th console):
router.post('/search', function (req,res) {
var componentsArray = null;
function getArray(){
console.log('Initializing...');
componentsArray = dataExchanger.search(req.body.select, req.body.selectType, req.body.searchField);
}
getArray(function () {
console.log('Documents returned.');
console.log('Printing array...');
console.log('Array: ' + componentsArray);
console.log('Array type: ' + typeof (componentsArray));
console.log('Rendering page...');
res.render('search_results');
});
});
Searching and fetching function implemented in a different module dataExchanger:
exports.search = function(select, type, data) {
console.log('Fetching documents...');
componentsModel.find({name: data}, function (err, docs) {
if(!err) {
console.log('Returning documents...');
return docs;
} else {
console.log('Can\'t return documents!');
throw err;
}
});
};
The problem is that when I am using a callback function for getArray(), the server just freezes at the moment of returning docs and stops responding.
What am I doing wrong?
Try to use async/await
router.post('/search', async (req,res) => {
let componentsArray;
try {
componentsArray = await dataExchanger.search(req.body.select, req.body.selectType, req.body.searchField);
} catch(e){
//If error in request and no data.
console.error('Error', e.message);
return res.render('error_message');
}
console.log('Documents returned.');
console.log('Printing array...');
console.log('Array: ' + componentsArray);
console.log('Array type: ' + typeof (componentsArray));
console.log('Rendering page...');
res.render('search_results');
});
And here is your dataExchanger
exports.search = function(select, type, data) {
console.log('Fetching documents...');
return new Promise((resolve, reject) => {
componentsModel.find({name: data}, function (err, docs) {
if(err) return reject(err);
resolve(docs);
});
})
};
Further reading: promises, async/await
router.post('/search', function (req,res) {
var componentsArray = null;
function getArray(cb){
console.log('Initializing...');
componentsArray = dataExchanger.search(req.body.select, req.body.selectType, req.body.searchField);
//Execute the callback
cb();
}
getArray(function () {
console.log('Documents returned.');
console.log('Printing array...');
console.log('Array: ' + componentsArray);
console.log('Array type: ' + typeof (componentsArray));
console.log('Rendering page...');
res.render('search_results');
});
});
Looks like your search method is async as well, so you will need to pass the callback down to that to get the desired result.

Q.fcall always goes into fail

var Q = require('q')
var fs = require('fs')
var deferred = Q.defer()
function GetDTImage (PicName) {
fs.readFile(process.cwd() + '\\' + PicName + '.jpg', function (error, text) {
if (error) {
console.log(error)
} else {
return text.toString('base64')
}
})
}
Q.fcall(GetDTImage('Dogs'))
.then(
function (imgBase64Code) {
console.log(imgBase64Code)
}, function (err) {console.log(err)}
)
Hello everyone, here is a question bothering me for a time.
I am confused why the above code always performs the error message, Cannot read property 'apply' of undefined,
Firstly, Q.fcall expects a function as the first argument, and optional arguments for that function as subsequent arguments
so, you need to use Q.fcall like this
Q.fcall(GetDTImage, 'Dogs')
.then(
function (imgBase64Code) {
console.log(imgBase64Code)
}, function (err) {console.log(err)}
)
However, this would resolve to the value (or promise) returned by calling
GetDTImage('Dogs')
But your function GetDTImage doesn't return anything - the only return statement is inside the callback inside that function!
effectively your GetDTImage funciton is
function GetDTImage (PicName) {
// go and do this asynchronous thing
fs.readFile(process.cwd() + '\\' + PicName + '.jpg', function (error, text) {
if (error) {
return reject(error);
}
resolve(text.toString('base64'));
});
// but return straight away regardless of that asynchronous thing
return undefined;
}
because fs.readFile is asynchronous, you need GetDTImage to return a promise for it to work
function GetDTImage (PicName) {
// here we return a promise, huzzah!
return new Promise(function (resolve, reject) {
fs.readFile(process.cwd() + '\\' + PicName + '.jpg', function (error, text) {
if (error) {
return reject(error);
}
resolve(text.toString('base64'));
});
});
}
Now you can either
Q.fcall(GetDTImage, 'Dogs')
.then(
function (imgBase64Code) {
console.log(imgBase64Code)
}, function (err) {console.log(err)}
)
or
Q.fcall(GetDTImage('Dogs'))
.then(
function (imgBase64Code) {
console.log(imgBase64Code)
}, function (err) {console.log(err)}
)
(I'm sure there's a difference between those two, but I'm not familiar enough with Q to accurately define that difference
However, since GetDTImage now returns a promise, you may as well do this:
GetDTImage('Dogs')
.then(
function (imgBase64Code) {
console.log(imgBase64Code)
}, function (err) {console.log(err)}
)
and forget Q.fcall altogether
var Q = require('q')
var fs = require('fs')
var deferred = Q.defer()
function GetDTImage (PicName) {
fs.readFile(process.cwd() + '\\' + PicName + '.jpg', function (error, text) {
if (error) {
console.log(error)
} else {
deferred.resolve(text.toString('base64'))
}
})
return deferred.promise
}
GetDTImage(Dogs)
.then(
function (imgBase64Code) {
console.log(imgBase64Code)
}, function (err) {console.log(err)}
)
Thanks Jaromanda's support.
I have solved it and here is my solution.

How to make sure call is asynchronous?

I have a program where user first create a file once file is created i am appending data to the file that is coming from client consistently.The below code is working as expected. I am new to nodejs so just want to get an expert opinion in case when multiple users creating and recording files on their machines at same time, will it work asynchronously or do i need to make some changes to the code ?
io.js
socket.on('createlogfile', function() {
logsRecording.userLogs(function(filename) {
socket.emit('filename', filename);
});
});
socket.on('startrecording', function(obj) {
logsRecording.recordLogs(obj);
});
server.js
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.openSync(file, 'a',function () {
console.log('file created');
});
console.log('userLogs');
callback(filename);
},
recordLogs: function (obj) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
fs.readdir(dir, function(err, items) {
items.forEach(function(file){
if(obj.file === file){
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
if (err) throw err;
});
console.log('filename in records',obj.file);
}
});
});
}
You are using fs.openSync, which is synchronous and as such can hang the event loop.
You should be using fs.open and callback inside it:
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.open(file, 'a', function (err) {
console.log('file created');
console.log('userLogs');
callback(err, filename);
});
},
And you can flatten recordLogs using async.
Also, it is bad practice to throw error in synchronous function, you should be passing the error in the callback.
As a last tip, Array.forEach is synchronous, and can hang the process, you should be using async.each
recordLogs: function (obj, callback) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
async.waterfall([
(callback) => {
fs.readdir(dir, (err, items) => {
callback(err, items);
});
},
(items, callback) => {
async.each(items, (file, callback) => {
if(obj.file === file) {
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
callback(err);
});
console.log('filename in records',obj.file);
} else {
callback();
}
}, (err) => {
callback(err);
});
}
], (err, file) => {
if(callback) {
callback(err);
}
});
}

how to make synchronous http calls within async.each in nodejs

I want to make http requests to an API-s to collect for each user it's data and insert into mongodb.
The problem I am having is, it is doing all the requests at once, and seems it gets stuck somewhere and I don't know what is going on.
Al thou I am using async library and add the request() method inside each iteration, and I dont know if this is the right way, here is the code:
function iterateThruAllStudents(from, to) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
async.forEach(students, function iteratee(student, callback) {
if (student.worksnap.user != null) {
var options = {
url: 'https://api.worksnaps.com/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic bGhNSVJkVUFwOE1DS2loOFVyZkFyOENEZEhPSXdCdUlHdElWMHo0czo='
}
};
request(options, getTimeEntriesFromWorksnap);
}
callback(); // tell async that the iterator has completed
}, function (err) {
console.log('iterating done');
});
});
}
function getTimeEntriesFromWorksnap(error, response, body) {
console.log(response.statusCode);
if (!error && response.statusCode == 200) {
parser.parseString(body, function (err, results) {
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
_.forEach(timeEntries, function (timeEntry) {
_.forEach(timeEntry, function (item) {
saveTimeEntry(item);
});
});
});
}
}
function saveTimeEntry(item) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
throw err;
}
student.timeEntries.push(item);
student.save(function (err) {
if (err) {
console.log(err);
} else {
console.log('item inserted...');
}
});
});
}
var from = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 30);
var to = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(from, to);
I am new to JavaScript, especially when dealing with async.
Any help?
Use Async.eachLimit() to make batched request to the api...Try this iterateThruAllStudents() function.
I already had same question before here
See tutorial of limiting here.
Though i am making the limit as 5 but you can do whatever you want(10,50 etc).
function iterateThruAllStudents(from, to) {
Student.find({status: 'student'})
.populate('user')
.exec(function (err, students) {
if (err) {
throw err;
}
async.eachLimit(students,5,function iteratee(student, callback) {
if (student.worksnap.user != null) {
var options = {
url: 'https://api.worksnaps.com/api/projects/' + project_id + '/time_entries.xml?user_ids=' + student.worksnap.user.user_id + '&from_timestamp=' + from + '&to_timestamp=' + to,
headers: {
'Authorization': 'Basic bGhNSVJkVUFwOE1DS2loOFVyZkFyOENEZEhPSXdCdUlHdElWMHo0czo='
}
};
request(options,getTimeEntriesFromWorksnap(callback));
}
}, function (err) {
console.log(err);
console.log('iterating done');
});
});
}
function getTimeEntriesFromWorksnap(cb) {
return function(error, response, body){
console.log(response.statusCode);
if (!error && response.statusCode == 200) {
parser.parseString(body, function (err, results) {
var json_string = JSON.stringify(results.time_entries);
var timeEntries = JSON.parse(json_string);
async.each(timeEntries,function(timeEntry,cb1){
async.each(timeEntry,function(item,cb2){
saveTimeEntry(item,cb2);
},function(err){
if(err)
cb1(err);
else
cb1();
})
},function(err){
if(err)
cb(err);
else
cb();
});
//_.forEach(timeEntries, function (timeEntry) {
// _.forEach(timeEntry, function (item) {
// saveTimeEntry(item);
// });
//});
});
}
cb(null);
}
}
function saveTimeEntry(item,cb2) {
Student.findOne({
'worksnap.user.user_id': item.user_id[0]
})
.populate('user')
.exec(function (err, student) {
if (err) {
return cb2(err);
}
student.timeEntries.push(item);
student.save(function (err) {
if (err) {
console.log(err);
//return cb2(err);//Do it if you wanna throw an error.
} else {
console.log('item inserted...');
}
cb2();
});
});
}
var from = new Date(startDate).getTime() / 1000;
startDate.setDate(startDate.getDate() + 30);
var to = new Date(startDate).getTime() / 1000;
iterateThruAllStudents(from, to);
In your example you missed iteratee param in the each method of async - iteratee(item, callback). Look at this example here.
You need to call callback each time inside your iteratee function to tell async continue doing its processing.
each(collection, iteratee, [callback])
collection - collection to iterate over.
iteratee(item, callback) - function to apply to each item in coll. The iteratee is passed a callback(err) which must be called once it has completed. If no error has occurred, the callback should be run without arguments or with an explicit null argument. The array index is not passed to the iteratee. If you need the index, use forEachOf.
callback(err) - Optional callback which is called when all iteratee functions have finished, or an error occurs.
If you need synchronous behavior, no probs! There is also eachSeries method with the same signature except every collection item will be iterated synchronously.
UPDATE:
Changes should be implemented:
Pass async callback:
request(options, getTimeEntriesFromWorksnap(callback));
Return necessary for request callback function:
function getTimeEntriesFromWorksnap(callback) {
return function(error, response, body) {
// ...
saveTimeEntry(item, callback);
// ...
}
}
Call callback only after record is saved in database:
function saveTimeEntry(item, callback) {
// ..
student.save(callback);
// ..
}
Refactor nested loops (not sure what timeEntries, timeEntry are, so use appropriate async method to iterate these data structures):
async.each(timeEntries, function (timeEntry, callback) {
async.each(timeEntry, function (item, callback) {
saveTimeEntry(item, callback);
}, callback);
}, callback);

Categories

Resources