node.js async function in loop? - javascript

I am having some problems with node.js. What I'm trying to do is get an array of the directories in "./"+req.user.email and loop through them finding out their size and adding a table row to output, as you can see in the code. At the end I wan't to send all the table rows using res.send().
However the only output I am getting is:
<tr></tr>
for each file in the array. It seems that the forEach function is not waiting for readSizeRecursive at all. The readSizeRecursive function is asynchronous, and I believe that is what's causing the problem, but I don't know how I can fix this.
Any help would be greatly appreciated, I have included the readSizeRecursive function too. Thank you!
var output = "";
fs.readdir("./" + req.user.email, function (err, files) {
files.forEach(function(file){
output += "<tr>";
readSizeRecursive("./"+req.user.email+"/"+file, function (err, total){
output += '<td>' + file + '</td><td>' + total + '</td>';
});
output += "</tr>"
});
res.send(output)
});
readSizeRecursive() :
// Function to find the size of a directory
function readSizeRecursive(item, cb) {
fs.lstat(item, function(err, stats) {
var total = stats.size;
if (!err && stats.isDirectory()) {
fs.readdir(item, function(err, list) {
async.forEach(
list,
function(diritem, callback) {
readSizeRecursive(path.join(item, diritem), function(err, size) {
total += size;
callback(err);
});
},
function(err) {
cb(err, total);
}
);
});
}
else {
cb(err, total);
}
});
}

Please use the async module for this kind of pattern. Using async.each will allow you to compute the size for each folder asynchronously, and then return the sizes once you're done computing everything individually.
var output = [];
fs.readdir('./' + req.user.email, function (err, files) {
async.each(compute, report);
});
function compute (file, done) {
// calculate size, then callback to signal completion
// produce a result like below, then invoke done()
var obj = { files: [
{ name: file, size: size },
{ name: file, size: size },
{ name: file, size: size }
]};
output.push(obj);
done();
}
// doesn't need to be this awful
function format (list) {
var result = [];
list.forEach(function (item) {
var description = item.files.map(function (file) {
return util.format('<td>%s</td><td>%s</td>', file.name, file.size);
});
result.push(description);
});
result.unshift('<tr>');
result.push('</tr>');
return result.join('</tr><tr>');
}
function report (err) {
if (err) { return next(err); }
var result = format(output);
res.send(result);
}
This way you can easily swap out the different pieces of functionality, changing the formatting without altering the computing of the file size tree, for example.
Your main issue was control flow. You return with res.send while you are asynchronously looping and figuring out the sizes.

var fs = require ("fs");
var createTableContent = function (p, cb){
var read = function (p, cb){
//Prevent recursion if error
if (err) return cb ();
fs.stat (p, function (error, stats){
if (error){
err = error;
return cb ();
}
if (stats.isDirectory ()){
var dirSize = 0;
fs.readdir (p, function (error, entries){
if (error){
err = error;
return cb ();
}
var pending = entries.length;
//Empty dir
if (!pending) return cb (0);
entries.forEach (function (entry){
read (p + "/" + entry, function (entrySize){
dirSize += entrySize;
if (!--pending) return cb (dirSize);
});
});
});
}else{
cb (stats.size);
}
});
};
//A lot of errors can be produced, return only the first one
var err = null;
//Suppose p is a dir
fs.readdir (p, function (error, entries){
if (error) return cb (error);
var content = "";
var pending = entries.length;
if (!pending) return cb (null, content);
entries.forEach (function (entry){
read (p + "/" + entry, function (totalSize){
if (err) return cb (err);
content += "<tr><td>" + entry + "</td><td>" + totalSize + "</td></tr>";
if (!--pending){
//End
cb (null, content);
}
});
});
});
};
//Here goes the "email" path
createTableContent (".", function (error, content){
if (error) return console.error (error);
console.log (content);
});

Related

NodeJS - output json array as mutiple json files to disk

I try to use NodeJS to read a JSON array from a JSON file, and then output each JSON object multiple JSON files to the disk.
However, I got the Error EMFILE: too many open files
The array has 20,000 objects.
The code:
function main(){
var clusters_statistics=require("cluster_whole_1.json");
for(var i=0; i<clusters_statistics.length; i++){
var fs=require('fs');
var outputFilename='cut_json/'+i+'.json';
fs.writeFile(outputFilename, JSON.stringify(clusters_statistics[i], null, 4), function(err) {
if(err) {
console.log(err);
} else {
console.log(data);
}
});
}
}
Update:
1. I tried to use the close() function as suggested by Gustavo, unfortunately, it still says "there are too many files open". ("Open" this time).
2. Then I tried recursion inside the close(), and it works now.
The code:
function main(){
clusters_statistics=require("cluster_whole_1.json");
call_close(clusters_statistics.length);
}
function call_close(i){
var fs = require("fs");
var path = 'cut_json/'+i+'.json';
fs.open(path, "w+", function(error, fd) {
if (error) {
console.error("open error: " + error.message);
}else {
fs.writeFile(path, JSON.stringify(clusters_statistics[i], null, 4), function(err) {
if(err) {
console.log(err);
} else {
}
});
fs.close(fd, function(error) {
if (error) {
console.log(err);
} else {
if(i<=0){
return;
}else{
if(i%100==0){
console.log(i);
}
call_close(i-1);
}
}
});
}
});
}
Close the file after you finish writing the Jason into it.
Now you are keeping the files open and filling the memory, if you don't close them manually they will only be closed when your program finishes​.
var fs=require('fs');
function main(){
var clusters_statistics=require("cluster_whole_1.json"); for(var i=0; i<clusters_statistics.length; i++){
var outputFilename='cut_json/'+i+'.json';
var my_file = fs.open(outputFilename, 'w+');
var buffer = new Buffer(JSON.stringify(clusters_statistics[i], null,4):
fs.write(my_file, buffer, 0, buffer.length, null, function(err, written, buffer) {
if(err) {
console.log(err);
} else {
console.log('OK!');
}
});
fs.close(my_file);
}
Opening and closing the file descriptor is not necessary when using fs.writeFile, but you'll probably want to write the files sequentially so you don't open too many files at once.
function write_cluster_statistics(clusters_statistics, callback, index) {
index = index || 0;
if (index >= clusters_statistics.length) {
return callback();
}
var fs = require('fs');
var path = 'cut_json/' + index + '.json';
var content = JSON.stringify(clusters_statistics[index], null, 4);
fs.writeFile(path, content, function (err) {
if (err) {
callback(err);
} else {
write_cluster_statistics(clusters_statistics, callback, index + 1);
}
});
}
function main() {
var clusters_statistics = require("cluster_whole_1.json")
write_cluster_statistics(clusters_statistics, function (err) {
if (err) {
console.error(err);
} else {
console.log('done');
}
});
}

How to run code only after query has finished?

I have the following code:
var userFound = false;
let sql = `SELECT box_id, cubby_id, comport, deliveredToUser
FROM recipients
WHERE package_password = ?`;
connection.query(sql, [req.session.sessionUserPackagePassword],
function(err, rows, fields) {
if (!err) {
for (var i = 0; i < rows.length; i++) {
// Make the comparaison case insensitive
if ((rows[i].deliveredToUser).toLowerCase() == `no`) {
userFound = true;
console.log(userFound);
var comport = rows[i].comport;
var command = "open" + rows[i].cubby_id;
var commandClose = "close" + rows[i].cubby_id;
var commandStatus = "status" + rows[i].cubby_id;
console.log(command);
console.log(comport);
var options = {
scriptPath: 'python/scripts',
// pass arguments to the script here
args: [command, comport, commandClose, commandStatus]
};
PythonShell.run('controlLock.py', options, function(err, results) {
if (err) {
res.render('errorConnection', {});
}
console.log('results: %j', results);
});
}
}
console.log(userFound);
// If the query fails to execute
} else {
console.log('Error while performing Query.');
res.render('errorConnection', {});
}
});
connection.end();
if (!userFound) {
//
res.render('pickup/errorAlreadyDelivered', {});
connection.end();
}
I would want this part at the end to be run only once the query has finished:
if (!userFound) {
//
res.render('pickup/errorAlreadyDelivered', {});
connection.end();
}
console.log("connection ended " + userFound);
I placed a console.log outside of the query and at the bottom and despite the query value changing to true it prints out false first, and comes out first because the console.log didn't wait until the query has finish to be able to store the value.

JS Asynchronous execution is causing pain

I am developing a backend server code with NodeJS. What the code does is, periodically connect to a REST API, request updates and then write to my database.
I have no way of getting delta of the data from the API, so I drop my collection from MongoDB and then just insert the newly pulled data.
I implemented promises to make sure that the dependent methods are executed only after the previous methods resolve. This however doesn't seem to work as I anticipated.
So, I drop the collection and insert, this works. But the following method seems to execute before the new data is populated. It sometime works, when I have some new console.log statements which seems to induce a slight delay ever so slightly to make it all work.
setTimeout function didn't seem to help. Any suggestions?
Here is a sanitized version of the code: https://jsfiddle.net/ppbfrozg/
var request = require("request");
var q = require('q');
function authenticate() {
var deferred = q.defer();
request(options, function(error, response, body) {
if (error) throw new Error(error);
deferred.resolve(JSON.parse(body).token);
});
return deferred.promise;
}
function getData(token) {
var deferred = q.defer();
request(options, function(error, response, body) {
if (error) throw new Error(error);
deferred.resolve(JSON.parse(body).token);
});
return deferred.promise;
}
function insertDataInMongo(a) {
var deferred = q.defer();
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost/myDB';
var token = a[1];
MongoClient.connect(url, function(err, db) {
if (err) return deferred.reject(new Error(err))
console.log("connected for insert");
var apiData = JSON.parse(a[0]).data;
if (JSON.parse(a[0]).data) {
db.collection('MediaData').insert(apiData);
console.log("Records Inserted");
} else {
db.collection('Details').drop();
db.collection('Details').insert(JSON.parse(a[0]));
console.log("Records Inserted");
}
deferred.resolve(token);
});
return deferred.promise;
}
function getMedia(dataContext) {
var deferred = q.defer();
var cursor = dataContext[0];
var token = dataContext[1];
if (cursor !== null) {
console.log("Inside cursor not null");
cursor.forEach(function(data) {
insertDataInMongo(data);
})
}
return deferred.promise;
}
function check(array, attr, value) {
for (var i = 0; i < array.length; i += 1) {
if (array[i][attr] === value) {
return false;
}
}
return true;
}
function get_value(array, attr) {
for (var i = 0; i < array.length; i += 1) {
if (array[i].hasOwnProperty(attr)) {
return array[i][attr];
}
}
}
function getNames(token) {
var deferred = q.defer();
var MongoClient2 = require('mongodb').MongoClient;
var url = 'mongodb://localhost/myDB';
console.log("going to get Data");
MongoClient2.connect(url, function(err, db) {
if (err) return deferred.reject(new Error(err));
console.log("connected for select");
var data = db.collection('Details').find();
var dataContext = [data, token, 0, 0, 0, 0, 0, 0, 0, null];
deferred.resolve(dataContext);
});
return deferred.promise;
}
function convertDate(date) {
var yyyy = date.getFullYear().toString();
var mm = (date.getMonth() + 1).toString();
var dd = (date.getDate() - 3).toString();
var mmChars = mm.split('');
var ddChars = dd.split('');
return yyyy + '-' + (mmChars[1] ? mm : "0" + mmChars[0]) + '-' + (ddChars[1] ? dd : "0" + ddChars[0]);
}
authenticate()
.then(getData)
.then(insertDataInMongo)
.then(getNames)
.then(getMedia);
This should work. Let me know if any issue.
function insertDataInMongo(a) {
var deferred = q.defer();
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://localhost/myDB';
var token = a[1];
MongoClient.connect(url, function(err, db) {
if (err) return deferred.reject(new Error(err))
console.log("connected for insert");
var apiData = JSON.parse(a[0]).data;
if (JSON.parse(a[0]).data) {
db.collection('MediaData').insert(apiData, function(){
console.log("Records Inserted");
return deferred.resolve(token);
});
} else {
db.collection('Details').drop(function(error, result){//Callback function that executes after drop operation has completed.
if(error){
return deferred.reject(error);//Reject the promise if there was an error
}
db.collection('Details').insert(JSON.parse(a[0]), function(err, res){//Callback function that executes after insert operation has completed.
if(err){
return deferred.reject(err);//Reject the promise if there was an error
}
console.log("Records Inserted");
return deferred.resolve(token);
});
});
}
});
return deferred.promise;
}
From what I see from nodeJS driver API for MongoDB :
https://mongodb.github.io/node-mongodb-native/api-generated/collection.html
hint : db.collection.drop is asynchronous, so you have to use a callback with it
db.collection('Details').drop(function(err, result){
// Do anything AFTER you dropped your collection
}
but since you are using promises, you should use something like that:
authenticate()
.then(getData)
.then(db.collection('Details').drop)
.then(insertDataInMongo)
.then(getNames)
.then(getMedia);
or if you want to really keep the same code format :
function dropCollectionInMongo() {
db.collection('Details').drop()
}
authenticate()
.then(getData)
.then(dropCollectionInMongo)
.then(insertDataInMongo)
.then(getNames)
.then(getMedia);

Node.js print directory files

Currently working on a small project and having a few issues with getting files to print onto the page.
Currently, all files within my /views directory will print, however, I am wanting to expand this so it can print files within folders, for example, /views/test/prototype.html.
module.exports = function (router) {
var fs = require('fs');
router.get('/file-list', function (req, res) {
var markup = function (files, callback) {
var items = [];
for (i = 0; i < files.length; i++) {
var q = files[i];
q = q.slice(0, -5);
var markup = '<li>' + '' + q + '' + '</li>';
items.push(markup);
};
callback(items);
};
var getFiles = function (callback) {
fs.readdir(__dirname + '/views', function (err, files) { // '/' denotes the root folder
if (err) throw err;
markup(files, callback)
});
};
getFiles(function (items) {
// render markup for items
res.render('file-list', { 'files': items });
});
});
}
Inside your getFiles method, you can add some extra logic to see if the file that you are currently parsing is a directory, case in which you can loop over the files in that directory and push them to a result set array (filesArray). My example below only takes into account level 2 folders, such as in the /views/test/prototype.html scenario that you provided:
module.exports = function (router) {
var fs = require('fs');
router.get('/file-list', function (req, res) {
var markup = function (files, callback) {
var items = [];
for (i = 0; i < files.length; i++) {
var q = files[i];
q = q.slice(0, -5);
var markup = '<li>' + '' + q + '' + '</li>';
items.push(markup);
};
callback(items);
};
var getFiles = function (callback) {
var rootDir = __dirname + '/views';
fs.readdir(rootDir, function (err, files) { // '/' denotes the root folder
if (err) throw err;
// Subdirectory parsing logic START
var filesArray = [];
for (var i = 0; i < files.length; i++) {
var subdirPath = rootDir + '/' + files[i];
fs.stat(subdirPath, function (err, stats) {
if (err) {
console.log(err);
throw err;
}
if (stats.isDirectory()) {
fs.readdir(subdirPath, function (err, subdirFiles) {
if (err) throw err;
Array.prototype.forEach.call(subdirFiles, function (file) {
filesArray.push(file);
});
});
} else {
filesArray.push(files[i]);
}
});
}
// Subdirectory parsing logic END
markup(filesArray, callback)
});
};
getFiles(function (items) {
// render markup for items
res.render('file-list', {
'files': items
});
});
});
}
The above example is a crude one, as it would be recommended to use a mechanism such as Promises that allows you to better control the asynchronous looping going around and make sure that you have parsed all the individual files before calling markup().
UPDATE #1
I've been trying various approaches for trying to crawl all directories and subdirectories using promises, with no success, but I did manage to come across a working solution for your scenario in this Stack Overflow accepted answer - to be more precise, it's the parallel search mechanism provided in the aforementioned answer.
Below is an adaptation of the parallel search mechanism blended in with your existing code base - I've tested it and it works as intended:
var fs = require('fs');
var path = require('path');
module.exports = function (router) {
router.get('/file-list', function (req, res) {
var getFiles = function (callback) {
var rootDir = path.join(__dirname, 'views');
walk(rootDir, function (err, results) {
if (err) {
console.log(err);
return;
}
markup(results, callback);
});
};
var walk = function (dir, done) {
var results = [];
fs.readdir(dir, function (err, list) {
if (err) {
return done(err);
}
var pending = list.length;
if (!pending) {
return done(null, results);
}
list.forEach(function (file) {
file = path.resolve(dir, file);
fs.stat(file, function (err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function (err, res) {
results = results.concat(res);
if (!--pending) {
done(null, results);
}
});
} else {
results.push(file);
if (!--pending) {
done(null, results);
}
}
});
});
});
};
var markup = function (files, callback) {
var items = [];
for (i = 0; i < files.length; i++) {
var q = files[i];
q = q.slice(0, -5);
var markup = '<li>' + '' + q + '' + '</li>';
items.push(markup);
};
callback(items);
};
getFiles(function (items) {
// render markup for items
res.render('file-list', {
'files': items
});
});
});
};
You need to examine files array with fs.stat and recursively do getFiles on directories.
This answer solves similar problem

Node js child_process.execFile return

I need to run a compiled file in C + + with node.js and bring me back a value from this file.
I tried to use child_process.execFile, but I have no problems.
This is the function I use:
var exec = require('child_process');
Test.prototype.write = function (m) {
var wRet;
exec.execFile ('./mainCmd', ['-o', '\\!' + m + '.']
function (error, stdout, stderr) {
wRet = stdout;
console.log ("wRet" + wRet);
return wRet;
});
}
The problem is that the wRet in "console.log" contains text me back from the file c + +, in the "return" remains undefined.
Can you help?
Thank you all!
You have to pass a callback to your test() function:
var chproc = require('child_process');
Test.prototype.write = function(m, cb) {
chproc.execFile(
'./mainCmd',
['-o', '\\!' + m + '.'],
function(error, stdout, stderr) {
if (error) return cb(error);
cb(null, stdout);
}
);
};
// usage ...
var t = new Test();
t.write('foo', function(err, result) {
if (err) throw err;
// use `result`
});

Categories

Resources