I'm beginner with node.js and I have a problem. I want to display the filename and last modification date in a list with a view ejs.
But, my problem is to pass the variable to my view, I want to fill in an arraylist with filename and one with date but nothing appears..
here is the code :
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
var filenameArray = [];
var datefileArray = [];
fs.readdir('./PDF/', function (err, files) {
if (err) {
throw err;
}
files.forEach(function (file) {
fs.stat('./PDF/'+file, function (err, stats) {
if (err) {
throw err;
}
// Fill in the array with filename and last date modification
filenameArray.push(file);
datefileArray.push(stats.mtime);
});
});
});
filenameArray.push("test");
datefileArray.push("pouet");
res.render('files.ejs', { filename: filenameArray, dateModification: datefileArray, index: filenameArray.length });
});
and here is my view :
<p> <%= filename.length %></p>
<ul><%
for(var i = 0 ; i <= index; i++) {
%>
<li><%= filename[i] + " - " + dateModification[i] %></li>
<% } %></ul>
I have only the test item in my array..
Thank you.
Remember: node.js is asynchronous, so when you call render, the fs.readdir and the fs.stat inside it have not returned yet.
You can use the async module to help you with that:
var async = require('async');
app.get('/', function (req, res) {
res.setHeader('Content-Type', 'text/html');
var filenameArray = [];
var datefileArray = [];
fs.readdir('./PDF/', function (err, files) {
if (err) {
throw err;
}
async.each(files, function (file, callback) {
fs.stat('./PDF/'+file, function (err, stats) {
if (err) {
throw err;
}
// Fill in the array with filename and last date modification
filenameArray.push(file);
datefileArray.push(stats.mtime);
callback();
});
}, function (error) {
if (error) return res.status(500).end();
res.render('files.ejs', { filename: filenameArray, dateModification: datefileArray, index: filenameArray.length });
});
});
});
The each function will execute the iterator callback for each item in the array, and at the end, when all iterator functions have finished (or an error occurs), it calls the last callback.
Related
I have a problem with my html-pdf document creation. The problem is that often the code runs to fast to complete the process of pdf-docutment creation. The Processes consists out of building an HTML-String by replacing placeholders in an Html file. Below you see the code what happens afterwards.
Object.keys(setter).forEach(function(element, key, _array) {
var regex = new RegExp(element, "g");
data = data.replace(regex, setter[element])
})
var result = data;
fs.writeFile(mergeFileRes, result, 'utf8', function (err) {
if(err) {
console.log(err);
return;
} else {
let html2 = fs.readFileSync(mergeFileRes, 'utf8');
let options = {
format: 'a4' ,
"directory" : "/tmp",
};
if(html2){
pdf.create(html2, options).toStream(function(err, stream2){
if(err) console.log(err);
stream2.pipe(res);
stream2.on('end', function () {
try{
fs.unlink(mergeFileRes)
console.log(3090, "deleted file");
}
catch (err){
console.log(3090, "Did not delete file");
}
});
});
} else {
}
}
});
My problem is that in many cases the html2 variable is not yet created before the pdf.create process starts. This is probably because the readFileSync takes too long to finish.
I was wondering, how can I fix this. How can I make the pdf.create wait for the readFileSync to finish and the html2 variable to be filled.
You can use fs.readFile to read the file asynchronously and html2 will be available within the callback function.
Object.keys(setter).forEach(function(element, key, _array) {
var regex = new RegExp(element, "g");
data = data.replace(regex, setter[element])
})
var result = data;
fs.writeFile(mergeFileRes, result, 'utf8', function (err) {
if(err) {
console.log(err);
return;
} else {
fs.readFile(mergeFileRes, 'utf8', function(err, html2){
if (err) throw err;
let options = {
format: 'a4' ,
"directory" : "/tmp",
};
pdf.create(html2, options).toStream(function(err, stream2){
if(err) console.log(err);
stream2.pipe(res);
stream2.on('end', function () {
try{
fs.unlink(mergeFileRes)
console.log(3090, "deleted file");
}
catch (err){
console.log(3090, "Did not delete file");
}
});
});
});
}
});
I am trying to use async with node.js to handle multiple incoming POST requests to edit a JSON file. No matter how I refactor it, it will always make one of the edits and not the other. I though that using async.queue would force the operations to handle sequentially? What am I doing wrong?
My code:
var editHandler = function(task, done) {
var req = task.req;
var res = task.res;
fs.stat( "./app//public/json/" + "data.json", function(err, stat) {
if(err == null) {
console.log('File exists');
} else if(err.code == 'ENOENT') {
console.log("Error");
} else {
console.log('Some other error: ', err.code);
}
});
console.log(req.params.id);
console.log(req.body);
fs.readFile( "./app//public/json/" + "data.json", 'utf8', function (err, data) {
data = JSON.parse( data );
data[req.params.id] = req.body.school;
//console.log( data );
fs.writeFile("./app//public/json/" + "data.json", JSON.stringify(data), function (err){
if(err) {
return console.log(err);
}
})
res.redirect('/');
});
};
//Make a queue for the services
var serviceQ = async.queue(editHandler, 20);
serviceQ.drain = function() {
console.log('all services have been processed');
}
app.post('/edit_school/:id', function(req, res) {
serviceQ.push({req: req, res: res })
})
Thanks in advance for any insights! I am really new to using node.js for anything other than npm/webpack.
I have a program where user first create a file once file is created i am appending data to the file that is coming from client consistently.The below code is working as expected. I am new to nodejs so just want to get an expert opinion in case when multiple users creating and recording files on their machines at same time, will it work asynchronously or do i need to make some changes to the code ?
io.js
socket.on('createlogfile', function() {
logsRecording.userLogs(function(filename) {
socket.emit('filename', filename);
});
});
socket.on('startrecording', function(obj) {
logsRecording.recordLogs(obj);
});
server.js
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.openSync(file, 'a',function () {
console.log('file created');
});
console.log('userLogs');
callback(filename);
},
recordLogs: function (obj) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
fs.readdir(dir, function(err, items) {
items.forEach(function(file){
if(obj.file === file){
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
if (err) throw err;
});
console.log('filename in records',obj.file);
}
});
});
}
You are using fs.openSync, which is synchronous and as such can hang the event loop.
You should be using fs.open and callback inside it:
userLogs: function (callback) {
var filename = uuid.v4() + '.log';
var file = filePath + '/' + filename;
fs.open(file, 'a', function (err) {
console.log('file created');
console.log('userLogs');
callback(err, filename);
});
},
And you can flatten recordLogs using async.
Also, it is bad practice to throw error in synchronous function, you should be passing the error in the callback.
As a last tip, Array.forEach is synchronous, and can hang the process, you should be using async.each
recordLogs: function (obj, callback) {
var dir = './app/records/templogs'
var fileAppend = dir + '/'+ obj.file;
console.log('data from recording', obj.data);
async.waterfall([
(callback) => {
fs.readdir(dir, (err, items) => {
callback(err, items);
});
},
(items, callback) => {
async.each(items, (file, callback) => {
if(obj.file === file) {
fs.appendFile(fileAppend, obj.data+ "\r\n", null, 'utf8', function (err) {
callback(err);
});
console.log('filename in records',obj.file);
} else {
callback();
}
}, (err) => {
callback(err);
});
}
], (err, file) => {
if(callback) {
callback(err);
}
});
}
I have some code that loops through a directory and retrieves the file name of each file. The code then retrieves the contents of each file (usually a number or a short text).
var config = {};
config.liveProcValues = {};
var path = require('path');
walk = function(dir, done) {
var results = {};
fs.readdir(dir, function(err, list) {
if (err) return done(err);
var pending = list.length;
if (!pending) return done(null, results);
list.forEach(function(file) {
file = path.resolve(dir, file);
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function(err, res) {
results = results.concat(res);
if (!--pending) done(null, results);
});
} else {
fs.readFileSync(file, 'utf8', function(err, data) {
if (err) {
contents = err;
} else {
contents = data;
}
console.log(filename + " - " + contents);
filename = file.replace(/^.*[\\\/]/, '');
config.liveProcValues[filename] = contents;
});
The console.log line successfully outputs the right information, however when trying to store it into JSON:
config.liveProcValues[filename] = contents;
It simply does remember the information.
walk("testdirectory", function(err, results) {
if (err) throw err;
});
// Output the configuration
console.log(JSON.stringify(config, null, 2));
You have to make sure that you are accessing the data after the filesystem was traversed. In order to do that you have to move the console.log into the walk callback:
walk("testdirectory", function(err, results) {
if (err) throw err;
// Output the configuration
console.log(JSON.stringify(results, null, 2));
});
See Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference for more info.
That alone won't solve the issue though, since you have a couple of logic errors in your code. You are trying to treat an object as an array (results.concat) and you are not always calling done when you are done (in particular, you are not calling done after you finished reading the files in a directory).
Here is a version that should come closer do what you want.
This uses Object.assign to merge two objects, which is not available in Node yet, but you can find modules that provide the same functionality.
Note thats I also removed the whole config object. It's cleaner if you work with results.
var path = require('path');
function walk(dir, done) {
var results = {};
fs.readdir(dir, function(err, list) {
if (err) return done(err);
if (!list.length) return done(null, results);
var pending = list.length;
list.forEach(function(file) {
file = path.resolve(dir, file);
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function(err, res) {
if (!err) {
// Merge recursive results
Object.assign(results, res);
}
if (!--pending) done(null, results);
});
} else {
fs.readFile(file, 'utf8', function(err, data) {
var contents = err || data;
console.log(file + " - " + contents);
file = file.replace(/^.*[\\\/]/, '');
// Assign the result to `results` instead of a shared variable
results[file] = contents;
// Need to call `done` if there are no more files to read
if (!--pending) done(null, results);
});
}
});
});
});
}
But instead of writing your own walk implementation, you could also use an existing package.
I'm trying to understand the asynchronous programming Node.js but stalled on this code.
This function in their callback returns an array of files in a directory:
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
if (exists) {
fs.readdir(path, function (err, files) {
if (err) {
throw err;
}
var result = [];
files.forEach(function (filename, index) {
result[index] = filename;
});
return callback(result);
});
}
});
}
But when I use asynchronous code inside.forEach, it returns nothing:
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
if (exists) {
fs.readdir(path, function (err, files) {
if (err) {
throw err;
}
var result = [];
files.forEach(function (filename, index) {
fs.stat(path + filename, function (err, stats) {
if (err) {
throw err;
}
result[index] = filename;
});
});
return callback(result);
});
}
});
}
I understand why it happens, but don't understand how to write correct code.
The issue is that fs.stat is also async, but you could probably do something like:
var result = [],
expectedLoadCount = files.length,
loadCount = 0;
files.forEach(function (filename, index) {
fs.stat(path + filename, function (err, stats) {
if (err) {
throw err;
}
result[index] = filename;
if (++loadCount === expectedLoadCount) callback(result);
});
});
The other answers may work well, but they are currently quite different semantically from the original code: they both execute stats in parallel, rather than sequentially. The forEach will initiate as many asynchronous stats operation as there are files in the list of files. The completion order of those operations may quite well be different from the original order of the list. This may substantially affect the error handling logic.
The following approach implements a state machine, which is aimed to executes stats asynchronously, yet sequentially (untested):
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
if (!exists)
callback(null, null); // node (err, result) convention
else {
fs.readdir(path, function (err, files) {
if (err)
callback(err, null); // node (err, result) convention
else {
var results = [];
var i = 0;
nextStep(); // process the first file (the first step)
function nextStep() {
if (i >= files.length) // no more files?
callback(null, result); // node (err, result) convention
else {
fs.stat(path + files[i], function (err, stats) {
if (err)
callback(err, null); // node (err, result) convention
else {
results[i++] = stats;
// proceed to the next file
nextStep();
}
});
}
}
}
}
}
});
});
Promises may help to reduce the nesting level of the famous "Pyramid of Doom" like above.
try this:
function openDir(path, callback) {
path = __dirname + path;
fs.exists(path, function (exists) {
var totalFiles = 0;;
if (exists) {
fs.readdir(path, function (err, files) {
if (err) {
throw err;
}
var result = [];
files.forEach(function (filename, index) {
fs.stat(path + filename, function (err, stats) {
if (err) {
throw err;
}
result[index] = filename;
totalFiles++;
if(totalFiles === files.length){
callback(result);
}
});
});
});
}
});
}
you can also use the Async module, to help on these kinds of situations