error in writing text to a file using node js fs? - javascript

I have a folder which is in my public folder called errorlogs.I want to append text to a file in that folder without overwriting the file. However, for some reason, I am unable to do that using the fs.appendfile. Please can someone suggest my error. Here is what I tried.It actually creates a file called errorlogsapikey.txt outside my public folder which is not what I want
var filepath = __dirname+'/errorlogs';
ensureExists(__dirname + '/errorlogs', 0744, function(err) {
if (err){
} // handle folder creation error
else {
console.log("we are all good");
var data = "url:" +" "+url+" "+"urlstatus:"+" "+urlstatus+" "+"time:"+ " "+formatted;
fs.appendFile(filepath + apikey+'.txt', data,function(err){
if(err) throw err;
});
}
});
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') { // allow the `mask` parameter to be optional
cb = mask;
mask = 0777;
}
fs.mkdir(path, mask, function(err) {
if (err) {
if (err.code == 'EEXIST') cb(null); // ignore the error if the folder already exists
else cb(err); // something else went wrong
} else cb(null); // successfully created folder
});
}

Related

Using FS to write new files if a certain url is found and remove the file if it's not found anymore

I'm trying to write a script, when a new url is found it will turn the url to a hash. Check if the file already has been written it just ignores it, and if it's not known earlier it should be added.
needle.get(mainUrl, function(err, res) {
if (err) throw err;
if (res.statusCode == 200 && !err ) {
var $ = cheerio.load(res.body)
var href = $('div div a').each(function(index, element) {
urlList.push($(element).attr("href"))
var url =($(element).attr("href"))
var hash = crypto.createHash('md5').update(url).digest('hex');
fs.writeFile('./directory/otherdirectory' + `${hash}`, url, (err) => {
if (err) throw err;
console.log('Hash created: ' + url + ' saved as ' + hash
});
}
)
}
})
This is what I've done so far, but this only writes new files. it doesn't check if files already has been added and doesn't remove files that's not found anymore.
So what I try to do:
I've written a script that fetches a website for urls.
Hash all the urls.
Make FS check if file already has been written, if it has just ignore it.
If it not is known earlier, add it as a new file.
If url isn't found when fetching anymore, delete it from the list.
I think this might be an X/Y problem and for that I'm still awaiting the answer to my comment.
With that said, you can simply ignore the existing files using fs.existsSync, if that returns true just skip saving the current file, otherwise save it. And to remove files that are not available anymore, just get all the files in the directory using fs.readdir and remove files that you whose urls are not in the response using fs.unlink:
needle.get(mainUrl, (err, res) => {
if (err) throw err;
if (res.statusCode == 200) {
let $ = cheerio.load(res.body);
let hashes = []; // list of hashes for this website (to be used later to keep only the items that are still available)
$('div div a').each((index, element) => {
let url = $(element).attr("href");
let hash = crypto.createHash('md5').update(url).digest('hex');
hashes.push(hash); // store the hash of the current url
if (!fs.existsSync('./directory/otherdirectory/' + hash)) { // if this file doesn't exist (notice the "not operator !" before fs.existsSync)
fs.writeFile('./directory/otherdirectory/' + hash, url, err => { // save it
if (err) throw err;
console.log('Hash created: ' + url + ' saved as ' + hash);
});
}
});
fs.readdir('./directory/otherdirectory', (err, files) => { // get a list of all the files in the directory
if (err) throw err;
files.forEach(file => { // and for each file
if(!hashes.includes(file)) { // if it was not encountered above (meaning that it doesn't exist in the hashes array)
fs.unlink('./directory/otherdirectory/' + file, err => { // remove it
if (err) throw err;
});
}
});
});
});
Another approach:
Since you only seem to want to store the urls, the best way to so would be to use one single file to store them all instead of storing each url in its own file. Something like this is more efficient:
needle.get(mainUrl, (err, res) => {
if (err) throw err;
if (res.statusCode == 200) {
let $ = cheerio.load(res.body);
let urls = $('div div a') // get the 'a' elements
.map((index, element) => $(element).attr("href")) // map each one into its href attribute
.get(); // and get them as an array
fs.writeFile('./directory/list-of-urls', urls.join('\n'), err => { // then save all the urls encountered in the file 'list-of-urls' (each on its own line, hence the join('\n'))
if (err) throw err;
console.log('saved all the urls to the file "list-of-urls"');
});
}
});
That way old urls will be removed automatically as the file gets overwritten each time, and new urls will be added automatically. No need to check whether an url is already encountered or not because it will get re-saved anyway.
And if you want to get the list of urls somewhere else, just read the file and split it by '\n' like so:
fs.readFile('./directory/list-of-urls', 'utf8', (err, data) => {
if (err) throw err;
let urls = data.split('\n');
// use urls here
});

How to implement a for-loop for an SFTP-Server

I am setting up an SFTP-Server with Nodejs with several functions
implemented. (Read, delete, download…)
The problem is, that the server only sees the content of the first
file in the directory he reads. Meaning while I can see all files
lined up with the help of my WinSCP Client, it only opens the
content of the first file. When I click on the second file, it shows
the content of the first again, although the file is named different
and so on. When I try to download the files, same happens; you can
download each file, but as soon as you open them there is the
content of the very first file in my directory. I know that it has
to do with the for-Loop I wasn't able to implement for a while now.
I hope you guys can help with some experience/ expertise.
I used dummy user names and Paths for this website.
session.on("readdir", function(files, responder) {
var i = 0;
responder.on("dir", function() {
**if (results[i])** {
console.warn(now + " Returning file: " + results[i]);
responder.file(results[i]);
**return i++;**
} else {
return responder.end();
}
});
return responder.on("end", function() {
return console.warn(now + " Directory has been read");
});
});
session.on('stat', function(path, statkind, statresponder) {
statresponder.is_file();
statresponder.permissions = 0o777;
statresponder.uid = 1;
statresponder.gid = 1;
statresponder.size = 1234;
});
session.on("readfile", function(path, writestream) {
var i = 0;
**if(results[i])** {
console.log(now + " Reading file: " **+ results[i]);**
return fs.createReadStream("KKK_Files/"+results[i]).pipe(writestream);
}
else {
console.warn(now + " no file to read");
}
})
session.on("delete", function(path, writestream) {
session.sftp(function(err, sftp) {
if (err) throw err;
var i = 0;
fs.unlink("KKK_Files/" **+ results[i],** function(err){
if ( err ) {
console.log( "Error, problem starting SFTP: %s", err );
}
else
{
console.log(now + " all Files have been deleted");
}
});
});
session.on("download", function(path, fastGet) {
var moveTo = "C:/Users/User/Downloads/file.DAT";
if (err) throw err;
var i = 0;
fs.fastGet("KKK_Files/" +
**results[i],**moveTo,function
(err) {if ( err ) {
console.log("Error downloading the file", err);
}
else
{
console.log(now + "Successfully downloaded");
}
});
});
files.forEach(files => {
results.push(files);
});
});
});
});
for**(i=0; i<results.length; i++)** {
console.log(now + " Reading file: " + results[i]);
return fs.createReadStream("KKK_Files/" + results[i]).pipe(writestream);
}if (err) throw err;
You are using local variable i for index variable on your results. This local variable has value 0 each time the event occurs.
// problem with your variable name, change variable names and try to understand scope.
var i = 0;
You may want to use index variable outside of the the event listener. Try with one event at a time.

Is there any way to prevent this error in NodeJS?

I'm trying to extract text from pdf file then put it in a string.
I found "pdfReader" and tried to implement it, but I always get an error. At first it begins reading the text normally then when the pdf file ends it stops and block the app.
Code:
var PdfReader = require("pdfreader").PdfReader;
router.get('/adminse', function(req, res, next){
aux='';
new PdfReader().parseFileItems("D:/bureau/VoguelConsulting/Backend/uploads/cv_url_CV_anglais_20191337991.pdf", function(err, item){
if (err)
callback(err);
else if(item==='undefined'){
console.log('erreur');
}
else if(item.text)
{
aux = item.text;
console.log(' aux = ' + aux);
}
else
{
console.log('working');}
});
});
Error:
Use a falsy check with the not operator (!), this will work with undefined and null as well:
else if (!item){
console.log('erreur');
} else if(item.text) {
...

Node.js: How to call a function again from its callback

I want to create a directory in server. But want to check if one exists already with the same name. If no directory exists then create one with the name provided. Otherwise append a random string to the provided name and recheck if one exists with the new name.
So far, I'm able to write a function that perform the initial check and create one if it doesn't exists. But no idea to run the check again if a directory exists.
var outputDir = __dirname + '/outputfiles/' + values.boxname;
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') {
cb = mask;
mask = 484;
}
fs.mkdir(path, mask, function(err) {
if (err) {
cb(err);
} else cb(null); // successfully created folder
});
}
and calling the function
ensureExists(outputDir, 484, function(err) {
if (err) {
if (err.code == 'EEXIST') {
var outputDir = outputDir + '-' + Date.now();
// NEED Help here to call this function again to run the check again
return console.log("A Folder with same name already exists");
} else {
console.err(err);
};
} else {
console.log("Folder created");
}
});
So, In short I want to create directories in server with unique names..please help me to fix this problem..thanks
function callback(err) {
if (err) {
if (err.code == 'EEXIST') {
var outputDir = outputDir + '-' + Date.now();
// NEED Help here to call this function again to run the check again
ensureExists(outputDir, 484, callback); // Call again
return console.log("A Folder with same name already exists");
} else {
console.err(err);
};
} else {
console.log("Folder created");
}
}
ensureExists(outputDir, 484, callback); // Call first
Or you could merge the functionality into one:
function ensureExists(path, mask, cb) {
if (typeof mask == 'function') {
cb = mask;
mask = 484;
}
fs.mkdir(path, mask, function(err) {
if (err) {
if (err.code == 'EEXIST') {
var newpath = path + '-' + Date.now();
ensureExists(newpath, mask, cb); // rerun with new path
console.log("A Folder with same name already exists");
} else {
console.err(err);
};
} else cb(path); // successfully created folder
});
}

Walking a directory with Node.js [duplicate]

This question already has answers here:
What is the purpose of the var keyword and when should I use it (or omit it)?
(19 answers)
JavaScript closure inside loops – simple practical example
(44 answers)
Closed last year.
I've got a problem with this code in node.js. I want to recursively walk through a directory tree and apply the callback action to every file in the tree. This is my code at the moment:
var fs = require("fs");
// General function
var dive = function (dir, action) {
// Assert that it's a function
if (typeof action !== "function")
action = function (error, file) { };
// Read the directory
fs.readdir(dir, function (err, list) {
// Return the error if something went wrong
if (err)
return action(err);
// For every file in the list
list.forEach(function (file) {
// Full path of that file
path = dir + "/" + file;
// Get the file's stats
fs.stat(path, function (err, stat) {
console.log(stat);
// If the file is a directory
if (stat && stat.isDirectory())
// Dive into the directory
dive(path, action);
else
// Call the action
action(null, path);
});
});
});
};
The problem is that in the for each loop stat is called for every file via the variable path. When the callback is called, path already has another value and so it dives into the wrong directories or calls the action for the wrong files.
Probably this problem could easily get solved by using fs.statSync, but this is not the solution I would prefer, since it is blocking the process.
var path = dir + "/" + file;
You forgot to make path a local variable. Now it won't be changed behind your back in the loop.
Use node-dir for this. Because you need a separate action for directories and files, I'll give you 2 simple iterators using node-dir.
Asynchronously iterate the files of a directory and its subdirectories and pass an array of file paths to a callback.
var dir = require('node-dir');
dir.files(__dirname, function(err, files) {
if (err) throw err;
console.log(files);
//we have an array of files now, so now we'll iterate that array
files.forEach(function(filepath) {
actionOnFile(null, filepath);
})
});
Asynchronously iterate the subdirectories of a directory and its subdirectories and pass an array of directory paths to a callback.
var dir = require('node-dir');
dir.subdirs(__dirname, function(err, subdirs) {
if (err) throw err;
console.log(subdirs);
//we have an array of subdirs now, so now we'll iterate that array
subdirs.forEach(function(filepath) {
actionOnDir(null, filepath);
})
});
Another suitable library is filehound. It supports file filtering (if required), callbacks and promises.
For example:
const Filehound = require('filehound');
function action(file) {
console.log(`process ${file}`)
}
Filehound.create()
.find((err, files) => {
if (err) {
return console.error(`error: ${err}`);
}
files.forEach(action);
});
The library is well documented and provides numerous examples of common use cases.
https://github.com/nspragg/filehound
Disclaimer: I'm the author.
Not sure if I should really post this as an answer, but for your convenience and other users, here is a rewritten version of OP's which might prove useful. It provides:
Better error management support
A global completion callback which is called when the exploration is complete
The code:
/**
* dir: path to the directory to explore
* action(file, stat): called on each file or until an error occurs. file: path to the file. stat: stat of the file (retrived by fs.stat)
* done(err): called one time when the process is complete. err is undifined is everything was ok. the error that stopped the process otherwise
*/
var walk = function(dir, action, done) {
// this flag will indicate if an error occured (in this case we don't want to go on walking the tree)
var dead = false;
// this flag will store the number of pending async operations
var pending = 0;
var fail = function(err) {
if(!dead) {
dead = true;
done(err);
}
};
var checkSuccess = function() {
if(!dead && pending == 0) {
done();
}
};
var performAction = function(file, stat) {
if(!dead) {
try {
action(file, stat);
}
catch(error) {
fail(error);
}
}
};
// this function will recursively explore one directory in the context defined by the variables above
var dive = function(dir) {
pending++; // async operation starting after this line
fs.readdir(dir, function(err, list) {
if(!dead) { // if we are already dead, we don't do anything
if (err) {
fail(err); // if an error occured, let's fail
}
else { // iterate over the files
list.forEach(function(file) {
if(!dead) { // if we are already dead, we don't do anything
var path = dir + "/" + file;
pending++; // async operation starting after this line
fs.stat(path, function(err, stat) {
if(!dead) { // if we are already dead, we don't do anything
if (err) {
fail(err); // if an error occured, let's fail
}
else {
if (stat && stat.isDirectory()) {
dive(path); // it's a directory, let's explore recursively
}
else {
performAction(path, stat); // it's not a directory, just perform the action
}
pending--; checkSuccess(); // async operation complete
}
}
});
}
});
pending--; checkSuccess(); // async operation complete
}
}
});
};
// start exploration
dive(dir);
};
Don't reinvent the wheel - use and contribute to open source instead. Try one of the following:
https://github.com/pvorb/node-dive
https://github.com/coolaj86/node-walk
There is an NPM module for this:
npm dree
Example:
const dree = require('dree');
const options = {
depth: 5, // To stop after 5 directory levels
exclude: /dir_to_exclude/, // To exclude some pahts with a regexp
extensions: [ 'txt', 'jpg' ] // To include only some extensions
};
const fileCallback = function (file) {
action(file.path);
};
let tree;
// Doing it synchronously
tree = dree.scan('./dir', options, fileCallback);
// Doing it asynchronously (returns promise)
tree = await dree.scanAsync('./dir', options, fileCallback);
// Here tree contains an object representing the whole directory tree (filtered with options)
function loop( ) {
var item = list.shift( );
if ( item ) {
// content of the loop
functionWithCallback( loop );
} else {
// after the loop has ended
whatever( );
}
}

Categories

Resources