critical section in nodejs - writing data from frontend to json file - javascript

I have following question that have I to synchronize requests when writing data to a file from multiple users in the same time. In java there is a synchronized keyword but I don't understand how it looks in nodejs.
app.post("/register",function(req,res){
fs.readFile( __dirname + "/" + "users.json", 'utf8', function (err, data) {
var users=JSON.parse(data);
users["users"].push(req.body);
var check=false;
fs.writeFile(__dirname + "/" + "users.json", JSON.stringify(users),'utf8', function (err) {
if(err){
res.send(JSON.stringify("error"));
check=true;
}
})
if(!check){
res.send(JSON.stringify("Created"));
}
})
})

You should use writeFileSync instead of writeFile.
It returns nothing, so if the process went to the next line, this mean the file was written successfully!!

Related

Having difficulty with javascript(node.js) that needs to be synchronous

I have an express.js app that needs to run a script on the server in order to derive some values using functions later. Here's the gist of it:
shell.exec(commandString);
readFolder();
renderPage();
Essentially, I need to run a script on the server, then run the second function, then run the third function. These need to happen subsequently, but it seems that javascript moves on ahead with the the second and third function no matter what I do. I've tried promises, async, callbacks. All of which I only partially understand and seem to get zero progress.
I will admit that I am a javascript novice. I am working on a project with others and this task fell to me. I doubt this is the best way to accomplish our ultimate goals, but I am left with little choice. please help.
I'll put the entire post here for reference:
//Run script when post is rec'd from root and send to results page
app.post("/", (req, res) => {
var commandString;
//take values and create complete command for Astrum script
commandString = 'bash /home/astrum/Main/Astrum.sh -s ' + req.body.speed + ' -h ' + req.body.host + ' -u ' + req.body.username + ' -p ' + req.body.password;
//execute command in shell
shell.exec(commandString);
readFolder();
renderPage();
//Iterate thru filenames to create arrays for links and link labels
function readFolder() {
fs.readdir('./reports/html/', (err, files) => {
//variable & method for links to html records pages
ipAddressesLink = files; //this is initialized earlier, globally
//variable and method to remove file extension for link labels in pug
ipAddresses = files.map(removeExtension); //this is initialized earlier, globally
});
}
//function to remove last five characters of each element
function removeExtension(value) {
return value.substring(0, value.length - 5);
};
//function to render the page
function renderPage() {
res.render("results", {ipAddressesLink, ipAddresses, title: 'Results'});
}
res.end();
});
You could write it this way:
shell.exec(commandString, (error, stdout, stderr) => {
// Calling the 1st function after shell command is executed
readFolder();
});
function readFolder() {
fs.readdir('./reports/html/', (err, files) => {
// Some stuff
...
// Calls the 2nd function after fs is done reading files
renderPage();
});
}
function renderPage() {
const options = { ... }; // IP addresses etc.
res.render(
"results",
options,
// Calls the final function after render is finished
sendResponse
);
}
function sendResponse(err, html) {
// Sends the response. It’s possible that res.send() is the better solution here
res.end();
}
It’s just the general structure of the callback chain, definitely not the cleanest one. If you want better code structure and readability try switching to async / await syntax.
Is shell here the child_process module? If it is then you can pass an optional callback argument and call your functions from there.
shell.exec(commandString, (error, stdout, stderr) => {
const files = readFolder();
renderPage(files);
});
function readFolder() {
...
return fs.readdirSync(files);
}
function renderPage(files) {
...
}

Using FS to write new files if a certain url is found and remove the file if it's not found anymore

I'm trying to write a script, when a new url is found it will turn the url to a hash. Check if the file already has been written it just ignores it, and if it's not known earlier it should be added.
needle.get(mainUrl, function(err, res) {
if (err) throw err;
if (res.statusCode == 200 && !err ) {
var $ = cheerio.load(res.body)
var href = $('div div a').each(function(index, element) {
urlList.push($(element).attr("href"))
var url =($(element).attr("href"))
var hash = crypto.createHash('md5').update(url).digest('hex');
fs.writeFile('./directory/otherdirectory' + `${hash}`, url, (err) => {
if (err) throw err;
console.log('Hash created: ' + url + ' saved as ' + hash
});
}
)
}
})
This is what I've done so far, but this only writes new files. it doesn't check if files already has been added and doesn't remove files that's not found anymore.
So what I try to do:
I've written a script that fetches a website for urls.
Hash all the urls.
Make FS check if file already has been written, if it has just ignore it.
If it not is known earlier, add it as a new file.
If url isn't found when fetching anymore, delete it from the list.
I think this might be an X/Y problem and for that I'm still awaiting the answer to my comment.
With that said, you can simply ignore the existing files using fs.existsSync, if that returns true just skip saving the current file, otherwise save it. And to remove files that are not available anymore, just get all the files in the directory using fs.readdir and remove files that you whose urls are not in the response using fs.unlink:
needle.get(mainUrl, (err, res) => {
if (err) throw err;
if (res.statusCode == 200) {
let $ = cheerio.load(res.body);
let hashes = []; // list of hashes for this website (to be used later to keep only the items that are still available)
$('div div a').each((index, element) => {
let url = $(element).attr("href");
let hash = crypto.createHash('md5').update(url).digest('hex');
hashes.push(hash); // store the hash of the current url
if (!fs.existsSync('./directory/otherdirectory/' + hash)) { // if this file doesn't exist (notice the "not operator !" before fs.existsSync)
fs.writeFile('./directory/otherdirectory/' + hash, url, err => { // save it
if (err) throw err;
console.log('Hash created: ' + url + ' saved as ' + hash);
});
}
});
fs.readdir('./directory/otherdirectory', (err, files) => { // get a list of all the files in the directory
if (err) throw err;
files.forEach(file => { // and for each file
if(!hashes.includes(file)) { // if it was not encountered above (meaning that it doesn't exist in the hashes array)
fs.unlink('./directory/otherdirectory/' + file, err => { // remove it
if (err) throw err;
});
}
});
});
});
Another approach:
Since you only seem to want to store the urls, the best way to so would be to use one single file to store them all instead of storing each url in its own file. Something like this is more efficient:
needle.get(mainUrl, (err, res) => {
if (err) throw err;
if (res.statusCode == 200) {
let $ = cheerio.load(res.body);
let urls = $('div div a') // get the 'a' elements
.map((index, element) => $(element).attr("href")) // map each one into its href attribute
.get(); // and get them as an array
fs.writeFile('./directory/list-of-urls', urls.join('\n'), err => { // then save all the urls encountered in the file 'list-of-urls' (each on its own line, hence the join('\n'))
if (err) throw err;
console.log('saved all the urls to the file "list-of-urls"');
});
}
});
That way old urls will be removed automatically as the file gets overwritten each time, and new urls will be added automatically. No need to check whether an url is already encountered or not because it will get re-saved anyway.
And if you want to get the list of urls somewhere else, just read the file and split it by '\n' like so:
fs.readFile('./directory/list-of-urls', 'utf8', (err, data) => {
if (err) throw err;
let urls = data.split('\n');
// use urls here
});

JavaScript Read text file returns as undefined

I have this code which should read the current text file's contents and make a new line and write a new sentence in that line, it writes it but the line before that is just undefined.
const data = msg.author.tag + " bejelentkezett a bottal. " + d.toLocaleTimeString();
 const currenttext = fs.readFile('output.txt', 'utf8', function(err, contents) {
fs.writeFile('output.txt', currenttext + '\n' + data, (err) => {
      
    if (err) throw err;
})
});
readFile does not return anything. Instead you will be given the contents in your callback in the variable you named contents.
So change
fs.writeFile('output.txt', currenttext + '\n' + data, (err) => {
to
fs.writeFile('output.txt', contents+ '\n' + data, (err) => {
As a side note your error handling is a mess. Throwing an error from within a callback just leads to losing the error. Consider using the promise api with async/await instead to simplify your code or update your callbacks. https://nodejs.org/api/fs.html#fs_fs_promises_api

write() does not write sequentialy?

I have found that write() method of stream.Writable class does not write data sequentially. When I an sending am attachment to the server in chunks, this code assembles data chunks in wrong order if no delay occurs. If I put a debug message like console.log() in the middle of the loop (like to dump the data to watch what is being written, actually), this bug disappears. So, what is the race condition in this code ? Looks like I am enforcing a sequential assembling of the file, so I do not understand what is wrong.
My code:
function join_chunks(company_id,attachment_id,num_chunks) {
var stream;
var file;
var output_filename=ATTACHMENTS_PATH + '/comp' + company_id + '/' + attachment_id + '.data';
var input_filename;
var chunk_data;
var chunk_count=0;
stream=fs.createWriteStream(output_filename,{flags:'w+',mode: 0666});
console.log('joining files:');
for(var i=0;i<num_chunks;i++) {
input_filename=ATTACHMENTS_PATH + '/comp' + company_id + '/' + attachment_id + '-' + (i+1) + '.chunk';
console.log(input_filename);
fs.readFile(input_filename , (err, chunk_data) => {
if (err) throw err;
stream.write(chunk_data,function() {
chunk_count++;
if (chunk_count==num_chunks) {
console.log('join finished. closing stream');
stream.end();
}
});
});
}
}
The console:
joining files:
/home/attachments/comp-2084830518/67-1.chunk
/home/attachments/comp-2084830518/67-2.chunk
/home/attachments/comp-2084830518/67-3.chunk
/home/attachments/comp-2084830518/67-4.chunk
join finished. closing stream
Node version: v6.9.2
stream.write is an asynchronous operation. This means that multiple calls to it may be serviced out of order.
If you want your writes to happen in order, use stream.writeSync, or use the callback argument to stream.write to sequence your writes.

Reading multiple files in parallel and writing the data in new files accordingly node.js

I'm trying to handle a asynchronous action that read multiple files from a folder at a same time and writes new ones in a different folder. The files that I read are by pair. One file is the data template and the other one is about the data. According to the template, we process the data from the related data file. All the information that I got from the both files are inserted into an object that I need to write in JSON into a new file . The code below works perfectly if there are only one pair of these files (1 template and 1 data):
for(var i = 0; i < folderFiles.length; i++)
{
var objectToWrite = new objectToWrite();
var templatefileName = folderFiles[i].toString();
//Reading the Template File
fs.readFile(baseTemplate_dir + folderFiles[i],{ encoding: "utf8"},function(err, data)
{
if(err) throw err;
//Here I'm manipulating template data
//Now I want to read to data according to template read above
fs.readFile(baseData_dir + folderFiles[i],{ encoding: "utf8"},function(err, data)
{
if(err) throw err;
//Here I'm manipulating the data
//Once I've got the template data and the data into my object objectToWrite, I'm writing it in json in a file
fs.writeFile(baseOut_dir + folderFiles[i], JSON.stringify(objectToWrite ),{ encoding: 'utf8' }, function (err)
{
if(err) throw err;
console.log("File written and saved !");
}
}
}
}
Since I have 4 files so two template files and two related data files, it crashed. So I believe I have a problem with the callbacks... Maybe someone could help me to figure it out ! Thanks in advance !
It is happening because readFile is asynchronous, so for loop does not wait for it to be executed and goes on with the next iteration, and it eventually finishes all iterations really fast, so by the time readFile callback is executed, folderFiles[i] will contain a name of the last folder => all callbacks will operate only this last folder name. The solution is to move all this stuff to a separate function out of the loop, so closures will come in handy.
function combineFiles(objectToWrite, templatefileName) {
//Reading the Template File
fs.readFile(baseTemplate_dir + templatefileName,{ encoding: "utf8"},function(err, data)
{
if(err) throw err;
//Here I'm manipulating template data
//Now I want to read to data according to template read above
fs.readFile(baseData_dir + templatefileName,{ encoding: "utf8"},function(err, data)
{
if(err) throw err;
//Here I'm manipulating the data
//Once I've got the template data and the data into my object objectToWrite, I'm writing it in json in a file
fs.writeFile(baseOut_dir + templatefileName, JSON.stringify(objectToWrite ),{ encoding: 'utf8' }, function (err)
{
if(err) throw err;
console.log("File written and saved !");
}
}
}
}
for(var i = 0; i < folderFiles.length; i++)
{
var objectToWrite = new objectToWrite();
var templatefileName = folderFiles[i].toString();
combineFiles(objectToWrite, templatefileName);
}

Categories

Resources