Javascript Best practice to write to multiple files using fs writeFile - javascript

I have a scenario where I need to create 2 different files from data available in one object. I need these files to be created with data before I perform the next set of actions. While testing sometimes the data is not complete in the created file when I try to read from those. Do I need to add await/promise? Doesn't writeFile wait till the callback function is returned?
import {
writeFile
} from 'fs';
const data = {
peopleList: {..},
phoneList: {..}
};
writeFile(
'peopleList.txt',
data.peopleList,
(err) => {
if (err) {
return console.log(err);
}
console.log("The people list file was saved!");
}
);
writeFile(
'phoneList.txt',
data.phoneList,
(err) => {
if (err) {
return console.log(err);
}
console.log("The phone list file was saved!");
}
);

Related

fs.readfile changes scope of global array and it can't be used outside it

I have 2 sections of code 1) that is called by 2nd to populate the array and write it into a file.
async function timeSeries(obj) {
data = [
{
original_value: []
}
]
//read file named as passed object's _id
await fs.readFile("./api/assignment_data/" + obj._id + ".json", "utf-8", function read(err, datas) {
if (err) {
throw err;
}
const filedata = JSON.parse(datas)
filedata.map(line => data[0].original_value.push(line.original_value))
})
setTimeout(() => {
try {
fs.writeFileSync("./api/timeseries.json", JSON.stringify(data), { encoding: 'utf8', flag: 'w' })
} catch (error) {
console.log(error)
}
}, 300);
}
The problem is, I can't access the global data array above after using it inside the fs.readfile function ( callback scope hell problem), I had to setTimeout then I am able to write it inside a file using another fs.writeFileSync function ( if I return the array I get a promise, I want data).How do I solve this? instead of writing it into another file and using it inside another route(below) how can I directly return the array in the second route and pass it as a json res?
section 2)
router.route("/api/debug/:num").get((req, res) => {
fs.readFile("./api/assignment_data/metrics.json", "utf8", function read(err, data) {
if (err) {
console.log(err);
}
const objdata = JSON.parse(data)
timeSeries(objdata[req.params.num])
})
fs.readFile("./api/timeseries.json", "utf8", function read(err, data) {
if (err) {
console.log(err);
}
const objdata = JSON.parse(data)
res.json(data)
})
})
If you use fs.readFile and want to do an action after the file has been read, you must do the action (write and read a file in your case) inside the callback function. Also, you can use fs.readFileSync if you can read synchronously.
First off, we need to explain a few things:
fs.readFile() is non-blocking and asynchronous. That means that when you call it, it starts the operation and then returns immediately and starts the execute the code that comes right after it. Then, some time later, it calls its callback.
So, your code is:
Calling fs.readFile()
Then, immediately setting a timer
Then, it's an indeterminate race between the fs.readFile() callback and the timer to see who finishes first. If the timer finishes first, then it will call its callback and you will attempt to access data BEFORE it has been filled in (because the fs.readFile() callback has not yet been called).
You cannot write reliable code this way as you are guessing on the timing of indeterminate, asynchronous operations. Instead, you have to use the asynchronous result from within the callback because that's the only place that you know the timing for when it finished and thus when it's valid. So, one way to solve your problem is by chaining the asynchronous operations so you put the second one INSIDE the callback of the first:
function timeSeries(obj, callback) {
//read file named as passed object's _id
fs.readFile("./api/assignment_data/" + obj._id + ".json", "utf-8", function read(err, datas) {
if (err) {
console.log(err);
// tell caller about our error
callback(err)
return;
} else {
let data = [{original_value: []}];
const filedata = JSON.parse(datas);
for (let line of filedata) {
data[0].original_value.push(line.original_value);
}
fs.writeFile("./api/timeseries.json", JSON.stringify(data), { encoding: 'utf8' }, (err) => {
if (err) {
console.log(err);
callback(err);
return;
} else {
// give our data to the caller
callback(data);
}
});
}
})
}
Then, to call this function, you pass it a callback and in the callback you can either see the error or get the data.
In modern nodejs, it's a bit easier to use async/await and the promise-based interfaces in the fs module:
const fsp = require('fs').promises;
async function timeSeries(obj) {
//read file named as passed object's _id
try {
let datas = await fsp.readFile("./api/assignment_data/" + obj._id + ".json", "utf-8");
const filedata = JSON.parse(datas);
let data = [{original_value: []}];
for (let line of filedata) {
data[0].original_value.push(line.original_value);
}
await fsp.writeFile("./api/timeseries.json", JSON.stringify(data), { encoding: 'utf8' });
return data;
} catch(e) {
console.log(e);
// handle error here or throw back to the caller
throw e;
}
}
For this version, the caller can use await and try/catch to get errors:
try {
let data = await timeSeries(obj);
// do something with data here
} catch(e) {
// handle error here
}
Based on what code you have written , I could just modify it using simple async-await - hope this helps
import fs from 'fs'
async function timeSeries(obj) {
const data = [{
original_value: []
}]
const assData = fs.readFileSync('./api/assignment_data/metrics.json', 'utf8')
const filedata = JSON.parse(assData)
filedata.map(line => data[0].original_value.push(line.original_value))
// no need for timeOut
fs.writeFileSync('./api/timeseries.json', JSON.stringify(data));
//return data if u need
return data
}
router.route("/api/debug/:num").get(async (req, res) => {
try {
const metricData = fs.readFileSync('./api/assignment_data/metrics.json', 'utf8')
const objdata = JSON.parse(data)
const timeSeriesData = await timeSeries(objdata[req.params.num])
// returning TimeSeriesData
res.status(200).json(timeSeriesData)
})
}
catch (error) {
res.status(500).send(error.message)
}

How to share a variable between two files generated by a function

I am writing an API in NodeJS and I have ran into a brick wall. I am trying to use a function to grab a variable and use module.exports to use said variable in another file. This however keeps coming up as undefined in the console.
I have already tried used return statements in different places in the file but I keep getting undefined.
This is what the code looks like to grab the variable and export it.
File 1 (api.js)
const fs = require('fs');
const homeDir = require('os').homedir();
module.exports = {
workingDirectory: () => {
let dir;
fs.access(`${homeDir}/.unitv`, fs.constants.F_OK, (err) => {
if(err) throw err;
fs.readFile(`${homeDir}/.unitv`, 'utf8', (readErr, data) => {
if(readErr) throw readErr;
let jsonData = JSON.parse(data);
dir = jsonData.WorkingDirectory;
});
});
return dir;
}
};
File 2
const api = require('../api');
console.log(api.workingDirectory);
.unitv file
{
"WorkingDirectory": "/home/user/UniTV",
"Port": "3000"
}
In the console it will turn up as undefined when it should turn up with the value of the "working directory" in /home/user/.unitv
Any and all help is appreciated, thanks.
Your current code is particularly problematic.
return dir; occurs before fs.access/fs.readFile finishes. These are asynchronous functions and require the use of callback, promise, or async/await styled coding. The gist of it is that the code continues executing other code while it waits on I/O (such as reading a file) and the way you have written it causes nothing to be returned. See https://repl.it/#CodyGeisler/readFileCallback for a working callback example.
workingDirectory: () => {
let dir;
fs.access(`${homeDir}/.unitv`, fs.constants.F_OK, (err) => {
if(err) throw err;
fs.readFile(`${homeDir}/.unitv`, 'utf8', (readErr, data) => {
if(readErr) throw readErr;
let jsonData = JSON.parse(data);
dir = jsonData.WorkingDirectory;
});
});
return dir;
}

Store fs.stat while looping through files into an array, in Node JS

I'm looping through files in a directory and storing the file details to an array data. The following code populates the array if I don't attempt to run fs.stat to get things like the file create/edit date:
fs.readdir('../src/templates', function (err, files) {
if (err) {
throw err;
}
var data = [];
files
.forEach(function (file) {
try {
fs.stat('../src/templates/'+file,(error,stats) => {
data.push({ Name : file,Path : path.join(query, file) });
});
} catch(e) {
console.log(e);
}
});
res.json(data);
});
});
If I move the data.push(...) outside the fs.stat the array returns with the file data. Inside the fs.stat it returns empty. I assume this is an asynchronous issue in that the for loop is running and finishing before fs.stat runs.
I'm thinking I need to use a promise here but unsure.
If you want or need to be asynchronous:
const fs = require("fs");
const path = require("path");
const { promisify } = require("util");
const asyncStat = promisify(fs.stat);
fs.readdir('../src/templates', async function(err, files) {
if (err) {
throw err;
}
const data = await Promise.all(files.map(async function(file) {
try {
const stats = await asyncStat('../src/templates/' + file);
return { Name: file, Path: path.join(query, file), stats };
} catch (e) {
console.log(e);
}
}));
res.json(data);
});
Note that I used map instead of forEach and then awaited all Promises (async makes function return a promise).
I also needed to change fs.stat to use promise with util.promisify.
You're right about the issue being in the asynchronous call. You could use a promise, or you could use fs.statSync(...), which returns a fs.Stats object and operates synchonously.
files.forEach(function (file) {
try {
var fileStats = fs.statSync('../src/templates/' + file);
data.push({
Name : file,
Path : path.join(query, file)
});
} catch(e) {
console.log(e);
}
});

Parse JSON while asynchronously reading files

I am trying to read a few JSON files and store their results into one array. I have:
const files = ['file0.json', 'file1.json', 'file2.json', 'file3.json']
In order to read all of them and create a resulting array of the files' contents, I do this:
import { readFile } from 'fs'
import async from 'async'
const files = ['file0.json', 'file1.json', 'file2.json', 'file3.json']
function read(file, callback) {
readFile(file, 'utf8', callback)
}
async.map(files, read, (err, results) => {
if (err) throw err
// parse without needing to map over entire results array?
results = results.map(file => JSON.parse(file))
console.log('results', results)
})
This post helped me in getting there: Asynchronously reading and caching multiple files in nodejs
What I'm wondering is how to call JSON.parse() in the intermediate step of reading the file instead of having to map over the resulting array. And I suppose a clarification on what exactly the callback parameter inside the read function is used for, if not just passed in for the sake of calling readFile properly.
Well, perhaps you should move JSON.parse in read step then
import { readFile } from 'fs'
import async from 'async'
const files = ['file0.json', 'file1.json', 'file2.json', 'file3.json']
function read(file, callback) {
readFile(file, 'utf8', function(err, data) {
if (err) {
return callback(err);
}
try {
callback(JSON.parse(data));
} catch (rejection) {
return callback(rejection);
}
})
}
async.map(files, read, (err, results) => {
if (err) throw err;
console.log('results', results)
})
I'd recommend you to read this article to understand the meaning of callback function.

Why is the following fs.writeFile only writing data from the last file?

I want to read the content of two files (same folder) and write them into a single one:
const input = process.argv[2]
fs.readdir(__dirname + `/${input}/`, (err, files) => {
if (err) {
return
}
files.forEach((file) => {
fs.readFile(__dirname + `/${input}/` + file, 'utf8', (err, data) => {
let items = []
items.unshift(data)
let result = items.join('\n\n')
fs.writeFile("untitled2.html", result, (err) => {
if (err) {
console.log(err)
} else {
console.log(result)
}
})
})
})
})
console.log(result) outputs the content of the two files:
alex#alex-K43U:~/node/m2n/bin$ node index4.js folder
File 1
File 2
The file, however, has only the content from the second file:
File 2
What's happening here?
Don't use writeFile but appendFile when your goal is to append to a file without replacing the content (appendFile takes care of creating the file when necessary).
You're also not waiting for the appending to be finished, which might lead to errors. You have various solutions here:
promises
a recursive function handling files one after the other
use appendFileSync (assuming you're writing an utility rather than a server)
Exemple with the recursive function:
(function doOneFile(){
var file = files.shift();
if (!file) return;
fs.readFile(__dirname + `/${input}/` + file, 'utf8', (err, data) => {
let items = []
items.unshift(data)
let result = items.join('\n\n')
fs.appendFile("untitled2.html", result, (err) => {
if (err) {
console.log(err)
} else {
console.log(result)
}
doOneFile();
})
})
})();
With the default options, writeFile erases previous contents every time. It's the "write mode". What you want is switch to "append mode", like so :
fs.writeFile("untitled2.html", result, {flag:"a"}, callbacks...);
In the process, you'll need to take care to erase the possible file contents before your loop, or have the first access be in write mode. Otherwise you'll keep appending to previsously existing contents.
Besides, in this case you'll be hitting problems with concurrent accesses. Either use the synchronous forms, or loop through files via a callback.

Categories

Resources