Parse JSON while asynchronously reading files - javascript

I am trying to read a few JSON files and store their results into one array. I have:
const files = ['file0.json', 'file1.json', 'file2.json', 'file3.json']
In order to read all of them and create a resulting array of the files' contents, I do this:
import { readFile } from 'fs'
import async from 'async'
const files = ['file0.json', 'file1.json', 'file2.json', 'file3.json']
function read(file, callback) {
readFile(file, 'utf8', callback)
}
async.map(files, read, (err, results) => {
if (err) throw err
// parse without needing to map over entire results array?
results = results.map(file => JSON.parse(file))
console.log('results', results)
})
This post helped me in getting there: Asynchronously reading and caching multiple files in nodejs
What I'm wondering is how to call JSON.parse() in the intermediate step of reading the file instead of having to map over the resulting array. And I suppose a clarification on what exactly the callback parameter inside the read function is used for, if not just passed in for the sake of calling readFile properly.

Well, perhaps you should move JSON.parse in read step then
import { readFile } from 'fs'
import async from 'async'
const files = ['file0.json', 'file1.json', 'file2.json', 'file3.json']
function read(file, callback) {
readFile(file, 'utf8', function(err, data) {
if (err) {
return callback(err);
}
try {
callback(JSON.parse(data));
} catch (rejection) {
return callback(rejection);
}
})
}
async.map(files, read, (err, results) => {
if (err) throw err;
console.log('results', results)
})
I'd recommend you to read this article to understand the meaning of callback function.

Related

Javascript Best practice to write to multiple files using fs writeFile

I have a scenario where I need to create 2 different files from data available in one object. I need these files to be created with data before I perform the next set of actions. While testing sometimes the data is not complete in the created file when I try to read from those. Do I need to add await/promise? Doesn't writeFile wait till the callback function is returned?
import {
writeFile
} from 'fs';
const data = {
peopleList: {..},
phoneList: {..}
};
writeFile(
'peopleList.txt',
data.peopleList,
(err) => {
if (err) {
return console.log(err);
}
console.log("The people list file was saved!");
}
);
writeFile(
'phoneList.txt',
data.phoneList,
(err) => {
if (err) {
return console.log(err);
}
console.log("The phone list file was saved!");
}
);

fs.readfile changes scope of global array and it can't be used outside it

I have 2 sections of code 1) that is called by 2nd to populate the array and write it into a file.
async function timeSeries(obj) {
data = [
{
original_value: []
}
]
//read file named as passed object's _id
await fs.readFile("./api/assignment_data/" + obj._id + ".json", "utf-8", function read(err, datas) {
if (err) {
throw err;
}
const filedata = JSON.parse(datas)
filedata.map(line => data[0].original_value.push(line.original_value))
})
setTimeout(() => {
try {
fs.writeFileSync("./api/timeseries.json", JSON.stringify(data), { encoding: 'utf8', flag: 'w' })
} catch (error) {
console.log(error)
}
}, 300);
}
The problem is, I can't access the global data array above after using it inside the fs.readfile function ( callback scope hell problem), I had to setTimeout then I am able to write it inside a file using another fs.writeFileSync function ( if I return the array I get a promise, I want data).How do I solve this? instead of writing it into another file and using it inside another route(below) how can I directly return the array in the second route and pass it as a json res?
section 2)
router.route("/api/debug/:num").get((req, res) => {
fs.readFile("./api/assignment_data/metrics.json", "utf8", function read(err, data) {
if (err) {
console.log(err);
}
const objdata = JSON.parse(data)
timeSeries(objdata[req.params.num])
})
fs.readFile("./api/timeseries.json", "utf8", function read(err, data) {
if (err) {
console.log(err);
}
const objdata = JSON.parse(data)
res.json(data)
})
})
If you use fs.readFile and want to do an action after the file has been read, you must do the action (write and read a file in your case) inside the callback function. Also, you can use fs.readFileSync if you can read synchronously.
First off, we need to explain a few things:
fs.readFile() is non-blocking and asynchronous. That means that when you call it, it starts the operation and then returns immediately and starts the execute the code that comes right after it. Then, some time later, it calls its callback.
So, your code is:
Calling fs.readFile()
Then, immediately setting a timer
Then, it's an indeterminate race between the fs.readFile() callback and the timer to see who finishes first. If the timer finishes first, then it will call its callback and you will attempt to access data BEFORE it has been filled in (because the fs.readFile() callback has not yet been called).
You cannot write reliable code this way as you are guessing on the timing of indeterminate, asynchronous operations. Instead, you have to use the asynchronous result from within the callback because that's the only place that you know the timing for when it finished and thus when it's valid. So, one way to solve your problem is by chaining the asynchronous operations so you put the second one INSIDE the callback of the first:
function timeSeries(obj, callback) {
//read file named as passed object's _id
fs.readFile("./api/assignment_data/" + obj._id + ".json", "utf-8", function read(err, datas) {
if (err) {
console.log(err);
// tell caller about our error
callback(err)
return;
} else {
let data = [{original_value: []}];
const filedata = JSON.parse(datas);
for (let line of filedata) {
data[0].original_value.push(line.original_value);
}
fs.writeFile("./api/timeseries.json", JSON.stringify(data), { encoding: 'utf8' }, (err) => {
if (err) {
console.log(err);
callback(err);
return;
} else {
// give our data to the caller
callback(data);
}
});
}
})
}
Then, to call this function, you pass it a callback and in the callback you can either see the error or get the data.
In modern nodejs, it's a bit easier to use async/await and the promise-based interfaces in the fs module:
const fsp = require('fs').promises;
async function timeSeries(obj) {
//read file named as passed object's _id
try {
let datas = await fsp.readFile("./api/assignment_data/" + obj._id + ".json", "utf-8");
const filedata = JSON.parse(datas);
let data = [{original_value: []}];
for (let line of filedata) {
data[0].original_value.push(line.original_value);
}
await fsp.writeFile("./api/timeseries.json", JSON.stringify(data), { encoding: 'utf8' });
return data;
} catch(e) {
console.log(e);
// handle error here or throw back to the caller
throw e;
}
}
For this version, the caller can use await and try/catch to get errors:
try {
let data = await timeSeries(obj);
// do something with data here
} catch(e) {
// handle error here
}
Based on what code you have written , I could just modify it using simple async-await - hope this helps
import fs from 'fs'
async function timeSeries(obj) {
const data = [{
original_value: []
}]
const assData = fs.readFileSync('./api/assignment_data/metrics.json', 'utf8')
const filedata = JSON.parse(assData)
filedata.map(line => data[0].original_value.push(line.original_value))
// no need for timeOut
fs.writeFileSync('./api/timeseries.json', JSON.stringify(data));
//return data if u need
return data
}
router.route("/api/debug/:num").get(async (req, res) => {
try {
const metricData = fs.readFileSync('./api/assignment_data/metrics.json', 'utf8')
const objdata = JSON.parse(data)
const timeSeriesData = await timeSeries(objdata[req.params.num])
// returning TimeSeriesData
res.status(200).json(timeSeriesData)
})
}
catch (error) {
res.status(500).send(error.message)
}

How to share a variable between two files generated by a function

I am writing an API in NodeJS and I have ran into a brick wall. I am trying to use a function to grab a variable and use module.exports to use said variable in another file. This however keeps coming up as undefined in the console.
I have already tried used return statements in different places in the file but I keep getting undefined.
This is what the code looks like to grab the variable and export it.
File 1 (api.js)
const fs = require('fs');
const homeDir = require('os').homedir();
module.exports = {
workingDirectory: () => {
let dir;
fs.access(`${homeDir}/.unitv`, fs.constants.F_OK, (err) => {
if(err) throw err;
fs.readFile(`${homeDir}/.unitv`, 'utf8', (readErr, data) => {
if(readErr) throw readErr;
let jsonData = JSON.parse(data);
dir = jsonData.WorkingDirectory;
});
});
return dir;
}
};
File 2
const api = require('../api');
console.log(api.workingDirectory);
.unitv file
{
"WorkingDirectory": "/home/user/UniTV",
"Port": "3000"
}
In the console it will turn up as undefined when it should turn up with the value of the "working directory" in /home/user/.unitv
Any and all help is appreciated, thanks.
Your current code is particularly problematic.
return dir; occurs before fs.access/fs.readFile finishes. These are asynchronous functions and require the use of callback, promise, or async/await styled coding. The gist of it is that the code continues executing other code while it waits on I/O (such as reading a file) and the way you have written it causes nothing to be returned. See https://repl.it/#CodyGeisler/readFileCallback for a working callback example.
workingDirectory: () => {
let dir;
fs.access(`${homeDir}/.unitv`, fs.constants.F_OK, (err) => {
if(err) throw err;
fs.readFile(`${homeDir}/.unitv`, 'utf8', (readErr, data) => {
if(readErr) throw readErr;
let jsonData = JSON.parse(data);
dir = jsonData.WorkingDirectory;
});
});
return dir;
}

Get async result in async.filter() array nodejs

Need to parse some XML files from mass array with file_path values.
Try to use async, fs, xml2js.
When use single string file_path all works perfect. But when I use aync.filter() with array I can't understand how I can return result from xml.parseString()
const fs = require('fs');
const xml2js = require('xml2js');
const async = require('async');
var mass=['/file1.xml','/fil2.xml','/file3.xml',...]
async.filter(mass, async function(file_path, callback){
if(fs.statSync(file_path)['size']>0){
fs.readFileSync(file_path, 'utf8', function(err, data) {
xml.parseString(data, function (err, result) {
console.log(Object.keys(result)[0]);
return result; //need get this result to results array
})
})
}
}, function(err, results) {
console.log(results)
});
Who can understand how it works and what I need to change in my code.
Thanks a lot!
You are trying to map and filter at the same time. Since your filter condition is synchronously available, use the array filter method for that, and then pass that to async.map.
You should then call the callback function, that async.map provides to you, passing it the result. So don't return it, but call the callback.
The readFileSync method does not take a callback like its asynchronous counterpart. It just returns the data.
Also, drop the async keyword, as you are not using the await keyword at all.
async.map(mass.filter((file_path) => fs.statSync(file_path).size > 0),
function(file_path, callback){
var data = fs.readFileSync(file_path, 'utf8');
xml.parseString(data, function (err, result) {
console.log(Object.keys(result)[0]);
callback(null, result);
})
}, function(err, results) {
console.log(results)
});
It should be noted however, that since Node now comes with the Promise API, and even the async/await extension to that, the async module has become much less interesting. Consider using Promises.
const promises = mass.filter(file_path => {
return fs.statSync(file_path).size > 0
}).map(function(file_path) {
return new Promise(resolve => {
const data = fs.readFileSync(file_path, 'utf8');
xml.parseString(data, function (err, result) {
console.log(Object.keys(result)[0]);
resolve(result);
});
});
});
Promise.all(promises).then(results => {
console.log(results);
});

Async readFile module.exports in node.js

I'm sorry for, what might easily be a naive question, but I`m trying to figure out how node works, especially for a problem like this:
What I need do is to send an object/file from fs.readFile through require and module.exports. This is what I have tried is this
in one file (call it app.js) the code for reading a file:
var fs = require('fs');
var file_contents = undefined;
var callback_reader = function(err, data) {
if (err) return console.error(err);
file_contents = data.toString().split('\n');
}
module.exports = {
parseFile: function(file_path) {
fs.readFile(file_path.toString(), 'utf-8', callback_reader);
}
}
and in some other file, (call it main.js) I need to use the contents of the file read by the readFile like this
var file_importer = require('./app.js')
file_importer.parseFile(real_path_to_file);
but if i try console.log of this last line I always get undefined object. Now I know it is because callback does not execute before the console.log but I`m unsure how to achieve this communication.
So i changed your code a little bit to use callbacks.
It seems that you can't use "return" from asyncronous function in module.exports. However, the code bellow works as expected. Hope it helps.
main.js
var file_importer = require('./app.js')
file_importer.parseFile('./time.js', function(err, data){
if(err) return console.log(err);
console.log(data);
});
app.js
var fs = require('fs');
module.exports = {
parseFile: function(file_path, callback) {
fs.readFile(file_path.toString(), 'utf-8', function(err, data) {
if (err) return callback(err);
callback(null, data);
});
}
}
// much shorter version
exports.parseFile = function(file_path, callback) {
fs.readFile(file_path.toString(), 'utf-8', callback);
}
This is javascript work, it don't wait the callback was called to return.
You should do your console.log in your callback.
Like these :
fs.readFile(pathToFile, 'utf-8', function(err, data) {
if (err) return err;
console.log(data);
// Continue your process here
})

Categories

Resources