module.exports is returning undefined - javascript

I am currently new to Node JS, and today I was trying to read data from a file data.json.
Here is the JSON file:
{"username":"rahul_v7","password":"9673"} {"username":"7vik","password":"3248"} {"username":"pradypot_2","password":"6824"} {"username":"ad_1","password":"9284"} {"username":"premchand_4","password":"4346"}
And, I was using the below code present in a file GetData.js, to read the data present in the data.json:
'use strict';
const fs = require('fs');
let res = '', resObjs = [];
let fin = fs.createReadStream('F:/RahulVerma/NodeJS/data.json', 'utf-8');
fin.on('data', data => {
if(data.length > 0) res += data;
}).on('end', () => {
if(res.length > 0) {
let resArr = res.trim().split(' ');
for(let i = 0; i < resArr.length; i++) {
resObjs.push(JSON.parse(resArr[i]));
}
module.exports.objects = resObjs;
}
});
As you can see, I am exporting the resObjs array, which is actually an array of objects, to an another file named AppendData.js, which is given below:
'use strict';
const fs = require('fs');
const getObjs = require('./GetData');
console.log(getObjs.objects);
But, when I run AppendData.js in Node.js 9.3.0 (ia32), it gives the following output:

You're trying to use the objects before they've been read. Remember that your code reading the stream runs asynchronously, and nothing in your code attempts to coordinate it with module loading. So AppendData.js isn't seeing the objects export because it doesn't exist yet as of when that code runs.
Instead, return a promise of the objects that AppendData.js can consume; see *** comments:
'use strict';
const fs = require('fs');
// *** Export the promise
module.exports.objectsPromise = new Promise((resolve, reject) => {
let res = '', resObjs = [];
let fin = fs.createReadStream('F:/RahulVerma/NodeJS/data.json', 'utf-8');
fin.on('data', data => {
if(data.length > 0) res += data;
}).on('end', () => {
if(res.length > 0) {
let resArr = res.trim().split(' ');
for(let i = 0; i < resArr.length; i++) {
resObjs.push(JSON.parse(resArr[i]));
}
resolve(resObjs); // *** Resolve the promise
}
}).on('error', error => {
reject(error); // *** Reject the promise
});
});
Note I added a handler for errors.
And then:
'use strict';
const fs = require('fs');
const getObjs = require('./GetData');
getObjs.objectsPromise
.then(console.log)
.catch(error => {
// Do something
});
Again note the error handler.

The problem happens because you're trying to use the objects in AppendData.js before they are loaded on GetData.js due to fs.createReadStream being asynchronous. To fix this just make module.exports be a function that expect a callback in GetData.js like:
'use strict';
const fs = require('fs');
module.exports = function(callback) {
let res = '', resObjs = [];
let fin = fs.createReadStream('F:/RahulVerma/NodeJS/data.json', 'utf-8');
fin.on('data', data => {
if(data.length > 0) res += data;
}).on('end', () => {
if(res.length > 0) {
let resArr = res.trim().split(' ');
for(let i = 0; i < resArr.length; i++) {
resObjs.push(JSON.parse(resArr[i]));
}
callback(resObjs); // call the callback with the array of results
}
});
}
Which you can then use like this in AppendData.js:
'use strict';
const fs = require('fs');
const getObjs = require('./GetData'); // getObjs is now a function
getObjs(function(objects) {
console.log(objects);
});

Related

How to read and Write multiple files using node js?

In a array I have filenames; I want to first read one file and perform some operation then store result in a separate file. Then read 2nd file, perform operation again and save result in new 2nd file. Do the same procedure for all files. Below I have written code to read and write files.
TextReader.js
var fs = require('fs');
const readline= require('readline');
var headerIndex = [];
var isFirstLine = true;
var finalList = [];
module.exports={
readTextFile: (filename)=>{
console.log('inside textreader')
readline.createInterface({
input: fs.createReadStream(`./s3/${filename}`)
}).on('line', function(line) {
console.log(line);
console.log("-----------------------------");
if (isFirstLine) {
headerIndex = line.split('|');
}
else if (!isFirstLine){
let rowValues = line.split('|');
let valueIndex = 0;
var singlePerson = {};
headerIndex.forEach(currentval => {
singlePerson[currentval] = rowValues[valueIndex];
valueIndex++;
});
finalList.push(singlePerson);
}
isFirstLine = false;
}).on('close',function(){
//console.log(finalList);
var data='';
var header= "Employee ID"+'\t'+headerIndex[0]+'\t'+headerIndex[2]+'\t'+headerIndex[1]+'\t'+headerIndex[4]
+'\t'+headerIndex[3]+'\t'+headerIndex[5]+'\n';
for (var i = 0; i < finalList.length; i++) {
function split(name){
var conv=name.split(' ');
var result=[conv.slice(0, -1).join(' '),conv.slice(-1)[0]].join(conv.length < 2 ? '' : ',');
return result;
}
split(finalList[i].UserName);
data=data+finalList[i].LoginID+'\t'+split(finalList[i].UserName)+'\t'+finalList[i].Email+'\t'
+finalList[i].LoginID+'\t'+'A&G Professional'+'\t'+finalList[i].Title+'\t'+finalList[i].State+'\n';
}
var newFilename= filename.substr(0, filename.lastIndexOf("."))
var alldata= header + data;
//console.log(alldata)
fs.appendFile(`./s3/${filename}.xlsx`,alldata, (err) => {
if (err) throw err;
console.log('File created');
});
});
}
}
I am calling readTextFile(); from another file.
demo.js
const { readTextFile } = require("./textReader");
var array=['UserRoleDetails_12102021063206.txt',
'UserRoleDetails_12102021064706 (1).txt',
'UserRoleDetails_12102021064706.txt',
'UserRoleDetails_12102021070206.txt']
array.forEach(function(currentItem){
readTextFile(currentItem);
})
The problem i am facing is that all files are processed at the same time and all the datas of all files are stored together.
first, this node js is not work in sequential as you mention here
and second, array.forEach is not useful here to do the sequential operation
you need to use
const { readTextFile } = require("./textReader");
var array=['UserRoleDetails_12102021063206.txt',
'UserRoleDetails_12102021064706 (1).txt',
'UserRoleDetails_12102021064706.txt',
'UserRoleDetails_12102021070206.txt']
for (const element of array) {
readTextFile(currentItem);
}
NOTE:- readTextFile(currentItem) your this function is not async so maybe you need to make it async
if you are not clear then raise your hand

Node JS function that return https get request final edited data

Hello everybody I have a problem with the Node JS function that I want it to return https get request final edited data, I know there are a lot of solutions for this async problem but I tried them all and still can't figure out what is wrong with my code?
here is my function without any other solutions editing:
function getMovie(apiKey, gen) {
const baseUrl = "https://api.themoviedb.org/3/discover/movie?api_key=" + apiKey + "&language=en-US&include_adult=false&include_video=false&page=1&with_genres=" + gen;
https.get(baseUrl, function (responce) {
console.log(responce.statusCode);
var d = "";
responce.on("data", function (data) {
d += data;
});
responce.on("end", () => {
const finalData = [];
const moviesData = JSON.parse(d);
const result = moviesData.results;
const maxx = result.length;
const rand = Math.floor(Math.random() * maxx);
const title = result[rand].title;
const rDate = result[rand].release_date;
const overview = result[rand].overview;
const imageRoot = result[rand].poster_path;
const movieId = result[rand].id;
const movieRating = result[rand].vote_average;
// here will push those variables to finalData array
// then return it
return finalData;
});
}).on('error', (e) => {
console.error(e);
});
}
and want after this finalData returns:
const finalResult = getMovie(apiKey, genre);
it always returns undefined, How can I fix this? please anyone ca help me with this problem
thanks in advance.
I solved this problem using promises using this code:
const rp = require('request-promise');
function getMovie(url) {
// returns a promise
return rp(url).then(body => {
// make the count be the resolved value of the promise
let responseJSON = JSON.parse(body);
return responseJSON.results.count;
});
}
getMovie(someURL).then(result => {
// use the result in here
console.log(`Got result = ${result}`);
}).catch(err => {
console.log('Got error from getMovie ', err);
});

How to read a file line by line in Javascript and store it in an array

I have a file in which the data is in the form like
abc#email.com:name
ewdfgwed#gmail.com:nameother
wertgtr#gmsi.com:onemorename
I want to store the emails and names in arrays like
email = ["abc#email.com","ewdfgwed#gmail.com","wertgtr#gmsi.com"]
names = ["name","nameother","onemorename"]
Also, guys, the file is a little bit large around 50 MB so also I want to do it without using a lot of resources
I have tried this to work but can't make things done
// read contents of the file
const data = fs.readFileSync('file.txt', 'UTF-8');
// split the contents by new line
const lines = data.split(/\r?\n/);
// print all lines
lines.forEach((line) => {
names[num] = line;
num++
});
} catch (err) {
console.error(err);
}
Maybe this will help you.
Async Version:
const fs = require('fs')
const emails = [];
const names = [];
fs.readFile('file.txt', (err, file) => {
if (err) throw err;
file.toString().split('\n').forEach(line => {
const splitedLine = line.split(':');
emails.push(splitedLine[0]);
names.push(splitedLine[1]);
});
});
Sync Version:
const fs = require('fs')
const emails = [];
const names = [];
fs.readFileSync('file.txt').toString().split('\n').forEach(line => {
const splitedLine = line.split(':');
emails.push(splitedLine[0]);
names.push(splitedLine[1]);
})
console.log(emails)
console.log(names)
You can directly use line-reader :
fileData.js :
const lineReader = require('line-reader');
class FileData {
constructor(filePath) {
this.emails = [];
this.names = [];
lineReader.eachLine(filePath, function(line) {
console.log(line);
const splitedLine = line.split(':');
emails.push(splitedLine[0]);
names.push(splitedLine[1]);
});
}
getEmails(){
return this.emails;
}
getNames(){
return this.names;
}
}
module.exports = FileData
Whrerever You want:
const FileData = require('path to fileData.js');
const fileData = new FileData('test.txt');
console.log(fileData.getEmails())

How to handle axios.all request fails

How can I handle request fails in this example of axios.all requests. I.e. if all servers are responde with JSON all is okay and I have JSON file at end of a cycle. But if one of this servers not responde with JSON or not responde at all I do have nothing in "/data.json" file, even all other servers are working perfectly. How can I catch a server fail and skip it?
var fs = require("fs");
var axios = require('axios');
var util = require('util');
var round = 0;
var tmp = {};
var streem = fs.createWriteStream(__dirname + '/data.json', {flags : 'w'});
toFile = function(d) { //
streem.write(util.format(d));
};
start();
setInterval(start, 27000);
function start(){
streem = fs.createWriteStream(__dirname + '/data.json', {flags : 'w'});
monitor();
}
function monitor(){
axios.all([
axios.get('server1:api'),
axios.get('server2:api'),
axios.get('server3:api'),
axios.get('server4:api'),
]).then(axios.spread((response1, response2, response3, response4) => {
tmp.servers = {};
tmp.servers.server1 = {};
tmp.servers.server1 = response1.data;
tmp.servers.server2 = {};
tmp.servers.server2 = response2.data;
tmp.servers.server3 = {};
tmp.servers.server3 = response3.data;
tmp.servers.server4 = {};
tmp.servers.server4 = response4.data;
toFile(JSON.stringify(tmp));
round++;
streem.end();
streem.on('finish', () => {
console.error('Round: ' + round);
});
})).catch(error => {
console.log(error);
});
}
The most standard way to approach this would be a recursive function like below.
let promises = [
axios.get('server1:api'),
axios.get('server2:api'),
axios.get('server3:api'),
axios.get('server4:api'),
];
async function monitor() {
const responses = (arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments))[0];
const nextPromise = promises.shift();
if (nextPromise) {
try {
const response = await getSentenceFragment(offset);
responses.push(response);
}
catch (error) {
responses.push({});
}
return responses.concat(await monitor(responses));
} else {
return responses;
}
}
monitor([]).then(([response1, response2, response3, response4]) => {
tmp.servers = {};
tmp.servers.server1 = {};
tmp.servers.server1 = response1.data;
tmp.servers.server2 = {};
tmp.servers.server2 = response2.data;
tmp.servers.server3 = {};
tmp.servers.server3 = response3.data;
tmp.servers.server4 = {};
tmp.servers.server4 = response4.data;
toFile(JSON.stringify(tmp));
round++;
streem.end();
streem.on('finish', () => {
console.error('Round: ' + round);
});
});

NodeJS Waiting for asynchronous function to complete foreach

Hi so i have been trying to make this tried using async module didn't really know how to convert this to one tried promising it didn't really work well i think i did it wrong so i reverted the function to the way it was at first
Basically i want to wait till the ReadJson() function is done with reading all the json files that are in the array then do other functions like editjson etc
Code:
App.js
const Reader = require('./Reader');
Reader.ReadJson();
Reader.js
const fsp = require('fs-promise');
const JsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
const JsonContents = [];
class Reader {
static ReadJson() {
JsonFiles.forEach(name => {
let FileDir = "D:\\Development\\Java\\" + name;
fsp.readJson(FileDir).then(contents => {
if (contents) {
JsonContents.push(contents);
console.log(`Loaded >> ${name} ${Reader.JsonContents.length}/${JsonFiles.length}`);
}
});
});
console.log('Done Reading Json Content!');
//Other functions
}
}
Reader.JsonContents = JsonContents;
module.exports = Reader;
So basically the output is:
Done Reading Json Content!
Loaded >> json1.json 1/4
Loaded >> json2.json 2/4
Loaded >> json3.json 3/4
Loaded >> json4.json 4/4
When i need it to be:
Loaded >> json1.json 1/4
Loaded >> json2.json 2/4
Loaded >> json3.json 3/4
Loaded >> json4.json 4/4
Done Reading Json Content!
Thank you :)
Return a promise, track your progress in the forEach and resolve it only when JsonContents length is the same as JsonFiles length.
const fsp = require('fs-promise');
const JsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
const JsonContents = [];
class Reader {
static ReadJson() {
return new Promise((resolve, reject) => {
JsonFiles.forEach(name => {
let FileDir = "D:\\Development\\Java\\" + name;
fsp.readJson(FileDir).then(contents => {
if (contents) {
JsonContents.push(contents);
console.log(`Loaded >> ${name} ${Reader.JsonContents.length}/${JsonFiles.length}`);
}
if (JsonContents.length == JsonFile.length) {
return resolve(JsonContents);
}
}).catch(err => {
return reject(err);
});
});
});
}
}
Reader.JsonContents = JsonContents;
module.exports = Reader;
And then use it in your app:
const Reader = require('./Reader');
Reader.ReadJson().then(() => { console.log('Done Reading Json Content!'); });
Another option is using Promise.all, because you are using fs-promise, but although it can be done with forEach, a regular for loop is better here.
const fsp = require('fs-promise');
const JsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
const JsonContents = [];
class Reader {
static ReadJson() {
var promises = [];
for (let i = 0; i < JsonFiles.length; i++) {
let FileDir = "D:\\Development\\Java\\" + JsonFiles[i];
promises.push(fsp.readJson(FileDir).then(contents => {
if (contents) {
JsonContents.push(contents);
console.log(`Loaded >> ${JsonFiles[i]} ${Reader.JsonContents.length}/${JsonFiles.length}`);
}
}));
}
return Promise.all(promises);
}
}
Reader.JsonContents = JsonContents;
module.exports = Reader;
As an addendum to Ron Dadon's Promise.all method....
The Bluebird promise library provides some helper functions like Promise.map and Promise.filter that can remove a lot of the boiler plate of Promise array processing code.
const Promise = require('bluebird');
const fsp = require('fs-promise');
const path = require('path');
class Reader {
static readFiles(jsonPath, jsonFiles){
let fileReadCount = 0;
return Promise.map(jsonFiles, name => {
let filePath = path.join(jsonPath, name);
return fsp.readJson(filePath);
})
.filter((content, index, length) => {
if (!content) return false;
console.log(`Loaded >> ${jsonFiles[index]} ${index+1} / ${length}`);
return true;
})
}
static readJson() {
return this.readFiles(this.jsonPath, this.jsonFiles).then(contents => {
console.log('Done Reading Json Content!', contents);
return this.jsonContents = contents;
})
}
}
Reader.jsonFiles = ['json1.json', 'json2.json', 'json3.json', 'json4.json'];
Reader.jsonPath = 'D:\\Development\\Java';
module.exports = Reader;

Categories

Resources