Cannot read property 'map' of undefined Node js - javascript

I am getting the Error, I am writing a function to monitor a path for. I am new to Node.js:
TypeError: Cannot read property 'map' of undefined
at C:\Users\a\Desktop\DL\file\filemonitor.js:15:14
at FSReqWrap.oncomplete (fs.js:149:20)
const Promise = require ('bluebird');
var fs = Promise.promisifyAll(require("fs"));
monitordir(monitorpath) {
var fileList = [];
return new Promise((resolve, reject) => {
fs.readdir(monitorpath,function(err, items) {
items.map((file) => {
fileList.push(file);
});
resolve(fileList);
});
})
}
Note: I don't see a package.json file either. Should I have a sucessful run to see it

When you run var fs = Promise.promisifyAll(require("fs")); it return a promise to you. So you can't execute a map of a promise.
I believe that you don't need a Promise to resolve fs module, my suggestion is you write something like that.
const Promise = require('bluebird');
const fs = require("fs");
const monitordir = path => {
return new Promise((resolve, reject) => {
fs.readdir(path, (error, items) => {
if (error) return reject(error)
return resolve(items);
})
})
}

Try following fix, see if it fits your needs:
monitordir(monitorpath)
{
var fileList = [];
return fs.readdir(monitorpath)
.then(function(err,items) {
items.map((file) => {
fileList.push(file); // fileList is ready here! do whatever you want before it resolves to caller
});
return fileList;
})
.catch(function(e) {
// something bad happened; throw error or handle it as per your needs
throw new Error(e);
});
}
For package.json you can run npm init command at your project directory it will create one for you.

Related

Angular reading the value of a js function as undefined, even when the object has value

I created some javascript functions that read and write to a json file, are suppose to be invoked in angular(from typescript code), using jsonfile library.
Here is the code:
function savePatient(patient){
const jsonfile = require('jsonfile')
const file = 'src/resources/patients.json'
jsonfile.writeFile(file, patient, {flag: 'a'}, function(err){
if(err) console.error(err)
})
}
function getPatients(){
const jsonfile = require('jsonfile')
const file = 'src/resources/patients.json'
jsonfile.readFile(file, function(err, obj){
if(err) console.error(err)
console.dir(obj)
return obj
})
}
Here is the declaration of functions in Angular component:
declare function savePatient(patient: Patient);
declare function getPatients(): Patient[];
I managed to successfully call the savePatient() function, and it does as intended.
When I try to invoke console.log(getPatients()) from inside the Angular component, the output is undefined, but the getPatients() function itself generates a correct console output from the console.dir(obj) line.
How am I suppose to get the correct value of the function inside the Angular component?
Also, this project is inside an electron container, if someone may find that relevant.
I found it interesting that the Angular component is the first one to output information to console, even though it would make sense that the js functions should give output before it, considering that the Angular component should be dependent on the return value of the js function, but I don't know what to make of that.
Your function
function getPatients(){
const jsonfile = require('jsonfile')
const file = 'src/resources/patients.json'
jsonfile.readFile(file, function(err, obj){
if(err) console.error(err)
console.dir(obj)
return obj
})
}
works asynchronous (see docs).
You have two options. The first one is to handle the file-reading asynchronously:
function getPatients(){
const jsonfile = require('jsonfile')
const file = 'src/resources/patients.json';
// Create a new promise
return new Promise((resolve, reject) => {
jsonfile.readFile(file, function(err, obj){
if(err){
console.error(err)
return reject(err);
}
console.dir(obj)
return resolve(obj);
});
});
}
...
// Prints the read object in the console, after the file reading is done
getPatients().then((obj) => {
console.dir(obj);
});
The second options, and in my opinion the best solution for you is using the synchronous way to read a file:
function getPatients(){
const jsonfile = require('jsonfile')
const file = 'src/resources/patients.json'
try {
const obj = jsonfile.readFileSync(file);
console.dir(obj);
return obj;
} catch(e) {
console.error(e);
});
}
Please make sure that your function return something. In this snippet i added a return statement before jsonfile.readfile().
function getPatients(){
const jsonfile = require('jsonfile')
const file = 'src/resources/patients.json'
return jsonfile.readFile(file, function(err, obj){
if(err) return err;
return obj;
});
}

Dealing with Promises

I wanted to ask a question about the way I am using Promises, since I'm not sure if I have over complicated things.
Firstly I am trying to create some methods that I want to execute one after another as the rely on the outcome of each previous method.
My project structure is like so
my_project
csv
helpers
FileDownload.js
scripts
getCSVData.js
app.js
Each file at the moment looks like
## FileDownload.js
const fetch = require('node-fetch');
const fs = require('fs');
module.exports = function(url, target) {
return fetch(url)
.then(function(res) {
return new Promise((resolve, reject) => {
var dest = fs.createWriteStream(target);
res.body.pipe(dest)
.on('finish', () => resolve()) // ** Resolve on success
.on('error', reject); // ** Reject on error
});
}).then(result => {
console.log(`File saved at ${target}`)
return result;
});
}
## getCSVData.js
const file_download = require('../helpers/FileDownload')
function getPremierLeagueData() {
file_download("http://www.football-data.co.uk/mmz4281/1718/E0.csv", "./csv/premier_league/premier_league.csv")
}
module.exports = {
getPremierLeagueData: getPremierLeagueData
}
## app.js
const premier_league = require('./scripts/getCSVData')
premier_league.getPremierLeagueData()
.then(function(result){ console.log(result)})
When running node app.js i get the error TypeError: Cannot read property 'then' of undefined
If I go back to my FileDownload function and console.log(result) that is undefined.
My confusion here is that because resolve() is called I am thinking that the Promise is resolved? So that should carry through to the .then.
I am clearly misunderstanding something here.
Your getPremierLeagueData function does not return a Promise. It does not return anything…
You just need to return the result of file_download, which is already a Promise:
function getPremierLeagueData() {
return file_download(args)
}
Need to return from getPremierLeagueData
function getPremierLeagueData() {
return file_download("http://www.football-data.co.uk/mmz4281/1718/E0.csv", "./csv/premier_league/premier_league.csv")
}
OR ( Use new ES6 syntax )
const getPremierLeagueData = () => file_download("http://www.football-data.co.uk/mmz4281/1718/E0.csv", "./csv/premier_league/premier_league.csv")
Note :
you can also reduce this
module.exports = {
getPremierLeagueData: getPremierLeagueData
}
with
module.exports = { getPremierLeagueData }
I've done some minor changes in your files . You are neither resolving anything in FileDownload nor are you returning anything in getCSVData . Hence the final console will print nothing .Please try the code below
## FileDownload.js
const fetch = require('node-fetch');
const fs = require('fs');
module.exports = function(url, target) {
return new Promise(function(resolve,reject){
fetch(url)
.then(function(res) {
var dest = fs.createWriteStream(target);
res.body.pipe(dest)
.on('finish', function(data){
console.log(`File saved at ${target}`)
resolve(data)
})
.on('error', function(){
reject();
}) // ** Reject on error
})
})
}
## getCSVData.js
const file_download = require('../helpers/FileDownload')
function getPremierLeagueData() {
return file_download("http://www.football-data.co.uk/mmz4281/1718/E0.csv", "./csv/premier_league/premier_league.csv")
}
module.exports = {
getPremierLeagueData: getPremierLeagueData
}
## app.js
const premier_league = require('./scripts/getCSVData')
premier_league.getPremierLeagueData()
.then(function(result){ console.log(result)})

Undefined when returning value

I've node project.
Root file is index.js and file helper.js, here I've some helper functions and it imported to index.js.
I'm trying to get some data, using function in helper.js, but when I calling it in index.js it returning undefined.
But in helper.js everething is OK, console.log showing data that I need.
How I can fix this problem?
index.js file content:
const helper = require('./helper');
let data = helper.getData();
console.log(data); // undefined
helper.js file content:
const fs = require('fs');
module.exports = {
getData: () => {
fs.readFile('data.json', 'utf8', (err, data) => {
const allData = JSON.parse(data);
console.log(allData); // IS OK!
return allData;
});
}
}
You can use Promise:
const fs = require('fs');
module.exports = {
getData: () => {
return new Promise(function(resolve, reject){
fs.readFile('data.json', 'utf8', (err, data) => {
if(err){
reject(err);
} else {
try {
resolve(JSON.parse(data));
} catch(ex){
reject(ex);
}
}
});
});
}
}
and then:
helper.getData().then(function(data){
console.log(data);
}, function(err){
// here something failed
});
The problem is that fs.readFile method is asynchronous and will not give you as result any data check the documentation here.
So one option is to use a Promise as I did or to use a callback as suggested in the answer of #Tatsuyuki Ishi, you can check the docs about callback implementation.
The problem is that fs.readFile is an asynchronous function and so doesn't return anything.
If you really need it to return something you can use the synchronous version, fs.readFileSync.
Otherwise - and a better way to do it - would be to have getData return a promise that you can then resolve with allData.
readFile is an asynchronous function, which accepts a callback. You have two options:
1 . Get a callback as parameter in getData().
getData: (callback) => {
fs.readFile('data.json', 'utf8', (err, data) => {
const allData = JSON.parse(data);
console.log(allData); // IS OK!
callback(allData);
});
}
2 . Use the synchronous version.
getData: () => {
var data = fs.readFileSync('data.json', 'utf8');
const allData = JSON.parse(data);
console.log(allData); // IS OK!
return allData;
}
Of course, you can use Promise which is more beautiful on chaining things, but it's often used with dependencies like Bluebird.
The problem is, you are returning allData from the callback function, not the getData function. And since getData has no explicit return, your helper.getData() function will return undefined and this value would printed instead of what you wanted.
I suggest using Promise to return the data properly, as in #sand's answer.

Jest: resolves return undefined from Promise

I am using jest for testing. I want to test the return of my function:
const xml2js = require('xml2js')
const parser = new xml2js.Parser()
function opmlParser(xmlData) {
return new Promise((resolve, reject) => {
parser.parseString(xmlData, (err, data) => {
if (err !== null && data !== null) reject(err)
resolve({})
})
})
}
The promise resolves {} in order to be in simple test case.
So, I want to test to following function, and I expect the result to be a promise containing {}.
I have the following code:
const fs = require('fs');
test('minimal-opml', () => {
const xmlData = fs.readFileSync('test/opml-parser/opml/test1.opml')
return expect(opmlParser(xmlData)).resolves.toEqual({})
})
As the Jest documentation say, I shoud use resolves statement before matching result and use return.
But, I got an issue:
TypeError: Cannot read property 'toEqual' of undefined
resolves return undefined, so I can't continue to test values.
I tried to add global.Promise = require.requireActual('promise'), but stills not work.
Do you have any idea what wrong I am doing?
The resolves methode is avalaible from 20.0.0+. So it is not already avalable. source
This is why it return undefined.
You have a few issues in checking for an error in the opmlParser function. You want to check if there is an error and reject, else resolve.
Try this (brackets for clarity, and you may need to refine this to your specific case):
if (err !== null) {
reject(err)
} else {
resolve({})
}

Fastest way to copy a file in Node.js

The project that I am working on (Node.js) implies lots of operations with the file system (copying, reading, writing, etc.).
Which methods are the fastest?
Use the standard built-in way fs.copyFile:
const fs = require('fs');
// File destination.txt will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err) throw err;
console.log('source.txt was copied to destination.txt');
});
If you have to support old end-of-life versions of Node.js - here is how you do it in versions that do not support fs.copyFile:
const fs = require('fs');
fs.createReadStream('test.log').pipe(fs.createWriteStream('newLog.log'));
Same mechanism, but this adds error handling:
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", function(err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on("error", function(err) {
done(err);
});
wr.on("close", function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
Since Node.js 8.5.0 we have the new fs.copyFile and fs.copyFileSync methods.
Usage example:
var fs = require('fs');
// File "destination.txt" will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err)
throw err;
console.log('source.txt was copied to destination.txt');
});
I was not able to get the createReadStream/createWriteStream method working for some reason, but using the fs-extra npm module it worked right away. I am not sure of the performance difference though.
npm install --save fs-extra
var fs = require('fs-extra');
fs.copySync(path.resolve(__dirname, './init/xxx.json'), 'xxx.json');
Fast to write and convenient to use, with promise and error management:
function copyFile(source, target) {
var rd = fs.createReadStream(source);
var wr = fs.createWriteStream(target);
return new Promise(function(resolve, reject) {
rd.on('error', reject);
wr.on('error', reject);
wr.on('finish', resolve);
rd.pipe(wr);
}).catch(function(error) {
rd.destroy();
wr.end();
throw error;
});
}
The same with async/await syntax:
async function copyFile(source, target) {
var rd = fs.createReadStream(source);
var wr = fs.createWriteStream(target);
try {
return await new Promise(function(resolve, reject) {
rd.on('error', reject);
wr.on('error', reject);
wr.on('finish', resolve);
rd.pipe(wr);
});
} catch (error) {
rd.destroy();
wr.end();
throw error;
}
}
Well, usually it is good to avoid asynchronous file operations. Here is the short (i.e. no error handling) sync example:
var fs = require('fs');
fs.writeFileSync(targetFile, fs.readFileSync(sourceFile));
If you don't care about it being async, and aren't copying gigabyte-sized files, and would rather not add another dependency just for a single function:
function copySync(src, dest) {
var data = fs.readFileSync(src);
fs.writeFileSync(dest, data);
}
Mike Schilling's solution with error handling with a shortcut for the error event handler.
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", done);
var wr = fs.createWriteStream(target);
wr.on("error", done);
wr.on("close", function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
You may want to use async/await, since node v10.0.0 it's possible with the built-in fs Promises API.
Example:
const fs = require('fs')
const copyFile = async (src, dest) => {
await fs.promises.copyFile(src, dest)
}
Note:
As of node v11.14.0, v10.17.0 the API is no longer experimental.
More information:
Promises API
Promises copyFile
const fs = require("fs");
fs.copyFileSync("filepath1", "filepath2"); //fs.copyFileSync("file1.txt", "file2.txt");
This is what I personally use to copy a file and replace another file using Node.js :)
For fast copies you should use the fs.constants.COPYFILE_FICLONE flag. It allows (for filesystems that support this) to not actually copy the content of the file. Just a new file entry is created, but it points to a Copy-on-Write "clone" of the source file.
To do nothing/less is the fastest way of doing something ;)
https://nodejs.org/api/fs.html#fs_fs_copyfile_src_dest_flags_callback
let fs = require("fs");
fs.copyFile(
"source.txt",
"destination.txt",
fs.constants.COPYFILE_FICLONE,
(err) => {
if (err) {
// TODO: handle error
console.log("error");
}
console.log("success");
}
);
Using promises instead:
let fs = require("fs");
let util = require("util");
let copyFile = util.promisify(fs.copyFile);
copyFile(
"source.txt",
"destination.txt",
fs.constants.COPYFILE_FICLONE
)
.catch(() => console.log("error"))
.then(() => console.log("success"));
Use Node.js's built-in copy function
It provides both async and sync version:
const fs = require('fs');
// File "destination.txt" will be created or overwritten by default.
fs.copyFile('source.txt', 'destination.txt', (err) => {
if (err)
throw err;
console.log('source.txt was copied to destination.txt');
});
fs.copyFileSync(src, dest[, mode])
You can do it using the fs-extra module very easily:
const fse = require('fs-extra');
let srcDir = 'path/to/file';
let destDir = 'pat/to/destination/directory';
fse.moveSync(srcDir, destDir, function (err) {
// To move a file permanently from a directory
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
Or
fse.copySync(srcDir, destDir, function (err) {
// To copy a file from a directory
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
I wrote a little utility to test the different methods:
https://www.npmjs.com/package/copy-speed-test
run it with
npx copy-speed-test --source someFile.zip --destination someNonExistentFolder
It does a native copy using child_process.exec(), a copy file using fs.copyFile and it uses createReadStream with a variety of different buffer sizes (you can change buffer sizes by passing them on the command line. run npx copy-speed-test -h for more info).
Mike's solution, but with promises:
const FileSystem = require('fs');
exports.copyFile = function copyFile(source, target) {
return new Promise((resolve,reject) => {
const rd = FileSystem.createReadStream(source);
rd.on('error', err => reject(err));
const wr = FileSystem.createWriteStream(target);
wr.on('error', err => reject(err));
wr.on('close', () => resolve());
rd.pipe(wr);
});
};
Improvement of one other answer.
Features:
If the dst folders do not exist, it will automatically create it. The other answer will only throw errors.
It returns a promise, which makes it easier to use in a larger project.
It allows you to copy multiple files, and the promise will be done when all of them are copied.
Usage:
var onePromise = copyFilePromise("src.txt", "dst.txt");
var anotherPromise = copyMultiFilePromise(new Array(new Array("src1.txt", "dst1.txt"), new Array("src2.txt", "dst2.txt")));
Code:
function copyFile(source, target, cb) {
console.log("CopyFile", source, target);
var ensureDirectoryExistence = function (filePath) {
var dirname = path.dirname(filePath);
if (fs.existsSync(dirname)) {
return true;
}
ensureDirectoryExistence(dirname);
fs.mkdirSync(dirname);
}
ensureDirectoryExistence(target);
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on("error", function (err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on("error", function (err) {
done(err);
});
wr.on("close", function (ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
function copyFilePromise(source, target) {
return new Promise(function (accept, reject) {
copyFile(source, target, function (data) {
if (data === undefined) {
accept();
} else {
reject(data);
}
});
});
}
function copyMultiFilePromise(srcTgtPairArr) {
var copyFilePromiseArr = new Array();
srcTgtPairArr.forEach(function (srcTgtPair) {
copyFilePromiseArr.push(copyFilePromise(srcTgtPair[0], srcTgtPair[1]));
});
return Promise.all(copyFilePromiseArr);
}

Categories

Resources