Why is my callback function not being invoked? - javascript

I have a function that gets the contents of a load of files and places them in array. I need that array to be referenced in the function that calls the function. I've got a callback in the calling function but it doesn't get invoked.
Any suggestions?
(for purposes of illustration, I have 2 console log calls on the array I need to get back. The 1st is inside my callback which never gets called, and the second is outside it and unsurprisingly shows an empty array.)
const fs = require('fs');
const path = require('path')
const chai = require('chai');
const chaiExpect = chai.expect;
const logger = require('../../../../app/logger/winston');
const pageFolder = 'app/views/pages';
let pages = [];
fdescribe('html pages', function () {
buildPages(function () {
console.log(pages);
});
console.log(pages)
})
function buildPages() {
filewalker(pageFolder, function (err, pageList) {
if (err) {
throw err;
}
pageList.forEach(function (file) {
if (!fs.file.isDirectory) {
fs.readFileSync(file.substring(39), 'utf8', function (err, pageContents) {
if (err) throw err;
pages.push({ file: file.substring(39), page: pageContents })
})
}
})
});
}

You should pass and call callback to buildPages
const fs = require('fs');
const path = require('path')
const chai = require('chai');
const chaiExpect = chai.expect;
const logger = require('../../../../app/logger/winston');
const pageFolder = 'app/views/pages';
let pages = [];
fdescribe('html pages', function () {
buildPages(function () {
console.log(pages);
});
console.log(pages)
})
function buildPages(cb) {
filewalker(pageFolder, function (err, pageList) {
if (err) {
throw err;
}
pageList.forEach(function (file) {
if (!fs.file.isDirectory) {
fs.readFileSync(file.substring(39), 'utf8', function (err, pageContents) {
if (err) throw err;
pages.push({ file: file.substring(39), page: pageContents })
})
}
})
cb(); // <--- HERE
});
}

You're calling the buildPages function with a function argument even thought you didn't configure it to accept any argument. That's why the function inside doesn't get called.
Simply add a callback function as an argument in buildPages and call that callback function once your done with all the tasks in buildPages
const fs = require('fs');
const path = require('path')
const chai = require('chai');
const chaiExpect = chai.expect;
const logger = require('../../../../app/logger/winston');
const pageFolder = 'app/views/pages';
let pages = [];
fdescribe('html pages', function () {
buildPages(function () {
console.log(pages);
});
console.log(pages)
})
function buildPages(callback) {
filewalker(pageFolder, function (err, pageList) {
if (err) {
throw err;
}
pageList.forEach(function (file) {
if (!fs.file.isDirectory) {
fs.readFileSync(file.substring(39), 'utf8', function (err, pageContents) {
if (err) throw err;
pages.push({ file: file.substring(39), page: pageContents })
})
}
})
callback();
});
}
But i would highly suggest that if you want to get some data from another function. Don't use a global variable as your code will be hard to maintain the more things you add to it, instead pass your data as an argument.
function functionWithResult(callback) {
setTimeout(() => {
var result = [1,2,3,4,5]
callback(result)
}, 1000)
}
functionWithResult(function(result) {
console.log(result)
});

Related

Using async/await syntax with node stream

I'm using node 18.7 on ubuntu. I'm trying to parse a bunch of csv files to objects (using csv-parse), ultimately to load into a db. Because there are large numbers of these I decided to try streams and I'd like to use the async await style.
So far I have:
const { parse } = require('csv-parse');
const path = __dirname + '/file1.csv';
const opt = { columns: true, relax_column_count: true, skip_empty_lines: true, skip_records_with_error: true };
console.log(path);
const { pipeline } = require('stream');
// const pipeline = stream.pipeline;
async function readByLine(path, opt) {
const readFileStream = fs.createReadStream(path);
var csvParser = parse(opt, function (err, records) {
if (err) throw err;
});
await pipeline(readFileStream, csvParser, (err) => {
if (err) {
console.error('Pipeline failed.', err);
} else {
console.log('Pipeline succeeded.');
}
});
for await (const record of csvParser) {
console.log(record);
}
}
readByLine(path, opt)
When I run this I see:
Pipeline succeeded.
But the parsed objects are not sent to the console. What am I doing wrong?
edit1:
I changed the code to :
async function readByLine(path, opt) {
const readFileStream = fs.createReadStream(path);
var csvParser = parse(opt, function (err, records) {
if (err) throw err;
});
await pipeline(readFileStream, csvParser, (err) => {
if (err) {
console.error('Pipeline failed.', err);
} else {
console.log('Pipeline succeeded.');
}
});
// for await (const record of csvParser) {
// console.log(record);
// }
return csvParser;
}
(async function () {
const o = await readByLine(path, opt);
console.log(o);
})();
The result is an object which has a million properties, but some look set like in the screenshot.
You can only useful await a promise.
The pipeline function you are using doesn't return a promise.
If you look at the documentation you will see:
The pipeline API provides a promise version, which can also receive an options argument as the last parameter with a signal <AbortSignal> property. When the signal is aborted, destroy will be called on the underlying pipeline, with an AbortError.
const { pipeline } = require('node:stream/promises');
const fs = require('node:fs');
const zlib = require('node:zlib');
async function run() {
await pipeline(
fs.createReadStream('archive.tar'),
zlib.createGzip(),
fs.createWriteStream('archive.tar.gz')
);
console.log('Pipeline succeeded.');
}
run().catch(console.error);
Note the different value passed to require. Use that version of pipeline instead.

How to get value inside foreach in nodejs

I'm trying to develop a simple app that if you pass a parameter in command line the application will search inside a directory and if the text match in some of the files the file should be save in a list, but when I put the console.log the value is not updated
here is my code:
const folder = "./movies/data";
const fs = require("fs");
var args = process.argv.slice(2);
console.log("myArgs: ", args);
var count = 0;
var list = [];
fs.readdir(folder, (err, files) => {
files.forEach((file) => {
fs.readFile(`movies/data/${file}`, "utf8", function (err, data) {
if (err) console.log(err);
if (data.includes("walt disney")) {
count++;
list.push(data);
console.log("Found in: ", data);
}
});
});
console.log(`Foram encontradas ${count} ocorrĂȘncias pelo termo ${args}.`);
});
any suggestions about what i'm doing wrong?
For your program to work, you will have to add some Promise / async/await logic. On the moment you try to read from the files, the files are still undefined so the fs.readDir() function will not provide the wanted result.
This should work:
const { resolve } = require('path');
const { readdir } = require('fs').promises;
const fs = require("fs");
var args = process.argv.slice(2);
const pathToFiles = "./movies/";
async function getFiles(dir) {
const dirents = await readdir(dir, { withFileTypes: true });
const files = await Promise.all(dirents.map((dirent) => {
const res = resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
}));
return Array.prototype.concat(...files);
}
getFiles(pathToFiles)
.then(files => {
console.log(files)
files.forEach((file) => {
fs.readFile(file, 'utf8', (err, data) => {
if (err) console.log(err);
if (data.includes(args)) {
console.log(`${args} found in ${file}.`);
} else {
console.log(`${args} not found.`);
}
});
})
})
.catch (e => console.error(e));

Express.js : how export an asynchronous result of a promise within my module

Within my express.js app I've 2 modules:
the first is an initializer module, which I call at the beginning of the second; the main module.:
mainModule.js
const result = require('initilalizerModule')
...
initilalizerModule.js:
const soap = require('soap');
const path = require('path');
//passed en params for DAO
const endpoint = 'https://myurl.com';
const url = path.resolve(__dirname, 'mycontract.wsdl');
const soapOptions = {
forceSoap12Headers: true,
connection: 'keep-alive',
disableCache: false
};
function initialize() {
console.log("test")
return new Promise((resolve,reject) => {
soap.createClient(url, soapOptions, function (err, RESULT) {
if (err) {
reject('err');
}
else {
client.setEndpoint(endpoint);
resolve(RESULT);
}
});
})
}
module.exports = {
myResult : ....
}
I have this asynchronous initialize() method which brings a RESULT
My purpose is how to export this RESULT object from my initilizerModule to be used after that in my mainModule?
You have to understand async programming. require is synchronous and cached by default. Anything that you want to perform after some task async. You have to use the callback. Here is a basic example.
// main.js
const {init} = require("./provider")
init((data) => {
console.log(data) // somedata
})
//provider.js
const someDelay = () => new Promise(r => {
setTimeout(() => r("somedata"), 1000)
})
exports.init = (cb) => {
someDelay().then(cb)
}
If you are using the lastest node.js. You can use async/await.
async/await version
// main.js
const {init} = require("./provider")
async function start() {
const data = await init()
console.log(data) // somedata
}
start()
// provider.js
const someDelay = () => new Promise(r => {
setTimeout(() => r("somedata"), 1000)
})
exports.init = async () => {
return await someDelay()
}
Hope this will answer your question! Cheers.
On a quick look you can define your initialize() function as a variable, which you can export, e.g. const myResult = function initialize() {...
and then use
module.exports = { myResult }

undefined var module.exports

For some reason, I can't get values returned from module.exports function from a separate custom module. I tried many ways from many sources from >10s researched posts. If you want to vote down, please read my bio or if you want to help I will be happy to accept your answer.
// restapi/index.js
module.exports = function gifs() {
giphy.search('Pokemon', function (err, res) {
return res.data[0];
});
}
// main server.js
var readapi = require('restapi')
console.log(readapi.gifs());
// Output:__________________
TypeError: readapi.gifs is not a function
You are exporting a function, not an object with a function and you are using a sync function (console.log) with an async operation.. it won't work.
You need to write it like this:
module.exports = function gifs(cb) {
giphy.search('Pokemon', function (err, res) {
if(err) { cb(err) }
else { cb(null, res.data[0]) }
});
}
----
var readapi = require('restapi')
readapi((err, data) => { console.log({err, data}) })
Remember the difference between:
module.export = {
hello: () => { console.log('world') }
}
// usage: require('./hello').hello()
module.export = () => { console.log('world') }
// usage: require('./hello')()
Try this code
module.exports.gifs = function gifs() {
return new Promise((resolve, reject) => {
giphy.search('Pokemon', function (err, res) {
if (err) reject(err);
else resolve(res.data[0]);
});
});
}
// main server.js
var readapi = require('restapi')
readapi.gifs().then(console.log);

How should I download a file in Node? [duplicate]

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Categories

Resources