I already asked the question but I fele like I didn't asked it properly.
I'm trying to make a little encryption module in NodeJS but I have a very hard time with the asynchronous nature of it.
First of, the result variable in my main file is undefined a milisecond after the script is called, I was expecting that.
Also, the file is processed while the encryption occur, not before.
Note that I do not wish to encrypt the file itself.
The big question:
How can I make everything run smoothly and in order? :D
www.js
var mod = require('./mymodule.js')
var result = mod.doencrypt();
mymodule.js
module.exports.doencrypt = function() {
var content = processFile(); //Open a file, increment counter
var key = generateKey();
var iv = generateIV();
var encrypt = doEncryption(content);
return encrypt;
}
//File manipulation
async function openFile() {
return new Buffer(await readFile('monolitic.txt', "binary"));
}
async function saveFile(bin) {
await fs.writeFile("monolitic.txt", bin, "binary", function(err) {
if(err) {
console.log(err);
} else {
console.log("The monolitic file was saved!");
return bin;
}
});
}
function processFile() {
console.log("Reading buffer")
openFile().then(function (bin) {
monoCounter = bin;
//Increment
inc(monoCounter);
console.log(monoCounter);
monoCounter = saveMonoCounter(monoCounter);
return monoCounter;
}).catch((err) => {
monoCounter = Buffer.alloc(128);
saveMonoCounter(Buffer.alloc(128));
});
}
Related
Lets say this is my code (just a sample I wrote up to show the idea)
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
function RunFunc() {
extract.Start();
console.log("Extraction complete");
rescore.Start();
console.log("Scoring complete");
}
And I want to not let the rescore.Start() run until the entire extract.Start() has finished. Both scripts contain a spiderweb of functions inside of them, so having a callback put directly into the Start() function is not appearing viable as the final function won't return it, and I am having a lot of trouble understanding how to use Promises. What are ways I can make this work?
These are the scripts that extract.Start() begins and ends with. OpenWriter() is gotten to through multiple other functions and streams, with the actual fileWrite.write() being in another script that's attached to this (although not needed to detect the end of run. Currently, fileWrite.on('finish') is where I want the script to be determined as done
module.exports = {
Start: function CodeFileRead() {
//this.country = countryIn;
//Read stream of thate address components
fs.createReadStream("Reference\\" + postValid.country + " ADDRESS REF DATA.csv")
//Change separator based on file
.pipe(csv({escape: null, headers: false, separator: delim}))
//Indicate start of reading
.on('resume', (data) => console.log("Reading complete postal code file..."))
//Processes lines of data into storage array for comparison
.on('data', (data) => {
postValid.addProper[data[1]] = JSON.stringify(Object.values(data)).replace(/"/g, '').split(',').join('*');
})
//End of reading file
.on('end', () => {
postValid.complete = true;
console.log("Done reading");
//Launch main script, delayed to here in order to not read ahead of this stream
ThisFunc();
});
},
extractDone
}
function OpenWriter() {
//File stream for writing the processed chunks into a new file
fileWrite = fs.createWriteStream("Processed\\" + fileName.split('.')[0] + "_processed." + fileName.split('.')[1]);
fileWrite.on('open', () => console.log("File write is open"));
fileWrite.on('finish', () => {
console.log("File write is closed");
});
}
EDIT: I do not want to simply add the next script onto the end of the previous one and forego the master file, as I don't know how long it will be and its supposed to be designed to be capable of taking additional scripts past our development period. I cannot just use a package as it stands because approval time in the company takes up to two weeks and I need this more immediately
DOUBLE EDIT: This is all my code, every script and function is all written by me, so I can make the scripts being called do what's needed
You can just wrap your function in Promise and return that.
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
fs.createReadStream(
'Reference\\' + postValid.country + ' ADDRESS REF DATA.csv'
)
// .......some code...
.on('end', () => {
postValid.complete = true;
console.log('Done reading');
resolve('success');
});
});
}
};
And Run the RunFunc like this:
async function RunFunc() {
await extract.Start();
console.log("Extraction complete");
await rescore.Start();
console.log("Scoring complete");
}
//or IIFE
RunFunc().then(()=>{
console.log("All Complete");
})
Note: Also you can/should handle error by reject("some error") when some error occurs.
EDIT After knowing about TheFunc():
Making a new Event emitter will probably the easiest solution:
eventEmitter.js
const EventEmitter = require('events').EventEmitter
module.exports = new EventEmitter()
const eventEmitter = require('./eventEmitter');
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
//after all of your code
eventEmitter.once('WORK_DONE', ()=>{
resolve("Done");
})
});
}
};
function OpenWriter() {
...
fileWrite.on('finish', () => {
console.log("File write is closed");
eventEmitter.emit("WORK_DONE");
});
}
And Run the RunFunc like as before.
There's no generic way to determine when everything a function call does has finished.
It might accept a callback. It might return a promise. It might not provide any kind of method to determine when it is done. It might have side effects that you could monitor by polling.
You need to read the documentation and/or source code for that particular function.
Use async/await (promises), example:
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
async function extract_start() {
try {
extract.Start()
}
catch(e){
console.log(e)
}
}
async function rescore_start() {
try {
rescore.Start()
}
catch(e){
console.log(e)
}
}
async function RunFunc() {
await extract_start();
console.log("Extraction complete");
await rescore_start();
console.log("Scoring complete");
}
I am having a weird issue writing to a file in NodeJS.
I have this code in my FileHandler.js:
module.exports.writeFile = function (arr) {
var fs = require('fs');
console.log(arr);
var file = fs.createWriteStream(__dirname+'\\test.txt',{encoding: 'utf8'});
file.on('error', function (err) {
console.log(err); });
file.on("finish", function() {
console.log("finished");
});
arr.forEach(function (item) {
file.write(item+"\n");
});
file.end();
}
If I append
exports.writeFile(["1","2","3"])
To the end of this file and then run node FileHandler.js
The file is created correctly.
However, if I call the writeFile function from another .js file as:
var R = require("r-script");
const dataHandler = require("./DataHandler");
const fileHandler = require("./FileHandler");
var out = R(__dirname + "\\apriori.R");
exports.getRules = function () {
dataHandler.getListOfPageVisitsBySession(1000781912582,1530781912582,function (result){
//ignored result variable
fileHandler.writeFile(["1","2","3"]);
})
}
and passing the exact same array to the function it doesn't write anything (but the file is created), neither fires err or finish event.
If it matters, the DataHandler method contains a request module and a GET to another API.
Any clue of the problem?
Thanks in advance
I am new to node so am struggling quite a bit with the Async nature of it.
I am trying to create a script that will parse the pdfs inside a directory and output them in txt format in another directory.
To do this, I am using fs and pdf2json npm packages. I am passing the parseData function as a callback in the loopingFiles function. The only problem I am having is the async nature of node.
It will loop through all the files at the same time and the output is then a jumbled mess in the last file index.
I would like to process this synchronously such that it will wait once the data is finished parsing to write to the txt and then loop again.
I have tried promises but to no avail. Any help would be much appreciated!
var fs = require('fs'),
PDFParser = require("pdf2json");
let pdfParser = new PDFParser(this,1);
var parseData = function(pdf, index) {
txtFile = "/Users/janet/node/pdf/Destination/".concat(index.toString().concat(".txt"))
pdfFile = "/Users/janet/node/pdf/Source/".concat(pdf);
pdfParser.loadPDF(pdfFile);
// Parsing the pdf file in question
pdfParser.on("pdfParser_dataError", errData => console.error(errData.parserError) );
pdfParser.on("pdfParser_dataReady", pdfData => {
fs.writeFile(txtFile, pdfParser.getRawTextContent());
});
};
var loopingFiles = function(callback) {
fs.readdir("/Users/janet/node/pdf/Source", function (err, files) {
if (err) {
console.log(err);
} else {
files.forEach( function(file, index) {
callback(file, index);
});
};
});
};
loopingFiles(parseData);
Something like this?
var fs = require("fs"),
PDFParser = require("pdf2json");
let pdfParser = new PDFParser(this, 1);
var parseData = function(pdfs, index = 0) {
// finished
if (index >= pdfs.length) return;
let pdf = pdfs[index];
txtFile = "/Users/janet/node/pdf/Destination/".concat(
index.toString().concat(".txt")
);
pdfFile = "/Users/janet/node/pdf/Source/".concat(pdf);
// Parsing the pdf file in question
pdfParser.on("pdfParser_dataError", errData => {
console.error(errData.parserError)
// not sure if you want to call this here to keep going or not
parseData(pdfs, index + 1);
});
pdfParser.on("pdfParser_dataReady", pdfData => {
fs.writeFile(txtFile, pdfParser.getRawTextContent(), function() {
// when we're all done, call this function again, with the index of the next pdf
parseData(pdfs, index + 1);
});
});
pdfParser.loadPDF(pdfFile);
};
var loopingFiles = function(callback) {
fs.readdir("/Users/janet/node/pdf/Source", function(err, files) {
if (err) {
console.log(err);
} else {
callback(files, 0);
}
});
};
loopingFiles(parseData);
the main difference is passing the whole array of pdfs to the function with an index, and only calling that function again with an incremented index once the current one is completed
I'm a beginner in non-blocking environment, such NodeJS. Below is my simple code, which list all files in directory :
var readline = require('readline');
var rl = readline.createInterface(process.stdin, process.stdout);
var fs = require('fs');
var datafolder = './datafolder';
var datafoldername = 'datafolder';
rl.setPrompt('Option> ');
rl.prompt();
rl.on('line', function(line) {
if (line === "right") rl.close();
if (line == '1') {
listFile();
}
rl.prompt();
}).on('close', function() {
process.exit(0);
});
function listFile() {
console.log(`File(s) on ${datafolder}`);
fs.readdirSync(datafolder, (err, files) => {
if (err) {
console.log(err);
} else {
files.forEach(filename => {
console.log(filename);
});
}
});
}
If user press 1, it's suppose to execute method listFile and show all files inside.
My question is, why fs.readdirSync not executed? The program works if I do it with readdir(), but it'll mess the output to user.
You are passing a callback to fs.readdirSync() but *Sync() functions don't take callbacks. The callback is never run (because the function does not take a callback), so you see no output. But fs.readdirSync() does in fact execute.
fs.readdirSync() simply returns it's value (which may make the program easier to read, but also means the call will block, which may be OK depending on what your program does and how it is used.)
var resultsArray = fs.readdirSync(datafolder);
(You may want to wrap it in a try/catch for error handling.)
I am working on a web application that makes use of a file tree. The frontend JavaScript performs an ajax request to my Node.js server which calls my browse2 exported function. This function is then responsible for supplying the correct path to my function, getFolderContents(), that recursively builds the file system hierarchy object structure.
My issue is that I am currently doing things synchronously. Having done research into the inner workings of Node.js, it seems as though I should avoid synchronous operations at all costs. As such, I wanted to convert my code to work asynchronously. However, I couldn't get it working and all of my solutions were convoluted.
I have tried managing the flow using the "async" package. I had no luck with figuring that out. I tried implementing my own system of counters/loops/callbacks to determine when processes had finished executing. Ultimately, I suppose I can't wrap my mind around asynchronous execution flow.
I would like to ask two questions:
1. In this case, would it be detrimental to perform this request synchronously instead of asynchronously?
2. If yes to the first question, how should I go about converting this code to be asynchronous?
Note: When I tried to do things asynchronously, I used each synchronous function's asynchronous counterpart.
Below is my synchronous (working) code:
var path = require('path');
var fs = require('fs');
exports.browse2 = function(request, response) {
var tree = getFolderContents('C:\\Users\\AccountName\\folder1\\folder2\\folder3\\test\\');
response.send(tree);
};
function getFolderContents(route) {
var branch = {};
branch.title = path.basename(route);
branch.folder = true;
branch.children = [];
var files = fs.readdirSync(route);
var size = files.length;
for (var i = 0; i < size; i++) {
var file = files[i];
var concatPath = path.join(route, file);
if (fs.lstatSync(concatPath).isDirectory())
branch.children.push(getFolderContents(concatPath));
else
branch.children.push({
"title" : path.basename(file),
"path" : file
});
}
return branch;
}
I appreciate all input!
Edit:
Added asynchronous code attempt. Not fully working. Only a part of the tree is received.
exports.browse2 = function(request, response) {
getFolderContents(
'C:\\Users\\AccountName\\folder1\\folder2\\folder3\\test\\',
function(tree) {
response.send(tree);
});
};
function getFolderContents(route, callback) {
var branch = {};
branch.title = path.basename(route);
branch.folder = true;
branch.children = [];
fs.readdir(route, function(err, files) {
files.forEach(function(file) {
var concatPath = path.join(route, file);
fs.lstat(concatPath, function(err, stats) {
if (stats.isDirectory())
branch.children.push(getFolderContents(concatPath, callback));
else
branch.children.push({
"title" : path.basename(file),
"path" : file
});
callback(branch);
});
});
});
}
The basic problem you're having is that when you use asynchronous calls, you can't just assign things to the return of the function. The entire point of async is that the function won't wait. So for example:
function get_data(a) {
var data = some_async_call(a);
//at this point, data is undefined because execution won't wait on the calls to finish
data.do_something(); // this breaks because of the above
}
So instead what you do is pass an anonymous function to the asynchronous function called a callback, and the asynchronous function calls that function once the operations actually complete. The above example would become this:
function get_data(a) {
some_async_call(a, function(data) {
data.do_something();
});
}
function some_async_call(variable, callback) {
call_async({
data: variable,
success: callback
});
}
And in your case that would look like this:
exports.browse2 = function(request, response) {
getFolderContents('C:\\Users\\AccountName\\folder1\\folder2\\folder3\\test\\', function(tree) {
response.send(tree);
});
};
function getFolderContents(route, callback) {
var branch = {};
branch.title = path.basename(route);
...
callback(branch);
}
If you're familiar with setTimetout, this is how that works - the design pattern is to pass an anonymous function that does the work, and that function then executes once the data/information is actually available.
I managed to get it working. Here are my answers to my own questions:
It is better to perform the tasks asynchronously because to do it otherwise would mean that the application would block other users from receiving their responses until subsequent requests have been responded to.
The way to convert the synchronous code to asynchronous code is to use a parallel loop. The code for my particular case is this:
var path = require('path');
var fs = require('fs');
exports.browse2 = function(request, response) {
getFolderContents(
'C:\\Users\\AccountName\\folder1\\folder2\\folder3\\test\\',
function(err, tree) {
if (err)
throw err;
response.send(tree);
});
};
function getFolderContents(route, callback) {
var branch = {};
branch.title = path.basename(route);
branch.folder = true;
branch.children = [];
fs.readdir(route, function(err, files) {
if (err)
return callback(err);
var pending = files.length;
if (!pending)
return callback(null, branch);
files.forEach(function(file) {
var concatPath = path.join(route, file);
fs.lstat(concatPath, function(err, stats) {
if (stats && stats.isDirectory()) {
getFolderContents(concatPath, function(err, res) {
branch.children.push(res);
if (!--pending)
callback(null, branch);
});
} else {
branch.children.push({
"title" : path.basename(file),
"path" : file
});
if (!--pending)
callback(null, branch);
}
});
});
});
}
Thanks to user "chjj" with his response to a similar question on this thread: node.js fs.readdir recursive directory search
And thanks to user "Dan Smolinske" for directing me to the thread.