I'm a Node.js newbie trying to send back data to the browser by accessing a function load_blocks() in an external file I wrote and call it with res.send().
I have two simple files:
The first one is the typical app.js file:
const express = require('express');
const app = express();
//import my file
const blockchain = require('./load_blockchain.js');
app.get('/', function(req, res){
res.send('<h3>' + blockchain.load_blocks() + '</h3>');
});
app.listen(3000, function(){
console.log('Example app listening on port 3000!');
});
load_blockchain.js
This file has a function load_blocks() which is meant to read the lines of a file and then return them as text. But when I load my page, the <h3> tag reads undefined
var fs = require('fs');
var readline = require('readline');
module.exports = {
load_blocks: function (){
//Load The File
var return_string = "";
var rd = readline.createInterface({
input:fs.createReadStream('/home/blockchain_data/1.dat$
output: process.stdout,
console: true
});
rd.on('line', function(line){
console.log(line);
return_string += line;
});
return return_string;
}
};
Shouldn't the h3 tag read the data that is in the file I passed now? What is going wrong?
load_blocks() is asynchronous. It does not return the finished string because the readline code inside that function is asynchronous and has not yet finished when load_blocks() returns so thus return_string does not yet have the value you want when you return.
This is a classic asynchronous programming problem in Javascript. Your load_blocks function needs to either return a promise or call a callback when it finishes its work and the caller has to use that returned promise or callback to obtain the final result.
For a lot more info on returning an asynchronous value, see the various options offered here: How do I return the response from an asynchronous call?
You could change the code to something like this:
app.get('/', function(req, res){
blockchain.load_blocks().then(val => {
res.send('<h3>' + val + '</h3>');
}).catch(err => {
console.log(err);
res.status(500).end();
});
});
And, then change blockchain.load_blocks() to return a promise that resolves when you have the final value.
The code you show for load_blocks appears to just be reading an entire file. I don't understand any reason that you are reading it line by line. If you just want to read a whole file, you can just use fs.readFile() to read the whole file.
module.exports = {
load_blocks: function() {
return new Promise((resolve, reject) => {
fs.readFile('/home/blockchain_data/1.dat$', function(err, data) {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
}
};
Related
I have a file that has around 1000 lines of data, in that I am appending 'FFFF' to a particular line. The append working fine and the file is changed. But when I re-read the file again immediately after appending, changes are not coming - it is showing old data instead of new changes.
Below is the code:
const fs = require('fs');
const parseTxt = async (txtFile) => {
const data = await fs.readFileAsync(txtFile);
const str = data.toString('utf8');
const lines = str.split('\r\n');
var ff_string = 'FFFF';
var append_FF = linedata.substring(0, line_len - 2) + ff_string + linedata.substring(line_len - 2);
replace_line(linedata, append_FF, txtFile);
/* Re-Read the File with Changed/Appended data FF */
var re_data = re_read_file(txtFile);
const re_str = re_data.toString('utf8');
const re_lines = re_str.split('\r\n');
console.log('Re Lines Data:=========',re_str);
}
parseTxt('file.txt').then(() => {
console.log('parseTxt===');
})
function replace_line(linedata, append_FF, txtFile){
fs.readFile(txtFile, 'utf8', function(err,data) {
var formatted = data.replace(linedata, append_FF);
fs.writeFile(txtFile, formatted, 'utf8', function (err) {
if (err) return console.log(err);
});
});
return;
}
function re_read_file(txtFile){
try {
const data = fs.readFileSync(txtFile)
console.log('Re-readed File data',data);
} catch (err) {
console.error(err)
}
}
Variables 'linedata' and 'line_len' I am getting it from the different functions which I have not included in this as it is a huge function.
The reason for your issues is that writing to the file is asynchronous. Here's what happens:
// you do some magic and create a string that you'd like to write to a file
replace_line(linedata, append_FF, txtFile); // you call "write this to a file (1)
/* Re-Read the File with Changed/Appended data FF */
// you've called `replace_line`, and you get here
// but since reading/writing to the file is asynchronous, it's working somewhere
// and your code continues to execute
// you read the file again, happens immediately! check (2)
var re_data = re_read_file(txtFile);
const re_str = re_data.toString('utf8');
const re_lines = re_str.split('\r\n');
// you print the result
console.log('Re Lines Data:=========',re_str);
// you execute this method from (1)
function replace_line(linedata, append_FF, txtFile) {
// you start reading the file, but it doesn't happen immediately
// you provide a callback to execute when read is done
fs.readFile(txtFile, 'utf8', function(err,data) {
var formatted = data.replace(linedata, append_FF);
// same goes here - you start writing the file, but it's asynchronous
// you pass a callback to execute when it's done
fs.writeFile(txtFile, formatted, 'utf8', function (err) {
if (err) return console.log(err);
});
});
return;
}
function re_read_file(txtFile){ // (2)
try {
// here you use read file SYNC, so it happens immediately!
const data = fs.readFileSync(txtFile)
console.log('Re-readed File data',data);
} catch (err) {
console.error(err)
}
}
So the overall flow is like this:
You do some string manipulation
You try to write that to a file
it makes asynchronous read of the file
it makes asynchronous write of the new data
You read the file synchronously (read immediately)
You write the result of that read
-> at some point the asynchronous reading/writing to the file is done
You should either use sync read/write, or you should somehow wait for the updating of the document to be done and then re-read it again (Promises, await/async, callbacks - whatever you like).
I am having a weird issue writing to a file in NodeJS.
I have this code in my FileHandler.js:
module.exports.writeFile = function (arr) {
var fs = require('fs');
console.log(arr);
var file = fs.createWriteStream(__dirname+'\\test.txt',{encoding: 'utf8'});
file.on('error', function (err) {
console.log(err); });
file.on("finish", function() {
console.log("finished");
});
arr.forEach(function (item) {
file.write(item+"\n");
});
file.end();
}
If I append
exports.writeFile(["1","2","3"])
To the end of this file and then run node FileHandler.js
The file is created correctly.
However, if I call the writeFile function from another .js file as:
var R = require("r-script");
const dataHandler = require("./DataHandler");
const fileHandler = require("./FileHandler");
var out = R(__dirname + "\\apriori.R");
exports.getRules = function () {
dataHandler.getListOfPageVisitsBySession(1000781912582,1530781912582,function (result){
//ignored result variable
fileHandler.writeFile(["1","2","3"]);
})
}
and passing the exact same array to the function it doesn't write anything (but the file is created), neither fires err or finish event.
If it matters, the DataHandler method contains a request module and a GET to another API.
Any clue of the problem?
Thanks in advance
I'm a beginner in non-blocking environment, such NodeJS. Below is my simple code, which list all files in directory :
var readline = require('readline');
var rl = readline.createInterface(process.stdin, process.stdout);
var fs = require('fs');
var datafolder = './datafolder';
var datafoldername = 'datafolder';
rl.setPrompt('Option> ');
rl.prompt();
rl.on('line', function(line) {
if (line === "right") rl.close();
if (line == '1') {
listFile();
}
rl.prompt();
}).on('close', function() {
process.exit(0);
});
function listFile() {
console.log(`File(s) on ${datafolder}`);
fs.readdirSync(datafolder, (err, files) => {
if (err) {
console.log(err);
} else {
files.forEach(filename => {
console.log(filename);
});
}
});
}
If user press 1, it's suppose to execute method listFile and show all files inside.
My question is, why fs.readdirSync not executed? The program works if I do it with readdir(), but it'll mess the output to user.
You are passing a callback to fs.readdirSync() but *Sync() functions don't take callbacks. The callback is never run (because the function does not take a callback), so you see no output. But fs.readdirSync() does in fact execute.
fs.readdirSync() simply returns it's value (which may make the program easier to read, but also means the call will block, which may be OK depending on what your program does and how it is used.)
var resultsArray = fs.readdirSync(datafolder);
(You may want to wrap it in a try/catch for error handling.)
I am hosting Node as a server locally to interact with hardware.
My web application then makes a request to Node to execute 2 executable, whichever executable returns a data first will Respond it back to my Web Application.
By doing so, it causes the other executable to still be running in the background waiting for response from the hardware.
I am unable to kill off that process either, I have to either manually stop Node and run it again or task kill that executable.
My code are below:
Node.JS
var exec = require('child_process').exec;
app.get('/Page', function (req, res) {
var Page = function () {
var a = exec('F:/Example1.exe', function (err, data) {
console.log(err);
console.log(data);
b.kill();
if (!res.headersSent) {
res.send(data);
}
});
var b = exec('F:/Example2.exe', function (err, data) {
console.log(err);
console.log(data);
a.kill();
if (!res.headersSent) {
res.send(data);
}
});
}
Page();
});
Apparently, even with the kill command, I am still unable to terminate the process.
I should let you guys know, I am also using AngularJS for my front-end.
I have sourced online for solution, however Google's results are all slowly turning purple.
Thank you so much for those who post their solution, please explain to me the details of the solution as well. I would really want to learn more.
Thank you so much.
The problem with exec is it will wait until the program has executed for it's callback to run.
You can use spawn instead, then you have control over the process as it's running.
var spawn = require('child_process').spawn;
app.get('/Page', function(req, res) {
var Page = function() {
var a = spawn('F:/Example1.exe');
var b = spawn('F:/Example2.exe');
var aData = '',
bData = '';
a.stdout.on('data', function(data) {
aData += data.toString();
});
b.stdout.on('data', function(data) {
bData += data.toString();
});
a.on('close', function() {
b.kill();
if (!res.headersSent) {
res.send(aData);
}
});
b.on('close', function() {
a.kill();
if (!res.headersSent) {
res.send(bData);
}
});
}
Page();
});
I have never used exec in nodejs but javascript scoping I think Page is executed every request, so the a and b processes from previous requests are no longer around.
You could store references to the processes globally so that each request has access to the processes, (real incomplete rough example):
var exec = require('child_process').exec;
var a = null;
var b = null;
app.get('/Page', function (req, res) {
var Page = function () {
if (a) {
// a is already a running process? Do something?
} else {
// start a ?
a = exec('your command');
}
if (b) {
// b is already running? Do something?
}
}
Page();
});
I am new in Node.js . All I did in here is get file list in folder... Everything in Node.js seem to async but my function I want it in synchronous way.
So, I do as follow.
function getFiles() {
var file = [];
var walker = walk.walk('./files');
walker.on('file', function (root, stat, next) {
file.push(root + '/' + stat.name);
next();
})
walker.on('end', function () {
console.log(JSON.stringify(file));
})
return file;}
It worked as I expected :
["./files/1.mp3","./files/2.mp3","./files/3.mp3","./files/4.mp3","./files/5.mp3","./files/6.mp3","./files/7.mp3","./files/8.mp3"]
but when I assigned that function to variable
var list = getFiles();
response.writeHead(200, {'Content-type':'application/json'});
response.end(JSON.stringify(list));
It always returned nothing, I think that getFiles() run in another thread so can not receive value of data. Thank for your reading.
I can shed some light on the behavior you are experiencing by outlining the flow of the application as it is run:
call to getFiles
declare files array and walker
bind walker event "file" and "end" to callbacks
return files array
walker file event fires
walker end event fires
as you can see the events are firing out of band with the method call. To deal with this the common node.js approach is to setup your code something like the following:
function getFiles(callback) {
var file = [];
var walker = walk.walk('./files');
walker.on('file', function (root, stat, next) {
file.push(root + '/' + stat.name);
next();
})
walker.on('end', function () {
callback(file);
})
}
now when you go to execute this method you would do something like this:
getFiles(function(list){
response.writeHead(200, {'Content-type':'application/json'});
response.end(JSON.stringify(list));
});
obviously this is a little unsightly since the controller now has to create a callback scenario and the getFiles method need to execute that callback in course. Another approach is to use the Promises concept, which I will leave to the discovery of the reader with the following link: https://github.com/kriskowal/q
Async functions return before they are ready
You can't return data thats not there in your own code
Async functions often take a callback argument that is executed when they are ready
You own code could ask for its own callbacks
`
function getFiles(callBack) {
var file = [];
var walker = walk.walk('./files');
walker.on('file', function (root, stat, next) {
file.push(root + '/' + stat.name);
next();
})
walker.on('end', function () {
console.log(JSON.stringify(file));
callBack(file);
})
}
// assuming server environment, express, connect, etc...
app.get('/list', function(req, res){
getFiles(function(D){ res.json(D) });
});
`