Javascript/Node/JSON question, why is this not working? - javascript

I thought I understood what I was doing until this wasn't going in order. I am running this through Node, not through a browser.
It goes to the prompt at the end of the while loop first. I don't know why.
const fs = require('fs');
const prompt = require('prompt-sync')();
function jsonReader(filepath, cb){
fs.readFile(filepath, 'utf-8', (err, fileData) => {
if (err) { return cb && cb(err); }
try {
const object = JSON.parse(fileData);
console.log(object);
return cb && cb(null, object);
} catch (err) {
return cs && cb(err);
}
});
}
var exit = 0;
do {
jsonReader('./customer.json', (err, customer) => {
if (err) {
console.log('Error reading file:',err)
return
}
//customer.order_count +=1
const note = prompt("Enter a note for the JSON file: ");
customer.note = note;
fs.writeFile('./customer.json', JSON.stringify(customer, null, 2), (err) =>{
if (err) {
console.log('Error writing file:',err)
} else {
console.log('File updated');
}
})
})
exit = prompt("Do you want to exit?");
} while (exit != 'y');

There are several things wrong with your code. The prompt problem you notice is just the beginning.
The way your code executes is like this:
// happens now
do {
// happens now
// ...
jsonReader('./customer.json', (err, customer) => {
// happens after readFile
fs.writeFile('./customer.json', JSON.stringify(customer, null, 2), (err) =>{
// happens after writeFile
// ...
})
// happens after readFile
// ...
})
// happens now
exit = prompt("Do you want to exit?");
} while (exit != 'y');
// happens now
The time sequence is as follows:
1. Things that happens now
2. Things that happens after readFile
3. Things that happens after writeFile
It is obvious that you are outputting the prompt before either readFile or writeFile.
However there is another problem. You have an infinite while loop. In node.js and in fact in the browser I/O only happens when there is no javascript to execute - in other words it happens when the interpreter is idle. You are preventing the script from reaching the end of script with the while loop therefore the interpreter is never idle.
For your script what will happen after you fix the prompt issue is:
1. Things that happens now
2. loop into things that happens now
3. loop into things that happens now
4. loop into things that happens now
5. loop into things that happens now
..
∞. loop into things that happens now
Thus the readFile and writeFile never executes. You need to replace the while loop either with a recursive asynchronous call or using setTimeout() or setInterval() or use a while loop with async/await.
Here's an implementation with miniminal changes to your code:
function doIt () { // <----------- replace the do..while loop
// ...
jsonReader('./customer.json', (err, customer) => {
// ...
fs.writeFile('./customer.json', JSON.stringify(customer, null, 2), (err) =>{
// ...
var exit = prompt("Do you want to exit?");
if (exit !== 'y') {
doIt(); // <-------------- repeat the process again
}
})
})
}
doIt(); // <--------- don't forget to begin the whole thing
This is admittedly not as easy to read as using async/await but I leave that implementation as homework. Besides, it requires much more changes to your existing code.

It is because fs.readFile is asynchronous, so your second prompt is executing before fs.readFile has finished. You may want to use async functions and await or put the prompt at the end of the callback for jsonReader.

Related

Recursion & Async function does not respect/wait for promise

Situation:
I have a function which runs at the start of my code load_content:
async function load_content() {
console.log("I GOT HERE 1");
await load_js_files("./cmds/","commands")
console.log("I GOT HERE 2");
await load_js_files("./events/","events");
}
This function calls load_js_files twice, load_js_files is a recursive function which calls itself for each directory in the specified directory, 'requiring' each file found and doing different things if type = commands or type = events.
The function load_js_files looks like:
function load_js_files(dir,type){
fs.readdir(dir, (e, files) => {
if(e) console.error(e);
let jsfiles = files.filter(f => f.split(".").pop() === "js");
if(jsfiles.length <= 0){
console.log(`No commands to load from ${dir}!`);
return;
}
for(const file of files){
if(fs.lstatSync(dir+file).isDirectory()){
load_js_files(dir+file+"/",type)
}
}
if(type === "commands"){
console.log("\x1b[4m%s\x1b[0m",`Loading ${jsfiles.length} commands from ${dir} ...`);
jsfiles.forEach((f,i) => {
let command = require(`${dir}${f}`);
console.log(`${i + 1}: ${f} loaded!`);
bot.commands.set(command.info.name, command);
});
} else if (type === "events"){
console.log("\x1b[4m%s\x1b[0m",`Loading ${jsfiles.length} events from ${dir} ...`);
jsfiles.forEach((f,i) => {
let event = require(`${dir}${f}`);
console.log(`${i + 1}: ${f} loaded!`);
let commands = [];
for(const cmd of bot.commands){
if(cmd[1].data) commands.push(cmd[1].data.toJSON());
}
if(event.once){
bot.once(event.name, (...args) => event.execute(...args, commands));
} else {
bot.on(event.name, (...args) => event.execute(...args, commands));
}
});
} else {
console.log(log_Red,"FUNCTION 'load_js_files' CALLED WITH INCORRECT 'type'.")
}
});
return new Promise((resolve,reject) => resolve("DONE"));
}
I would expect in load_content that the events occur in this order:
console logs I GOT HERE 1
load_js_files occurs with commands parameter (granted I haven't solved recursion promises yet it should run once at least)
console logs I GOT HERE 2
load_js_files occurs again but with events parameter.
Issue:
Upon running load_js_files requiring type = event the variable (bot.commands) is undefined. bot.commands is assigned values based in step 2 above, during the load_js_files call.
From what I can debug to, the initial function load_content does not respect (my understanding) of async/await, so I assume I am doing something incorrectly with promises.
In my console however the two console.log statements execute immidiatley & before the function is finished:
I GOT HERE 1
I GOT HERE 2
Loading 3 commands from ./cmds/ ...
1: createtables.js loaded!
2: ping.js loaded!
3: sqltest.js loaded!
Loading 1 events from ./events/ ...
1: ready.js loaded!
Loading 1 commands from ./cmds/settings/ ...
1: set.js loaded!
What I've tried:
I've tried the code noted above, additionally I have tried wrapping the second run of load_js_files in a .then(), I've tried a callback function & I've also tried nesting Promises but run into issues as load_js_files is calling itself recursively.
I'm having a hard time understanding if these Promises are going to work with this type of recursion (all recursions of load_js_files must finish before the second load_js_files is called within load_content).
Bonus points:
Bonus points if you can help me understand promises within a recursive function. I've read
https://blog.scottlogic.com/2017/09/14/asynchronous-recursion.html
https://www.bennadel.com/blog/3201-exploring-recursive-promises-in-javascript.htm
and
https://medium.com/#wrj111/recursive-promises-in-nodejs-769d0e4c0cf9
But it's not quite getting through.
Attempt at implementing David's answer:
This results in error, I believe related to fs.readdir(dir) requiring a callback.
Error: TypeError [ERR_INVALID_CALLBACK]: Callback must be a function. Received undefined
async function load_js_files_async_test(dir,type){
const files = fs.readdir(dir);
for (const file of files) {
const file_info = await lstat(dir + file);
if(file_info.isDirectory()){
await load_jsfiles_async_test(dir + file + "/", type);
} else {
console.log("Thanks David!");
}
}
}
does not respect/wait for promise
Sure it does. This is the Promise it's awaiting:
new Promise((resolve,reject) => resolve("DONE"))
That Promise of course finishes very quickly (and synchronously) and then the code moves on to the next task. But what isn't being awaited anywhere in the code is this asynchronous operation:
fs.readdir(dir, (e, files) => {
//...
});
This call to readdir is an asynchronous operation, and as such that callback function won't be invoked until after the current thread finishes everything it's doing. Which includes the "promises" being awaited (which don't do anything asynchronous) as well as the console.log statements and the next invocation of load_js_files.
Fortunately, Node provides Promise-based versions of these operations as well.
Simplifying the original code a bit, imagine instead this structure:
async function load_js_files(dir, type) {
const files = await readdir(dir);
for (const file of files) {
const fileInfo = await lstat(dir + file);
if(fileInfo.isDirectory()) {
await load_js_files(dir + file + "/", type)
}
}
// etc.
}
As you can see, this "reads" a lot more like synchronous operations. The idea here is to remove the usage of callback functions, which are essentially causing you confusion and making "awaiting" much more difficult. Now all of the logic in the load_js_files function is directly in the load_js_files function, not in other anonymous callback functions. And that logic proceeds, step by step, awaiting each asynchronous operation.
Then you can await the calls to load_js_files as expected.
The function
fs.readdir
Is a non blocking function which means that the code that you inserted there is not being 'awaited' to be done, you could try with fs.readdirSync and remove your return new Promise().

How to assign a variable in callback function in a callback function in javascript

So I have found this question which seems pretty similar but I do not understand the answer at all I tried to implement it but I do not recognize the patterns of the answer in my code. similar question
Now here is my problem, I have this piece of code :
var fs = require('fs');
var index = JSON.parse(fs.readFileSync('../data/7XXX7/index.json', 'utf8'));
window = {};
var indicators = require('./indicators');
var parser = new window.patient.Indicator('tes', 'test');
var i = 0;
function create_indicators() {
var result = [];
fs.readdirSync('../data/7XXX7/files/').forEach(file => {
fs.readFile('../data/7XXX7/files/' + file, 'utf8', function (err, data) {
if (err)
throw err;
let $ = {};
$.poids = parser.poids(data);
$.taille = parser.taille(data);
$.temperature = parser.temperature(data);
$.tension = parser.tension(data);
$.pouls = parser.pouls(data);
$.ps = parser.ps(data);
$.saturation = parser.saturation(data);
for (var j in index.files)
{
if (index.files[j].name === file)
{
$.id = index.files[j].name;
$.date = index.files[j].date;
$.name = index.files[j].IntituleSession;
break;
}
}
if ($.poids || $.taille || $.temperature || $.tension || $.pouls || $.ps || $.saturation)
{
result.push($);
console.log(result); // print the actual state of result
// console.log(i); prints 0 then 1 then ...
i++;
}
});
console.log(i); // prints 0
});
console.log(result); // prints []
return result;
}
let result = create_indicators();
console.log(result); // prints []
And it displays :
[]
Why does the callback function in readFile has it's own variables ? Cause it's asynchronous ? But when I use readFileSync it doesn't work too.
How to make result get all the values I put into it ? when I console log result after result.push($); it works so that's not my parser, i is also properly indented each time.
Your code doesn't wait for the files to get read and have the result pushed to result before moving on. Where you're doing asynchronous operations on items in an array, I would recommend using promises and using Promise.all() to wait for each file to get read and processed before you try using the result. You could do something like this:
function create_indicators() {
const result = fs.readdirSync('../data/7XXX7/files/').map(file =>
new Promise((resolve, reject) => {
fs.readFile('../data/7XXX7/files/' + file, 'utf8', (err, data) => {
if (err) reject(err);
// do whatever
if ($.poids || /* ... */ $.saturation) {
// ...
resolve($); // instead of `result.push($);`
} else {
resolve(); // can't reject for `Promise.all()` to work
}
})
}));
return Promise.all(result).then(items => items.filter(item => item));
}
create_indicators().then(indicators => {
// do something with your list of indicators
}).catch(err => {
// handle error
});
It creates a promise for each file in your directory that resolves when the file has been processed. It resolves with the item if there is one or nothing if your condition is not met, rejecting if there's an error (promise equivalent to throw). Since you only want the items that meet your condition, you can then do a filter on the result of Promise.all() to get rid of any undefined in the array (you could also get rid of the condition checking in the fs.readFile callback and do it instead in the filter if you'd like). This returns a promise that resolves with your filtered list.
Here's your problem:
fs.readFileSync('../data/7XXX7/files/' + file, 'utf8', function (err, data) {
The readFileSync doesn't take a callback as an argument. It returns the data or raises an exception. It is synchronous (as the "Sync" in the name suggests) and you're using it as if it was asynchronous.
See the docs:
https://nodejs.org/api/fs.html
readFileSync doesn't callback. It is synchronous.
use fs.readdir to get the list of files you want to read. See How do you get a list of the names of all files present in a directory in Node.js?
Need to understand how callback works.
readFileSync doesn't callback. It might be helpful to explain how callback works in asynchronous fs.readFile and fs.readdir
When you are doing asynchronous operations, because you don't know when it is going to be finished, you pass in a function (callback) in the parameter, and run it at the end of the operation.
fs.readFile('/etc/passwd', function (err, data) {
if (err) throw err;
console.log(data);
});
fs.readFile in the above code will run the function (err, data) when it finishes executing and pass in the data as the second parameter. If error occurs it will pass in the error as the first parameter.
You can also get a callback function defining what to do when the parsing is over. The callback will need to take error and result. (if you need the error)
Read:
http://fredkschott.com/post/2014/03/understanding-error-first-callbacks-in-node-js/
So your create_indicators function should take a callback function.
fs = require("fs")
function create_indicators(folderPath, callback) {
let result = [];
fs.readdir(folderPath, (err, files) => {
if (err)
callback(err, null); //pass the error to callback if there is any
else {
files.forEach((file, index, filesArray) => {
fs.readFile(file, (err, data) => {
if (err)
callback(err, null); //pass the error to callback if there is any
else {
//.....parse....
result.push(data);
// pass data to callback function when it is the last result
if (result.length == filesArray.length)
callback(null, result);
}
});
});
}
})
}
When you call it, pass in what you want to do with the result and error as a function.
create_indicators(".", function(err,result){
if (err)
console.error("Got error:", err);
else
console.log("Got result:", result);
//do what you want with the final result
})
Once you got the callback working, look into Promise which will make this procedure cleaner and easier. Read: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise

How to return from a looped asynchronous function with callback in Node

I am trying to write a function that:
Takes an array of URLs
Gets files from URLs in parallel (order's irrelevant)
Processes each file
Returns an object with the processed files
Furthermore, I don't need for errors in #2 or #3 to affect the rest of the execution in my application in any way - the app could continue even if all the requests or processing failed.
I know how to fire all the requests in a loop, then once I have all the data, fire the callback to process the files, by using this insertCollection pattern.
However, this is not efficient, as I shouldn't need to wait for ALL files to download before attempting to process them - I would like to process them as each download finishes.
So far I have this code:
const request = require('request');
const urlArray = [urlA, urlB, urlC];
const results = {};
let count = 0;
let processedResult;
const makeRequests = function (urls, callback) {
for (let url of urls) {
request(url, function(error, response, body) {
if (error) {
callback(error);
return;
}
processedResult = callback(null, body)
if (processedResult) {
console.log(processedResult); // prints correctly!
return processedResult;
}
})
}
};
const processResult = function(error, file) {
if (error) {
console.log(error);
results.errors.push(error);
}
const processedFile = file + `<!-- Hello, Dolly! ${count}-->`;
results.processedFiles.push(processedFile);
if (++count === urlArray.length) {
return results;
}
};
const finalResult = makeRequests(urlArray, processResult);
console.log(finalResult); // undefined;
In the last call to processResult I manage to send a return, and makeRequests captures it, but I'm failing to "reign it in" in finalResult after that.
My questions are:
Why is this not working? I can print a well-formed processedResult
on the last iteration of makeRequests, but somehow I cannot return
it back to the caller (finalResult)
How can this be solved, ideally "by hand", without promises or the
help of libraries like async?
The makeRequests function returns undefined to finalResult because that is a synchronous function. Nothing stops the code executing, so it gets to the end of the function and, because there is no defined return statement, it returns undefined as default.

Asynchronously Write Large Array of Objects to Redis with Node.js

I created a Node.js script that creates a large array of randomly generated test data and I want to write it to a Redis DB. I am using the redis client library and the async library. Initially, I tried executing a redisClient.hset(...) command within the for loop that generates my test data, but after some Googling, I learned the Redis method is asynchronous while the for loop is synchronous. After seeing some questions on StackOverflow, I can't get it to work the way I want.
I can write to Redis without a problem with a small array or larger, such as one with 100,000 items. However, it does not work well when I have an array of 5,000,000 items. I end up not having enough memory because the redis commands seem to be queueing up, but aren't executed until after async.each(...) is complete and the node process does not exit. How do I get the Redis client to actually execute the commands, as I call redisClient.hset(...)?
Here a fragment of the code I am working with.
var redis = require('redis');
var async = require('async');
var redisClient = redis.createClient(6379, '192.168.1.150');
var testData = generateTestData();
async.each(testData, function(item, callback) {
var someData = JSON.stringify(item.data);
redisClient.hset('item:'+item.key, 'hashKey', someData, function(err, reply) {
console.log("Item was persisted. Result: " +reply);
});
callback();
}, function(err) {
if (err) {
console.error(err);
} else {
console.log.info("Items have been persisted to Redis.");
}
});
You could call eachLimit to ensure you are not executing too many redisClient.hset calls at the same time.
To avoid overflowing the call stack you could do setTimeout(callback, 0); instead of calling the callback directly.
edit:
Forget what I said about setTimeout. All you need to do is call the callback at the right place. Like so:
redisClient.hset('item:'+item.key, 'hashKey', someData, function(err, reply) {
console.log("Item was persisted. Result: " +reply);
callback();
});
You may still want to use eachLimit and try out which limit works best.
By the way - async.each is supposed to be used only on code that schedules the invocation of the callback in the javascript event queue (e.g. timer, network, etc) . Never use it on code that calls the callback immediately as was the case in your original code.
edit:
You can implement your own eachLimit function that instead of an array takes a generator as it's first argument. Then you write a generator function to create the test data. For that to work, node needs to be run with "node --harmony code.js".
function eachLimit(generator, limit, iterator, callback) {
var isError = false, j;
function startNextSetOfActions() {
var elems = [];
for(var i = 0; i < limit; i++) {
j = generator.next();
if(j.done) break;
elems.push(j.value);
}
var activeActions = elems.length;
if(activeActions === 0) {
callback(null);
}
elems.forEach(function(elem) {
iterator(elem, function(err) {
if(isError) return;
else if(err) {
callback(err);
isError = true;
return;
}
activeActions--;
if(activeActions === 0) startNextSetOfActions();
});
});
}
startNextSetOfActions();
}
function* testData() {
while(...) {
yield new Data(...);
}
}
eachLimit(testData(), 10, function(item, callback) {
var someData = JSON.stringify(item.data);
redisClient.hset('item:'+item.key, 'hashKey', someData, function(err, reply) {
if(err) callback(err);
else {
console.log("Item was persisted. Result: " +reply);
callback();
}
});
}, function(err) {
if (err) {
console.error(err);
} else {
console.log.info("Items have been persisted to Redis.");
}
});

Using Async waterfall in node.js

I have 2 functions that I'm running asynchronously. I'd like to write them using waterfall model. The thing is, I don't know how..
Here is my code :
var fs = require('fs');
function updateJson(ticker, value) {
//var stocksJson = JSON.parse(fs.readFileSync("stocktest.json"));
fs.readFile('stocktest.json', function(error, file) {
var stocksJson = JSON.parse(file);
if (stocksJson[ticker]!=null) {
console.log(ticker+" price : " + stocksJson[ticker].price);
console.log("changing the value...")
stocksJson[ticker].price = value;
console.log("Price after the change has been made -- " + stocksJson[ticker].price);
console.log("printing the the Json.stringify")
console.log(JSON.stringify(stocksJson, null, 4));
fs.writeFile('stocktest.json', JSON.stringify(stocksJson, null, 4), function(err) {
if(!err) {
console.log("File successfully written");
}
if (err) {
console.error(err);
}
}); //end of writeFile
} else {
console.log(ticker + " doesn't exist on the json");
}
});
} // end of updateJson
Any idea how can I write it using waterfall, so i'll be able to control this? Please write me some examples because I'm new to node.js
First identify the steps and write them as asynchronous functions (taking a callback argument)
read the file
function readFile(readFileCallback) {
fs.readFile('stocktest.json', function (error, file) {
if (error) {
readFileCallback(error);
} else {
readFileCallback(null, file);
}
});
}
process the file (I removed most of the console.log in the examples)
function processFile(file, processFileCallback) {
var stocksJson = JSON.parse(file);
if (stocksJson[ticker] != null) {
stocksJson[ticker].price = value;
fs.writeFile('stocktest.json', JSON.stringify(stocksJson, null, 4), function (error) {
if (err) {
processFileCallback(error);
} else {
console.log("File successfully written");
processFileCallback(null);
}
});
}
else {
console.log(ticker + " doesn't exist on the json");
processFileCallback(null); //callback should always be called once (and only one time)
}
}
Note that I did no specific error handling here, I'll take benefit of async.waterfall to centralize error handling at the same place.
Also be careful that if you have (if/else/switch/...) branches in an asynchronous function, it always call the callback one (and only one) time.
Plug everything with async.waterfall
async.waterfall([
readFile,
processFile
], function (error) {
if (error) {
//handle readFile error or processFile error here
}
});
Clean example
The previous code was excessively verbose to make the explanations clearer. Here is a full cleaned example:
async.waterfall([
function readFile(readFileCallback) {
fs.readFile('stocktest.json', readFileCallback);
},
function processFile(file, processFileCallback) {
var stocksJson = JSON.parse(file);
if (stocksJson[ticker] != null) {
stocksJson[ticker].price = value;
fs.writeFile('stocktest.json', JSON.stringify(stocksJson, null, 4), function (error) {
if (!err) {
console.log("File successfully written");
}
processFileCallback(err);
});
}
else {
console.log(ticker + " doesn't exist on the json");
processFileCallback(null);
}
}
], function (error) {
if (error) {
//handle readFile error or processFile error here
}
});
I left the function names because it helps readability and helps debugging with tools like chrome debugger.
If you use underscore (on npm), you can also replace the first function with _.partial(fs.readFile, 'stocktest.json')
First and foremost, make sure you read the documentation regarding async.waterfall.
Now, there are couple key parts about the waterfall control flow:
The control flow is specified by an array of functions for invocation as the first argument, and a "complete" callback when the flow is finished as the second argument.
The array of functions are invoked in series (as opposed to parallel).
If an error (usually named err) is encountered at any operation in the flow array, it will short-circuit and immediately invoke the "complete"/"finish"/"done" callback.
Arguments from the previously executed function are applied to the next function in the control flow, in order, and an "intermediate" callback is supplied as the last argument. Note: The first function only has this "intermediate" callback, and the "complete" callback will have the arguments of the last invoked function in the control flow (with consideration to any errors) but with an err argument prepended instead of an "intermediate" callback that is appended.
The callbacks for each individual operation (I call this cbAsync in my examples) should be invoked when you're ready to move on: The first parameter will be an error, if any, and the second (third, fourth... etc.) parameter will be any data you want to pass to the subsequent operation.
The first goal is to get your code working almost verbatim alongside the introduction of async.waterfall. I decided to remove all your console.log statements and simplified your error handling. Here is the first iteration (untested code):
var fs = require('fs'),
async = require('async');
function updateJson(ticker,value) {
async.waterfall([ // the series operation list of `async.waterfall`
// waterfall operation 1, invoke cbAsync when done
function getTicker(cbAsync) {
fs.readFile('stocktest.json',function(err,file) {
if ( err ) {
// if there was an error, let async know and bail
cbAsync(err);
return; // bail
}
var stocksJson = JSON.parse(file);
if ( stocksJson[ticker] === null ) {
// if we don't have the ticker, let "complete" know and bail
cbAsync(new Error('Missing ticker property in JSON.'));
return; // bail
}
stocksJson[ticker] = value;
// err = null (no error), jsonString = JSON.stringify(...)
cbAsync(null,JSON.stringify(stocksJson,null,4));
});
},
function writeTicker(jsonString,cbAsync) {
fs.writeFile('stocktest.json',jsonString,function(err) {
cbAsync(err); // err will be null if the operation was successful
});
}
],function asyncComplete(err) { // the "complete" callback of `async.waterfall`
if ( err ) { // there was an error with either `getTicker` or `writeTicker`
console.warn('Error updating stock ticker JSON.',err);
} else {
console.info('Successfully completed operation.');
}
});
}
The second iteration divides up the operation flow a bit more. It puts it into smaller single-operation oriented chunks of code. I'm not going to comment it, it speaks for itself (again, untested):
var fs = require('fs'),
async = require('async');
function updateJson(ticker,value,callback) { // introduced a main callback
var stockTestFile = 'stocktest.json';
async.waterfall([
function getTicker(cbAsync) {
fs.readFile(stockTestFile,function(err,file) {
cbAsync(err,file);
});
},
function parseAndPrepareStockTicker(file,cbAsync) {
var stocksJson = JSON.parse(file);
if ( stocksJson[ticker] === null ) {
cbAsync(new Error('Missing ticker property in JSON.'));
return;
}
stocksJson[ticker] = value;
cbAsync(null,JSON.stringify(stocksJson,null,4));
},
function writeTicker(jsonString,cbAsync) {
fs.writeFile('stocktest.json',jsonString,,function(err) {
cbAsync(err);
});
}
],function asyncComplete(err) {
if ( err ) {
console.warn('Error updating stock ticker JSON.',err);
}
callback(err);
});
}
The last iteration short-hands a lot of this with the use of some bind tricks to decrease the call stack and increase readability (IMO), also untested:
var fs = require('fs'),
async = require('async');
function updateJson(ticker,value,callback) {
var stockTestFile = 'stocktest.json';
async.waterfall([
fs.readFile.bind(fs,stockTestFile),
function parseStockTicker(file,cbAsync) {
var stocksJson = JSON.parse(file);
if ( stocksJson[ticker] === null ) {
cbAsync(new Error('Missing ticker property in JSON.'));
return;
}
cbAsync(null,stocksJson);
},
function prepareStockTicker(stocksJson,cbAsync) {
stocksJson[ticker] = value;
cbAsync(null,JSON.stringify(stocksJson,null,4));
},
fs.writeFile.bind(fs,stockTestFile)
],function asyncComplete(err) {
if ( err ) {
console.warn('Error updating stock ticker JSON.',err);
}
callback(err);
});
}
Basically nodejs (and more generally javascript) functions that require some time to execute (be it for I/O or cpu processing) are typically asynchronous, so the event loop (to make it simple is a loop that continuously checks for tasks to be executed) can invoke the function right below the first one, without getting blocked for a response. If you are familiar with other languages like C or Java, you can think an asynchronous function as a function that runs on another thread (it's not necessarily true in javascript, but the programmer shouldn't care about it) and when the execution terminates this thread notifies the main one (the event loop one) that the job is done and it has the results.
As said once the first function has ended its job it must be able to notify that its job is finished and it does so invoking the callback function you pass to it. to make an example:
var callback = function(data,err)
{
if(!err)
{
do something with the received data
}
else
something went wrong
}
asyncFunction1(someparams, callback);
asyncFunction2(someotherparams);
the execution flow would call: asyncFunction1, asyncFunction2 and every function below until asyncFunction1 ends, then the callback function which is passed as the last parameter to asyncFunction1 is called to do something with data if no errors occurred.
So, to make 2 or more asynchronous functions execute one after another only when they ended you have to call them inside their callback functions:
function asyncTask1(data, function(result1, err)
{
if(!err)
asyncTask2(data, function(result2, err2)
{
if(!err2)
//call maybe a third async function
else
console.log(err2);
});
else
console.log(err);
});
result1 is the return value from asyncTask1 and result2 is the return value for asyncTask2. You can this way nest how many asynchronous functions you want.
In your case if you want another function to be called after updateJson() you must call it after this line:
console.log("File successfully written");

Categories

Resources