Iterating with callback/anonymous functions - javascript

I'm new to Node.JS and advanced Javascript in general, but I'm trying to build a schedule manager application on my own and I faced a problem (I will detail it later) when trying to execute the following code:
router.get('/', function (req, res) {
var day = new Date(req.query.day);
Location.getLocations(function (err, locations) {
if (locations.length > 0) {
var i;
for (i = 0; i < locations.length; i++) {
var location = locations[i];
Appointment.getAppointments(day, location, function (err, appointments) {
if (err) throw err;
if (appointments.length == 0) {
// CREATE APPOINTMENTS
for (var j = location.available_time_start; j <= location.available_time_end; j += location.appointment_duration) {
var newAppointment = new Appointment();
newAppointment.start_date = new Date(day.getFullYear(), day.getMonth() + 1, day.getDate(), j);
newAppointment.appointment_duration = location.appointment_duration;
newAppointment.location = location.id;
newAppointment.booked = false;
newAppointment.locked = false;
Appointment.createAppointment(newAppointment, function (err, appointment) {
if (err) throw err;
console.log(appointment.location + ' - ' + appointment.start_date);
});
}
}
});
}
} else {
// THERE ARE NO LOCATIONS
}
res.render('appointments', { locations: locations });
});
The problem is:
When I try to iterate the locations object and then execute the getAppointments function the code isn't executed at this exact moment. Later, when it's executed, location object is always the same (the iteration doesn't work), resulting on a unexpected result (all appointments with the same/last location).
I tried using IIFE (Immediately-invoked function expression) to execute the code instantly, but when I did this I couldn't get the appointments callback object and my logic is broken too.
Thanks in advance!

The problem was solved by using let instead of var as suggested by #JaromandaX.

Your code seems to be saving appointments but doesn't do anything with the saved appointments (are you mutating locations?).
When saving an appointment goes wrong the requestor doesn't know about it because createAppointment is asynchronous and by the time the callback is called back res.render('appointments', { locations: locations }); is already executed.
You could try converting your callback based functions to promises:
const asPromise = (fn,...args) =>
new Promise(
(resolve,reject)=>
fn.apply(undefined,
args.concat(//assuming one value to resole
(err,result)=>(err)?reject(err):resolve(result)
)
)
);
const savedAppointmentsForLocation = (day,location,appointments) => {
const savedAppointments = [];
if (appointments.length == 0) {
// CREATE APPOINTMENTS
for (var j = location.available_time_start; j <= location.available_time_end; j += location.appointment_duration) {
var newAppointment = new Appointment();
newAppointment.start_date = new Date(day.getFullYear(), day.getMonth() + 1, day.getDate(), j);
newAppointment.appointment_duration = location.appointment_duration;
newAppointment.location = location.id;
newAppointment.booked = false;
newAppointment.locked = false;
savedAppointments.push(
asPromise(
Appointment.createAppointment.bind(Appointment),
newAppointment
)
);
}
}
//you are not doing anything with the result of the saved appointment
// I'll save it as promise to see if something went wrong to indicate
// to the requestor of the api that something went wrong
return Promise.all(savedAppointments);
}
router.get('/', function (req, res) {
var day = new Date(req.query.day);
asPromise(Location.getLocations.bind(Location))
.then(
locations=>
promise.all(
locations.map(
location=>
asPromise(Appointment.getAppointments.bind(Appointment),[day,location])
.then(appointments=>[location,appointments])
)
)
)
.then(
results=>//results should be [ [location,[appointment,appointment]],...]
Promise.all(
results.map(
([location,appointments])=>
savedAppointmentsForLocation(day,location,appointments)
.then(ignoredSavedAppointment=>location)
)
)
)
.then(locations=>res.render('appointments', { locations: locations }))
.catch(
error=>{
console.log("something went wrong:",error);
res.status(500).send("Error in code");
}
)
});

Related

Why is my code not waiting for the completion of the function?

I am trying to read some data from a file and store it in a database.
This is part of a larger transaction and I need the returned ids for further steps.
async parseHeaders(mysqlCon, ghID, csv) {
var self = this;
var hIDs = [];
var skip = true;
var idx = 0;
console.log("Parsing headers");
return new Promise(async function(resolve, reject) {
try {
var lineReader = require('readline').createInterface({
input: require('fs').createReadStream(csv)
});
await lineReader.on('close', async function () {
console.log("done: ", JSON.stringify(hIDs));
resolve(hIDs);
});
await lineReader.on('line', async function (line) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx++] = await self.getParamID(mysqlCon, ghID, v, c, data);//, function(hID,sidx) { //add data in case the parameter is not in DB, yet.
}
});
} catch(e) {
console.log(JSON.stringify(e));
reject("some error occured: " + e);
}
});
}
async getParamID(mysqlCon,ghID,variable,category,data) {
return new Promise(function(resolve, reject) {
var sql = "SELECT ID FROM Parameter WHERE GreenHouseID="+ghID+" AND Variable = '" + variable + "' AND Category='" + category + "'";
mysqlCon.query(sql, function (err, result, fields) {
if(result.length === 0 || err) { //apparently not in DB, yet ... add it (Acronym and Machine need to be set manually).
sql = "INSERT INTO Parameter (GreenHouseID,Variable,Category,Control) VALUES ("+ghID+",'"+variable+"','"+category+"','"+data[3]+"')";
mysqlCon.query(sql, function (err, result) {
if(err) {
console.log(result,err,this.sql);
reject(err);
} else {
console.log("Inserting ",variable," into DB: ",JSON.stringify(result));
resolve(result.insertId); //added, return generated ID.
}
});
} else {
resolve(result[0].ID); //found in DB .. return ID.
}
});
});
}
The functions above are in the base class and called by the following code:
let headerIDs = await self.parseHeaders(mysqlCon, ghID, filePath);
console.log("headers:",JSON.stringify(headerIDs));
The sequence of events is that everything in parseHeaders completes except for the call to self.getParamID and control returns to the calling function which prints an empty array for headerIDs.
The console.log statements in self.getParamID are then printed afterward.
What am I missing?
Thank you
As you want to execute an asynchronous action for every line we could define a handler to do right that:
const once = (target, evt) => new Promise(res => target.on(evt, res));
function mapLines(reader, action) {
const results = [];
let index = 0;
reader.on("line", line => results.push(action(line, index++)));
return once(reader, "close").then(() => Promise.all(results));
}
So now you can solve that easily:
let skip = false;
const hIDs = [];
await mapLines(lineReader, async function (line, idx) {
line = line.replace(/\"/g, '');
if (line.startsWith("Variable")) { //Variable,Statistics,Category,Control
console.log("found variables");
skip = false; //Ignore all data and skip to the parameter description.
return; //Skip also the header line.
}
if (!skip) {
var data = line.split(",");
if (data.length < 2) { //Variable section done return results.
console.log("Found sub?",line);
return lineReader.close();
}
var v = data[0];
var bidx = data[0].indexOf(" [");
if (bidx > 0)
v = data[0].substring(0, bidx); //[] are disturbing mysql (E.g.; Air temperature [�C])
var c = data[2];
hIDs[idx] = await self.getParamID(mysqlCon, ghID, v, c, data);
}
});

NodeJS Loop issue due to async/synchronicity issues

I am porting an old ruby script over to use javascript setting the function as a cron instance so it will run on schedule. The function queries our mysql database and retrieves inventory information for our products and then sends requests to a trading partners api to update our inventory on their site.
Due to nodes a-synchronicity I am running into issues. We need to chunk requests into 1000 items per request, and we are sending 10k products. The issue is each request is just sending the last 1000 items each time. The for loop that is inside the while loop is moving forward before it finishes crafting the json request body. I tried creating anon setTimeout functions in the while loop to try and handle it, as well as creating an object with the request function and the variables to be passed and stuffing it into an array to iterate over once the while loop completes but I am getting the same result. Not sure whats the best way to handle it so that each requests gets the correct batch of items. I also need to wait 3 minutes between each request of 1000 items to not hit the request cap.
query.on('end',()=>{
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({data: writeArray,fields:fields}),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' +(timestamp.getMonth() + 1) + '-' + timestamp.getDate()+ ' '+timestamp.getHours() +':'+timestamp.getMinutes()+':'+timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-'+timestamp+'.csv';
while(itemArray.length > 0){
alteredArray = itemArray.splice(0,999);
for(let i = 0; i < alteredArray.length; i++){
jsonObjectArray.push({
sku: alteredArray[i]['sku'],
quantity: alteredArray[i]["quantity"],
overstockquantity: alteredArray[i]["osInv"],
warehouse: warehouse,
isdiscontinued: alteredArray[i]["disc"],
backorderdate: alteredArray[i]["etd"],
backorderavailability: alteredArray[i]["boq"]
});
}
var jsonObject = {
login: user,
password: password,
items: jsonObjectArray
};
postOptions.url = endpoint;
postOptions.body = JSON.stringify(jsonObject);
funcArray.push({func:function(postOptions){request(postOptions,(err,res,body)=>{if(err){console.error(err);throw err;}console.log(body);})},vars:postOptions});
jsonObjectArray.length = 0;
}
var mili = 180000;
for(let i = 0;i < funcArray.length; i++){
setTimeout(()=>{
var d = JSON.parse(funcArray[i]['vars'].body);
console.log(d);
console.log('request '+ i);
//funcArray[i]['func'](funcArray[i]['vars']);
}, mili * i);
}
});
});
You would need async/await or Promise to handle async actions in node js.
I am not sure if you have node version which supports Async/await so i have tried a promise based solution.
query.on('end', () => {
connection.release();
writeArray = itemArray.slice(0),
alteredArray = [];
var csv = json2csv({ data: writeArray, fields: fields }),
timestamp = new Date(Date.now());
timestamp = timestamp.getFullYear() + '-' + (timestamp.getMonth() + 1) + '-' + timestamp.getDate() + ' ' + timestamp.getHours() + ':' + timestamp.getMinutes() + ':' + timestamp.getSeconds();
let fpath = './public/assets/archives/opalEdiInventory-' + timestamp + '.csv';
var calls = chunk(itemArray, 1000)
.map(function(chunk) {
var renameditemsArray = chunk.map((item) => new renamedItem(item, warehouse));
var postOptions = {};
postOptions.url = endpoint;
postOptions.body = JSON.stringify({
login: user,
password: password,
items: renameditemsArray
});
return postOptions;
});
sequenceBatch(calls, makeRequest)
.then(function() {
console.log('done');
})
.catch(function(err) {
console.log('failed', err)
});
function sequenceBatch (calls, cb) {
var sequence = Promise.resolve();
var count = 1;
calls.forEach(function (callOptions) {
count++;
sequence = sequence.then(()=> {
return new Promise(function (resolve, reject){
setTimeout(function () {
try {
cb(callOptions);
resolve(`callsequence${count} done`);
}
catch(err) {
reject(`callsequence ${count} failed`);
}
}, 180000);
});
})
});
return sequence;
}
function makeRequest(postOptions) {
request(postOptions, (err, res, body) => {
if (err) {
console.error(err);
throw err;
}
console.log(body)
});
}
function chunk(arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
function renamedItem(item, warehouse) {
this.sku = item['sku']
this.quantity = item["quantity"]
this.overstockquantity = item["osInv"]
this.warehouse = warehouse
this.isdiscontinued = item["disc"]
this.backorderdate = item["etd"]
this.backorderavailability= item["boq"]
}
});
Could you please try this snippet and let me know if it works?I couldn't test it since made it up on the fly. the core logic is in the sequenceBatch function. the The answer is based on an another question which explains how timeouts and promises works together.
Turns out this wasn't a closure or async issues at all, the request object I was building was using references to objects instead of shallow copies resulting in the data all being linked to the same object ref in the ending array.

Javascript call by reference not working

I read this and tried implementing my function so that data doesn't change back, but it isn't working with me.
I have an array of objects, where I send them one by one to another function, to add data.
queries.first(finalObject.sectionProjects[i]);
for each one of the sectionProjects, there is a variable achievements, with an empty array.
Upon sending each sectionProject to the queries.first function, I reassign achievements,
finalObject.sectionProjects[i].achievements = something else
When I return from the queries.first function, I lose the data I added.
Am I doing something wrong?
Here's the function:
module.exports = {
first:function(aProject) {
// Latest achievements
var query =
" SELECT ta.description, ta.remarks, ta.expectedECD " +
" FROM project pr, task ta, milestone mi " +
" WHERE pr.ID = mi.project_ID AND mi.ID = ta.milestone_ID " +
" AND ta.achived = ta.percent AND pr.ID = " + aProject.project_id +
" ORDER BY pr.expectedECD " +
" LIMIT 5;"
;
var stringified = null;
pmdb.getConnection(function(err, connection){
connection.query(query, function(err, rows){
if(err) {
throw err;
}else{
var jsonRows = [];
for( var i in rows) {
stringified = JSON.stringify(rows[i]);
jsonRows.push(JSON.parse(stringified));
}
connection.release();
aProject.achievements = jsonRows;
upcomingTasks(aProject);
}
});
});
}
}
This is pmdb.js:
var mysql = require("mysql");
var con = mysql.createPool({
host: "localhost",
user: "user",
password: "password",
database: "database"
});
module.exports = con;
This is the main function that calls queries.first:
// ...Code...
//Number of section projects
var len = jsonRows.length;
console.log("Number of section projects: " + len);
var internal_counter = 0;
function callbackFun(i){
(finalObject.sectionProjects[i]).achievements = [];
queries.first(finalObject.sectionProjects[i]);
if(++internal_counter === len) {
response.json(finalObject);
}
}
var funcs = [];
for (var i = 0; i < len; i++) {
funcs[i] = callbackFun.bind(this, i);
}
for (var j = 0; j < len; j++) {
funcs[j]();
}
Read That Answer twice. Objects acts as a wrapper for the scalar primitive property. You are passing the Objects in to the "queries.first" function.
See this Object reference issue
Edited for the sample code
pmdb.getConnection(function(err, connection){
connection.query(query, function(err, rows){
if(err) {
throw err;
}else{
var jsonRows = [];
for( var i in rows) {
stringified = JSON.stringify(rows[i]);
jsonRows.push(JSON.parse(stringified));
}
connection.release();
aProject.achievements = jsonRows;
upcomingTasks(aProject)
}
});
});
that is not a problem. change it like this. "upcomingTasks" is not a callback function. it is execute after assign the achievements in aProject

Node.js asynchronous coding difficulty

I'm trying to get multiple documents from MongoDB and send all the data in an array, but I'm having serious trouble understanding how this can be done with the event-driven Node.js.
The problem is that at the time dataArray.push(tempObject) is being executed, the tempObject["data"] = tempDataArray still has not been performed.
My code looks like this:
app.post('/api/charts', function(req, res) {
var names = req.body.names;
var categories = req.body.categories;
var dataArray = [];
for (i = 0; i < names.length; i++) {
var tempObject = {};
tempObject["name"] = names[i];
Company.find({ name : names[i] }, function(err, result) {
if (err) {
throw err;
}
var tempDataArray = [];
for (k = 0; k < categories.length; k++) {
var tempDataObject = {};
tempDataObject["name"] = categories[k];
tempDataObject["numbers"] = result[0]["data"][categories[k]]["numbers"];
tempDataObject["dates"] = result[0]["data"][categories[k]]["dates"];
tempDataArray.push(tempDataObject);
}
tempObject["data"] = tempDataArray;
});
dataArray.push(tempObject);
}
res.send(dataArray);
});
Any suggestions on how to properly achieve the desired result would be appreciated.
Use this library
https://github.com/caolan/async
And Using this code, your code will look like this:
var async = require("async");
app.post('/api/charts', function(req, res) {
var names = req.body.names;
var categories = req.body.categories;
var dataArray = [];
async.forEach(names, function(name, callback){
var tempObject = {};
tempObject["name"] = name;
Company.find({ name : name }, function(err, result) {
if (err) {
callback(err);
} else {
var tempDataArray = [];
for (k = 0; k < categories.length; k++) {
var tempDataObject = {};
tempDataObject["name"] = categories[k];
tempDataObject["numbers"] = result[0]["data"][categories[k]]["numbers"];
tempDataObject["dates"] = result[0]["data"][categories[k]]["dates"];
tempDataArray.push(tempDataObject);
}
tempObject["data"] = tempDataArray;
dataArray.push(tempObject);
callback();
}
});
}, function(err){
if(err){
res.send(err);
} else {
res.send(dataArray);
}
});
});
The Company.find() method takes a callback function as it's second parameter. This callback is to be called after the company data is retrieved from the database. This means it could be anywhere between a few milliseconds and a few hundered milliseconds until it is called after calling the Company.find() method. But the code directly after Company.find() will not be delayed; it will be called straight away. So the callback delay is why dataArray.push(tempObject) is always called before tempObject["data"] = tempDataArray.
On top of this the outer for loop will run synchronously and on each iteration a separate DB call will be made. This isn't ideal so we want to get this for loop into the callback. So we can do something like:
app.post('/api/charts', function(req, res) {
var names = req.body.names;
var categories = req.body.categories;
// we just do one DB query where all the data we need is returned
Company.find({ name : names }, function(err, result) {
if (err) {
throw err;
}
var dataArray = [];
// we iteratre through each result in the callback, not outside it since
// that would cause blocking due to synchronous operation
for (i = 0; i < result.length; i++) {
var tempObject = {};
tempObject["name"] = result[i].name;
var tempDataArray = [];
for (k = 0; k < categories.length; k++) {
var tempDataObject = {};
tempDataObject["name"] = categories[k];
tempDataObject["numbers"] = result[i]["data"][categories[k]]["numbers"];
tempDataObject["dates"] = result[i]["data"][categories[k]]["dates"];
tempDataArray.push(tempDataObject);
}
tempObject["data"] = tempDataArray;
dataArray.push(tempObject);
}
res.send(dataArray);
});
});
There are many approaches to abstract Nodes event driven nature such as Promises (which can be accessed either in ECMA Script 6 or a Promise library such as Bluebird, Async, etc.). But the above is a basic callback approach that is typically used in the likes of Express applications.
Simply change this :
tempObject["data"] = tempDataArray;
});
dataArray.push(tempObject);
To:
tempObject["data"] = tempDataArray;
dataArray.push(tempObject);
});

Javascript for loop wait for callback

I have this function:
function tryStartLocalTrendsFetch(woeid) {
var userIds = Object.keys(twitClientsMap);
var isStarted = false;
for (var i = 0; i < userIds.length; i++) {
var userId = userIds[i];
var twitClientData = twitClientsMap[userId];
var isWoeidMatch = (woeid === twitClientData.woeid);
if (isWoeidMatch) {
startLocalTrendsFetch(woeid, twitClientData, function (err, data) {
if (err) {
// Couldn't start local trends fetch for userId: and woeid:
isStarted = false;
} else {
isStarted = true;
}
});
// This will not obviously work because startLocalTrendsFetch method is async and will execute immediately
if (isStarted) {
break;
}
}
}
console.log("No users are fetching woeid: " + woeid);
}
The gist of this method is that I want the line if (isStarted) { break; } to work. The reason is that if it's started it should not continue the loop and try to start another one.
I'm doing this in NodeJS.
try to use a recursive definition instead
function tryStartLocalTrendsFetch(woeid) {
var userIds = Object.keys(twitClientsMap);
recursiveDefinition (userIds, woeid);
}
function recursiveDefinition (userIds, woeid, userIndex)
var userId = userIds[userIndex = userIndex || 0];
var twitClientData = twitClientsMap[userId];
var isWoeidMatch = (woeid === twitClientData.woeid);
if (isWoeidMatch && userIndex<userIds.length) {
startLocalTrendsFetch(woeid, twitClientData, function (err, data) {
if (err) {
recursiveDefinition(userIds, woeid, userIndex + 1)
} else {
console.log("No users are fetching woeid: " + woeid);
}
});
} else {
console.log("No users are fetching woeid: " + woeid);
}
}
You may also use async (npm install async):
var async = require('async');
async.forEach(row, function(col, callback){
// Do your magic here
callback(); // indicates the end of loop - exit out of loop
}, function(err){
if(err) throw err;
});
More material to help you out: Node.js - Using the async lib - async.foreach with object

Categories

Resources