I have a very long promise chain inside of a $http.get call that takes quite a few minutes to complete. The part that takes a long time is a for loop that iterates through 160 some array elements and runs a long series of socket connect tests. However around the 84th iteration in the for loop, the whole promise chain (or maybe the get call) starts all over again. While the other one is still running. Then once the first one finishes the res.send never goes through while the new chain runs and this repeats ad infinitum.
router.get('/', function(req, res) {
fs.readdir('C:\\Temp\\hostPorts', function(err, files) {
console.log('files', files);
chooseFile(files).then(response => {readTheFile(response).then(async (result) => {
splitText(result).then( async (final) => {
console.log('final version', final);
res.send({file: final});
})
// res.send({file: result});
}).catch(error => {
console.log(error);
}) //end catch
}); //end promise
}); //end read
}); //end get
this is my get call, the splitText function is where it is getting stuck. I will post the source for the splitText function below but I'm certain that its somehow creating two instances since every time at the 84th iteration my terminal console reprints the initial console.log('files', files) and then runs through the other promises in the chain.
It is eventually finishing the first one because the console.log('final version', final) does print out. but the res.send never happens and the second promise chain continues to run. then the third, etc, etc.
here's the code from the long loop
async function splitText(file){
let tableData = "<table border=1 cellspacing=0><tr>";
let splited = file.trim().split(/\s+/);
//vars for checking connectivity
for (let i = 0; i < splited.length; i++) {
console.log(splited[i] + " " + i);
if(i < 4 ) {
tableData += "<th>" + splited[i] + "</th>";
//if its less than 4 print out table headers
}
else if (i == 4){
tableData += "</tr><tr><td>" + splited[i] + "</td>";
//if its 4 create a new row for data
}
else if (i % 3 == 0){
//if modulo 3 is 0 then its on a port, checks connectivity and adds it to the row as data after port
//then starts a new row
let host = splited[(i - 1)]; //1 index ago was host
let port = parseInt(splited[(i)]); //current array index is port
console.log('host: ' + host );
console.log('port: ' + port );
await testPort(port, host).then(async (reachable) => {
console.log(reachable);
if (reachable) {
tableData += "<td>" + splited[i] + "</td><td>" + "<font color=\"GREEN\">UP</font>" + "</tr><tr>";
}
else {
tableData += "<td>" + splited[i] + "</td><td>" + "<font color=\"RED\">DOWN</font>" + "</tr><tr>";
}
});
} //end else if
else {
tableData += "<td>" + splited[i] + "</td>";
//otherwise adds tabledata
}
} //end for
return tableData;
} //end function
And this is the async function that checks if the host/ports are up.
async function testPort(port, host){
return new Promise(resolve => {
const socket = new net.Socket();
const onError = () => {
socket.destroy();
resolve(false);
};
socket.setTimeout(10000);
socket.on('error', onError);
socket.on('timeout', onError);
socket.connect(port, host, () => {
socket.end();
resolve(true);
}); //end promise
});
I'm not sure if this is an issue of the HTTP. get restarting after it takes too long however i have the timeout set to 5 minutes. Or if this is the promise chain restarting after not getting a response. But I'm really banging my head on this one as either way im never getting data back to my client and as far as I see i never recalled a function or created an infinite loop.
I'm not sure what framework you are using and whether it has internal timeout for processing the request to retry. One of the problem with the above code is testing the network connection is happening serially. When you have lot of hosts to check, it bound to fail/timeout. You can test multiple hosts in parallel. For this, the code to split, test & built output should be separated. Here is a rudimentary version.
function _splitText(file = '') {
let ret = {
header: [],
hosts: {}
};
if (!file.length) {
return ret;
}
//split the content
let splitted = file.trim().split(/\s+/);
if (splitted.length % 3 !== 1 && splitted.length < 4) {
console.log('Invalid data!');
return ret;
}
//get header
ret.header = splitted.splice(0, 4);
while (splitted.length) {
const [name, host, port, ...rest] = splitted;
ret.hosts[name] = {
host,
port,
isReachable: false
};
splitted = rest;
}
return ret;
}
async function testPort(name, port, host) {
return new Promise(resolve => {
const socket = new net.Socket();
const onError = () => {
socket.destroy();
resolve({
name,
isReachable: false
});
};
socket.setTimeout(10000);
socket.on('error', onError);
socket.on('timeout', onError);
socket.connect(port, host, () => {
socket.end();
resolve({
name,
isReachable: true
});
}); //end promise
});
}
async function testPortsParallel(o, nParallel = 5, timeout = 10000) {
const hostnames = Object.keys(o.hosts);
let temp;
while ((temp = hostnames.splice(0, nParallel))) {
//create async promise for all hosts and wait for them at one go.
await Promise.all(temp.map(v => testPort(v, o.hosts[v].host, o.hosts[v].port))).then(values => values.forEach(v => o.hosts[v.name].isReachable = v.isReachable));
}
}
function buildOutput(o) {
let ret = '<table border=1 cellspacing=0>';
//add header
ret += '<tr><th>' + o.header.join('</th><th>') + '</th></tr>';
//add hosts
ret += o.hosts.keys().map(v => '<tr><td>' + [v, o.hosts[v].host, o.hosts[v].port].join('</td><td>') + '</td></tr>').join('');
ret += '</table>'
}
async function splitText(s) {
let data = _splitText(s);
await testPortsParallel(data);
return buildOutput(data);
}
splitText('name host port IsReachable 1 a b 2 c d');
//console.log(JSON.stringify(output));
Hope this might be somewhat helpful.You can adjust the number of servers to test in parallel as per your need.
Note: There is a slight chagne in your testPort fn as well.
You are not helping yourself by mixing three different styles on async syntax. The first step should really be to simplify the code into a single style which often reveals the issue.
At a glance I suspect your issue is how you are chaining promises. Some of your then statements are executing new promises but not returning a promise which means you create multiple promise chains. You also are losing errors as they have no catch clauses.
I would refactor into
router.get('/', function(req, res) {
fs.readdir('C:\\Temp\\hostPorts', function(err, files) {
sendFile()
.then(result => res.send(result));
.catch(err => {
console.error(err);
res.sendError("Boom");
}
}
}
async function sendFile() {
const file= await chooseFile(files);
const contents = await readTheFile(file);
const splitContents = await splitText(contents);
return {file: splitContents};
}
This uses async await which is easier to read than standard promise chains. You always have to remember to return Promises from your then clauses with classic promise chains or you can get into trouble.
Related
A few days ago I did a project and I had some problems, which where solved in this question, let me try to resume it.
I need to insert multiple objects into a DB in SQLServer, for that, I did a function that loops another function, which opens a connection, inserts and closes the connection, then, repeats it over and over again.
It worked fine, till today that was tested in a collegue PC, in the server of the job, I get this error:
Error: Requests can only be made in the LoggedIn state, not the LoggedInSendingInitialSql state
Error: Requests can only be made in the LoggedIn state, not the SentLogin7WithStandardLogin state
Here's the code we tested (the same in my last question), it works in my PC, but not in the other:
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `https://api.openweathermap.org/data/2.5/weather?lat=${offices[index].latjson}&lon=${offices[index].lonjson}&appid=${api_key}&units=metric&lang=sp`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
var myObject = {
Id_Oficina: offices[index].IdOficina,
...
};
const request = new Request(
"EXEC USP_BI_CSL_insert_reg_RegistroTemperaturaXidOdicina #IdOficina, ...",
function (err) {
if (err) {
console.log("Couldnt insert data (" + index + "), " + err);
} else {
console.log("Data with ID: " + myObject.Id_Oficina +" inserted succesfully(" + index + ").")
}
}
);
request.addParameter("IdOficina", TYPES.SmallInt, myObject.Id_Oficina);
...
request.on("row", function (columns) {
columns.forEach(function (column) {
if (column.value === null) {
console.log("NULL");
} else {
console.log("Product id of inserted item is " + column.value);
}
});
});
request.on("requestCompleted", function () {
connection.close();
});
connection.execSql(request);
});
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
functionLooper();
So, I thought it would be a good idea to use a setTimeOut, to:
Run functionLooper().
Open connection, insert and close.
Wait a few seconds.
Repeat.
So, I changed to this:
setTimeout(functionLooper, 2000);
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
It works, but, as you can see, only waits when I first run it, so tried to make a function that runs setTimeout(functionLooper, 2000); like functionLooper() does, but it didn't work either.
function TimerLooper() {
for (let i = 0; i < offices.length; i++) {
setTimeout(functionLooper, 500);
}
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
TimerLooper();
This shows me this error:
Error: Validation failed for parameter 'Descripcion'. No collation was set by the server for the current connection.
file:///...:/.../.../node_modules/node-fetch/src/index.js:95
reject(new FetchError(request to ${request.url} failed, reason: ${error.message}, 'system', error));
^ FetchError: request to https://api.openweathermap.org/data/2.5/weather?lat=XX&lon=XX&appid=XX&units=metric&lang=sp failed, reason: connect ETIMEDOUT X.X.X.X:X
So, I have some questions
How can I use properly setTimeOut? I did this function based on what I watch here in SO, but I just can't get it and I don't know what I'm doing wrong.
Why it works in my PC and the other don't? Do we have to change some kind of config or something?
Using setTimeOut, is the correct way to solve this problem? if not, what would you suggest me?
Could you do something like:
//edit: not disconnect but end
connection.on("end", function(){
functionLopper(index++)
})
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
Edit: according to tidious doc
There is an end event emitted on connection.close()
Event: 'end'
function () { }
The connection has ended. This may be as a result of the client calling close(), the server closing the connection, or a network error.
My suggestion from above
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `...`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
...
});
connection.on("end", function(){
functionLopper(index++)
})
}
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
``
So, I'm having a problem with JavaScript asynchronous execution when making an API call to AWS S3.
I have a sequence of nested callbacks that are working fine up until a specific S3 call that my code is not waiting for. Here's my code:
getThumbUrls(contentIndex, function(data) {
console.log('Returning from getThumbUrls');
// let's just display thumbUrls[0] for now...
console.log('The thumbUrls are ' + data[0]);
});
getThumbUrls() looks like this:
function getThumbUrls(contentIndex, callback) {
console.log('Entering getThumbUrls');
var thumbUrls = [];
JSON.parse(contentIndex).forEach(videoKey => {
// get the thumbnail: bucket-name/thumbnails/<first-key>
console.log('videoKey = ' + videoKey);
getThumbFileName(videoKey, function(thumbFileName) {
console.log('Returning from getThumbFileName');
console.log('Returned thumb filename is ' + thumbFileName);
thumbUrls.push(CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName);
});
});
callback(thumbUrls);
}
And getThumbFileName() looks like this:
function getThumbFileName(videoKey, callback) {
console.log('Entering getThumbFileName...');
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {
Bucket: 'my-bucket-name'
}
});
// Get the name of the file.
params = {
Bucket: 'my-bucket-name',
Delimiter: '/',
Prefix: videoKey + '/' + THUMBS_FOLDER,
MaxKeys: 1
};
var urlKey;
//console.log('listObjects params = ' + JSON.stringify(params, null, 4));
s3.listObjectsV2(params, (err, data) => {
if (err) {
console.log(err, err.stack);
callback(err);
return;
}
var thumbsKey = data.Contents;
// MaxKeys was 1 bc first thumbnail key is good enough for now. Therefore, only one iteration.
thumbsKey.forEach(function (keys) {
console.log('thumbKey = ' + keys.Key);
urlKey = keys.Key;
});
});
callback(urlKey);
//callback('20161111-TheWind.jpg');
}
Obviously, what's happening is that execution doesn't wait for the s3.listObjectsV2 call to finish. I've verified that the entire flow works properly when all getThumbFileName() does is callback with the filename.
Would someone kindly show me how to force execution to wait for s3.listObjectsV2 to complete before calling back with undefined?
As discussed, you should avoid callbacks approach when dealing with asynchronous operations over iterations, due their difficulty.
(You can skip this section if you don't want to know motivation behind promises approach).
Just to mention, in a callback approach, you must have to wait for all callbacks to complete in your getThumbUrls(), using a if which will check if all callbacks has been called, then just call callback(thumbUrls); with all responses pushed into your thumbUrls array:
function getThumbUrls(contentIndex, callback) {
const thumbUrls = [];
// counter which will increment by one for every callback
let counter = 0;
JSON.parse(contentIndex).forEach(videoKey => {
getThumbFileName(videoKey, function (thumbFileName) {
thumbUrls.push(CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName);
// for each callback response you must add 1 to a counter and then
counter++;
// check if all callbacks already has been called
if (counter === JSON.parse(contentIndex).length) {
// right here, thumbsUrls are filled with all responses
callback(thumbUrls);
}
});
});
}
So, you can make use of Promises, and a Promise.all will be enough for you to handle all responses from api. You can study over internet and check your code below, which is using a promise approach. I've added some comments to help you understanding what is happening.
// when using promises, no callbacks is needed
getThumbUrls(contentIndex)
.then(function (data) {
console.log('Returning from getThumbUrls');
// let's just display thumbUrls[0] for now...
console.log('The thumbUrls are ' + data[0]);
})
// when using promises, no callbacks is needed
function getThumbUrls(contentIndex) {
console.log('Entering getThumbUrls');
// not needed anymore, Promise.all will return all values
// var thumbUrls = [];
// Promise.all receives an array of promises and returns to next .then() all results
// changing forEach to map to return promises to my Promise.all
return Promise.all(JSON.parse(contentIndex).map(videoKey => {
console.log('videoKey = ' + videoKey);
// returning a promise
return getThumbFileName(videoKey)
.then(function (thumbFileName) {
console.log('Returning from getThumbFileName');
console.log('Returned thumb filename is ' + thumbFileName);
return CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName;
});
}))
}
// when using promises, no callbacks is needed
function getThumbFileName(videoKey) {
console.log('Entering getThumbFileName...');
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {
Bucket: 'my-bucket-name'
}
});
// Get the name of the file.
params = {
Bucket: 'my-bucket-name',
Delimiter: '/',
Prefix: videoKey + '/' + THUMBS_FOLDER,
MaxKeys: 1
};
// urlKey not need anymore
// var urlKey;
// most of AWS functions has a .promise() method which returns a promise instead calling callback funcions
return s3.listObjectsV2(params).promise()
.then(function (data) {
var thumbsKey = data.Contents;
//if you want to return only first one thumbsKey:
return thumbsKey[0];
})
.catch(function (err) {
console.log(err, err.stack);
callback(err);
return;
})
}
Hope this helps you out in your study.
Would someone kindly show me how to force execution to wait
That's the wrong question. You are not trying to get execution to "wait," or, at least, you shouldn't be. You just need to call the callback in the right place -- inside the callback from s3.listObjectsV2(), not outside.
function getThumbFileName(videoKey, callback) {
...
s3.listObjectsV2(params, (err, data) => {
if (err) {
...
}
var thumbsKey = data.Contents;
// MaxKeys was 1 bc first thumbnail key is good enough for now. Therefore, only one iteration.
thumbsKey.forEach(function (keys) {
console.log('thumbKey = ' + keys.Key);
urlKey = keys.Key;
});
callback(urlKey); // right
});
// wrong // callback(urlKey);
}
The way you wrote it, the callback fires after s3.getObjectsV2() begins to run -- not after it finishes (calls its own callback).
I'm trying to use async/await in my firebase function but I am getting an error. I have marked the function as async but when I try to use await inside of it I get error: Expression has type void. Put it on its own line as a statement.
I think this is strange because I think it each await function call is already in its own line as a statement. So, I'm not sure what to do. Any help would be appreciated.
For clarity, I am making a request with the Cheerio web scrape library, and then trying to make two async function calls during each loop with the .each method.
export const helloWorld = functions.https.onRequest((req, response) => {
const options = {
uri: 'https://www.cbssports.com/nba/scoreboard/',
transform: function (body) {
return cheerio.load(body);
}
};
request(options)
.then(($) => {
$('.live-update').each((i, element) => {
const homeTeamAbbr = $(element).find('tbody').children('tr').eq(0).find('a').html().split("alt/").pop().split('.svg')[0];
const awayTeamAbbr = $(element).find('tbody').children('tr').eq(1).find('a').html().split("alt/").pop().split('.svg')[0];
const homeTeam = $(element).find('tbody').children('tr').eq(0).find('a.team').text().trim();
const awayTeam = $(element).find('tbody').children('tr').eq(1).find('a.team').text().trim();
let homeTeamStatsURL = $(element).find('tbody').children('tr').eq(0).find('td').html();
let awayTeamStatsURL = $(element).find('tbody').children('tr').eq(1).find('td').html();
const gameTime = $(element).find('.pregame-date').text().trim();
homeTeamStatsURL = homeTeamStatsURL.match(/href="([^"]*)/)[1] + "roster";
awayTeamStatsURL = awayTeamStatsURL.match(/href="([^"]*)/)[1] + "roster";
const matchupString = awayTeamAbbr + "#" + homeTeamAbbr;
const URLString = "NBA_" + urlDate + "_" + matchupString;
// var docRef = database.collection('NBASchedule').doc("UpcommingSchedule");
// var boxScoreURL = "www.cbssports.com/nba/gametracker/boxscore/" + URLString;
// var setAda = docRef.set({[URLString]:{
// homeTeam: homeTeam,
// awayTeam: awayTeam,
// date: gameTime,
// homeTeamAbbr: homeTeamAbbr,
// awayTeamAbbr: awayTeamAbbr,
// homeTeamStatsURL: homeTeamStatsURL,
// awayTeamStatsURL: awayTeamStatsURL,
// boxScoreURL: boxScoreURL
// }}, { merge: true });
getTeamPlayers(homeTeamStatsURL, matchupString);
getTeamPlayers(awayTeamStatsURL, matchupString);
console.log("retrieved schedule for "+ matchupString + " on " + urlDate)
});
response.send("retrieved schedule");
})
.catch(function (err) {
console.log("error " + err);
});
});
The function I am calling just makes another request and then I'm trying to log some data.
function getTeamPlayers(playerStatsURL, matchupString) {
const options = {
uri: playerStatsURL,
transform: function (body) {
return cheerio.load(body);
}
};
console.log(playerStatsURL + " stats url");
request(options)
.then(($) => {
console.log('inside cheerio')
$('tbody').children('tr').each(function(i, element){
const playerName = $(element).children('td').eq(1).children('span').eq(1).find('a').text().trim();
const injury = $(element).children('td').eq(1).children('span').eq(1).children('.icon-moon-injury').text().trim();
const news = $(element).children('td').eq(1).children('span').eq(1).children('.icon-moon-news').text().trim();
const playerUrl = $(element).children('td').eq(1).children('span').eq(1).find('a').attr('href');
const playerLogsUrl = "https://www.cbssports.com" + playerUrl.replace('playerpage', 'player/gamelogs/2018');
console.log(playerName + ": Inj: " + injury + " News: " + news);
// database.collection('NBAPlayers').add({[playerName]:{
// '01 playerName': playerName,
// '03 playerLogsUrl': playerLogsUrl,
// '04 inj': injury,
// '05 news': news
// }})
// .then(docRef => {
// console.log("ID " + docRef.id);
// //getPlayerLogs(playerLogsUrl, playerName, docRef.id);
// })
// .catch(error => console.error("Error adding document: ", error));
});
});
}
async/await is supported in the Node version 8 (which is deployable as Cloud Functions for Firebase). Specifically, use 8.6.1 (at the time of this writing).
As for awaiting x2 inside a loop - I don't think this is best practice.
Instead, push all these requests into an array, then Promise.all so as to fetch all in parallel.
just in case someone comes after. As Ron stated before, you shouldn't use async/await inside a traditional for loop if the second call doesn't need the value of the first, as you will increase the run time of the code.
Furthermore you can't use async/await inside of a forEach=> or a map=> loop, as it won't stop and wait for the promise to resolve.
The most efficient way is to use Promise.all([]). Here I leave a great youtube video of a crack that explains async/await and Promises => https://www.youtube.com/watch?v=vn3tm0quoqE&t=6s
As to one of the questions in the comments by DarkHorse:
But all I want is for getTeamPlayers() to execute and write to the database so I don't know why I need to return anything.
In Firebase Functions all functions need to return something before the final response.
For example in this case, he has created an http function. Before he ends the function with response.send("retrieved schedule"); you need to finish each and every function triggered. As Firebase Functions will clean up after the final response, erasing and stoping anything that it is still running. So any function that hasn't finish will be killed before doing its job.
Returning a promise is the way Firebase Functions knows when all executions have finished and can clean up.
Hop it helps :)
I seem to be having an issue when trying to write to a file in a loop, the loop is iterating even though the first file has not been created (I think i am either not understanding promises or the asynchronous nature of the script)
So on the command line i will run node write_file_script.js premier_league
// teams.js
module.exports = {
premier_league: [
{ team_name: "Team 1", friendly_name: "name 1"},
{ team_name: "Team 2", friendly_name: "name 2"}
]
}
My Script
const args = process.argv.slice(2);
const TEAM = require('./teams');
const Excel = require('exceljs');
const workbook = new Excel.Workbook();
for (var team = 0; team < TEAM[args].length; team++) {
console.log("Starting Excel File generation for " + TEAM[args][team]['team_name']);
var fhcw = require('../data_files/home/fhcw/' + TEAM[args][team]['friendly_name'] + '_home' + '.json');
fhcw = fhcw.map(Number);
workbook.xlsx.readFile('./excel_files/blank.xlsx')
.then(function() {
var worksheet = workbook.getWorksheet(1);
// Write FHCW
for (i=0; i < fhcw.length; i++) {
col = i+6;
worksheet.getCell('E'+ col).value = fhcw[i];
}
console.log(TEAM[args][team])
workbook.xlsx.writeFile('./excel_files/' + TEAM[args] + '/' + TEAM[args][team]['friendly_name'] + '.xlsx');
});
}
The output i get when running this is
Starting Excel File generation for Team 1
Starting Excel File generation for Team 2
undefined
(node:75393) UnhandledPromiseRejectionWarning: Unhandled promise rejection
(rejection id: 1): TypeError: Cannot read property 'friendly_name' of undefined
So it seems like the file is not being written but the loop continues, how can i ensure the file is written before moving onto the next loop?
Thanks
If the function is returning a Promise, and you want to do them serially (one at a time) instead of in parallel (start all at the same time), you'll need to wait for each before you start the next using then().
Also, note that your TEAM is just an export of an array (at least as presented), so you can't give it args, which is where the other error comes from.
When you have a list of things to do, the best way to do them is to have a queue which you run until you run out of files. In this case, it looks like your TEAM array is your queue, but since this is an export, I'd recommend not necessarily changing this, but instead copy it to another array which you can alter:
const args = process.argv.slice(2);
const TEAM = require('./teams');
const Excel = require('exceljs');
const workbook = new Excel.Workbook();
const writeNextFile = (queue) => {
// If we have nothing left in the queue, we're done.
if (!queue.length) {
return Promise.resolve();
}
const team = queue.shift(); // get the first element, take it out of the array
console.log("Starting Excel File generation for " + team.team_name);
var fhcw = require('../data_files/home/fhcw/' + team.friendly_name + '_home' + '.json');
fhcw = fhcw.map(Number);
// return this promise chain
return workbook.xlsx.readFile('./excel_files/blank.xlsx')
.then(function() {
var worksheet = workbook.getWorksheet(1);
// Write FHCW
for (i=0; i < fhcw.length; i++) {
col = i+6;
worksheet.getCell('E'+ col).value = fhcw[i];
}
console.log(team);
// not sure what you thought this would TEAM[args] would have translated to, but it wouldn't have been a string, so I put ??? for now
// also, making the assumption that writeFile() returns a Promise.
return workbook.xlsx.writeFile('./excel_files/' + team.??? + '/' + team.friendly_name + '.xlsx');
}).then(() => writeNextFile(queue));
}
writeNextFile(TEAM.slice(0)) // make a copy of TEAM so we can alter it
.then(() => console.log('Done'))
.catch(err => console.error('There was an error', err));
Basically, our function takes an array and will write the first team, then call itself recursively to call the next. Eventually it'll all resolve and you're end up with is a Promise that resolves at the end.
When it comes to Promises, you basically always need to chain them together. You won't be able to use a for loop for them, or any other standard loop for that matter.
If you wanted to write them all at once, it is a little cleaner, because you can just do a non-recursive map for each and use Promise.all to know when they are done.
const writeNextFile = (team) => {
console.log("Starting Excel File generation for " + team.team_name);
var fhcw = require('../data_files/home/fhcw/' + team.friendly_name + '_home' + '.json');
fhcw = fhcw.map(Number);
// return this promise chain
return workbook.xlsx.readFile('./excel_files/blank.xlsx')
.then(function() {
var worksheet = workbook.getWorksheet(1);
// Write FHCW
for (i=0; i < fhcw.length; i++) {
col = i+6;
worksheet.getCell('E'+ col).value = fhcw[i];
}
console.log(team);
// not sure what you thought this would TEAM[args] would have translated to, but it wouldn't have been a string, so I put ??? for now
// also, making the assumption that writeFile() returns a Promise.
return workbook.xlsx.writeFile('./excel_files/' + team.??? + '/' + team.friendly_name + '.xlsx');
});
}
Promise.all(TEAM.map(writeTeamFile))
.then(() => console.log('Done')
.catch(err => console.error('Error'));
Promises are for async, and generally when you have a group of things, you want to do them in parallel because it's faster. That's why the parallel version is a lot cleaner; we don't have to call things recursively.
Since you are not writing to the same file, you can loop through the files and perform read/write operations.
You can bind the index value in the loop, and move ahead.
Example code.
var filesWrittenCount = 0;
for(var team = 0; team < TEAM[args].length; team++){
(function(t){
// call your async function here.
workbook.xlsx.readFile('./excel_files/blank.xlsx').then(function() {
var worksheet = workbook.getWorksheet(1);
// Write FHCW
for (i=0; i < fhcw.length; i++) {
col = i+6;
worksheet.getCell('E'+ col).value = fhcw[i];
}
console.log(TEAM[args][t])
workbook.xlsx.writeFile('./excel_files/' + TEAM[args] + '/' + TEAM[args][t]['friendly_name'] + '.xlsx');
// update number of files written.
filesWrittenCount++;
if (totalFiles == filesWrittenCount) {
// final callback function - This states that all files are updated
done();
}
});
}(team));
}
function done() {
console.log('All data has been loaded');
}
Hope this helps.
I'm new to promises and I'm sure there's an answer/pattern out there but I just couldn't find one that was obvious enough to me to be the right one. I'm using node.js v4.2.4 and https://www.promisejs.org/
This should be pretty easy I think...I need to do multiple blocks of async in a specific order, and one of the middle blocks will be looping through an array of HTTP GETs.
//New Promise = asyncblock1 - FTP List, resolve the returned list array
//.then(asynchblock2(list)) - loop through list array and HTTP GET needed files
//.then(asynchblock3(list)) - update local log
I tried creating a new Promise, resolving it, passing the list to the .then, doing the GET loop, then the file update. I tried using a nested promise.all inside asynchblock2, but it's actually going in reverse order, 3, 2, and 1 due to the timing of those events. Thanks for any help.
EDIT: Ok, this is the pattern that I'm using which works, I just need a GET loop in the middle one now.
var p = new Promise((resolve, reject) => {
setTimeout(() => {
console.log('2 sec');
resolve(1);
},
2000);
}).then(() => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('1.5 sec');
// instead of this section, here I'd like to do something like:
// for(var i = 0; i < dynamicarray.length; i++){
// globalvar[i] = ftpclient.getfile(dynamicarray[i])
// }
// after this loop is done, resolve
resolve(1);
},
1500);
});
}).then(() => {
return new Promise((resolve) => {
setTimeout(() => {
console.log('1 sec');
resolve(1);
},
1000);
});
});
EDIT Here is the almost working code!
var pORecAlert = (function(){
var pa;
var newans = [];
var anstodownload = [];
var anfound = false;//anfound in log file
var nexttab;
var lastchar;
var po;
var fnar = [];
var antext = '';
//-->> This section works fine; it's just creating a JSON object from a local file
try{
console.log('trying');
porfile = fs.readFileSync('an_record_files.json', 'utf8');
if(porfile == null || porfile == ''){
console.log('No data in log file - uploaded_files_data.json being initialized!');
plogObj = [];
}
else{
plogObj = JSON.parse(porfile);
}
}
catch(jpfp){
console.log('Error parsing log file for PO Receiving Alert: ' + jpfp);
return endPORecAlertProgram();
};
if((typeof plogObj) === 'object'){
console.log('an_record_files.json log file found and parsed for PO Receiving Alert!');
}
else{
return mkError(ferror, 'pORecAlert');
};
//finish creating JSON Object
pa = new Client();
pa.connect(ftpoptions);
console.log('FTP Connection for FTP Check Acknowledgement begun...');
pa.on('greeting', function(msg){
console.log('FTP Received Greeting from Server for ftpCheckAcknowledgement: ' + msg);
});
pa.on('ready', function(){
console.log('on ready');
//START PROMISE LIST
var listpromise = new Promise((reslp, rejlp) => {
pa.list('/public_html/test/out', false, (cerr, clist) => {
if(cerr){
return mkError(ferror, 'pORecAlert');
}
else{
console.log('Resolving clist');
reslp(clist);
}
});
});
listpromise.then((reclist) => {
ftpplist:
for(var pcl = 0; pcl < reclist.length; pcl++){
console.log('reclist iteration: ' + pcl);
console.log('checking name: ', reclist[pcl].name);
if(reclist[pcl].name.substring(0, 2) !== 'AN'){
console.log('Not AN - skipping');
continue ftpplist;
}
else{//found an AN
for(var plc = 0; plc < plogObj.length; plc++){
if(reclist[pcl].name === plogObj[plc].anname){
//console.log('Found reclist[pcl].name in local log');
anfound = true;
};
};
if(anfound === false){
console.log('Found AN file to download: ', reclist[pcl].name);
anstodownload.push(reclist[pcl].name);
};
};
};
console.log('anstodownload array:');
console.dir(anstodownload);
return anstodownload;
}).then((fnar) => {
//for simplicity/transparency, here is the array being overwritten
fnar = new Array('AN_17650_37411.699.txt', 'AN_17650_37411.700', 'AN_17650_37411.701', 'AN_17650_37411.702.txt', 'AN_17650_37411.801', 'AN_17650_37411.802.txt');
return Promise.all(fnar.map((gfname) => {
var nsalertnames = [];
console.log('Getting: ', gfname);
debugger;
pa.get(('/public_html/test/out/' + gfname), function(err, anstream){//THE PROBLEM IS THAT THIS GET GETS TRIGGERED AN EXTRA TIME FOR EVERY OTHER FILE!!!
antext = '';
console.log('Get begun for: ', gfname);
debugger;
if(err){
ferror.nsrest_trace = 'Error - could not download new AN file!';
ferror.details = err;
console.log('Error - could not download new AN file!');
console.log('************************* Exiting *************************')
logError(ferror, gfname);
}
else{
// anstream.on('data', (anchunk) => {
// console.log('Receiving data for: ', gfname);
// antext += anchunk;
// });
// anstream.on('end', () => {
// console.log('GET end for: ', gfname);
// //console.log('path to update - gfname ', gfname, '|| end text.');
// fs.appendFileSync(path.resolve('test/from', gfname), antext);
// console.log('Appended file');
// return antext;
// });//end end
};
});//get end
}));//end Promise.all and map
}).then((res99) => {
// pa.end();
// return Promise(() => {
console.log('end all. res99: ', res99);
// //res4(1);
// return 1;
// });
});
});
})();
-->> What happens here:
So I added the almost working code. What is happening is that for every other file, an additional Get request gets made (I don't know how it's being triggered), which fails with an "Unable to make data connection".
So for my iteration over this array of 6, there ends up being 9 Get requests. Element 1 gets requested (works and expected), then 2 (works and expected), then 2 again (fails and unexpected/don't know why it was triggered). Then 3 (works and expected), then 4 (works and expected), then 4 again (fails and unexpected) etc
what you need is Promise.all(), sample code for your app:
...
}).then(() => {
return Promise.all(arry.map(item => ftpclient.getFile(item)))
}).then((resultArray) => {
...
So thanks for the help (and the negative votes with no useful direction!)
I actually reached out to a good nodejs programmer and he said that there seemed to be a bug in the ftp module I was using, and even when trying to use a blackbird .map, the quick succession of requests somehow kicked off an error. I ended up using promise-ftp, blackbird, and promiseTaksQueue - the kicker was that I needed interval. Without it the ftp would end up causing a strange illogical error in the ftp module.
You need the async library. Use the async.eachSeries in situations where you need to use asynchronous operations within a loop, then execute a function when all of those are complete. There are many variations depending on the flow you want but this library does it all.
https://github.com/caolan/async
async.each(theArrayToLoop, function(item, callback) {
// Perform async operation on item here.
doSomethingAsync(item).then(function(){
callback();
})
}, function(err){
//All your async calls are finished continue along here
});