possible nested asynchronous function - javascript

So I have read a lot of the different answers about asynchronous functions on here but I think I am over thinking my problem, or I have been staring at it just to long and I cant figure it out. So your help is greatly appreciated.
So I am parsing a csv file and then trying to get the lat/long through another api. but I cant access the lat/lng outside of the function. below is my code I have commented it to the best of my ability please let me know if there are any questions or a much better way to do this.
Thanks,
var location = []
function run() {
http.get(url, function(res) {
if(res.statusCode === 200) {
res.pipe(parse(function(err, data) {
for(i = 1; i < data.length; i++) {
var info = data[i];
passLoc = info[6].replace('block ', '')
passLoc = passLoc.replace(/ /g, "+")
getLoc(passLoc, function(loc) {
location.push(loc);
//If I console.log(location) here I get all the info I want but.....it is printed 100 times becuase it is printed for each i in data.length
})
}
console.log(location) // loging this here gives me an empty array
}))
}else {
console.error('The address is unavailable. (%d)', res.statusCode);
}
})
}
function getLoc(x, callback) {
var url = "http://geodata.alleghenycounty.us/arcgis/rest/services/Geocoders/EAMS_Composite_Loc/GeocodeServer/findAddressCandidates?Street=" + x + "&City=Pittsburgh&State=PA&ZIP=&SingleLine=&outFields=&outSR=4326&searchExtent=&f=pjson";
http.get(url, function(res) {
var data = '';
res.on('data', function(chunk) {
data += chunk;
});
res.on('end', function() {
var d = JSON.parse(data);
var obj = d.candidates;
if(obj != '') {
var loc = obj[0].location
var lat = loc.x
var lng = loc.y
var location = [lat, lng];
callback(location)
} else {
callback(x);
}
});
res.on('error', function(err) {
callback("error!")
});
});
}

Your code tries to synchronously consume asynchronous data -- you're synchronously trying to access the results (location) before any of the asynchronous operations have finished.
As you have multiple async operations running in parallel, you can make use of async.parallel to aid in controlling the asynchronous flow:
var async = require('async');
function run() {
http.get(url, function(res) {
if(res.statusCode === 200) {
res.pipe(parse(function(err, data) {
// array of async tasks to execute
var tasks = [];
data.slice(1).forEach(function(info) {
var passLoc = info[6].replace('block ', '').replace(/ /g, '+');
// push an async operation to the `tasks` array
tasks.push(function(cb) {
getLoc(passLoc, function(loc) {
cb(null, loc);
});
});
});
// run all async tasks in parallel
async.parallel(tasks, function(err, locations) {
// consume data when all async tasks are finished
console.log(locations);
});
}));
}else {
console.error('The address is unavailable. (%d)', res.statusCode);
}
});
}
Also, for loops don't create a scope, so I've swapped it by a forEach in order to scope the info and passLoc variables inside each iteration.
Here's a slightly more condensed version using ES5's Array#map:
var async = require('async');
function run() {
http.get(url, function(res) {
if(res.statusCode === 200) {
res.pipe(parse(function(err, data) {
async.parallel(
// map data items to async tasks
data.slice(1).map(function(info) {
return function(cb) {
var passLoc = info[6].replace('block ', '').replace(/ /g, '+');
getLoc(passLoc, function(loc) {
cb(null, loc);
});
};
}),
function(err, locations) {
// consume data when all async tasks are finished
console.log(locations);
}
);
}));
} else {
console.error('The address is unavailable. (%d)', res.statusCode);
}
});
}

Related

Google Custom Search API for NodeJS

I'm trying to get the first 5 pages of search results with google custom search API ...
So far I've tried to achieve the result using nested function but with no luck.
I know that I'm messing with callback but, so far I've not figure out the correct way (without using promises library) to solve my problem.
Could some of you point me out in the right direction?
Thanks.
app.get('/assesment', function(req, res){
console.log('route: /assesment');
var api_key = '';
var customsearch = google.customsearch('v1');
var response = "";
var number_of_pages = 5;
var next_page = 1;
var exit = 0
const CX = 'XXXXX';
const API_KEY = 'XXXXX';
const SEARCH = 'Test Query';
console.log('start');
// console.log('QUERY PAGE: '+pages);
doSearch(CX, SEARCH, API_KEY, next_page, function(resp){
res.send(resp);
});
//
// Functions
//
function doSearch(_cx, _search, _api_key, _start, callback ){
var response = '';
customsearch.cse.list({ cx: _cx, q: _search, auth: _api_key, start: _start }, function (err, resp) {
if (err) {
response = JSON.stringify(err);
} else {
// Got the response from custom search
console.log('Result: ' + resp.searchInformation.formattedTotalResults);
if (resp.items && resp.items.length > 0) {
console.log('First result of '+resp.items.length+' is ' + resp.items[0].title);
for (var i = 0; i < resp.items.length; i++) {
response += resp.items[i].title+"<br>";
response += resp.items[i].link +"<br><hr>";
}
}
res = {
response: response,
next_page: resp.queries.nextPage
}
// res =
}
_start += 1;
if (_start < 6 ) {
doSearch(_cx, _search, _api_key, _start, _start*10+1,
function(resp){
response += resp;
});
}
if (callback && typeof callback === "function") callback(response);
});
};
});
You can use a third-party service like SerpApi to scrape Google and get back structured JSON.
Example using the Node.js library to get 4 page of results:
var gsr = require('GoogleSearchResults')
let serp = new gsr.GoogleSearchResults("demo")
serp.json({
q: "Coffee",
num: 10,
start: 30,
location: "Portland"
}, (result) => {
console.log(result)
})

Javascript for loop wait for callback

I have this function:
function tryStartLocalTrendsFetch(woeid) {
var userIds = Object.keys(twitClientsMap);
var isStarted = false;
for (var i = 0; i < userIds.length; i++) {
var userId = userIds[i];
var twitClientData = twitClientsMap[userId];
var isWoeidMatch = (woeid === twitClientData.woeid);
if (isWoeidMatch) {
startLocalTrendsFetch(woeid, twitClientData, function (err, data) {
if (err) {
// Couldn't start local trends fetch for userId: and woeid:
isStarted = false;
} else {
isStarted = true;
}
});
// This will not obviously work because startLocalTrendsFetch method is async and will execute immediately
if (isStarted) {
break;
}
}
}
console.log("No users are fetching woeid: " + woeid);
}
The gist of this method is that I want the line if (isStarted) { break; } to work. The reason is that if it's started it should not continue the loop and try to start another one.
I'm doing this in NodeJS.
try to use a recursive definition instead
function tryStartLocalTrendsFetch(woeid) {
var userIds = Object.keys(twitClientsMap);
recursiveDefinition (userIds, woeid);
}
function recursiveDefinition (userIds, woeid, userIndex)
var userId = userIds[userIndex = userIndex || 0];
var twitClientData = twitClientsMap[userId];
var isWoeidMatch = (woeid === twitClientData.woeid);
if (isWoeidMatch && userIndex<userIds.length) {
startLocalTrendsFetch(woeid, twitClientData, function (err, data) {
if (err) {
recursiveDefinition(userIds, woeid, userIndex + 1)
} else {
console.log("No users are fetching woeid: " + woeid);
}
});
} else {
console.log("No users are fetching woeid: " + woeid);
}
}
You may also use async (npm install async):
var async = require('async');
async.forEach(row, function(col, callback){
// Do your magic here
callback(); // indicates the end of loop - exit out of loop
}, function(err){
if(err) throw err;
});
More material to help you out: Node.js - Using the async lib - async.foreach with object

How to correct structure an asynchronous program to ensure correct results?

I have a nodejs program that requests a series of XML files, parses them and then puts the output in an array which is written to disk as a CSV file.
The program mostly works, however occasionally the files end up in the wrong order in the array.
I want the order of the results to be in the same as the order as the URLs. The URLs are stored in an array, so when I get the XML file I check what the index of the URL was in the source array and insert the results at the same index in the destination URL.
can anyone see the flaw that is allowing the results to end up in the wrong order?
addResult = function (url, value, timestamp) {
data[config.sources.indexOf(url)] = {
value : value,
timestamp : timestamp,
url : url
};
numResults++;
if (numResults === config.sources.length) { //once all results are in build the output file
createOutputData();
}
}
fs.readFile("config.json", function (fileError, data) {
var eachSource, processResponse = function (responseError, response, body) {
if (responseError) {
console.log(responseError);
} else {
parseXML(body, {
explicitArray : false
}, function (xmlError, result) {
if (xmlError) {
console.log(xmlError);
}
addResult(response.request.uri.href, result.Hilltop.Measurement.Data.E.I1, moment(result.Hilltop.Measurement.Data.E.T));
});
}
};
if (fileError) {
console.log(fileError);
} else {
config = JSON.parse(data); //read in config file
for (eachSource = 0; eachSource < config.sources.length; eachSource++) {
config.sources[eachSource] = config.sources[eachSource].replace(/ /g, "%20"); //replace all %20 with " "
request(config.sources[eachSource], processResponse); //request each source
}
}
});
var writeOutputData, createOutputData, numResults = 0, data = [], eachDataPoint, multipliedFlow = 0;
writeOutputData = function (output, attempts) {
csv.writeToPath(config.outputFile, [ output ], {
headers : false
}).on("finish", function () {
console.log("successfully wrote data to: ", config.outputFile);
}).on("error", function (err) { //on write error
console.log(err);
if (attempts < 2) { //if there has been less than 3 attempts try writing again after 500ms
setTimeout(function () {
writeOutputData(output, attempts + 1);
}, 500);
}
});
};
createOutputData = function () {
var csvTimestamp, output = [];
if (config.hasOwnProperty("timestampFromSource")) {
csvTimestamp = data.filter(function (a) {
return a.url === config.sources[config.timestampFromSource];
})[0].timestamp.format("HHmm");
console.log("timestamp from source [" + config.timestampFromSource + "]:", csvTimestamp);
} else {
csvTimestamp = data.sort(function (a, b) { //sort results from oldest to newest
return a.timestamp.unix() - b.timestamp.unix();
});
csvTimestamp = csvTimestamp[0].timestamp.format("HHmm");//use the oldest date for the timestamp
console.log("timestamp from oldest source:", csvTimestamp);
}
//build array to represent data to be written
output.push(config.plDestVar); //pl var head address first
output.push(config.sources.length + 1); //number if vars to import
output.push(csvTimestamp); //the date of the data
for (eachDataPoint = 0; eachDataPoint < data.length; eachDataPoint++) { //add each data point
if (config.flowMultiplier) {
multipliedFlow = Math.round(data[eachDataPoint].value * config.flowMultiplier); //round to 1dp and remove decimal by *10
} else {
multipliedFlow = Math.round(data[eachDataPoint].value * 10); //round to 1dp and remove decimal by *10
}
if (multipliedFlow > 32766) {
multipliedFlow = 32766;
} else if (multipliedFlow < 0) {
multipliedFlow = 0;
}
output.push(multipliedFlow);
}
console.log(output);
writeOutputData(output, 0); //write the results, 0 is signalling first attempt
};
I think that the url to index code needs debugging.
Here is an example that uses an object that is pre-populated with keys in the for loop.
`
var http = require('http');
var fs = require("fs");
var allRequestsComplete = function(results){
console.log("All Requests Complete");
console.log(results);
};
fs.readFile("urls.json", function (fileError, data) {
var responseCount = 0;
if (fileError) {
console.log(fileError);
} else {
var allResponses = {};
config = JSON.parse(data); //read in config file
var requestComplete = function(url, fileData){
responseCount++;
allResponses[url] = fileData;
if(responseCount===config.sources.length){
allRequestsComplete(allResponses);
}
};
for (var eachSource = 0; eachSource < config.sources.length; eachSource++) {
(function(url){
allResponses[url] = "Waiting";
http.get({host: url,path: "/"}, function(response) {
response.on('error', function (chunk) {
requestComplete(url, "ERROR");
});
var str = ''
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
requestComplete(url, str);
});
});
}(config.sources[eachSource].replace(/ /g, "%20").replace("http://", "")));
}
}
});
`
I agree with #Kevin B, you cannot assume that async callbacks will return in the same order of which you send them. However, you could ensure the order, by adding an index function on processResponse.
say you add the following to addResult
addResult = function (index, url, value, timestamp) {
data[index] = {
value : value,
timestamp : timestamp,
url : url
};
numResults++;
if (numResults === config.sources.length) { //once all results are in build the output file
createOutputData();
}
}
and use an extra function to call your request
function doRequest(index, url) {
request(url, function(responseError, response, body) {
if (responseError) {
console.log(responseError);
} else {
parseXML(body, {
explicitArray : false
}, function (xmlError, result) {
if (xmlError) {
console.log(xmlError);
}
addResult(index, response.request.uri.href, result.Hilltop.Measurement.Data.E.I1, moment(result.Hilltop.Measurement.Data.E.T));
});
}
});
}
then you can also change your loop to:
for (eachSource = 0; eachSource < config.sources.length; eachSource++) {
config.sources[eachSource] = config.sources[eachSource].replace(/ /g, "%20"); //replace all %20 with " "
doRequest(eachSource, config.sources[eachSource]); //request each source
}

How to handle error of http call when site is down

I would like to replace the if(body.toString().indexOf("404") !== 0) block with some generic error handling code but I can't seem to see where it throws an error when the target host is down. So far, this is the only hacky method I've managed to put together that works.
app.get('/', function(req, res){
var sites = ["foo.com", "bar.com"];
var returnObj = [];
var index = 0;
getSites(index);
// Recursively add data from each site listed in "sites" array
function getSites(index) {
if(index < sites.length) {
var url = sites[index];
var _req = http.get({host: url}, function(_res) {
var bodyChunks = [];
_res.on('data', function(chunk) {
bodyChunks.push(chunk);
}).on('end', function() {
var body = Buffer.concat(bodyChunks);
if(body.toString().indexOf("404") !== 0) {
returnObj.push(JSON.parse(body));
}
getSites(++index);
});
});
_req.on('error', function(e) {
console.log('ERROR: ' + e.message);
});
} else {
res.json(returnObj);
res.end();
}
}
});
You can check the status code of the response.
if(_req.statusCode === 200) {
//Response okay.
}
Here's a list of the status codes.

How to use global error handling code in node.js for entire api call

I have a api call which has more than one functions. Instead of applying error-handling for each and every method, is it possible to use global error handling code that send the error to UI developers.
The code is given below:
app.post('/billing/pricingdetails', function (req, res) {
console.log('pricing api called');
var workload = req.body;
var resourcelevelPricing = {};
var response = {};
var workloadinfo = {
workloadId: workload.workloadId,
ownerId: workload.ownerId,
uniqueName: workload.uniqueName,
name: workload.name
}
var pricing = {}
var allresourceIdentifiers;
if (workload.elements && workload.elements.length > 0) {
var elementlevelpricingSummary = {};
var elementArray = [];
var allresourceIdentifierArray = [];
var elementinfo = {};
var metadataModified = {};
var elementsParam = workload.elements;
// handle configurable resource
var configurableElementarray = [];
// create array of all the elements in workloadjson - to be used for resourcelevel (instance/image), charamountunitlevel, resourcetypelevel pricing detail
for (var index in elementsParam) {
// if condition skips the uri of configurable resources - handle configurable resource
if(!elementsParam[index].parameters.ResourceParameters)
{
allresourceIdentifierArray.push(elementsParam[index].uri);
if (elementsParam[index].parameters.imageUri) {
allresourceIdentifierArray.push(elementsParam[index].parameters.imageUri);
}
}
}
var allresourceIdentifiers = allresourceIdentifierArray.join(',');
// call the functionalities that gives the each level of pricing detail synchronously to construct the workload json
async.series([
function (callback) {
getpricingSummary(elementsParam, function (err, workloadinfo) {
if(err){
}
else
{
callback(null, workloadinfo);
}
});
},
function (callback) {
getPricingforResourceIdentifiers(allresourceIdentifiers, function (err, pricingDetail) {
pricing.resourceLevel = pricingDetail;
callback(null, pricingDetail);
});
},
function (callback) {
getchargeamountunitlevelPricing(allresourceIdentifiers, function (err, pricingDetail) {
//merge configurable resource with concrete resource pricing details - handle configurable resource
if(configurableElementarray.length > 0)
{
var concatednatedArray = pricingDetail.concat(configurableElementarray);
var finalResult = [];
var i = concatednatedArray.reduce(function (result, o) {
var key = o.chargeAmountUnit + o.currencyCode;
if (!(key in result)) {
result.arr.push(result[key] = o);
finalResult.push(result);
}
else {
result[key].chargeAmount += Number(o.chargeAmount);
}
return result;
}, { arr: [] }).arr;
pricing.chargeamountunitLevel = i;
trace.info(i);
}
else
{
pricing.chargeamountunitLevel = pricingDetail;
}
callback(null, pricingDetail);
});
},
function (callback) {
getresourcetypelevelPricing(allresourceIdentifiers, function (err, pricingDetail) {
if(configurableElementarray.length > 0)
{
var concatednatedArray = pricingDetail.concat(configurableElementarray);
var i = concatednatedArray.reduce(function (result, o) {
var key = o.chargeAmountUnit + o.currencyCode + o.Name;
if (!(key in result)) {
result.arr.push(result[key] = o);
}
else {
result[key].chargeAmount += o.chargeAmount;
}
return result;
}, { arr: [] }).arr;
pricing.resourcetypeLevel = i;
trace.info(i);
}
else
{
pricing.resourcetypeLevel = pricingDetail;
}
callback(null, pricingDetail);
});
}
],
function (err, result) {
workloadinfo.pricing = pricing;
res.send(workloadinfo);
});
// get element level pricing summary for each elements (vm/vs) in the array within workload json - the output to be appended within metadata of workload json
function getpricingSummary(elementsParam, callback) {
async.forEachSeries(elementsParam, createResponse, function (err,result) {
return callback(null, result);
});
};
// this method called by async.forEachSeries passing each elements (vm/vs) of workload
function createResponse(elements, callback) {
var resourceIdentifierArray = [];
elementinfo = elements;
resourceIdentifierArray.push(elements.uri);
if (elements.parameters.imageUri) {
resourceIdentifierArray.push(elements.parameters.imageUri);
}
// build string of resourceIdentifier (instance/image) for input element
var resourceIdentifiers = resourceIdentifierArray.join(',');
console.log(resourceIdentifiers);
if(elements.parameters.ResourceParameters)
{
trace.info('1');
trace.info(elements.parameters.ResourceParameters);
var configJson = JSON.parse(elements.parameters.ResourceParameters);
trace.info(Number(configJson.cpuCount));
metadataModified = elements.metadata;
// TODO : Remove this hard-coding
elementlevelpricingSummary.Name = 'Hardware';
if(configJson.totalUnitPrice)
{
var chargeAmount = configJson.totalUnitPrice;
elementlevelpricingSummary.chargeAmount = Math.round(chargeAmount * 100)/100;
}
if(configJson.ChargeAmountUnit)
{
var chargeAmountUnit = configJson.ChargeAmountUnit;
elementlevelpricingSummary.chargeAmountUnit = configJson.ChargeAmountUnit;
}
if(configJson.CurrencyCode)
{
var currencyCode = configJson.CurrencyCode;
elementlevelpricingSummary.currencyCode = configJson.CurrencyCode;
}
metadataModified.pricingSummary = elementlevelpricingSummary;
configurableElementarray.push(elementlevelpricingSummary);
// delete original metadata from workload json (to be replaced by metadata containing pricing summary)
delete elementinfo.metadata;
elementinfo.metadata = metadataModified;
elementArray.push(elementinfo);
// global workloadinfo variable is appended with array of elements with its pricing summary within metadata of respective elements
workloadinfo.elements = elementArray;
return callback();
}
else
{
// Get element level pricing summary
mysql.elementlevelpricing(resourceIdentifiers, conn, function (result) {
elementlevelpricingSummary = result;
metadataModified = elements.metadata;
metadataModified.pricingSummary = elementlevelpricingSummary;
// delete original metadata from workload json (to be replaced by metadata containing pricing summary)
delete elementinfo.metadata;
elementinfo.metadata = metadataModified;
elementArray.push(elementinfo);
// global workloadinfo variable is appended with array of elements with its pricing summary within metadata of respective elements
workloadinfo.elements = elementArray;
return callback(null,workloadinfo);
});
}
};
function getPricingforResourceIdentifiers(resourceIdentifiers, callback) {
mysql.pricingDetail(resourceIdentifiers, conn, function (result) {
return callback(null, result);
});
};
function getchargeamountunitlevelPricing(resourceIdentifiers, callback) {
mysql.chargeamountunitlevelPricing(resourceIdentifiers, conn, function (result) {
return callback(null, result);
});
};
function getresourcetypelevelPricing(resourceIdentifiers, callback) {
mysql.resourcetypelevelPricing(resourceIdentifiers, conn, function (result) {
return callback(null, result);
});
};
};
});
With Express, you can install an error handler which will be called when an error occurs in any of your routes:
// somewhere at the end of your middleware/route chain
app.use(function(err, req, res, next) {
res.send(500, err.message); // or whatever you want to send back
});
It would still be best to rethrow any errors that occur in your code:
if (err) throw err;
Also, since you're using async, you can always propagate errors back to it:
if (err) return callback(err);
And handle the errors in the final callback.

Categories

Resources