Nodejs non-blocking and Event Emitter. Refresh URL - javascript

fist of all im not shure if the following is a non-blocking problem?
im getting started with https://github.com/sahat/hackathon-starter
currently i try to read all files out of a folder and later process all files...
i used EventEmitter to kind of manage the workflow.
i want to clear all arrays if the URL is refeshed or loaded new, but somehow if i reaload the URL there seems to be something inside the arrays which cases multiple outputs of the same data?
at the moment i just would be happy to have a correct console.log output.
/**
* GET /
* Home page.
*/
var fs = require('fs');
//XML
var jsxml = require("node-jsxml");
var Namespace = jsxml.Namespace,
QName = jsxml.QName,
XML = jsxml.XML,
XMLList = jsxml.XMLList;
//EventEmitter
var EventEmitter=require('events').EventEmitter;
var dateinamenEE=new EventEmitter();
var dateiinhaltEE=new EventEmitter();
var dateinamen = [];
var dateiinhalt = [];
exports.index = function(req, res) {
fs.readdir('./data', function (err, files) {
if (!err) {
files.forEach(function(value) {
dateinamen.push(value);
});
dateinamenEE.emit('dateinamen_ready');
} else {
throw err;
}
});
dateinamenEE.on('dateinamen_ready',function(){
dateinamen.forEach(function(value) {
var buf = fs.readFileSync('./data/'+value, "utf8");
var xml = new XML(buf);
var list = xml.descendants("suggestion");
var ergebnis = "";
var basiswort = "";
var buchstabe = "";
var obj = null;
list.each(function(item, index){
ergebnis = item.attribute('data').toString()
//basiswort = value.replace("%2B", " ");
//basiswort = basiswort.replace(".xml", "");
//var pieces = buchstabe.split(" ");
obj = {k: basiswort, b: buchstabe, e: ergebnis};
dateiinhalt.push(obj);
});
});
dateiinhaltEE.emit('dateiinhalt_ready');
});
dateiinhaltEE.on('dateiinhalt_ready',function(){
//console.log(dateiinhalt);
console.log("dateinamen:" + dateinamen.length);
console.log("dateiinhalt:" + dateiinhalt.length);
});
res.render('home', {
title: 'Home'
});
};
If if log the length of the 2 arrays the output on the second reload shows. First time loading the url:
Express server listening on port 3000 in development mode
dateinamen:2
dateiinhalt:20
Second time / refreshing the url:
GET / 200 898.198 ms - -
GET /fonts/fontawesome-webfont.woff2?v=4.3.0 304 12.991 ms - -
GET /favicon.ico 200 4.516 ms - -
dateinamen:4
dateiinhalt:60
dateinamen:4
dateiinhalt:60
dateinamen:4
dateiinhalt:100
dateinamen:4
dateiinhalt:100
GET / 200 139.259 ms - -
What causes the code to extend the arrays while reloading the page?

The non-blocking problem is due do your for(...) loops.
Changing them by : array.forEach(function(elem, index){});
EDIT
The arrays should be initialized inside the index function :
exports.index = function(req, res) {
var dateinamen = [];
var dateiinhalt = [];
...
Also, I'm not sure you need the use of EventEmitter.
Something like
`
fs.readdir('./data', function (err, files) {
if (!err) {
files.forEach(function(file){
var buf = fs.readFileSync('./data/'+file, "utf8");
var xml = new XML(buf);
var list = xml.descendants("suggestion");
var ergebnis = null;
var obj = null;
list.each(function(item, index){
ergebnis = item.attribute('data').toString();
obj = {k: file, v: ergebnis};
dateiinhalt.push(obj);
});
});
console.log(dateiinhalt);
} else {
throw err;
}
});
`
could do the job no?
(I wanted to say this as a comment, but I'm still missing reputation)

Related

Session.Create(FlowFile) Transfers with no content

I am trying to build a Excecute Script Processor for Nifi.
It handles a JSON file, splits it and sends it to the next processor, which is an MongoDB writer.
The logic works so far. The main problem is, that I cannot get the processor to create and send a new FlowFile for each new JSON created out of the input JSON. I got it working a bit but unfortunately, all the FlowFiles come out empty. Is there something wrong with the flow (from creation of a new Flow to sending it)?
Here is a code snippet:
var flowFile = session.get();
if (flowFile != null) {
var StreamCallback = Java.type("org.apache.nifi.processor.io.StreamCallback");
var IOUtils = Java.type("org.apache.commons.io.IOUtils");
var StandardCharsets = Java.type("java.nio.charset.StandardCharsets");
try {
flowFile = session.write(flowFile,
new StreamCallback(function (inputStream, outputStream) {
var content = IOUtils.toString(inputStream, StandardCharsets.UTF_8);
var json = JSON.parse(content);
var events = json["events"];
var mongoEvent = "";
var flowFileList = [];
for(var x = 0; x < json["events"].length; x++){
try{
var newFlowFile = session.create();
mongoEvent = constructJSONEvent(x, json); // Here we will receive our new JSON
outputStream.write(mongoEvent.getBytes(StandardCharsets.UTF_8));
session.transfer(newFlowFile, REL_SUCCESS);
}catch(e){
session.transfer(newFlowFile, REL_FAILURE);
}
}
}));
session.transfer(flowFile, REL_SUCCESS);
} catch(e) {
session.transfer(flowFile, REL_FAILURE);
}
}

Is Promise.all not working on the second time through? Why not?

I'm just finishing off this basic webscraper project for a tshirt website.
It enters through one hardcoded url, the home page. It will search for any product pages, and add them to an url. If it finds another link (remainder), it will scrape that again and find any more product pages. It adds the product pages to urlSet and will then scrape those again, grab the tshirt data (price, img, title) and then convert, then write them to a CSV file.
For some reason, this is not working on the second run through of the scrape with 'remainder'.
If I remove the second scrape of url, everything works out fine and the file gets written correctly. But if I want to get the other product pages, it seems to be failing somewhere.
Here is my code, i apologise for posting so much of it but I don't know how it will be understood properly without the right context, hopefully it's been commented okay:
//TASK: Create a command line application that goes to an ecommerce site to get the latest prices.
//Save the scraped data in a spreadsheet (CSV format).
'use strict';
//Modules being used:
var cheerio = require('cheerio');
var json2csv = require('json2csv');
var request = require('request');
var moment = require('moment');
var fs = require('fs');
//harcoded url
var url = 'http://shirts4mike.com/';
//url for tshirt pages
var urlSet = new Set();
var remainder;
var tshirtArray = [];
const requestPromise = function(url) {
return new Promise(function(resolve, reject) {
request(url, function(error, response, html) {
if(error)return reject(error);
if(!error && response.statusCode == 200){
return resolve(html);
}
});
});
}
// Go into webpage via url, load html and grab links shirt in url
function scrape (url) {
console.log("Currently scraping " + url)
return requestPromise(url)
.then(function(html) {
var $ = cheerio.load(html);
var links = [];
//get all the links
$('a[href*=shirt]').each(function(){
var a = $(this).attr('href');
//add into link array
links.push(url + a);
});
// return array of links
return links;
});
}
function nextStep (arrayOfLinks) {
var promiseArray = [];
console.log(arrayOfLinks);
for(var i = 0; i < arrayOfLinks.length; i++){
promiseArray.push(requestPromise(arrayOfLinks[i]));
}
//return both the html of pages and their urls
return Promise.all(promiseArray)
.then(function(arrayOfHtml){
return {arrayOfHtml: arrayOfHtml , arrayOfUrls: arrayOfLinks};
});
}
//go through the html of each url and add to urlSet if there is a checkout button
//add to remainder otherwise to rescrape
function lastStep (obj){
for(var i = 0; i < obj.arrayOfHtml.length; i++){
var $ = cheerio.load(obj.arrayOfHtml[i]);
//if page has a submit it must be a product page
if($('[type=submit]').length !== 0){
//add page to set
urlSet.add(obj.arrayOfUrls[i]);
console.log(obj.arrayOfUrls[i]);
} else if(remainder == undefined) {
//if not a product page, add it to remainder so it another scrape can be performed.
remainder = obj.arrayOfUrls[i];
console.log("The remainder is " + remainder)
}
}
//return remainder for second run-through of scrape
return remainder;
}
//iterate through urlSet (product pages and grab html)
function lastScraperPt1(){
//call lastScraper so we can grab data from the set (product pages)
//scrape set, product pages
var promiseArray = [];
for(var item of urlSet){
var url = item;
promiseArray.push(requestPromise(url));
}
return Promise.all(promiseArray)
.then(function(arrayOfHtml){
return arrayOfHtml;
});
}
//iterate over the html of the product pages and store data as objects
function lastScraperPt2(html){
for(var i = 0; i < html.length; i++){
var $ = cheerio.load(html[i]);
//grab data and store as variables
var price = $('.price').text();
var imgURL = $('.shirt-picture').find('img').attr('src');
var title = $('body').find('.shirt-details > h1').text().slice(4);
var tshirtObject = {};
//add values into tshirt object
tshirtObject.Title = title;
tshirtObject.Price = price;
tshirtObject.ImageURL = imgURL;
tshirtObject.URL = url;
tshirtObject.Date = moment().format('MMMM Do YYYY, h:mm:ss a');
//add the object into the array of tshirts
tshirtArray.push(tshirtObject);
}
convertJson2Csv();
}
//convert tshirt objects and save as CSV file
function convertJson2Csv(){
//The scraper should generate a folder called `data` if it doesn’t exist.
var dir ='./data';
if(!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Date'];
//convert tshirt data into CSV and pass in fields
var csv = json2csv({ data: tshirtArray, fields: fields });
//Name of file will be the date
var fileDate = moment().format('MM-DD-YY');
var fileName = dir + '/' + fileDate + '.csv';
//Write file
fs.writeFile(fileName, csv, {overwrite: true}, function(err) {
console.log('file saved');
if (err) throw err;
});
}
scrape(url) //scrape from original entry point
.then(nextStep)
.then(lastStep)
.then(scrape) //scrape again but with remainder url
.then(nextStep)
.then(lastStep)
.then(lastScraperPt1)
.then(lastScraperPt2)
.catch(function(err) {
// handle any error from any request here
console.log(err);
});
I'm console logging the arrayOfLinks in nextStep so I can see that they are being grabbed properly, I just cannot work out why they aren't being passed through to 'lastStep' properly.
Currently scraping http://shirts4mike.com/
[ 'http://shirts4mike.com/shirts.php',
'http://shirts4mike.com/shirts.php',
'http://shirts4mike.com/shirt.php?id=108',
'http://shirts4mike.com/shirt.php?id=107',
'http://shirts4mike.com/shirt.php?id=106',
'http://shirts4mike.com/shirt.php?id=105' ]
The remainder is http://shirts4mike.com/shirts.php
http://shirts4mike.com/shirt.php?id=108
http://shirts4mike.com/shirt.php?id=107
http://shirts4mike.com/shirt.php?id=106
http://shirts4mike.com/shirt.php?id=105
Currently scraping http://shirts4mike.com/shirts.php
[ 'http://shirts4mike.com/shirts.phpshirts.php',
'http://shirts4mike.com/shirts.phpshirt.php?id=101',
'http://shirts4mike.com/shirts.phpshirt.php?id=102',
'http://shirts4mike.com/shirts.phpshirt.php?id=103',
'http://shirts4mike.com/shirts.phpshirt.php?id=104',
'http://shirts4mike.com/shirts.phpshirt.php?id=105',
'http://shirts4mike.com/shirts.phpshirt.php?id=106',
'http://shirts4mike.com/shirts.phpshirt.php?id=107',
'http://shirts4mike.com/shirts.phpshirt.php?id=108' ]
BUT if I choose to only call the first scrape and don't call the second, like this:
scrape(url) //scrape from original entry point
.then(nextStep)
.then(lastStep)
.then(lastScraperPt1)
.then(lastScraperPt2)
.catch(function(err) {
// handle any error from any request here
console.log(err);
});
... Then everything works. I just don't get to all the urls.
What is happening here and how can I fix it? Thank you guys
The issue is tshirtArray is not defined in convertJson2Csv(). At lastlastScraperPt2 pass tshirtArray to convertJsonCsv()
convertJson2Csv(tshirtArray)
at convertJson2Csv
function convertJson2Csv(tshirtArray) {
// do stuff
}
One problem seems to be in your lastStep. It looks like you mean for remainder to be another array of urls. Correct me if I'm wrong there. However, what's happing is that the first time the if($('[type=submit]').length !== 0) condition fails, you'll automatically go down to the next block, because remainder start undefined. Whatever the current url is, you assign that one to remainder. For the rest of the iterations of your for-loop, you will never again hit the condition where remainder == undefined. So if you will only ever end up with one url assigned to remainder, while any more that you were hoping to get will simply be passed over.
You might want to define remainder as remainder = [];. And then instead of saying else if (remainder == undefined), you would just say
} else {
remainder.push(obj.arrayOfUrls[i]);
}
However, then you're passing an array of urls to scrape when scrape is only expecting a single url. If this is what you want and I am right in assuming that you mean for remainder to be an array of urls, you could defined a new function as follows:
function scrapeRemainders(remainders) {
var promises = [];
remainder.forEach(function (url) {
promises.push(requestPromise(url));
});
return Promise.all(promises).then(function (results) {
_.flattenDeep(results);
})
}
Then instead of the second scrape in your promise chain, you would replace it with scrapeRemainders. Also, for you the _ in the previous function, you would need to npm install lodash and then var _ = require('lodash'). On a side note, lodash has nothing to do with promises, but it is a great tool for data manipulation. You should look into it when you have the chance.
Also, in lastScraperPt1, you can change
return Promise.all(promiseArray)
.then(function(arrayOfHtml){
return arrayOfHtml;
});
to
return Promise.all(promiseArray);
It does the same thing.
Hope this helps. If this does not answer your question, comment at me and I can change my answer accordingly.
All fixed, it was grabbing the wrong urls in scrape(). Though I only knew this after I logged the statusCodes to the console :
//TASK: Create a command line application that goes to an ecommerce site to get the latest prices.
//Save the scraped data in a spreadsheet (CSV format).
'use strict';
//Modules being used:
var cheerio = require('cheerio');
var json2csv = require('json2csv');
var request = require('request');
var moment = require('moment');
var fs = require('fs');
//harcoded url
var urlHome = 'http://shirts4mike.com/';
//url for tshirt pages
var urlSet = [];
var tshirtArray = [];
const requestPromise = function(url) {
return new Promise(function(resolve, reject) {
request(url, function(error, response, html) {
if(error) {
errorHandler(error);
return reject(error);
}
if(!error && response.statusCode == 200){
return resolve(html);
}
if(response.statusCode !== 200){
console.log("response code is " + response.statusCode);
}
return resolve("");
});
});
}
// Go into webpage via url, load html and grab links shirt in url
function scrape (url) {
console.log("Currently scraping " + url)
return requestPromise(url)
.then(function(html) {
var $ = cheerio.load(html);
var links = [];
var URL = 'http://shirts4mike.com/';
//get all the links
$('a[href*=shirt]').each(function(){
var a = $(this).attr('href');
//add into link array
links.push(URL + a);
});
// return array of links
return links;
});
}
function nextStep (arrayOfLinks) {
var promiseArray = [];
console.log(arrayOfLinks);
for(var i = 0; i < arrayOfLinks.length; i++){
promiseArray.push(requestPromise(arrayOfLinks[i]));
}
//return both the html of pages and their urls
return Promise.all(promiseArray)
.then(function(arrayOfHtml){
return {arrayOfHtml: arrayOfHtml , arrayOfUrls: arrayOfLinks};
});
}
//go through the html of each url and add to urlSet if there is a checkout button
//add to remainder otherwise to rescrape
function lastStep (obj){
for(var i = 0; i < obj.arrayOfHtml.length; i++){
var $ = cheerio.load(obj.arrayOfHtml[i]);
//if page has a submit it must be a product page
if($('[type=submit]').length !== 0){
//add page to set
urlSet.push(obj.arrayOfUrls[i]);
console.log(obj.arrayOfUrls[i]);
} else if(remainder == undefined) {
//if not a product page, add it to remainder so it another scrape can be performed.
var remainder = obj.arrayOfUrls[i];
console.log("The remainder is " + remainder)
}
}
//return remainder for second run-through of scrape
return remainder;
}
//iterate through urlSet (product pages and grab html)
function lastScraperPt1(){
//call lastScraper so we can grab data from the set (product pages)
//scrape set, product pages
var promiseArray = [];
for(var item of urlSet){
var url = item;
promiseArray.push(requestPromise(url));
}
return Promise.all(promiseArray)
.then(function(arrayOfHtml){
return arrayOfHtml;
});
}
//iterate over the html of the product pages and store data as objects
function lastScraperPt2(html){
for(var i = 0; i < html.length; i++){
var $ = cheerio.load(html[i]);
//grab data and store as variables
var price = $('.price').text();
var imgURL = $('.shirt-picture').find('img').attr('src');
var title = $('body').find('.shirt-details > h1').text().slice(4);
var tshirtObject = {};
//add values into tshirt object
tshirtObject.Title = title;
tshirtObject.Price = price;
tshirtObject.ImageURL = urlHome + imgURL;
tshirtObject.URL = urlSet[i];
tshirtObject.Date = moment().format('MMMM Do YYYY, h:mm:ss a');
//add the object into the array of tshirts
tshirtArray.push(tshirtObject);
}
return tshirtArray;
}
//conver tshirt objects and save as CSV file
function convertJson2Csv(tshirtArray){
//The scraper should generate a folder called `data` if it doesn’t exist.
var dir ='./data';
if(!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
var fields = ['Title', 'Price', 'ImageURL', 'URL', 'Date'];
//convert tshirt data into CSV and pass in fields
var csv = json2csv({ data: tshirtArray, fields: fields });
//Name of file will be the date
var fileDate = moment().format('MM-DD-YY');
var fileName = dir + '/' + fileDate + '.csv';
//Write file
fs.writeFile(fileName, csv, {overwrite: true}, function(err) {
console.log('file saved');
if (err) errorHandler(err);
});
}
scrape(urlHome) //scrape from original entry point
.then(nextStep)
.then(lastStep)
.then(scrape)
.then(nextStep)
.then(lastStep)
.then(lastScraperPt1)
.then(lastScraperPt2)
.then(convertJson2Csv)
.catch(function(err) {
// handle any error from any request here
console.log(err);
});
//If the site is down, an error message describing the issue should appear in the console.
//This is to be tested by disabling wifi on your device.
//When an error occurs log it to a file scraper-error.log . It should append to the bottom of the file with a time stamp and error
var errorHandler = function (error) {
console.log(error.message);
console.log('The scraper could not not scrape data from ' + url + ' there is either a problem with your internet connection or the site may be down');
/**
* create new date for log file
*/
var loggerDate = new Date();
/**
* create message as a variable
*/
var errLog = '[' + loggerDate + '] ' + error.message + '\n';
/**
*when the error occurs, log that to the error logger file
*/
fs.appendFile('scraper-error.log', errLog, function (err) {
if (err) throw err;
console.log('There was an error. The error was logged to scraper-error.log');
});
};

Why is this an unreachable code?

I'm calling the function getKeywords from another function and got an Unrecheable code detected section and don't understand why. Any help?
var env = require('dotenv').config();
var request = require('request')
var getKeywords = function(){
request.get('URI', //URI IS CORRECT IN MY CODE
function(err, httpResponse, body){
if(err){ //UNREACHABLE CODE DETECTED
console.error("request.post Error:", err);
return false;
} //UNREACHABLE CODE DETECTED
else{
console.log('Im here');
return JSON.parse(httpResponse.body).keywords;
}
});
}
module.export = getKeywords;
Here is the calling code.
var getKeywords = require('./getKeywords.js');
var keywords = new getKeywords();
var env = require('dotenv').config();
var difflib = require('difflib');
var postMention = require('./postMention.js');
var detection = function(obj, i){
var keyword = keywords[i];
var mentionObject = {
//some json
//postMention(mentionObject);
}
}
module.exports = detection;
Some tools have the ability to analyze every call to your function. It may be possible that all the places in your code that call the function you never set err parameter to true.

Node.js - Looping through array of URLS one at a time

I am a beginner at node js and I'm trying to write a web scraping script. I got permission from the site admin to scrape their products if I make less then 15 requests a minute. When I started out it used to request all the URLs at once but after some tooling around, I was able to go through each item in the array, but the script doesn't stop when there is no more items in the array? I'm not really happy with my result and feel like there is a better way to do this.
var express = require('express');
var fs = require('fs');
var request = require('request');
var cheerio = require('cheerio');
var app = express();
var async = require('async');
app.get('/scrape', function(req, res){
productListing = ['ohio-precious-metals-1-ounce-silver-bar','morgan-1-ounce-silver-bar']
var i = 0;
async.eachLimit(productListing, 1, function (product, callback) {
var getProducts = function () {
var url = 'http://cbmint.com/' + productListing[i];
request(url, function(error, response, html) {
if(!error){
var $ = cheerio.load(html);
var title;
var json = { title : ""};
$('.product-name').filter(function(){
var data = $(this);
title = data.children().children().first().text();
json.title = title;
})
}
var theTime = new Date().getTime();
console.log(i);
console.log(json.title);
console.log(theTime);
i++;
});
}
setInterval(getProducts,10000);
})
res.send('Check your console!')
})
app.listen('8081')
console.log('Magic happens on port 8081');
exports = module.exports = app;
You aren't calling callback inside the iterator function. Take a look at the docs for eachLimit.

How to execute the function return the value to called function across node js files

I have 3 node js files :
mysqlconnection.js to store the database connection properties:
var mysql = require('mysql');
var cjson = require('cjson');
var yaml_config = require('node-yaml-config');
// project files
var config = yaml_config.load(__dirname + '/billingv2.yaml');
exports.execute = function(callback){
var connection = mysql.createConnection(
{
host : config.host,
user : config.user,
password : config.password,
database : config.database,
}
);
connection.connect();
return callback(null,connection);
}
subscriptionRestService.js to handle the REST api calls:
var express = require('express');
var app = express();
app.use(express.bodyParser());
var fs = require('fs');
// Project files
var mysql = require('./mysqlRestService.js');
// Get Resource Subscription data by Resourceuri
app.post('/pricingdetails', function(req, res) {
var workload = req.body;
if(workload.elements && workload.elements.length > 0)
{
var arr = [];
for(var index in workload.elements)
{
arr[index] = workload.elements[index].uri;
}
var resourceIdentifiers = arr.join(',');
}
console.log(resourceIdentifiers);
mysql.getPricingDetail(function(resourceIdentifiers,callback){
});
});
mysqlRestService.js to handle mysql queries/stored procedures:
// packages
var mysql = require('mysql');
var cjson = require('cjson');
var fs = require('fs');
var yaml_config = require('node-yaml-config');
// project files
var dbconnection = require('./mysqlconnection');
exports.getPricingDetail = function (resourceIdentifiers,callback){
console.log('entered into mysql function');
console.log(resourceIdentifiers);
var pricingDetail = {};
dbconnection.execute(function(err,response){
if(err){
throw err;
}
else
{
var selectqueryString = "call SP_ExposePricingDetailforUI('" + resourceIdentifiers + "')";
response.query(selectqueryString, function(err,pricingDetail){
if(err) {
throw err;
}
else
{
console.log(pricingDetail);
pricingDetail = pricingDetail;
}
});
}
});
//console.log('printing pricing details');
//console.log(pricingDetail);
};
problems faced
Unable to send the variable resourceIdentifiers from subscriptionRestService to mysqlRestService.js
Unable to return the pricingdetail from mysqlRestService.js to calling function in subscriptionRestService.
Any guidance greatly appreciated.
Unable to send the variable resourceIdentifiers from subscriptionRestService to mysqlRestService.js
Well, you didn't send it. It currently is a parameter of your callback function in the invocation, not an argument for the parameter of getPricingDetails. Use
mysql.getPricingDetail(resourceIdentifiers, function callback(result){
// use result here
});
Unable to return the pricingdetail from mysqlRestService.js to calling function in subscriptionRestService.
I've got no idea what pricingDetail = pricingDetail; was supposed to do. You have to call (invoke) back the callback here! Use
callback(pricingDetail);

Categories

Resources