var url = require('url');
var http = require('http');
var downloader = require("./downloader");
http.createServer(onRequest).listen(8080);
function onRequest(request,response)
{
if(request.method=='POST')
handlePost(request,response);
else
handleGet(request,response);
}
function handlePost(request,response)
{
var data = '';
request.on('data',function(chunk){
data+=chunk.toString();
});
request.on('end',downloadTrainStatus);
function downloadTrainStatus()
{
var downloadPromise = downloader.download(data);
downloadPromise.then(function (responses) {
var total = responses.length;
var result = [];
for( var i = 0 ; i < total ; i++)
result.push(JSON.parse(responses[i][1]));
response.writeHead(200);
response.write(JSON.stringify(result));
response.end();
}, function (err) {
console.log(err);
var result = { status : "error" };
response.writeHead(200);
response.write(JSON.stringify(result));
response.end();
})
}
}
function handleGet(request,response)
{
console.log("GET request");
response.writeHead(200);
response.write("Get request works !!");
response.end();
}
The above file server.js simply starts the web server. It uses a file downloader.js included below. The downloader.js just parallely makes 10 or 20 parallel web requests to a url which returns a JSON response. On running this process for half a day, the memory usage of the process shoots to 1.5GB. Is there any memory leak in this code ?
var Promise = require('bluebird')
var request = Promise.promisify(require('request'))
function download(json)
{
var requests = []
var data = JSON.parse(json);
for(var i = 0; i< data.total ; i++)
{
var stationCode = stations[i].StationCode;
var journeyDay = stations[i].JourneyDayCode;
requests.push(downloadStatus());
}
return Promise.all(requests);
}
function downloadStatus()
{
var url = "http://google.com";
var headers = {'User-Agent' : 'Apache-HttpClient/UNAVAILABLE (java 1.4)'};
var options = {
url: url ,
headers: headers,
timeout: 15 * 1000
}
return request(options);
}
module.exports.download = download;
Related
so i have create a function where i call an api and then fetch from that response but some time the api which i'm using right know it's return success so, i wan't to check for successful response from webscraping.com and retry 5 times if the response is failing, because the actual API request might fail and we need to track that
const wrapper = require('./wrapper');
const {save } = require('./api.service');
function hasHex (url){
var start = new Date();
var googleRemarkeging;
var facbookPixel;
var googleCheck = 'https://www.googletagmanager.com/gtm.js?id=';
var facbookCheck = 'https://connect.facebook.net/en_US/fbevents.js';
/////////////////////////// MAIN FUNCTION //////////////////////////
const options = {
"method": "GET",
"hostname": "api.webscrapingapi.com",
"port": null,
"path": `/v1?api_key=${wrapper.api_key}&url=${encodeURIComponent(wrapper.target_url(url))}&render_js=1&timeout=60000`,
};
const req = wrapper.http.request(options, function(res) {
const chunks = [];
res.on("data", function(chunk) {
chunks.push(chunk);
});
res.on("end", function() {
const body = Buffer.concat(chunks);
const response = body.toString();
const $ = wrapper.cheerio.load(response);
// console.log($.html())
if (res.statusCode == 200) {
$("script").each(function(index,text){
if($(text).text().includes(googleCheck)== true){
googleRemarkeging = "Yes";
return false;
}
})
$("script").each(function(index,text){
if($(text).text().includes(facbookCheck)== true){
facbookPixel = "Yes";
return false;
}
})
const data ={
"status":"success",
"result":{
googleRemarkeging:googleRemarkeging,
facbookPixel:facbookPixel
}
}
var result = JSON.stringify(data.result);
var duration = (new Date() - start)/1000;
var api_endpoint = '/api/hasHex'
save(api_endpoint,url,data,duration,result);
console.log(data);
}
});
});
req.end();
}
module.exports = hasHex;
I keep running to Cannot find function for each in object error while trying to loop entries. Is there something I am not seeing?. Here the code. This code is supposed to fetch time data from a system via API do calculations and send email
function getTime() {
var range = [5323, 9626, 4998];
var user = [];
for (var i = 0; i < range.length; i++) {
var auth = 'token'
var from = '2020-01-08'
var to = '2020-01-09'
var url = 'https://api.10000ft.com/api/v1/users/' + range[i] + '/time_entries?from=' + from + '&to=' + to + '&auth=' + auth;
var options = {
method: 'get',
headers: {
Authorization: 'Bearer ' + auth
}
};
var submitted_time_entries = {};
var response = UrlFetchApp.fetch(url, options);
var response = JSON.parse(response.getContentText());
var time_entries = response.data;
time_entries.forEach(function (time_entry) {
if (time_entry.user_id in submitted_time_entries) {
submitted_time_entries[time_entry.user_id] += time_entry.hours;
} else {
submitted_time_entries[time_entry.user_id] = time_entry.hours;
}
});
submitted_time_entries.forEach(function (user_id) {
if (submitted_time_entries[user_id] < 3) {
//send mail
}
});
}
}
response.data is probably not the array you expect. The server may be returning an error or a successful response that isn't parseable as an array. To find out, print response.data to the console and confirm it's the array you expect.
Seems my API returned an object. I figured out the way around it by using Object.keys method and it worked. Here is the working code.
function getTime() {
var range = [53, 926, 8098];
var user = [];
for (var i = 0; i < range.length; i++) {
var auth = 'token';
var from = '2020-01-08'
var to = '2020-01-09'
var url = 'https://api.10000ft.com/api/v1/users/' + '449625' + '/time_entries?from=' + from + '&to=' + to + '&auth=' + auth;
var options = {
method: 'get',
headers: {
Authorization: 'Bearer ' + auth
}
};
var submitted_time_entries = {};
var response = UrlFetchApp.fetch(url, options);
var response = JSON.parse(response.getContentText());
var time_entries = response.data;
Object.keys(time_entries).forEach(function (time_entry) {
if (time_entry.user_id in submitted_time_entries) {
submitted_time_entries[time_entry.user_id] += time_entry.hours;
} else {
submitted_time_entries[time_entry.user_id] = time_entry.hours;
}
});
Object.keys(submitted_time_entries).forEach(function (user_id) {
if (submitted_time_entries[user_id] < 3) {
Logger.log(time_entry)
//send mail
}
});
}
}
I am trying to build a file of json data from repeated calls to a restAPI. The final file to be written is the sum of the data received from all the calls. At present the file is being written with contents of the first call then overwritten by the contents of the first + second call (see console output below code).
As I have to make many calls, once the code is working, I would like to only write the file once the request has finished and the json string has been built. Does anyone now how I would go about doing this? Maybe with a callback(?), which I still don't have the hang of, once the requests have finished or the json string has finished being built.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({delimiter: ','}))
.on('data', function(data) {
csvData.push(data[1]);
})
.on('end',function() {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function(error, response, body) {
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log( '\n' + (Buffer.byteLength(s)/1000).toFixed(2)+
" kilobytes downloaded in: " + (time/1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s .length-1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
fs.writeFile(pathJSON, jsonData, function(err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
128.13 kilobytes downloaded in: 2.796 sec
2
file complete
256.21 kilobytes downloaded in: 3.167 sec
2
file complete
Perhaps writing to the file after all requests are complete will help. In the current code, the writeFile function is called each time a request is completed (which overwrites the file each time)
A quick way to fix this is to count requests (and failures) and write to file only after all the requests are complete.
"use strict";
const fs = require('fs');
const request = require('request');
var parse = require('csv-parse');
const path = "../path tocsv.csv";
const pathJSON = "../pathtoJSON.json";
var shapes = "https://url";
var options = {
url: '',
method: 'GET',
accept: "application/json",
json: true,
};
var csvData = [];
var jsonData = "[";
fs.createReadStream(path)
.pipe(parse({
delimiter: ','
}))
.on('data', function (data) {
csvData.push(data[1]);
})
.on('end', function () {
var start = Date.now();
var records = csvData.length //2212 objects
console.log(records);
var dataLength = 2 //set low at moment
var jsonsDownloaded = 0; // Counter to track complete JSON requests
var jsonsFailed = 0; // Counter to handle failed JSON requests
for (var i = 0; i < dataLength; i += 1) {
var url = shapes + csvData[i];
options.url = url; //set url query
request(options, function (error, response, body) {
if(error){
jsonsFailed++;
writeFile(jsonData);
return;
}
jsonsDownloaded++;
var time = Date.now() - start;
var s = JSON.stringify(body.response);
console.log('\n' + (Buffer.byteLength(s) / 1000).toFixed(2) +
" kilobytes downloaded in: " + (time / 1000) + " sec");
console.log(i)
buildJSON(s);
});
}
function buildJSON(s) {
var newStr = s.substring(1, s.length - 1);
jsonData += newStr + ',';
writeFile(jsonData);
}
function writeFile(jsonData) {
if(dataLength - (jsonsDownloaded + jsonsFailed) > 0){
return;
}
fs.writeFile(pathJSON, jsonData, function (err) {
if (err) {
return console.log(err);
} else {
console.log("file complete")
}
});
}
});
Note:
Requests being fired in quick succession like (2000 requests in a for loop) in my experience does not work well.. Try batching them. Also, doing it this way does not guarantee order (if that is important in your usecase)
An alternative would be to open your file in append mode. You can do this by passing an extra options object with flag set to your fs.writeFile call.
fs.writeFile(pathJSON, jsonData, {
flag: 'a'
}, function (err) {
if (err) {
return console.log(err);
}
});
References:
fs.writeFile Docs
File system flags
I am trying to implement multiple data insertion on one call and trigger response only after all data is inserted. This is how I am currently doing it:
create: function(req, res) {
var response = {};
var num = req.body.num;
var ret = 0;
for (var i = 0; i < num; i++) {
var db = new user();
db.enabled = false;
db.save(function(err){
if(err) {
// Handle error
} else {
ret++;
// Do something
}
});
}
response = {"status" : 200, "message" : "It's working: " + ret};
res.json(response);
}
The problem with this approach is that all the callbacks for save will be triggered after res.json(response) which is wrong because sometimes I would also like to inform user how much data was saved. User will always receive the following response:
It's working: 0
Because ret variable is always 0. It's getting increased only after response is already triggered. What am I doing wrong?
EDIT:
Code after Will's suggestion:
var Q = require('q');
create: function(req, res) {
var response = {};
var num = req.body.num;
var ret = 0;
var tasks = [];
for (var i = 0; i < num; i++) {
var db = new user();
db.enabled = false;
tasks.push(db.save());
}
Q.all(tasks).then(
function(results) {
response = {"status" : 200, "message" : "It's working!"};
},
function(err) {
response = {"status" : 500, "message" : "Not working!" };
);
res.json(response);
}
for (var i = 0; i < num; i++) {
var db = new user();
db.enabled = false;
db.save(function(err){
if(err) {
// Handle error
} else {
ret++;
if(ret == num){
response = {"status" : 200, "message" : "It's working: " + ret};
res.json(response);
}
}
});
}
After asking a question and getting a very helpful answer on what the 'Async Juggling' assignment in learnyounode was asking me to do, I set out to implement it myself.
The problem is, my setup isn't having any success! Even though I've referred to other solutions out there, my setup simply isn't returning any results when I do a learnyounode verify myscript.js.
GIST: jugglingAsync.js
var http = require('http');
var app = (function () {
// Private variables...
var responsesRemaining,
urls = [],
responses = [];
var displayResponses = function() {
for(var iterator in responses) {
console.log(responses[iterator]);
}
};
// Public scope...
var pub = {};
pub.main = function (args) {
responsesRemaining = args.length - 2;
// For every argument, push a URL and prep a response.
for(var i = 2; i < args.length; i++) {
urls.push(args[i]);
responses.push('');
}
// For every URL, set off an async request.
for(var iterator in urls) {
var i = iterator;
var url = urls[i];
http.get(url, function(response) {
response.setEncoding('utf8');
response.on('data', function(data) {
if(response.headers.host == url)
responses[i] += data;
});
response.on('end', function() {
if(--responsesRemaining == 0)
displayResponses();
});
});
}
};
return pub;
})();
app.main(process.argv);
Question: What am I doing wrong?
This line
for(var iterator in urls) {
doesn't do what you think it does. It actually loops over the properties of urls (see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for...in). Instead, you have to do something like
for(var i = 0; i < urls.length; i++) {
var url = urls[i];
...
}
or
urls.forEach(function(url, index) {
...
});
In addition to not properly looping through the arrays inside the app module, I was also not properly concatenating data returned from the response.on('data') event. Originally I was doing...
responses[index] += data;
Instead, the correct thing to do was:
responses[index] = responses[index] + data;
Changing that, as well as the things noted by #arghbleargh got the 'Async Juggling' to fully verify!
I have tested my code and it all worked:
~ $ node juggling_async.js site1 site2 site3 site4 ...
The JS code does not limit only to three sites.
var http = require('http');
// Process all the site-names from the arguments and store them in sites[].
// This way does not limit the count to only 3 sites.
var sites = [];
(function loadSites() {
for(var i = 2, len = process.argv.length; i < len; ++i) {
var site = process.argv[i];
if(site.substr(0, 6) != 'http://') site = 'http://' + site;
sites.push(site);
}
})();
var home_pages = [];
var count = 0;
function httpGet(index) {
var home_page = '';
var site = sites[index];
http.get(site, function(res) {
res.setEncoding('utf8');
res.on('data', function(data) {
home_page += data;
});
res.on('end', function() {
++count;
home_pages[index] = home_page;
if(count == sites.length) {
// Yahoo! We have reached the last one.
for(var i = 0; i < sites.length; ++i) {
console.log('\n############ Site #' + (+i+1) + ': ' + sites[i]);
console.log(home_pages[i]);
console.log('============================================\n');
}
}
});
})
.on('error', function(e) {
console.log('Error at loop index ' + inddex + ': ' + e.message);
})
;
}
for(var i = 0; i < sites.length; ++i) {
httpGet(i);
}