create a website uptime monitor in Node.js - javascript

I want to create a uptime monitor using NodeJS and MongoDB. I want to run a cron job in NodeJS and store the data into MongoDB. If the website response status code is not equal to 200 then it will be saved in the database. I want to make a database entry like this,
url : http://www.google.com
status_code : 500
start_time :- start time
end_time :- end time
I can run the cron job but not sure how to save the downtime in the database. As, I don't want to store every response into the database. Only when response status code is other than 200 , then it will start tracking (start_time) the URL and it keeps the time when website is back to 200 as end_time.
cron.js :-
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({},function(err,entrys){
console.log(err);
if(!err && entrys){
async.each(entrys,function(entry,callback){
request(entry.url, function (error, response, body) {
entry.statuscheck=response.statusCode;
entry.save();
callback();
});
},function (error) {
});
}
});
}
health.model.js :-
const mongoose = require('mongoose');
const EntrySchema = mongoose.Schema({
url: String,
statuscheck: String
}, {
timestamps: true
});
module.exports = mongoose.model('Entry', EntrySchema);

I would do something like this to handle updating the database. I went ahead and put standard arrow functions in, because it was easier for me that way. I put some comments in so that should clear most questions up. It may not be the most elegant solution because I wrote it in 5 minutes, but if you follow this general logic flow, you should be much closer to your solution (its completely untested mind you.)
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({}, (err,entrys) => {
console.log(err);
if (!err && entrys) {
async.each(entrys, (entry,callback) => {
request(entry.url, (error, response, body) => {
//first check if the url has a document in the db.
Entry.find({ url: entry.url }, (err, entry) => {
if(!entry) {
//since the document does not exist, check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, continue the loop.
callback();
} else { //if the status code is not 200, lets save this to the db.
console.log("Saving object: " + entry)
entry.status_code = response.statusCode;
entry.start_time = new Date();
entry.save();
callback();
}
} else if (entry) {
//since the document exists, lets check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, update the stop_time.
entry.end_time = new Date();
Entry.findOneAndUpdate({ url: entry.url }, entry, (err, object) => { //this returns the entry after update, so we can put that in the console for easy debug.
if (err) {
console.log(err);
callback();
} else {
console.log("Object saved: " + object);
callback();
}
});
}
} else { //there was an error finding the document in the db, just go to the next one.
callback();
});
});
});
}
});
}

Related

How to wait for a variable to be populated by an api request before passing it to a webpage as an argument?

I'm new to JavaScript and cannot seem to make this work , the topic of quiz depends on the user input... when the user presses next , I get the topic (this also takes user to the main quiz page), then i have to fetch data from the api with the topic as a parameter... I have to process the result of the fetch operation.. Then I have to pass that info to to the main quiz page... but the variable that is supposed to be populated by the fetch request is still undefined when i pass is to the main quiz page
var Allquestions;
var sheetdb = require('sheetdb-node');
// create a config file
var config = {
address: 'https://sheetdb.io/api/v1/9djmf8ydc7hwy',
};
//sheetdb
// Create new client
var client = sheetdb(config);
function downloadquestions(topic) {
console.log(topic);
client.read({ limit: 2, sheet: topic }).then(function(data) {
console.log(data + " in client.read func")
processQuestions(data);
}, function(err){
console.log(err);
});
}
async function processQuestions(data) {
console.log(data + "data in process");
Allquestions = JSON.parse(data);
console.log(Allquestions[0].Question + " This is defined");
}
app.get("/", (req, res) => {
res.render("pages/index", { title: "Home"});
});
// app.post("/" , urlencodedParser ,(req , res) => {
// console.log(req.body.topic);
// })
app.get("/questions", urlencodedParser , (req , res) => {
downloadquestions(req.body.topic);
console.log(Allquestions + " this is undefined");
res.render("/pages/quizpage" , {Allquestions})
})
There are a few issues with your code, you have a broken promise chain, client.read( is a promise, and that promise is going nowhere. You either return it, or await it. To be able to await your will need to also mark your route (req, res) as async too.
Your code is a little mixed up, you have Allquestions as a global var, this isn't great for multi-user, as the last topic is going to override this each time.
Also try and avoid swallowing exceptions in utility functions, try and keep your exception handling at the top level, eg. in your case inside your req/res handler.
So with all this in mind, your refactored code could look something like ->
const sheetdb = require('sheetdb-node');
// create a config file
const config = {
address: 'https://sheetdb.io/api/v1/9djmf8ydc7hwy',
};
//sheetdb
// Create new client
const client = sheetdb(config);
async function downloadquestions(topic) {
const data = await client.read({ limit: 2, sheet: topic });
return processQuestions(data);
}
function processQuestions(data) {
return JSON.parse(data);
}
app.get("/", (req, res) => {
res.render("pages/index", { title: "Home"});
});
app.get("/questions", urlencodedParser , async (req , res) => {
try {
const allQuestions = await downloadquestions(req.body.topic);
res.render("/pages/quizpage" , {Allquestions});
} catch (e) {
console.error(e);
res.end('There was an error');
}
})

Ajax DELETE request path inconsistency

I have set up an express server to handle different requests one of which is a delete request. It works some times and gives a 404 other times. I noticed that the url it is sending is different. So if I change my server code to handle one path it works until the client sends a different path. I am unable to understand why it is sending different urls and not consistent. I am very new to web programming; still a student. May be I am missing something very basic.
The request is being sent from
http://localhost:3000/notes
page.
Yesterday the request was sent with this path:
Today the request is :
Just in case the images do not load, These are the urls:
http://localhost:3000/api/notes/id
http://localhost:3000/notes/api/notes/id
This is the client side request: (I have verified that its calling the delete with correct value)
var deleteNote = function(id) {
return $.ajax({
url: "api/notes/" + id,
method: "DELETE"
});
};
This is the server code:
app.delete("/api/notes/:id", (req, res) => {
let chosenNoteToDelete = req.params.id;
fs.readFile(__dirname + "/db/db.json", (err, data) => {
if(err){
throw err;
}
let json = JSON.parse(data);
for(let i=0; i<json.length; i++){
if(json[i].id === chosenNoteToDelete){
json.splice(i,1);
}
}
fs.writeFile(__dirname + "/db/db.json", JSON.stringify(json), (err) => {
if(err){
throw err;
}
res.send("Successfully deleted");
})
})
});
Can someone help me understand why its inconsistent? And how do I handle it on the server?
Change the client code from this:
var deleteNote = function(id) {
return $.ajax({
url: "api/notes/" + id,
method: "DELETE"
});
};
to this:
var deleteNote = function(id) {
return $.ajax({
url: "/api/notes/" + id,
method: "DELETE"
});
};
Your relative path tells jQuery to combine your path with the path from the page's URL. You don't want a relative path. You always want it to be /api/notes/id so you need the leading slash.
Some other things to cleanup in your server code.
Log all possible errors with console.log(err) or some similar logging mechanism.
NEVER, EVER write if (err) throw err in your server inside an asynchronous callback. That does you no good as nobody can catch that error. Instead, you must always log the error and then HANDLE the error by sending an error response.
When parsing JSON from an external source that can throw an error use try/catch around it.
When you .splice() an array that you are iterating, you need to either return after processing the .splice() or you need to correct the iterating index (because you just moved the array elements down that are after it so you will miss the next item in the array) or you need to iterate the array backwards so a .splice() operation won't affect the iteration.
Here's a fixed version of your code:
app.delete("/api/notes/:id", (req, res) => {
let chosenNoteToDelete = req.params.id;
fs.readFile(__dirname + "/db/db.json", (err, data) => {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
try {
let json = JSON.parse(data);
} catch(e) {
console.log(err);
res.sendStatus(500);
return;
}
for (let i = 0; i < json.length; i++) {
if (json[i].id === chosenNoteToDelete) {
json.splice(i, 1);
return;
}
}
fs.writeFile(__dirname + "/db/db.json", JSON.stringify(json), (err) => {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
res.send("Successfully deleted");
});
});
});
And, here's a cleaner implementation using fs.promises and async/await with more centralized error handling and detection if the chosenNote is not found:
const fsp = require('fs').promises;
const path = require('path');
app.delete("/api/notes/:id", async (req, res) => {
let chosenNoteToDelete = req.params.id;
let dataFilename = path.join(__dirname, "/db/db.json");
try {
let data = await fsp.readFile(dataFilename);
let dataArray = JSON.parse(data);
// iterate array backwards so .splice() doesn't cause us to miss elements of the array
let found = false;
for (let i = dataArray.length - 1; i >= 0; i--) {
if (dataArray[i].id === chosenNoteToDelete) {
found = true;
dataArray.splice(i, 1);
}
}
if (found) {
await fsp.writeFile(dataFilename, JSON.stringify(dataArray));
res.send("Successfully deleted");
} else {
res.status(404).send(`Note id ${chosenNoteToDelete} not found.`);
}
} catch(e) {
console.log(e);
res.sendStatus(500);
}
});

Saving many values in a MongoDB Collection & node

I have an API which I call three times with three different parameters.
https://api.developer.com/${param1} // param2, and param3
This api returns 30,000+ results each time. (total of around 100,000).
I want to store this data in a single collection. Meaning, I want 100,000+ documents in one collection.
I have a small script that extends the npm request module, which looks like this:
```
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
```
But let's exclude that for now. The function which I use to GET a single endpoint, and update the Database after is this:
function update() {
request(url, (err, response, body) => {
if (err) {
console.log(err);
} else {
let json = {};
try {
json = JSON.parse(body);
} catch (e) {
console.log(e);
}
_.forOwn(json, (price, market_hash_name) => {
Price.update(
{ market_hash_name },
{
$set: { price }
},
{ upsert: true },
err => {
if (err) {
console.log(err);
}
}
);
});
}
});
}
The raw data returned looks like this:
{
market_hash_name: price,
market_hash_name: price.. etc
}
The problem:
Even though, Object.keys(data).length === 30000, my MongoDB collection only writes down ~10,000 documents, and the other ~20000 vanish into thin air.
I've checked a thousand times, with Postman, Browser and even logging the data.keys in the console, and I am sure there are 30k + key:value pairs.
Is it something wrong with my code? Is it bad practice to call Price.update for every key:value pair in the json (probably). But I'm stuck. Any help would be much appreciated.

AWS Lambda function processes same dynamodb stream multiple times. What am I missing?

I have written a node.js lambda function that triggers based on a dynamodb stream when new records are inserted into a particular table.
The function receives only new events, filters for inserted records, and then for each record, uses a couple of fields to retrieve data from other tables. Using this combined data a message is composed and sent via SNS to specific target ARN.
The function performs correctly. All the relevant data is retrieved, and a push notification is sent out.
However, for some reason the function appears to be called several times for the same stream, and processes the newly inserted records several times. The result is the target device receiving the same push notification several times.
Should I be placing the callback in a different place, or am I not calling on the context correctly?
This is the function:
'use strict';
var AWS = require("aws-sdk");
var dynamodb = new AWS.DynamoDB();
var sns = new AWS.SNS();
console.log('Loading function');
exports.handler = (event, context, callback) => {
console.log('Received event:', JSON.stringify(event, null, 2));
event.Records.forEach((record) => {
console.log(record.eventID);
console.log(record.eventName);
console.log('DynamoDB Record: %j', record.dynamodb);
if (record.eventName == 'INSERT') {
var matchId = record.dynamodb.NewImage.eventId.S;
var match_params = {
Key: {
"eventId": {
S: matchId
}
},
TableName: "xxxxxxxxxxx-mobilehub-xxxxxxx-Event"
};
//retrieve the match information from Event table
dynamodb.getItem(match_params, function(err, data) {
var match_description = "";
if (err) {
console.log(err, err.stack);
context.fail('No match event record found in Event table');
} else {
match_description = data.Item.description.S;
var uId = record.dynamodb.NewImage.participantUserId.S; //participantUserId
var user_params = {
Key: {
"userId": {
S: uId
}
},
TableName: "xxxxxxxxxxx-mobilehub-xxxxxxxxx-User"
};
//retrieve the user record from User table
dynamodb.getItem(user_params, function(err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
context.fail('Error occurred. See log.');
} else {
console.log(data); // successful response
if (data.length === 0) {
console.log("No User Record Found.");
context.fail('No user found for participantUserId.');
} else {
var deviceARN = data.Item.device_arn.S;
if (deviceARN <= 1) {
console.log("User has not registered their device for push notifications.");
context.fail('User has not registered for notifications');
} else {
var json_message = JSON.stringify({
APNS_SANDBOX: JSON.stringify({
aps: {
alert: "You are playing in an upcoming match " + match_description,
badge: 1,
sound: 'default'
}
})
});
var snsparams = {
Message: json_message,
MessageStructure: 'json',
TargetArn: deviceARN
};
sns.publish(snsparams, function(err, data) {
if (err) {
console.log(err); // an error occurred
context.fail('SNS send failed. See log.');
} else {
console.log(data); // successful response
context.success('Push notification sent to user.');
}
});
}
}
}
});
}
});
}
});
callback(null, `Successfully processed ${event.Records.length} records.`);
};
In my case, I added the same event source multiple times.
Quote from the conversation with an AWS support engineer:
Using my internal tools, I noticed that the Lambda function xxxxxx has
the event source:
arn:aws:events:my_region:my_acct_id:rule/my_event_target
configured twice as push event source. This means that this might be the cause
why you are seeing two invokes at every minute. Would you please
confirm on your side if this event is configured twice for the $LATEST
version of your lambda and also confirm if it's intended?
I hope this could save someelse :)
In your lambda page at the bottom, try tweaking "Concurrency" Unreserved account concurrency to 1 and "Asynchronous invocation" Retry attempts to 0 . As a test try these and observe the behaviour. Might help.

Node JS, make HTTPS request synchronously from two links

I want to make a HTTPS request to an external link through Node JS. On my first call, I need to fetch user id by looping through several users. On my second call, I need to input that user id in the URL link and fetch user properties. Keep repeating this process till I go through all users. The end goal is to store data of every user in a JSON format. There is no front-end involved. Any direction/advice is much appreciated.
I can't share the actual link due to api keys. But here is the hypothetical scenario. I only show 2 users here. I have about 10,000 users in my actual data set.
Link 1
https://www.google.com/all_users
JSON Output
{
"name": "joe",
"uri": "/id/UserObject/User/1234-1234",
},
{
"name": "matt",
"uri": "/id/UserObject/User/5678-5678",
}
Link 2
https://www.google.com//id/UserObject/User/1234-1234
JSON Output
{
"name": "joe",
"uri": "/id/UserObject/User/1234-1234",
"Property Values": {
"height": "2",
"location": "canada"
},
"Other Values": {
"work": "google",
"occupation": "developer"
}
}
Nested JSON
{
"PropertySetClassChildrenResponse": {
"PropertySetClassChildren": {
"PropertySetInstances": {
"totalCount": "1",
"Elements": [
{
"name": "SystemObject",
"uri": "/type/PropertySetClasses/SystemObject"
}
]
}
}
}
}
Not tested, but this should point you in the right direction. It uses Promises and assumes that run in an ES6 environment:
const rp = require('request-promise');
const Promise = require('bluebird');
fetchAllUsers()
.then(extractUserUris)
.then(extractUserIds)
.then(buildUserDetailRequests)
.then(Promise.all) // run all the user detail requests in parallel
.then(allUserData => {
// allUserData is an array of all users' data
});
function fetchAllUsers() {
return rp('https://api.whatever.com/all_users');
}
function extractUserUris(users) {
return users.map(user => user.uri);
}
function extractUserIds(userUris) {
return userUris.map(userUri => userUri.split('/').pop());
}
function buildUserDetailRequests(userIds) {
return userIds.map(userId => rp("https://api.whatever.com/user/" + userId));
}
I'd suggest using the request package to make your HTTP requests easier.
> npm install request
Then you would obtain a list of all users with something like this:
var request = require('request');
request.get({url: "https://example.org/all_users"}, handleUsersResponse);
You'd handle the request response like this:
function(err, response, body) {
if (!err && response.statusCode == 200) {
// parse json (assuming array of users)
var users = JSON.parse(body);
// iterate through each user and obtain user info
for(var i = 0; i < users.length; i++) {
var userUri = users[i].uri;
obtainUserInfo(userUri)
}
}
}
obtainUserInfo function would be similar to the above code.
One important thing to keep in mind is that since the HTTP requests are being made asynchronously, when you make the requests in a loop, the next iteration of the loop does not wait until the work is finished before moving to the next iteration and starting the next request. So in effect, your loop would start all the HTTP requests nearly in parallel. This can easily overwhelm both your client and the server. One way to get around this is to use a worker queue to enqueue the work and ensure that only a maximum number of HTTP requests are being executed at any given time.
You don't want to do synchronous calls, it defeats the purpose of using Node. So by the Node powers invested in me by the State of Texas I hereby cast that synchronous way I thinking out of you!
Just kidding :), but let's do this the Node way.
Install these two libraries:
sudo npm install Promise
sudo npm install request
And set your code to look like:
var Promise = require('promise');
var request = require('request');
//Get your user data, and print the data in JSON:
getUserData()
.then(function(userData) {
console.log(JSON.stringify(userData));
}).catch(function(err) {
console.log('Error: ' +err);
});
/**
* Prepares an Object containing data for all users.
* #return Promise - Contains object with all user data.
*/
function getUserData() {
return new Promise(function(fulfill, reject) {
// Make the first request to get the user IDs:
var url1 = 'https://www.google.com/all_users';
get(url1)
.then(function(res) {
res = JSON.parse(res);
// Loop through the object to get what you need:
// Set a counter though so we know once we are done.
var counter = 0;
for (x=0; x<res.users.length; x++) {
var url2 = 'https://www.google.com//id/UserObject/User/';
url2 = url2 + res.users.id; //Wherever the individual ID is stored.
var returnDataArr = [];
get(url2)
.then(function(res2) {
// Get what you need from the response from the 2nd URL.
returnDataArr.push(res2);
counter++;
if (counter === res.users.length) {
fulfill({data: returnDataArr}); //Return/Fulfill an object containing an array of the user data.
}
}).catch(function(err) {
// Catch any errors from the 2nd HTTPS request:
reject('Error: ' +err);
});
}).catch(function(err) {
// Catch any errors from the 1st HTTPS request:
reject('Error: ' +err);
});
}
/**
* Your HTTPS GET Request Function
* #param url - The url to GET
* #return Promise - Promise containing the JSON response.
*/
function get(url) {
return new Promise(function(fulfill, reject) {
var options = {
url: url,
headers: {
'Header Name': 'Header Value',
'Accept': 'application/json',
'Content-Type': 'application/json'
};
request(options, function(err, res, body) {
if (err) {
reject(err);
} else {
fulfill(body);
}
});
});
}
So what this Promise does, is that it returns the value once we actually have it. In the code above, we are first getting that list of users, and then as we parse through it, we are making a new asynchronous HTTP request to get the additional data on it. Once we get the user data, we push it to an array.
Finally, once our counter hits its endpoint, we know that we have gotten all the user data, and so we call fulfill which essentially means return, and it returns an object containing an array of the user data.
Let me know if this makes sense.
The answers above helped me go further with my solution and get the desired outcome. However, I spent a lot of time trying to understand node, promises in node, making an API call, etc. Hopefully, this will help to a beginner level node developer.
NODE
Node.jsĀ® is a JavaScript runtime built on Chrome's V8 JavaScript engine. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient. Node.js' package ecosystem, npm, is the largest ecosystem of open source libraries in the world.
If you are a JavaScript developer, you would prefer to use Node as you wouldn't have to spend time learning a new language like Java or Python.
GOAL
Make a HTTPS call to an external link to fetch all server URIs. Pass in the URI as a param to create a second link to fetch all server properties. Loop through to all server uris and properties. Refer the original post on the top for the data structure. The external link also required basic auth and headers.
CODE
Install NPM modules request (https call), bluebird (promises) and lodash(utility) and express(node framework).
/
********************** MODULES/DEPENDENCIES **********************/
var express = require('express');
var request = require('request');
var Promise = require('bluebird');
var _ = require("lodash");
/********************** INITIATE APP **********************/
var app = express();
console.log("Starting node server...");
/**
* Your HTTPS GET Request Function
* #param url - The url to GET
* #return Promise - Promise containing the JSON response.
*/
function get(url) {
return new Promise(function(resolve, reject) {
// var auth = "Basic " + new Buffer(username + ':' + password).toString("base64");
var options = {
url: url,
headers: {
// 'Authorization': auth,
'Content-Type': 'application/json',
'Accept': 'application/json'
}
};
console.log("Calling GET: ", url);
if ('development' == app.get('env')) {
console.log("Rejecting node tls");
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
}
request(options, function(error, response, body) {
if (error) {
reject(error);
} else {
// console.log("THIS IS BODY: ", body);
resolve(body);
}
});
});
}
/********************** GET DATA FUNCTION **********************/
function getServerData() {
/********************** URI VARIABLES **********************/
var username = 'username',
password = 'password',
role = 'Read-Only',
url_host = 'https://link.com:10843';
/********************** URL 1 **********************/
var url1 = url_host + '/type/PropertySetClasses/SystemObject/Server/?maxResults=1000&username=' + username + '&password=' + password + '&role=' + role;
console.log("Getting server data...", url1);
/********************** GET REQUEST 1 **********************/
return get(url1)
.then(function(res) {
console.log("Got response!");
res = JSON.parse(res);
res = res.PropertySetClassChildrenResponse.PropertySetClassChildren.PropertySetInstances.Elements;
// console.log("THIS IS RES: ", res);
/********************** FETCH URI FROM RES NESTED OBJECT **********************/
var server_ids = _.map(res, function(server) {
return server.uri;
});
console.log("Calling server urls", server_ids);
// Loop through the object to get what you need:
// Set a counter though so we know once we are done.
return Promise.map(server_ids, function (id) {
var url2 = url_host + id + '?username=' + username + '&password=' + password + '&role=' + role;
console.log("Calling URL", url2);
return get(url2)
.then(function(res2) {
res2 = JSON.parse(res2);
var elements = res2.PropertySetInstanceResponse.PropertySetInstance.PropertyValues.Elements;
console.log("Got second response", res2, elements);
return elements;
});
})
.then(function (allUrls) {
console.log("Got all URLS", allUrls);
return allUrls;
});
})
.catch(function(err) {
console.error(err);
throw err;
});
};
app.listen(8080, function() {
console.log("Server listening and booted on: " + 8080);
app.get("/serverInfo", function (req, res) {
console.log("Calling server info");
return getServerData()
.then(function(userData) {
var userData = JSON.stringify(userData, null, "\t");
console.log("This is USERDATA Data: ", userData);
res.send(userData);
})
.catch(function(err) {
console.error(err);
res.send({
__error: err,
message: err.message
});
});
});
});

Categories

Resources