I have set up an express server to handle different requests one of which is a delete request. It works some times and gives a 404 other times. I noticed that the url it is sending is different. So if I change my server code to handle one path it works until the client sends a different path. I am unable to understand why it is sending different urls and not consistent. I am very new to web programming; still a student. May be I am missing something very basic.
The request is being sent from
http://localhost:3000/notes
page.
Yesterday the request was sent with this path:
Today the request is :
Just in case the images do not load, These are the urls:
http://localhost:3000/api/notes/id
http://localhost:3000/notes/api/notes/id
This is the client side request: (I have verified that its calling the delete with correct value)
var deleteNote = function(id) {
return $.ajax({
url: "api/notes/" + id,
method: "DELETE"
});
};
This is the server code:
app.delete("/api/notes/:id", (req, res) => {
let chosenNoteToDelete = req.params.id;
fs.readFile(__dirname + "/db/db.json", (err, data) => {
if(err){
throw err;
}
let json = JSON.parse(data);
for(let i=0; i<json.length; i++){
if(json[i].id === chosenNoteToDelete){
json.splice(i,1);
}
}
fs.writeFile(__dirname + "/db/db.json", JSON.stringify(json), (err) => {
if(err){
throw err;
}
res.send("Successfully deleted");
})
})
});
Can someone help me understand why its inconsistent? And how do I handle it on the server?
Change the client code from this:
var deleteNote = function(id) {
return $.ajax({
url: "api/notes/" + id,
method: "DELETE"
});
};
to this:
var deleteNote = function(id) {
return $.ajax({
url: "/api/notes/" + id,
method: "DELETE"
});
};
Your relative path tells jQuery to combine your path with the path from the page's URL. You don't want a relative path. You always want it to be /api/notes/id so you need the leading slash.
Some other things to cleanup in your server code.
Log all possible errors with console.log(err) or some similar logging mechanism.
NEVER, EVER write if (err) throw err in your server inside an asynchronous callback. That does you no good as nobody can catch that error. Instead, you must always log the error and then HANDLE the error by sending an error response.
When parsing JSON from an external source that can throw an error use try/catch around it.
When you .splice() an array that you are iterating, you need to either return after processing the .splice() or you need to correct the iterating index (because you just moved the array elements down that are after it so you will miss the next item in the array) or you need to iterate the array backwards so a .splice() operation won't affect the iteration.
Here's a fixed version of your code:
app.delete("/api/notes/:id", (req, res) => {
let chosenNoteToDelete = req.params.id;
fs.readFile(__dirname + "/db/db.json", (err, data) => {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
try {
let json = JSON.parse(data);
} catch(e) {
console.log(err);
res.sendStatus(500);
return;
}
for (let i = 0; i < json.length; i++) {
if (json[i].id === chosenNoteToDelete) {
json.splice(i, 1);
return;
}
}
fs.writeFile(__dirname + "/db/db.json", JSON.stringify(json), (err) => {
if (err) {
console.log(err);
res.sendStatus(500);
return;
}
res.send("Successfully deleted");
});
});
});
And, here's a cleaner implementation using fs.promises and async/await with more centralized error handling and detection if the chosenNote is not found:
const fsp = require('fs').promises;
const path = require('path');
app.delete("/api/notes/:id", async (req, res) => {
let chosenNoteToDelete = req.params.id;
let dataFilename = path.join(__dirname, "/db/db.json");
try {
let data = await fsp.readFile(dataFilename);
let dataArray = JSON.parse(data);
// iterate array backwards so .splice() doesn't cause us to miss elements of the array
let found = false;
for (let i = dataArray.length - 1; i >= 0; i--) {
if (dataArray[i].id === chosenNoteToDelete) {
found = true;
dataArray.splice(i, 1);
}
}
if (found) {
await fsp.writeFile(dataFilename, JSON.stringify(dataArray));
res.send("Successfully deleted");
} else {
res.status(404).send(`Note id ${chosenNoteToDelete} not found.`);
}
} catch(e) {
console.log(e);
res.sendStatus(500);
}
});
Related
app.get('/students/:record_id', function (req, res) {
var found =false;
var record_id = req.params.record_id;
var dirname = "students/";
filenames = fs.readdirSync(dirname);
filenames.forEach(filename => {
fs.readFile(dirname + filename, "utf-8", function (err, data ) {
if (err) {
onError(err);
return;
}
if (data.includes(record_id)) {
found = true;
return res.status(200).send(data);
}
});
});
if(found == false){
return res.status(500).send({ "message": "error - no student found" });
}
});
I am using GET searching by a student name to retrieve a students file. If I remove the if condition it is successful. But if the student does not exist then it does not work.
The if condition fixes this by making found true when it finds it in the file. The problem is that the if condition executes before the file is read.
I tried using a callback but after a couple hours of research I can not figure out how to implement it in this since the readfile callback is being used to retrieve student info.
I tried using a promise but the promise is then only fulfilled if the student is found and I do not know how to implement it where it is fulfilled when it is not found.
Since you've tried with Promises, I'll show you how to do it with Promises
Using async/await especially makes this really easy
Note: since I don't know how you are using fs the import/require only gets the promise versions of readdir and readFile, and uses it like readdir rather than fs.readdir - so other existing code won't break
const {readFile, readdir} = require('fs/promises');
// if you use import (modules) instead of require (commonjs)
// import { readFile, readdir } from 'fs/promises';
app.get('/students/:record_id', async function (req, res) {
var record_id = req.params.record_id;
var dirname = "students/";
try {
const filenames = await readdir(dirname);
for (let filename of filenames) {
const data = await readFile(dirname + filename, "utf-8");
if (data.includes(record_id)) {
return res.status(200).send(data);
}
}
res.status(404).send({"message": "error - student not found"});
} catch (err) {
onError(err);
res.status(500).send({"message": "internal server error"});
}
});
Note: I wouldn't send a 500 (Internal Server Error) if a student is not found - I changed the logic to send 404 (not Found) instead - which is more appropriate
If you want pure promise (no async/await) - I believe the following is one way to do it, probably not the nicest code, but it should work (haven't tested though)
const {readFile, readdir} = require('fs/promises');
// if you use import (modules) instead of require (commonjs)
// import { readFile, readdir } from 'fs/promises';
app.get('/students/:record_id', function (req, res) {
var record_id = req.params.record_id;
var dirname = "students/";
let p = Promise.resolve(false);
for (let filename of filenames) {
p = p
.then(found =>
found ? found : readFile(dirname + filename, "utf-8")
.then(
data =>
data.includes(record_id) ? data : false
)
);
}
.then(data => data ? res.status(200).send(data) : res.status(404).send({"message": "error - student not found"}))
.catch(err => {
onError(err);
res.status(500).send({"message": "internal server error"});
})
});
And, finally - without using Promises at all
app.get('/students/:record_id', function (req, res) {
let found = false;
const record_id = req.params.record_id;
const dirname = "students/";
const filenames = fs.readdirSync(dirname);
const count = filenames.length;
const checkFile = index => {
if (index < count) {
const filename = filenames[index];
fs.readFile(dirname + filename, "utf-8", function (err, data) {
if (err) {
onError(err);
res.status(500).send({"message": "internal server error"});
} else if (data.includes(record_id)) {
res.status(200).send(data);
} else {
checkFile(index + 1)
}
});
} else {
res.status(404).send({"message": "error - student not found"});
}
}
checkFile(0);
});
I am using NodeJs and MongoDb as a back-end service.In my collection i have several documents having fields named _id and Name but Node is returning only first document and showing error in console.I want to fetch only Name field of a document.
Error:
Here is my code:
var express = require('express');
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/";
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended:true}));
app.post('/offers',(req, res) => {
MongoClient.connect(url, (err, db) => {
if(err) throw err;
var dbo = db.db('Tiffino_db');
dbo.collection("Offers")
.find({},{ projection: { _id: 0 } })
.toArray((err, result) => {
if (err) {
console.log("Error:", +err);
}
else {
for(var i = 0;i<result.length;i++){
res.send(result[i].Name);
}
db.close();
}
});
});
});
Please let me know what I did wrong in above code.
THANKS
It looks like you're trying to send multiple responses in a loop using Response.Send() as a response is only sent once.
This won't work, you'll need to create an array of names, and then use Response.Send() to do this once.
Using res.send(result.map(r => r.name)) would probably work
You should aggregate the results into a variable and then call res.send() only once:
let resultString;
for(var i = 0;i<result.length;i++){
resultString += result[i].Name + '\n';
}
res.send(resultString);
If you want to stream the results, you can use res.write() and when done call res.end()
You can not use res.send() multiple time in a for loop, try somthing like this
var data=[];
for(var i = 0;i<result.length;i++){
data.push(result[i].Name);
}
res.send(data);
app.post('/offers',(req, res) => {
MongoClient.connect(url, (err, db) => {
if(err) throw err;
var dbo = db.db('Tiffino_db');
dbo.collection("Offers")
.find({},{ projection: { _id: 0 } })
.toArray((err, results) => {
if (err) {
console.log("Error:", +err);
}
else {
response = results.map(result => result.Name);
//res.send(response);
// in json format
res.json(response);
db.close();
}
});
});
});
...
...
else {
output = result.map(offer => offer.name);
res.send({output});
db.close();
}
This should work as you can only send response once on a single response object and you are calling that for results.length times.
I'm pretty sure you're not supposed to be calling res.send(...) multiple times. Since you're calling it inside a loop, it will send the first document and fail in the next as expected.
I am trying to get an image from S3 using this backend function below:
exports.getImage = function (req, res) {
let request = req.query;
let key = request.key;
let bucket = request.bucket;
if (!key) {
return res.status(400).end("missing key");
}
if (!bucket) {
return res.status(400).end("missing bucket name");
}
let options = {
Bucket: bucket,
Key: key
};
res.setHeader('Content-Type', content_map['jpg']);
let nimage = null;
try {
nimage = await s3.getObject(options).createReadStream().pipe(res);
}
catch (err) {
console.log(err);
}
};
As one can see, there are measures against the lack of a key or a bucket within the request, a status 400 is sent back. However, I want to handle situations where the wrong key or the the wrong bucket is specified. The situation where the wrong key is added to the request in erroneously handled in the code above.
I also handled it using the code below, but did not end up getting any results:
s3.getObject(options, function(err, data) {
if (err) {
return res.status(400).end(err);
}
}).createReadStream().pipe(res);
The error that pops up is:
4:15:32 PM - error: caught an unhandled exception! NoSuchKey: The specified key does not exist.
NoSuchKey: The specified key does not exist.
I want to create a uptime monitor using NodeJS and MongoDB. I want to run a cron job in NodeJS and store the data into MongoDB. If the website response status code is not equal to 200 then it will be saved in the database. I want to make a database entry like this,
url : http://www.google.com
status_code : 500
start_time :- start time
end_time :- end time
I can run the cron job but not sure how to save the downtime in the database. As, I don't want to store every response into the database. Only when response status code is other than 200 , then it will start tracking (start_time) the URL and it keeps the time when website is back to 200 as end_time.
cron.js :-
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({},function(err,entrys){
console.log(err);
if(!err && entrys){
async.each(entrys,function(entry,callback){
request(entry.url, function (error, response, body) {
entry.statuscheck=response.statusCode;
entry.save();
callback();
});
},function (error) {
});
}
});
}
health.model.js :-
const mongoose = require('mongoose');
const EntrySchema = mongoose.Schema({
url: String,
statuscheck: String
}, {
timestamps: true
});
module.exports = mongoose.model('Entry', EntrySchema);
I would do something like this to handle updating the database. I went ahead and put standard arrow functions in, because it was easier for me that way. I put some comments in so that should clear most questions up. It may not be the most elegant solution because I wrote it in 5 minutes, but if you follow this general logic flow, you should be much closer to your solution (its completely untested mind you.)
var async=require('async');
const Entry = require('../models/health.model.js');
var https = require('https');
var request = require('request');
module.exports = function getHttpsRequests () {
Entry.find({}, (err,entrys) => {
console.log(err);
if (!err && entrys) {
async.each(entrys, (entry,callback) => {
request(entry.url, (error, response, body) => {
//first check if the url has a document in the db.
Entry.find({ url: entry.url }, (err, entry) => {
if(!entry) {
//since the document does not exist, check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, continue the loop.
callback();
} else { //if the status code is not 200, lets save this to the db.
console.log("Saving object: " + entry)
entry.status_code = response.statusCode;
entry.start_time = new Date();
entry.save();
callback();
}
} else if (entry) {
//since the document exists, lets check the statusCode.
if(response.statusCode===200) { //if the statusCode is 200, update the stop_time.
entry.end_time = new Date();
Entry.findOneAndUpdate({ url: entry.url }, entry, (err, object) => { //this returns the entry after update, so we can put that in the console for easy debug.
if (err) {
console.log(err);
callback();
} else {
console.log("Object saved: " + object);
callback();
}
});
}
} else { //there was an error finding the document in the db, just go to the next one.
callback();
});
});
});
}
});
}
I have the following server.js get route
app.get('/', function(req, res) {
var url;
var final_res = [];
endpoints.forEach(function(url){
request(url, function(error,response,body){
if(!error && response.statusCode == 200){
final_res.push(url.url);
console.log(url.url);
}else{
res.send(err);
console.log(err);
}
});
});
});
And this is my client js where I fetch this exact same get with jQuery
$(document).ready(function() {
$.get('http://localhost:3000/', function(data) {
console.log(data);
$("#body").text(data);
});
});
When I open my index.html it displays the user interface correctly and inside my terminal where I have executing my server.js it correctly displays the url. What I can't accomplish is how to use my data that my jQuery receives in order to populate a table inside my html. My table will be populated with urls that are fetch from my endpoints.
I have some background in nodejs but I cant wrap this up.
Since you need to know when multiple requests are done, I'd suggest you switch to using the request-promise library so you can use promises to track when all the requests are done. That library also checks the statusCode for you automatically. So, you can do this:
const rp = require('request-promise');
app.get('/', function(req, res) {
Promise.all(endpoints.map(url => {
return rp(url).then(r => {
return url.url;
}).catch(err => {
// rather than error, just return null result
return null;
})
})).then(results => {
// filter out null values, then send array as the response
res.json(results.filter(item => item !== null));
}).catch(err => {
console.log(err);
res.sendStatus(500);
});
});
This will run all the requests in parallel, but collect the results in order which should result in the fastest overall run time.
If you wanted to run them one a time, you could use async/await like this:
const rp = require('request-promise');
app.get('/', async function(req, res) {
let results = [];
for (let url of endpoints) {
try {
let r = await rp(url);
if (r) {
results.push(url.url);
}
} catch(e) {
// ignore error
}
}
res.json(results);
});
EDIT Jan, 2020 - request() module in maintenance mode
FYI, the request module and its derivatives like request-promise are now in maintenance mode and will not be actively developed to add new features. You can read more about the reasoning here. There is a list of alternatives in this table with some discussion of each one. I have been using got() myself and it's built from the beginning to use promises and is simple to use.
You must wait for all requests gets resolved to then send final_res array back to client. You can do this using async/await and Promise.all concepts. If you don't want to use these resources then you'll need to count and wait all request manually, using a counter to know when all requests has done, as below:
app.get('/', function(req, res) {
var url;
var final_res = [];
var respCounter = endpoints.length;
endpoints.forEach(function(url){
request(url, function(error,response,body){
respCounter--;
if(!error && response.statusCode == 200){
final_res.push(url.url);
console.log(url.url);
}else{
res.send(err);
console.log(err);
}
if(respCounter == 0) {
res.send(final_res);
}
});
});
});