I'm trying to get a node.js script to display the contents of a text file if the contents of the file changes.
Here's what I've got so far:
var http = require('http');
var fs = require("fs");
http.createServer(function (req, res) {
fs.watch("/Users/{username}/Desktop/data.txt",function(event,file) {
fs.readFile('/Users/{username}/Desktop/data.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(data);
//^ this doesn't work, just returns a blank page
});
});
}).listen(1337, '0.0.0.0');
Unfortunately, it only displays a blank page. What am I doing wrong...?
How about this:
var http = require('http');
var fs = require("fs");
function dataHandler(data) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(data);
}
http.createServer(function (req, res) {
fs.watch("/Users/{username}/Desktop/data.txt",function(event,file) {
fs.readFile('/Users/{username}/Desktop/data.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
dataHandler(data);
});
});
}).listen(1337, '0.0.0.0');
It's very likely that whatever process changed the file did so by opening it in write mode and then rewriting it. When that happens you'll get the notification right after the other process has opened the file, at which point it will be empty because it hasn't been written to yet.
It's probably best to set a timeout that's triggered every time you get a change notification (i.e. each time you get notified while a timeout is pending, you clear the timeout and start a new one) and only read the file when the timeout completes.
Related
Can I bypass cloudflare or use another method from this page
My code:
index.js:
const request = require('request');
request('https://cs.money/load_bots_inventory?hash=1521879336025', {
json:true }, (err, res, body) => {
if (err) { return console.log(err); }
console.log(body);
console.log(body.explanation);
});
server.js
const http = require('http');
var fs = require('fs');
// file is included here:
eval(fs.readFileSync('index.js')+'');
http.createServer((req, res) => {
res.writeHead(200, {'Content-type': 'text/plain'});
res.end('Node.js');
}).listen(3000, () => console.log('done'));
Based on your question I assume that you don't control the cs.money domain.
Based on this, I don't think you can find out what the origin server is. Also, the owners of cs.money use cloudflair for a specific reason and probably don't want you to do this. Chances are that what you're trying to do is against the terms of use of the website you're trying to game.
i am trying with following code:
const http = require('http');
const fs = require('fs');
var hServer = http.createServer( (req, res) => {
console.log ("Received Connection..");
fs.readFile('./index.html', function(err, page) {
res.writeHeader(200, {"Content-Type": "text/html"});
res.write(page);
res.end();
});
});
hServer.listen(8989);
When i connect from browser http://localhost:8989,
I received two times the console print "Received Connection." Why?
const http = require('http');
const fs = require('fs');
var hServer = http.createServer( (req, res) => {
console.log ("Received Connection...");
console.log('URL: ' + req.url);
fs.readFile('./index.html', function(err, page) {
res.writeHeader(200, {"Content-Type": "text/html"});
res.write(page);
res.end();
});
});
hServer.listen(8989);
Will print:
Received Connection...
URL: /
Received Connection...
URL: /favicon
It is because the browser automatically ask for the favicon, the little icon you see in your tabs.
If you fire your request from POSTMan, wget, curl, or other http tools, you'll only see one request.
This can be traced down by logging out req using console.log(req).
Looking at the raw request we see that the browser additionally requests /favicon for each request.
url: '/',
url: '/favicon.ico',
I have partially written a NODE.JS file to update the JSON file with data received from the client. The post works successfully. The Get command does not. I was wondering if there's a better way to do this? I have about 6 different callback options to write for. All different. I was wondering if there's a node.JS script already done that has all of the things I need. Or if there's a different language that would make it easier.
Here's the NODE:
var http = require('http');
var fs = require('fs');
http.createServer(function (req, res) {
console.log('Request received: ');
if (req.method == 'POST') {
req.on('data', function (chunk) {
fs.writeFile("comments-data.json", chunk, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
})
});
res.end('{"msg": "success"}');
};
if (req.method == 'GET') {
req.on('data', function (chunk) {
fs.readFile('comments-data.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
return data;
});
});
res.end(data);
};
}).listen(8080, '127.0.0.1');
console.log('Server running at http://127.0.0.1:8080/');
Here's the AJAX call:
postComment: function(commentJSON, success, error) {
$.ajax({
type: 'post',
url: 'http://127.0.0.1:8080',
data: commentJSON,
success: function(comment) {
success(comment)
},
error: error
});
},
But there's an ajax call for all sorts of things with the jquery plugin that i'm using. I need to GET, POST, PUT, DELETE, and sometimes multiple within the call.
Here's a full list of all of the callbacks i'm using:
http://viima.github.io/jquery-comments/#link-3-6
Using express you can do this much easily.
const express = require('express');
const app = express.Router();
//POST Request
app.post('/',(req, res, next)=>{
fs.writeFile("comments-data.json", chunk, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
res.json({'status': 'Success'})
})
})
//GET Request
app.get('/',(req, res, next)=>{
fs.readFile('comments-data.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
res.json({'status': 'Success', 'data':data})
});
})
As for your question regarding writing it in a different module. That is based on the pattern adopted by you. There are various nodejs patterns available eg. Controller based or classes based. It all depends on what you find comfortable.
So I am sending an HTTP POST request to a node.js HTTP server that's running locally. I wish to extract the JSON object from the HTTP body, and use the data it holds to do some stuff on the server side.
Here is my client app, which issues the request:
var requester = require('request');
requester.post(
'http://localhost:1337/',
{body:JSON.stringify({"someElement":"someValue"})},
function(error, response, body){
if(!error)
{
console.log(body);
}
else
{
console.log(error+response+body);
console.log(body);
}
}
);
Here's the server that is supposed to receive that request:
http.createServer(function (req, res) {
var chunk = {};
req.on('data', function (chunk) {
chunk = JSON.parse(chunk);
});
if(chunk.someElement)
{
console.log(chunk);
// do some stuff
}
else
{
// report error
}
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('Done with work \n');
}).listen(1337, '127.0.0.1');
console.log('Server running at http://127.0.0.1:1337/');
Now the issue is, since the req.on() function that has a callback extracts the POST data asynchronously, it seems like if(chunk.someElement) clause is evaluated before that is done, and so it always goes to the else clause and I am unable to do anything at all.
Is there a simpler way to handle this issue (by simpler, I mean: not
using any other fancy libraries or modules, just pure node)?
Is there
a synchronous function that performs the same tasks as req.on() and
returns the contents of the body before I do the
if(chunk.someElement) check?
You need to wait and buffer the request and parse/use the JSON on the request's 'end' event instead because there is no guarantee that all data will be received as a single chunk:
http.createServer(function (req, res) {
var buffer = '';
req.on('data', function (chunk) {
buffer += chunk;
}).on('end', function() {
var result;
try {
result = JSON.parse(buffer);
} catch (ex) {
res.writeHead(400);
return res.end('Bad JSON');
}
if (result && result.someElement)
{
console.log(chunk);
// do some stuff
}
else
{
// report error
}
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end('Done with work \n');
}).setEncoding('utf8');
}).listen(1337, '127.0.0.1');
console.log('Server running at http://127.0.0.1:1337/');
I'm writing a Node.js script that converts HTML files to ENML (Evernote Markup Language).
Now this script correctly converts an existing HTML file to the desired ENML output. Now, I have the following question:
Client will be sending an HTML file in JSON format. How do I listen for all incoming requests, take the JSON object, convert to ENML, and write back the response to the original request?
My code for this is as follows:
var fs = require('fs');
var path = require('path');
var html = require('enmlOfHtml');
var contents = '';
var contents1 = '';
fs.readFile(__dirname + '/index.html', 'utf8', function(err, html1){
html.ENMLOfHTML(html1, function(err, ENML){ //using Enml-js npm
contents1=ENML;
});
});
var http = require('http');
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'application/json'});
res.write(contents1);
}).listen(4567, "127.0.0.1");
Thanks!
I guess that the client will make POST requests to your server. Here is how you could get the send information:
var processRequest = function(req, callback) {
var body = '';
req.on('data', function (data) {
body += data;
});
req.on('end', function () {
callback(qs.parse(body));
});
}
var http = require('http');
http.createServer(function (req, res) {
processRequest(req, function(clientData) {
html.ENMLOfHTML(clientData, function(err, ENML){ //using Enml-js npm
contents1 = ENML;
res.writeHead(200, {'Content-Type': 'application/json'});
res.write(JSON.stringify(contents1));
});
});
}).listen(4567, "127.0.0.1");
You can use the Node's request module.
request('http://www.example.com', function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body);
}
});