Can I bypass cloudflare or use another method from this page
My code:
index.js:
const request = require('request');
request('https://cs.money/load_bots_inventory?hash=1521879336025', {
json:true }, (err, res, body) => {
if (err) { return console.log(err); }
console.log(body);
console.log(body.explanation);
});
server.js
const http = require('http');
var fs = require('fs');
// file is included here:
eval(fs.readFileSync('index.js')+'');
http.createServer((req, res) => {
res.writeHead(200, {'Content-type': 'text/plain'});
res.end('Node.js');
}).listen(3000, () => console.log('done'));
Based on your question I assume that you don't control the cs.money domain.
Based on this, I don't think you can find out what the origin server is. Also, the owners of cs.money use cloudflair for a specific reason and probably don't want you to do this. Chances are that what you're trying to do is against the terms of use of the website you're trying to game.
Related
I have a very basic question about a node application, and a question about HTTP requests. It's the first time I create a node app with server, and I just can't seem to get the different components to work together.
This is my server.js
var express = require('express');
var multer = require('multer');
const request = require('request');
const upload = multer({dest: __dirname + '/uploads/images'});
const app = express();
const PORT = 3000;
app.use(express.static('public'));
app.post('/upload', upload.single('photo'), (req, res) => {
if(req.file) {
res.json(req.file);
}
else throw 'error';
});
app.listen(PORT, () => {
console.log('Listening at ' + PORT );
});
Then I have a file app.js with a motion-detection system. Every time motion is detected, a picture is taken. This all works fine.
Then the picture should be sent to the server. This is what I can't figure out.
I created a function toServer() that should post the detected data to the server
const request = require('request');
function toServer(data) {
const formData = {
// Pass data via Buffers
my_buffer: data,
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/form-data/form-data
};
request.post({url:'http://localhost:3000/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('Upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
};
Problem 1: when running the server.js on localhost:3000, it doesn't find any of the scripts loaded in index.html nor my app.js.
Problem 2: when running the index.html on live-server, all scripts are found, but i get the error "request is not defined".
I am pretty sure there is some basic node setup thing I'm missing.
The solution for toServer() might be more complicated.
Thanks for your time,
Mustard Shaper
Problem 1:
this could happen because you have not specified to render your index.html.
for example:
res.render('index')
if it's not because of the single quotes in upload.single('photo') try double quotes.
Another possible error could be that you are missing a default display engine setting.
an example: https://www.npmjs.com/package/hbs
Problem 2:
it may be because you are missing the header
var request = require('request');
request.post({
headers: {'content-type' : 'application/x-www-form-urlencoded'},
url: 'http://localhost',
body: "example"
}, function(error, response, body){
console.log(body);
});
See more at https://expressjs.com/
Client code:
var data = new FormData();
data.append(fileName, blob, 'test.html');
fetch('http://localhost:3000/', {
method: 'POST',
headers: {
},
body: data
}).then(
response => {
console.log(response)
}
).then(
success => {
console.log(success)
}
).catch(
error => {
console.log(error)
}
);
Server code:
router.post('/', urlencodedParser, function(req, res, next) {
const body = req.body;
console.log(body);
res.send(`You sent: ${body} to Express`);
});
I am sending a blob in the body of a post request. When I send it to the server I want the server to download the file from the body of the request. How can i download this file? Or is there a simpler way to upload from client?
If you can utilize an NPM package formidable, there appears to be a solution at: https://www.w3schools.com/nodejs/nodejs_uploadfiles.asp
Once you have the file received, you can use the fs module to save and store in server
May it can solve your problem.
const fs = require('fs');
let directory = '/temp/data'; // where you want to save data file
router.post('/', urlencodedParser, function(req, res, next) {
const body = req.body;
console.log(body);
fs.writeFile(directory, body, function(err) {
if(err) {
return console.log(err);
}
console.log("File has been saved");
});
res.send(`You sent: ${body} to Express`);
});
This solved my answer - https://attacomsian.com/blog/uploading-files-nodejs-express, which basically uses a middleware to do the upload.
This was basically like:
const x = 6;
console.log(x);
Error: value is f'd up
const x = 6;
magic.valueParse(x);
console.log(x);
6
Also, i would like to point out how bodyParser cannot be used for multipart data. It is mentioned on the official docs, but even responses I get seem to point to bodyParser. So I thought I'd re-iterate that.
i am trying with following code:
const http = require('http');
const fs = require('fs');
var hServer = http.createServer( (req, res) => {
console.log ("Received Connection..");
fs.readFile('./index.html', function(err, page) {
res.writeHeader(200, {"Content-Type": "text/html"});
res.write(page);
res.end();
});
});
hServer.listen(8989);
When i connect from browser http://localhost:8989,
I received two times the console print "Received Connection." Why?
const http = require('http');
const fs = require('fs');
var hServer = http.createServer( (req, res) => {
console.log ("Received Connection...");
console.log('URL: ' + req.url);
fs.readFile('./index.html', function(err, page) {
res.writeHeader(200, {"Content-Type": "text/html"});
res.write(page);
res.end();
});
});
hServer.listen(8989);
Will print:
Received Connection...
URL: /
Received Connection...
URL: /favicon
It is because the browser automatically ask for the favicon, the little icon you see in your tabs.
If you fire your request from POSTMan, wget, curl, or other http tools, you'll only see one request.
This can be traced down by logging out req using console.log(req).
Looking at the raw request we see that the browser additionally requests /favicon for each request.
url: '/',
url: '/favicon.ico',
I have partially written a NODE.JS file to update the JSON file with data received from the client. The post works successfully. The Get command does not. I was wondering if there's a better way to do this? I have about 6 different callback options to write for. All different. I was wondering if there's a node.JS script already done that has all of the things I need. Or if there's a different language that would make it easier.
Here's the NODE:
var http = require('http');
var fs = require('fs');
http.createServer(function (req, res) {
console.log('Request received: ');
if (req.method == 'POST') {
req.on('data', function (chunk) {
fs.writeFile("comments-data.json", chunk, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
})
});
res.end('{"msg": "success"}');
};
if (req.method == 'GET') {
req.on('data', function (chunk) {
fs.readFile('comments-data.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
return data;
});
});
res.end(data);
};
}).listen(8080, '127.0.0.1');
console.log('Server running at http://127.0.0.1:8080/');
Here's the AJAX call:
postComment: function(commentJSON, success, error) {
$.ajax({
type: 'post',
url: 'http://127.0.0.1:8080',
data: commentJSON,
success: function(comment) {
success(comment)
},
error: error
});
},
But there's an ajax call for all sorts of things with the jquery plugin that i'm using. I need to GET, POST, PUT, DELETE, and sometimes multiple within the call.
Here's a full list of all of the callbacks i'm using:
http://viima.github.io/jquery-comments/#link-3-6
Using express you can do this much easily.
const express = require('express');
const app = express.Router();
//POST Request
app.post('/',(req, res, next)=>{
fs.writeFile("comments-data.json", chunk, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
res.json({'status': 'Success'})
})
})
//GET Request
app.get('/',(req, res, next)=>{
fs.readFile('comments-data.json', 'utf8', function (err, data) {
if (err) throw err;
obj = JSON.parse(data);
res.json({'status': 'Success', 'data':data})
});
})
As for your question regarding writing it in a different module. That is based on the pattern adopted by you. There are various nodejs patterns available eg. Controller based or classes based. It all depends on what you find comfortable.
I'm trying to get a node.js script to display the contents of a text file if the contents of the file changes.
Here's what I've got so far:
var http = require('http');
var fs = require("fs");
http.createServer(function (req, res) {
fs.watch("/Users/{username}/Desktop/data.txt",function(event,file) {
fs.readFile('/Users/{username}/Desktop/data.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(data);
//^ this doesn't work, just returns a blank page
});
});
}).listen(1337, '0.0.0.0');
Unfortunately, it only displays a blank page. What am I doing wrong...?
How about this:
var http = require('http');
var fs = require("fs");
function dataHandler(data) {
res.writeHead(200, {'Content-Type': 'text/plain'});
res.end(data);
}
http.createServer(function (req, res) {
fs.watch("/Users/{username}/Desktop/data.txt",function(event,file) {
fs.readFile('/Users/{username}/Desktop/data.txt', 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
dataHandler(data);
});
});
}).listen(1337, '0.0.0.0');
It's very likely that whatever process changed the file did so by opening it in write mode and then rewriting it. When that happens you'll get the notification right after the other process has opened the file, at which point it will be empty because it hasn't been written to yet.
It's probably best to set a timeout that's triggered every time you get a change notification (i.e. each time you get notified while a timeout is pending, you clear the timeout and start a new one) and only read the file when the timeout completes.