Streaming JSON with Node.js - javascript

I have been given a task — I'm trying to make routes/endpoints using just Node to better demostrate what is happening under the covers with Express. But I have to use streams.
I have two routes GET /songs and GET/refresh-songs with the following requirements:
We need the following two endpoints:
GET /songs
GET /refresh-songs
All endpoints should return JSON with the correct headers
/songs should...
stream data from a src/data/songs.json file
not crash the server if that file or directory doesn't exist
bonus points for triggering refresh code in this case
bonus points for compressing the response
/refresh-songs should...
return immediately
it should not hold the response while songs are being refreshed
return a 202 status code with a status JSON response
continue getting songs from iTunes
frontend should have
UI/button to trigger this endpoint
This is what I have so far in my server.js file where my endpoints will live.
const { createServer } = require('http');
const { parse: parseUrl } = require('url');
const { createGzip } = require('zlib');
const { songs, refreshSongs } = require('./songs');
const fs = require('fs');
const stream = fs.createReadStream('./src/data/songs.jso')
const PORT = 4000;
const server = createServer(({ headers, method, url }, res) => {
const baseResHeaders = {
// CORS stuff is gone. :(
'content-type' : 'application/json'
};
// Routing ¯\_(ツ)_ /¯
//
var path = url.parseUrl(url).pathname;
function onRequest(request, response){
response.writeHead(200, 'Content-Type': baseResHeaders['content-type']);
stream.on('data', function(err, data){
if (err) {
response.writeHead(404);
response.write('File not found');
refreshSongs();
} else {
response.write(createGzip(data))
}
response.end()
})
}
switch (path) {
case '/songs':
onRequest()
break;
case '/refresh-songs':
onRequest()
break;
}
server.on('listening', () => {
console.log(`Server running at http://localhost:${PORT}/`);
});
I am wondering if I am wiring the onRequest method I created correctly and if the switch statement is correctly going to intercept those URL's

Related

Why is my Remote Server not appropriately routing?

I am having trouble getting my VPS server set up appropriately. I copied all the files recursively from my local server to the remote server, but post & get requests to the server are not functioning appropriately (404 not found error). The routing works perfectly on my localhost server, but unfortunately it doesn't on the remote.
Server Code:
// getting datastores
const datastore = require('nedb');
const customerRecordsdb = new datastore("CustomerRecords.db");
customerRecordsdb.loadDatabase();
// importing express
const port = 3000;
const express = require('express');
const application = express();
const path = require('path')
const newPath = path.join(__dirname + '../../..')
application.listen(port, () => console.log("Listening at " + port));
application.use(express.json( {limit: '1mb'} ));
application.get('/RetrieveCustomerInformation', (request, response) => {
console.log("SUCCESS!");
customerRecordsdb.find({}, (error, data) => {
if (error) {
response.end();
return;
}
response.json(data);
})
})
Client Code:
// loading data
let numberRecords = 0;
async function loadClientRecords() {
const response = await fetch('/RetrieveCustomerInformation');
const data = await response.json();
for (let i = 0; i < data.length; i++) {
numberRecords++;
insertNewRecord(data[i]);
}
}
loadClientRecords()
.then(response => {
// was successful
})
.catch(error => {
//console.log(error);
})
[VPS Page Output]
https://i.stack.imgur.com/ghoIa.png (The server is not outputting anything)
[Local Host Page output]
https://i.stack.imgur.com/I6mqE.png
(The server is outputting "SUCCESS!" on every refresh)
As previously mentioned, I simply copied the directory over to the remote server.
Any help to what the problem could be will be greatly appreciated!

How to make an HTTP/HTTPS request within a Node.js server?

I'm just starting to learn Node.js and right now, I'm trying to use a Node.js Application through cPanel to provide a JSON response when its app's URL is accessed.
When visiting the app's URL, it's evident that the Node.js server is working as it should. So after editing the main JS file, and restarting the Node.js app, changes are reflected when visiting the URL again.
My problem:
Within the function of https.createServer( function (req, res) {});, I want to make an HTTPS request to a PHP file elsewhere, which returns a JSON response. At the moment, I can't even get a response or error from any type of request with the PHP file.
var https = require('https');
var server = https.createServer(function (req, res) {
var message = "";
res.writeHead(200, {
'Content-Type': 'text/plain'
});
var options = {
host: "mydomain.com",
path: '/myPhpScript.php'
};
https.get(options, function(res) {
var bodyChunks = [];
res.on('data', function(chunk) {
bodyChunks.push(chunk);
}).on('end', function() {
var body = Buffer.concat(bodyChunks);
message += body;
})
}).on('error', function(e) {
message += e;
});
res.end(message);
});
server.listen();
As you can see, message would be what's displayed to the browser window, but it's empty. Nothing appears when visiting the App URL. Is it possible to make an HTTPS request with a Node.js HTTPS server?
Note:
I've also tried with native-request and axios and have experienced the same issue.
Server code:
var http = require('http');
var https = require("https");
var server = http.createServer(function (req, res) {
let call = new Promise((resolve, reject) => {
var options = {
host: "jarrenmorris.com",
port: 443,
path: '/gamesense/r6_db/1.json'
};
https.get(options, function (res) {
var bodyChunks = [];
res.on('data', function (chunk) {
bodyChunks.push(chunk);
}).on('end', function () {
resolve(Buffer.concat(bodyChunks));
});
}).on('error', function (e) {
reject(e);
});
});
call.then((data) => {
// do something here with the successful request/json
res.writeHead(200, {
'Content-Type': 'text/plain'
});
res.end(data);
}).catch((err) => {
// do something here with the failure request/json
// res.write("ERROR:");
res.end(err);
});
});
server.listen(8081, "127.0.0.1", () => {
console.log(`Server listen on ${server.address().address}:${server.address().port} `);
});
Response:
{"name":"tim","age":"42"}
First thing i noticed, while i tried to run your code was, you cant establish a connection to your node.js.
The reason for this was you use the https module, but didnt specify an certificates/keyfiles. Skip this, and work with http till you get the result you want.
Then i wrapped you https request to the external api/file in a promise.
This allows a simple chaining and better readability of the code.
When the promises resolves/fullfill, we answer the request on the http server with the data we received from the external request.
The res.end in your code (where you put it) made no sense, since you didnt wait for the external request to complete. Thats the reason why its nothing is shown in the browser window.

Connect node app and server + post image to server

I have a very basic question about a node application, and a question about HTTP requests. It's the first time I create a node app with server, and I just can't seem to get the different components to work together.
This is my server.js
var express = require('express');
var multer = require('multer');
const request = require('request');
const upload = multer({dest: __dirname + '/uploads/images'});
const app = express();
const PORT = 3000;
app.use(express.static('public'));
app.post('/upload', upload.single('photo'), (req, res) => {
if(req.file) {
res.json(req.file);
}
else throw 'error';
});
app.listen(PORT, () => {
console.log('Listening at ' + PORT );
});
Then I have a file app.js with a motion-detection system. Every time motion is detected, a picture is taken. This all works fine.
Then the picture should be sent to the server. This is what I can't figure out.
I created a function toServer() that should post the detected data to the server
const request = require('request');
function toServer(data) {
const formData = {
// Pass data via Buffers
my_buffer: data,
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/form-data/form-data
};
request.post({url:'http://localhost:3000/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('Upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
};
Problem 1: when running the server.js on localhost:3000, it doesn't find any of the scripts loaded in index.html nor my app.js.
Problem 2: when running the index.html on live-server, all scripts are found, but i get the error "request is not defined".
I am pretty sure there is some basic node setup thing I'm missing.
The solution for toServer() might be more complicated.
Thanks for your time,
Mustard Shaper
Problem 1:
this could happen because you have not specified to render your index.html.
for example:
res.render('index')
if it's not because of the single quotes in upload.single('photo') try double quotes.
Another possible error could be that you are missing a default display engine setting.
an example: https://www.npmjs.com/package/hbs
Problem 2:
it may be because you are missing the header
var request = require('request');
request.post({
headers: {'content-type' : 'application/x-www-form-urlencoded'},
url: 'http://localhost',
body: "example"
}, function(error, response, body){
console.log(body);
});
See more at https://expressjs.com/

How could i add my web App name in url use Node.js?

How could i add my web App name in url use Node.js?
My Web run with http://127.0.0.1:8080
I want to add pathname /myapp like
http://127.0.0.1:8080/myapp
If you are using express follow this documentation
https://expressjs.com/en/guide/routing.html
In vanilla Node js you can do this
const http = require('http');
const url = require('url');
// Routes
const router = {
/* '/myapp' route will be handled here and following function will be executed */
'myapp': function (callback) {
//we expect a callback function as argument
callback(200, { name: 'sample handler' });
//execute the callback function and pass 200 status code and a sample data which will send as response payload to user
},
'login': function(callback){
callback(200, { name: 'Login' }); //You can add your routes in this object and handle them
}
};
const server = http.createServer((req, res) => {
// Get URL
const parsedURL = url.parse(req.url, true);
// Get Path
const path = parsedURL.pathname; //This will give **http://127.0.0.1:8080/myapp **
const trimmedPath = path.replace(/^\/+|\/+$/g, '');
//This will get routes specified after base url in our case **myapp**
req.on('data', (data) => {
// streamed data. Browser will send data in stream, this func will run on each stream
});
// When Request in ended, all user requests can be handled in this event
req.on('end', () => {
// Handle Routes
const choosenHandler = router[trimmedPath]; //Check in route obj
//Send the callback function which was required
choosenHandler(function (statusCode, payload) {
// Send response to user
const payloadString = JSON.stringify(payload);
res.setHeader('Content-Type', 'text/html');
res.writeHead(statusCode);
res.write('<h1>Data: ' + payloadString + '</h1>'); //This will be sent to user
console.log('returning response');
res.end();
});
});
});
// Start the server
server.listen(3000, () => {
console.log('Server Listening to 3000 port ');
});

Node JS HTTP Proxy hanging up

I have an http-proxy to proxy any website and inject some custom JS file before to serve the HTML back to the client. Whenever I try to access the proxied website, it will hang up or the browser seems to load indeterminately. But when I check the HTML source, I successfully managed to inject my custom JavaScript file. Here is the code:
const cheerio = require('cheerio');
const http = require('http');
const httpProxy = require('http-proxy');
const { ungzip } = require('node-gzip');
_initProxy(host: string) {
let proxy = httpProxy.createProxyServer({});
let option = {
target: host,
selfHandleResponse: true
};
proxy.on('proxyRes', function (proxyRes, req, res) {
let body = [];
proxyRes.on('data', function (chunk) {
body.push(chunk);
});
proxyRes.on('end', async function () {
let buffer = Buffer.concat(body);
if (proxyRes.headers['content-encoding'] === 'gzip') {
try {
let $ = null;
const decompressed = await ungzip(buffer);
const scriptTag = '<script src="my-customjs.js"></script>';
$ = await cheerio.load(decompressed.toString());
await $('body').append(scriptTag);
res.end($.html());
} catch (e) {
console.log(e);
}
}
});
});
let server = http.createServer(function (req, res) {
proxy.web(req, res, option, function (e) {
console.log(e);
});
});
console.log("listening on port 5051");
server.listen(5051);
}
Can someone please tell me if I am doing anything wrong, it looks like node-http-proxy is dying a lot and can't rely much on it since the proxy can work sometimes and die at the next run, depending on how many times I ran the server.
Your code looked fine so I was curious and tried it.
Although you do log a few errors, you don't handle several cases:
The server returns a body with no response (cheerio will generate an empty HTML body when this happens)
The server returns a response that is not gzipped (your code will silently discard the response)
I made a few modifications to your code.
Change initial options
let proxy = httpProxy.createProxyServer({
secure: false,
changeOrigin: true
});
Don't verify TLS certificates secure: false
Send the correct Host header changeOrigin: true
Remove the if statement and replace it with a ternary
const isCompressed = proxyRes.headers['content-encoding'] === 'gzip';
const decompressed = isCompressed ? await ungzip(buffer) : buffer;
You can also remove the 2 await on cheerio, Cheerio is not async and doesn't return an awaitable.
Final code
Here's the final code, which works. You mentioned that "it looks like node-http-proxy is dying a lot [...] depending on how many times I ran the server." I experienced no such stability issues, so your problems may lie elsewhere if that is happening (bad ram?)
const cheerio = require('cheerio');
const http = require('http');
const httpProxy = require('http-proxy');
const { ungzip } = require('node-gzip');
const host = 'https://github.com';
let proxy = httpProxy.createProxyServer({
secure: false,
changeOrigin: true
});
let option = {
target: host,
selfHandleResponse: true
};
proxy.on('proxyRes', function (proxyRes, req, res) {
console.log(`Proxy response with status code: ${proxyRes.statusCode} to url ${req.url}`);
if (proxyRes.statusCode == 301) {
throw new Error('You should probably do something here, I think there may be an httpProxy option to handle redirects');
}
let body = [];
proxyRes.on('data', function (chunk) {
body.push(chunk);
});
proxyRes.on('end', async function () {
let buffer = Buffer.concat(body);
try {
let $ = null;
const isCompressed = proxyRes.headers['content-encoding'] === 'gzip';
const decompressed = isCompressed ? await ungzip(buffer) : buffer;
const scriptTag = '<script src="my-customjs.js"></script>';
$ = cheerio.load(decompressed.toString());
$('body').append(scriptTag);
res.end($.html());
} catch (e) {
console.log(e);
}
});
});
let server = http.createServer(function (req, res) {
proxy.web(req, res, option, function (e) {
console.log(e);
});
});
console.log("listening on port 5051");
server.listen(5051);
I ended up writing a small Python Server using CherryPy and proxied the web app with mitmproxy. Everything is now working smoothly. Maybe I was doing it wrong with node-http-proxy but I also became sceptic about using it in a production environment.

Categories

Resources