D3.json Unexpected token with Node.js Server - javascript

trying to learn D3 I wrote the following local server:
const http = require('http');
const fs = require('fs');
function onRequest(request, response) {
response.writeHead(200, {'Content-Type': 'text/html'});
fs.readFile('./index.html', null, function(error, data) {
if (error) {
response.writeHead(404);
// response.write('file not found');
} else {
response.write(data);
}
response.end();
});
}
http.createServer(onRequest).listen(8000, '127.0.0.1');
I then go to http://127.0.0.1:8000/ to render this index.html:
<html>
<body>
<script src="https://d3js.org/d3.v5.min.js"></script>
<script>
var stringit = `[{"coin_val": 4999999,"coin_lab": "#PAX"},{"coin_val": 1100000,"coin_lab": "#USDC"}]`;
console.log('working.')
d3.json('./data.json', function(err, data) {
console.log(err)
console.log(data)
});
</script>
</body>
</html>
but I receive the following error in Chrome console:
Uncaught (in promise) SyntaxError: Unexpected token < in JSON at
position 1
at Go (d3.v5.min.js:2) Go # d3.v5.min.js:2
What am I doing wrong? is it i the 3D code or I just don't get the server right? how can I get D3 to read a JSON file in a Node.js server?
I suspect the JSON is not the issue, something goes wrong on the server side and reads the HTML in a wrong way?

I wrote the following local server
Which serves up the contents of index.html in response to any request it gets.
d3.json('./data.json',
So your JavaScript asks for data.json and it gets the contents of index.html.
Since the contents of index.html are not JSON, and start with a <, it throws the error message. A JSON file cannot start with a <.
You need to fix your server so it gives the browser what it asks for instead of blindly sending index.html.

your problem seems to be that your code doesn't know how to serve anything but index.html. It is really frustrating working with a pure node server because most resources on the internet assume that users are going to employ express or another framework.
Below I have a server that can serve static websites and handle requests for a few common types of media. You can add other types by modifying the code in the getContentType function by looking up the mime type for that file format.
I hope this helps
'use strict'
// Step 1: Declare Constants and Require External Resources
const port = "8888", landing = 'index.html', hostname = "127.0.0.1";
const path = require('path');
const http = require('http');
const fs = require('fs');
const qs = require('querystring');
// Step 2: Create getContentType Function that Returns the Requested MimeType for the browser
/**
* getContentType :: str -> str
*
* Function returns the content type that matches the resource being
* requested by the server controller
*/
function getContentType(url){
const mimeTypes = {
'.html' : 'text/html' , '.js' : 'text/javascript' ,
'.css' : 'text/css' , '.json' : 'application/json' ,
'.png' : 'image/png' , '.jpg' : 'image/jpg' ,
'.gif' : 'image/gif' , '.svg' : 'image/svg+xml' ,
'.wav' : 'audio/wav' , '.mp4' : 'video/mp4' ,
'.woff' : 'application/font-woff' , '.ttf' : 'application/font-ttf' ,
'.otf' : 'application/font-otf' , '.eot' : 'application/vnd.ms-fontobject' ,
'.wasm' : 'application/wasm'
};
// If requested url extension is a mime type, the dict object will return that url's value,
// otherwise octet-stream will be returned instead
return mimeTypes[path.extname(url).toLowerCase()] || 'application/octet-stream';
}
// Step 3: Create sendFile Function that Delivers Requested files to the Response stream
/**
* sendFile :: (str, str, str, stream) -> void
*
* function delivers any requested resources to the stream
*/
function sendFile(file, url, contentType, request, response){
fs.readFile(file, (error, content) => {
if(error) {
response.writeHead(404)
.write(`404 Error: '${url}' Was Not Found!`);
response.end();
// include file path for easy debugging, tabs added to make distinct
console.log(`\t${request.method} Response: 404 Error, '${file}' Was Not Found!`);
} else {
response.writeHead(200, {'Content-Type': contentType})
.write(content);
response.end();
console.log(`\t${request.method} Response: 200, ${url} Served`);
};
});
};
// Step 4: Create serverController Function to initialize the server and run the request loop
/**
* serverController :: str -> void
*
* Function creates a server and accesses sendFile and getContentType to serve
* requested resources
*/
function serverController(hostname) {
const server = http.createServer((request, response) => {
// Creates space around .html requests so that they stand out more in the console
if (path.extname(request.url) == '.html' || request.url == '/') {
console.log(`\nPage Requested: ${request.url}\n`);
} else {
if (request.method == "GET") {
console.log(`${request.method} Request: ${request.url}`);
} else {
console.log(`Request came: ${request.url}`);
}
}
// Sends the requested resources to the response stream
if (request.url == '/') {
var file = path.join(__dirname, landing); // delivers index.html by default
sendFile(file, landing, 'text/html', request, response);
} else {
var file = path.join(__dirname, request.url); // delivers requested resource
sendFile(file, request.url, getContentType(request.url), request, response);
};
});
// Gives server a port to listen to and gives an IP address to find it
server.listen(port, hostname, () => {
console.log(`Server running at ${hostname}:${port}\n`);
});
}
// Step 6: Create launch IIFE Function that Starts the server upon Instantiation
(function launch() {
serverController(hostname);
})();

Related

Connect node app and server + post image to server

I have a very basic question about a node application, and a question about HTTP requests. It's the first time I create a node app with server, and I just can't seem to get the different components to work together.
This is my server.js
var express = require('express');
var multer = require('multer');
const request = require('request');
const upload = multer({dest: __dirname + '/uploads/images'});
const app = express();
const PORT = 3000;
app.use(express.static('public'));
app.post('/upload', upload.single('photo'), (req, res) => {
if(req.file) {
res.json(req.file);
}
else throw 'error';
});
app.listen(PORT, () => {
console.log('Listening at ' + PORT );
});
Then I have a file app.js with a motion-detection system. Every time motion is detected, a picture is taken. This all works fine.
Then the picture should be sent to the server. This is what I can't figure out.
I created a function toServer() that should post the detected data to the server
const request = require('request');
function toServer(data) {
const formData = {
// Pass data via Buffers
my_buffer: data,
// Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
// Use case: for some types of streams, you'll need to provide "file"-related information manually.
// See the `form-data` README for more information about options: https://github.com/form-data/form-data
};
request.post({url:'http://localhost:3000/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('Upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
};
Problem 1: when running the server.js on localhost:3000, it doesn't find any of the scripts loaded in index.html nor my app.js.
Problem 2: when running the index.html on live-server, all scripts are found, but i get the error "request is not defined".
I am pretty sure there is some basic node setup thing I'm missing.
The solution for toServer() might be more complicated.
Thanks for your time,
Mustard Shaper
Problem 1:
this could happen because you have not specified to render your index.html.
for example:
res.render('index')
if it's not because of the single quotes in upload.single('photo') try double quotes.
Another possible error could be that you are missing a default display engine setting.
an example: https://www.npmjs.com/package/hbs
Problem 2:
it may be because you are missing the header
var request = require('request');
request.post({
headers: {'content-type' : 'application/x-www-form-urlencoded'},
url: 'http://localhost',
body: "example"
}, function(error, response, body){
console.log(body);
});
See more at https://expressjs.com/

Streaming JSON with Node.js

I have been given a task — I'm trying to make routes/endpoints using just Node to better demostrate what is happening under the covers with Express. But I have to use streams.
I have two routes GET /songs and GET/refresh-songs with the following requirements:
We need the following two endpoints:
GET /songs
GET /refresh-songs
All endpoints should return JSON with the correct headers
/songs should...
stream data from a src/data/songs.json file
not crash the server if that file or directory doesn't exist
bonus points for triggering refresh code in this case
bonus points for compressing the response
/refresh-songs should...
return immediately
it should not hold the response while songs are being refreshed
return a 202 status code with a status JSON response
continue getting songs from iTunes
frontend should have
UI/button to trigger this endpoint
This is what I have so far in my server.js file where my endpoints will live.
const { createServer } = require('http');
const { parse: parseUrl } = require('url');
const { createGzip } = require('zlib');
const { songs, refreshSongs } = require('./songs');
const fs = require('fs');
const stream = fs.createReadStream('./src/data/songs.jso')
const PORT = 4000;
const server = createServer(({ headers, method, url }, res) => {
const baseResHeaders = {
// CORS stuff is gone. :(
'content-type' : 'application/json'
};
// Routing ¯\_(ツ)_ /¯
//
var path = url.parseUrl(url).pathname;
function onRequest(request, response){
response.writeHead(200, 'Content-Type': baseResHeaders['content-type']);
stream.on('data', function(err, data){
if (err) {
response.writeHead(404);
response.write('File not found');
refreshSongs();
} else {
response.write(createGzip(data))
}
response.end()
})
}
switch (path) {
case '/songs':
onRequest()
break;
case '/refresh-songs':
onRequest()
break;
}
server.on('listening', () => {
console.log(`Server running at http://localhost:${PORT}/`);
});
I am wondering if I am wiring the onRequest method I created correctly and if the switch statement is correctly going to intercept those URL's

REST API node.js

I am trying to retrieve data from a REST API in the server side (.js) and display it in my view (.jade)
I was able to get the data but was not able to send it to the view .
This is how my code looks like :
var BugData ='initial data' ;
var https = require('https');
var optionsget = {
rejectUnauthorized: false,
host : 'My host', // here only the domain name
// (no http/https !)
port : 443,
path : 'Mypath', // the rest of the url with parameters if needed
method : 'GET' // do GET
};
console.info('Options prepared:');
console.info(optionsget);
console.info('Do the GET call');
// do the GET request
var reqGet = https.request(optionsget, function(res) {
console.log("statusCode: ", res.statusCode);
res.on('data', function(d) {
console.info('GET result:\n');
BugData =d;
console.log('Show Data : ***** \n' +d);
});
});
reqGet.end();
reqGet.on('error', function(e) {
console.error(e);
});
res.render('index', { ab:BugData});
BugData (was defined before )is the variable i am trying to send to the view but for some reasons it is empty and does not contain the variable 'd'
Does anyone know why or how can i solve this ?
Thanks
There is no need to write that long code.
Be simple, follow these steps:
1) install request package:
npm install --save request
2) outside of router add:
var request = require('request');
process.env.NODE_TLS_REJECT_UNAUTHORIZED = 0;
3) use this code inside router:
request.get({url: 'https://my-host/Mypath'}, function(err, response, body) {
var data = {};
if (err) {
console.error(err);
data.err = err;
}
data.ab = body;
console.log('Data: ', data);
res.render('index', data);
});

Parsing post data in phantomjs

I am working with the POSTMAN extension to chrome and am trying to send a post request to phantomjs I have managed to send a post request to a phantomjs server script by setting postman as in the attached screenshot
My phantomjs script is as follows:
// import the webserver module, and create a server
var server = require('webserver').create();
var port = require('system').env.PORT || 7788;
console.log("Start Application");
console.log("Listen port " + port);
// Create serever and listen port
server.listen(port, function(request, response) {
console.log("request method: ", request.method); // request.method POST or GET
if(request.method == 'POST' ){
console.log("POST params should be next: ");
console.log(request.headers);
code = response.statusCode = 200;
response.write(code);
response.close();
}
});
when I run phantomjs at the command line , here is the output:
$ phantomjs.exe myscript.js
Start Application
Listen port 7788
null
request method: POST
POST params should be next:
[object Object]
POST params: 1=bill&2=dave
So , it does appear to work. My question is now how to parse post body into variables, so I can access it in the rest of the script.
To read post data, you should not use request.headers as it's HTTP headers (encoding, cache, cookies, ...)
As said here, you should use request.post or request.postRaw.
request.post is a json object, so you write it into the console. That's why you get [object Object]. Try to apply a JSON.stringify(request.post) when logging.
As request.post is a json object, you can also directly read properties using indexers (do not forget to add a basic check if the property is not posted)
Here is an updated version of your script
// import the webserver module, and create a server
var server = require('webserver').create();
var port = require('system').env.PORT || 7788;
console.log("Start Application");
console.log("Listen port " + port);
// Create serever and listen port
server.listen(port, function (request, response) {
console.log("request method: ", request.method); // request.method POST or GET
if (request.method == 'POST') {
console.log("POST params should be next: ");
console.log(JSON.stringify(request.post));//dump
console.log(request.post['1']);//key is '1'
console.log(request.post['2']);//key is '2'
code = response.statusCode = 200;
response.write(code);
response.close();
}
});

Processing PLUpload uploads with node.js

I'm attempting to create a server-side upload component in node.js, but I'm having trouble interpreting the information sent from PLUpload. From what I can tell, PLUpload (in HTML5 mode) sends files as binary information, which creates problems for the node.js packages I've been attempting to use so far (node-formidable and node-express), as they expect normal HTML uploads with multipart content types.
For what it's worth, this is the code I've been attempting to use...
var formidable = require('formidable');
var sys = require('sys');
http.createServer( function( req, res ){
console.log('request detected');
if( req.url == '/upload/' ){
console.log('request processing');
var form = new formidable.IncomingForm();
form.parse( req, function( err, fields, files ){
res.writeHead( 200, {
'Access-Control-Allow-Origin': 'http://tksync.com',
'Access-Control-Allow-Methods': 'GET,PUT,POST,DELETE',
'Access-Control-Allow-Headers': '*',
'content-type': 'text/plain'
});
res.write('received upload:\n');
res.end(sys.inspect({
fields: fields,
files: files
}));
});
}
}).listen( 8080 );
I have no problem to use plupload(in HTML5 mode) with node.js with below code:
module.exports.savePhoto= (req, res) ->
if req.url is "/upload" and req.method.toLowerCase() is "post"
console.log 'savePhoto: req.url=', req.url, 'req.method=', req.method
form = new formidable.IncomingForm()
files = []
fields = []
form.uploadDir = config.PATH_upload
form.on("field", (field, value) ->
console.log field, value
fields.push [ field, value ]
).on("file", (field, file) ->
console.log field, file
files.push [ field, file ]
).on "end", ->
console.log "-> upload done: fields=", fields
console.log "received fields:", util.inspect(fields)
console.log "received files:", util.inspect(files)
size = files[0][1].size
pathList = files[0][1].path.split("/")
#console.log 'pathList=', pathList
file = pathList[pathList.length - 1]
console.log "file=" + file
......
I created node-pluploader to handle this, as I found elife's answer didn't work for chunked uploads, as said chunks come in on different requests.
Express-based usage example:
var Pluploader = require('node-pluploader');
var pluploader = new Pluploader({
uploadLimit: 16
});
/*
* Emitted when an entire file has been uploaded.
*
* #param file {Object} An object containing the uploaded file's name, type, buffered data & size
* #param req {Request} The request that carried in the final chunk
*/
pluploader.on('fileUploaded', function(file, req) {
console.log(file);
});
/*
* Emitted when an error occurs
*
* #param error {Error} The error
*/
pluploader.on('error', function(error) {
throw error;
});
// This example assumes you're using Express
app.post('/upload', function(req, res){
pluploader.handleRequest(req, res);
});

Categories

Resources