How to pass object parameters to functions in JavaScript - javascript

My server.js is
// server.js - the outer server loop
var http = require('http')
, php = require("./phpServer");
function start() {
function onRequest(request, response) {
php.phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
response.write('Ending');
response.end();
}
http.createServer(onRequest).listen(80);
console.log("Server started.");
}
exports.start = start;
That calls php.phpServer every request with response as the 3rd param.
phpServer contains.
//
// phpServer.js - a generic server to serve static files and
//
var fs = require('fs')
, pathfuncs = require('path')
, url = require('url')
, mimetypes = require('./mimetypes')
function phpServer(root, request, response) {
// serve static or pass to php.
var data = url.parse(request.url);
var ext = pathfuncs.extname(data.pathname);
fs.stat(root+request.url, function(err, stat) {
if (err || !stat.isFile()) { // error or not file.
console.log('404');
response.writeHead(404);
response.write('Not Found');
return;
}
// exists - serve.
console.log("serve("+root+request.url+", mimetypes.mimetype("+ext+"))");
response.writeHead(200, {'Content-Type': mimetypes.mimetype(ext)});
response.write('Somethign to serve');
// fs.createReadStream(root+request.url).pipe(response);
});
}
exports.phpServer = phpServer
As I see it, response is an object and is passed by reference, therefore the response.write() here should write to the response.
It doesn't. Response here is NOT the same as response in onRequest, so nothing in phpServer is sent to the browser - not code nor content.
The console.logs come out and show what I would expect.
How can I get the object response passed so I can call write on it?
------------- added later -------------------
I've tried to apply answers given and code for server.is now
// server.js - the outer server loop
var http = require('http')
, fs = require('fs')
, pathfuncs = require('path')
, url = require('url')
, mimetypes = require('./mimetypes')
function phpServer(root, request, res) {
// code adapted from page 118 of Smashing Node.js by Guillermo Rauch
// res is response provided to onRequest.
var data = url.parse(request.url);
var ext = pathfuncs.extname(data.pathname);
res.write('Start reply');
fs.stat(root+request.url, function(err,stat) {
// define delayed callback - reponse in scope
if (err || !stat.isFile()) { // error or not file.
console.log('404');
res.writeHead(404);
res.write('Not Found');
res.end
return;
};
// exists so serve.
console.log("serve("+root+request.url+", mimetypes.mimetype("+ext+"))");
res.writeHead(200, {'Content-Type': mimetypes.mimetype(ext)});
res.write('The file contents');
res.end;
} // end callback,
); // end fs.stat call.
} // end phpServer
function start() {
function onRequest(request, response) {
phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
}
http.createServer(onRequest).listen(80);
console.log("Server started.");
}
exports.start = start;
This does not reply at all - it times out. However the call to res.writeHead will either
fail, if res is out of scope/does not exist/undefined, or succeed if re is the param passed in.
It succeeds, and is followed by write and end, so please - what have I got wrong.
If the file does not exist I get a start reply and then a timeout.
At the res.write('Start reply'); res is the response param, yet it isn't later in the fs.stat call-back.
Why not?
Damn - this is frustrating.

The call to response.end should be moved from the onRequest function to phpServer. As it stands phpServer cannot write anything else since the stream has been closed.
function onRequest(request, response) {
php.phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
// response.end(); // move this to phpServer
}
As explained in the documentation for response.end
This method signals to the server that all of the response headers and body have been sent; that server should consider this message complete.

Your problem is not with parameter passing, it's with basic asynchronous control flow. The stat() function does not do its work immediately. Its callback parameter is called when it's done. You basically cannot structure the code the way you've done it. Instead, your "phpServer" code will need to take a callback parameter of its own, and call it after it does its work.

Related

Simple Azure Function in Javascript to use HTTP trigger body in queue

So I'm messing about with Use Azure Functions to automate SQL DW compute level from the MS website... and I've created a HTTP trigger that will send a msg to the queue when hit. I'm very new to Javascript, and I'm wondering how to use the HTTP request 'body' in place of the variable I have below
module.exports = function (context, res,) {
var timeStamp = new Date().toISOString();
context.log('JavaScript timer trigger function ran!', timeStamp);
context.res = { status: 201, body: "Resuming Datawarehouse" }; var operation = {
"operationType": "ResumeDw"
}
context.bindings.operationRequest = operation;
context.done(null, res);
};
Quite simply, I willy have some JSON in my HTTP request body that will hit this trigger, I then want to use only what's in that body for my queue. In this case above, it would replace var operation =
Any ideas, please?
Just an FYI, I want it to replace what I already have:
var operation = {
"operationType": "ResumeDw"
}
context.bindings.operationRequest = operation;
This is static inside the Function, but I want whatever the HTTP request sends as its body to be sent to my queue.
The second argument of your function is req, not res, which gives you access to HTTP request including its body:
module.exports = function(context, req) {
// req.body is a thing
var operation = {
"operationType": req.body.operationType
};
context.bindings.operationRequest = operation;
context.res = { status: 201, body: "Resuming Datawarehouse" };
context.done();
};

Node returning error - "route is not defined"

I'm brand new to node and just running through a tutorial I found. I punched in the code as written, which as I understand ought to be importing the route function from the router file. However, the server encounters an error on trying to load the page returning the error in the title.
Here is my code:
// index.js; my main file I run through cmd
var server = require("./server");
var router = require("./router")
server.start(router.route);
// router.js; the route function is stored here
function route(pathname) {
console.log("About to rout request for " + pathname);
}
exports.route = route;
// server.js; the meat
var http = require("http");
var url = require("url");
function start() {
function onRequest(request, response) {
var pathname = url.parse(request.url).pathname;
console.log("Request for " + pathname + " received");
route(pathname);
response.writeHead(200, { "Content-Type": "text/plain" });
response.write("Hello World!");
response.end();
}
http.createServer(onRequest).listen(8888);
console.log("Server has started.");
}
exports.start = start;
While I'm asking, can anyone explain how the server.start(router.route) line is working? I thought that start function didn't take arguments.
While I'm asking, can anyone explain how the server.start(router.route) line is working? I thought that start function didn't take arguments.
^ That's why it isn't working.
route is undefined in the start function. Change the start function to:
function start(route) { ...

Node.js - Check if stream has error before piping response

In Node.js, say that I want to read a file from somewhere and stream the response (e.g., from the filesystem using fs.createReadStream()).
application.get('/files/:id', function (request, response) {
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
var mimeType = getMimeTypeSomehow(request.params.id);
if (mimeType === 'application/pdf') {
response.set('Content-Range', ...);
response.status(206);
} else {
response.status(200);
}
readStream.pipe(response);
});
However, I want to detect if there is an error with the stream before sending my response headers. How do I do that?
Pseudocode:
application.get('/files/:id', function (request, response) {
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
readStream.on('ready', function () {
var mimeType = getMimeTypeSomehow(request.params.id);
if (mimeType === 'application/pdf') {
response.set('Content-Range', ...);
response.status(206);
} else {
response.status(200);
}
readStream.pipe(response);
});
readStream.on('error', function () {
response.status(404).end();
});
});
Write stream is ended when readStream ends or has an error. You can prevent this default behaviour by passing end:false during pipe and end the write stream manually.
So even if the error occurs, your write stream is still open and you can do other stuff(e.g. sending 404 status) with writestream in the error callback.
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
readStream.on('error', function () {
res.status(404).end();
});
readStream.on('end', function(){
res.end(); //end write stream manually when readstream ends
})
readStream.pipe(res,{end:false}); // prevent default behaviour
Update 1: For file streams, you can listen for open event to check if the file is ready to read:
readStream.on('open', function () {
// set response headers and status
});
Update 2: As OP mentioned there may be no open event for other streams, we may use the following if the stream is inherited from node's stream module. The trick is we write the data manually instead of pipe() method. That way we can do some 'initialization' on writable before starting to write first byte.
So we bind once('data') first and then bind on('data'). First one will be called before actual writing is happened.
readStream
.on('error',function(err) {
res.status(404).end();
})
.once('data',function(){
//will be called once and before the on('data') callback
//so it's safe to set headers here
res.set('Content-Type', 'text/html');
})
.on('data', function(chunk){
//now start writing data
res.write(chunk);
})
.on('end',res.end.bind(res)); //ending writable when readable ends

Node js working on second or third call

I have written a node.js server which creates a server and prints the output when done with an asynchronous function. While I am able to get the correct output always in the console.log. The same is not getting reflected in my response. Here is my code snippet :-
var request = require('request');
var http = require('http');
var cheerio = require('cheerio');
var url = require('url');
var Curl = require( 'node-libcurl' ).Curl;
var sleep = require('sleep');
isDone = 0;
globalImage = "";
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/plain'});
var url_parts = url.parse(req.url, true);
var query = url_parts.query;
var main_url = query["link"];
if (req.url != '/favicon.ico') {
res.writeHead(200);
if(main_url != undefined ){
var position = parseInt(query["position"]);
// web_scrap()
web_scrap(main_url,position, function(image) {
console.log("Console log : " + image);
globalImage = image;
});
res.write(globalImage);
res.end("HEY");
}
}
else {//For favicon and other requests just write 404s
res.writeHead(404);
res.write('This URL does nothing interesting');
res.end();
}
}).listen(3000, '127.0.0.1');
console.log('Server running at http://127.0.0.1:3000/');
function web_scrap(url, position, callback){
// do something
callback(JSON.stringify(product));
}
Now on starting the server and accessing it in browser with parameters link and position as get, I am getting output on second or third refresh. I am getting perfect output in console.log though !
Can anyone help or guide me in this regard ?
Thanks !
From what I understand, you're loading an image from an external source, asynchronously.
Thus, your function continues to run, even though the load is not finished yet. And as your globalImage is a global variable, once it is loaded, it stays in memory, that's why you get the data after some tries.
Just move your res.write and res.end in the callback function, this way the content will be sent once the image is loaded.
web_scrap(main_url,position, function(image) {
console.log("Console log : " + image);
globalImage = image;
res.write(globalImage);
res.end("HEY");
});
Anyway, except if you want to cache your image, you should not have a globalImage variable, as it would stay in memory even though you would want it to be garbage collected. You can remove the variable and just make this:
web_scrap(main_url,position, function(image) {
console.log("Console log : " + image);
res.write(image);
res.end("HEY");
});

Node.JS where to put the response.end()

I'm developing a simple NODE.JS application. First I create an httpServer using http module. Then I route the request to the requestsHandlers.js page. 'Response' parameter cames from the creation of the httpServer. Process1, process2 and process3 should write an answer to the page. This is the objective of this app, that process1, process2 and process3 write its respective text.
requestHandlers.js
var process1 = require("./process1");
var process2 = require("./process2");
var process3 = require("./process3");
function iniciar(response) {
console.log("Manipulador de peticiĆ³n 'iniciar' fue llamado.");
response.writeHead(200, {"Content-Type": "text/html"});
process1.fc1(response);
process2.fc2(response);
process3.fc3(response);
//response.end() //WHERE DO I PLACE IT?
}
As you can see, the response parameter is passed to process1.js, which after parsing some data shoud echo some information.
process1.js
var request = require('request')
function fc1 (response){
var url = 'http://webpagethatreturnsJSONfile.com/'
//Download webpage data and parses it
request(url, function(err, resp, body) {
if (err)
throw err;
var jsonResult = JSON.parse(body);
response.write("Number:" + jsonResult.number + '');
//response.end() //WHERE DO I PLACE IT?
});
}
exports.fc1 = fc1;
The point is that I don't know where to put 'response.end()'. Each process takes some time and I want to 'end' when all processes have echo their text.
How could I do it?
I don't know if the code I've attached is enough for helping me.

Categories

Resources