Simple Azure Function in Javascript to use HTTP trigger body in queue - javascript

So I'm messing about with Use Azure Functions to automate SQL DW compute level from the MS website... and I've created a HTTP trigger that will send a msg to the queue when hit. I'm very new to Javascript, and I'm wondering how to use the HTTP request 'body' in place of the variable I have below
module.exports = function (context, res,) {
var timeStamp = new Date().toISOString();
context.log('JavaScript timer trigger function ran!', timeStamp);
context.res = { status: 201, body: "Resuming Datawarehouse" }; var operation = {
"operationType": "ResumeDw"
}
context.bindings.operationRequest = operation;
context.done(null, res);
};
Quite simply, I willy have some JSON in my HTTP request body that will hit this trigger, I then want to use only what's in that body for my queue. In this case above, it would replace var operation =
Any ideas, please?
Just an FYI, I want it to replace what I already have:
var operation = {
"operationType": "ResumeDw"
}
context.bindings.operationRequest = operation;
This is static inside the Function, but I want whatever the HTTP request sends as its body to be sent to my queue.

The second argument of your function is req, not res, which gives you access to HTTP request including its body:
module.exports = function(context, req) {
// req.body is a thing
var operation = {
"operationType": req.body.operationType
};
context.bindings.operationRequest = operation;
context.res = { status: 201, body: "Resuming Datawarehouse" };
context.done();
};

Related

How do I use part of a returned object from an API request as what the function returns?

I have been researching and testing how to do API calls in node js. I'm currently trying to use part of a JSON object that gets returned from an API call in a module to return a Token.
var request = require("request");
var timestamp = require("unix-timestamp");
var jwt = require("jsonwebtoken");
var EventEmitter = require("events").EventEmitter;
timestamp.round = true;
//create current unix timestamp
var current = timestamp.now();
//create unix experation time
var experation = timestamp.add(current, "+5m");
//create header
var header = {"header"}
//create payload
var payload = {
"iss": process.env.CKEY,
"aud": "https://iformbuilder.com/exzact/api/oauth/token",
"exp": experation,
"iat": current
};
var signature = process.env.SKEY;
//Create assertion
var assert = jwt.sign(payload, signature);
var grant = 'urn:ietf:params:oauth:grant-type:jwt-bearer';
//set the options
var options = { method: 'POST',
url: 'https://iformbuilder.com/exzact/api/oauth/token',
qs: {
grant_type: grant,
assertion: assert
},
headers: {
'content-type': 'application/x-www-form-urlencoded',
'cache-control': 'no-cache'
}
};
var data = {};
var tkn = new EventEmitter();
module.exports = {
token: function() {
request(options, function (error, response, body) {
if (error) throw new Error(error);
console.log(body);
tkn.body = body;
tkn.emit('update');
});
tkn.on('update', function(){
data = JSON.parse(tkn.body);
return data.access_token;
});
}
}
The problem is that I can only use the returned item within the scope of tkn.on. I can nesting another API call within to use the token. However, I would like to use it without having to use the same code over again. The only solution that I can get to work is writing to a file. I'm wondering if I'm even going about this the right way. I can't seem to find any good source online to help me with this and maybe I'm asking the wrong question.
You have to use a callback (or promise) to get the value. Below is an example with callbacks I'll let you research promises. I would say, understand it with callbacks first then move on to promises.
someOtherModule.js
var tokenGetter = require('./tokenGetter');
function doSomethingWithToken(accessToken){
//Do what you want with the token here
}
tokenGetter.token(doSomethingWithToken);
tokenGetter.js
module.exports = {
token: function(callback) {
request(options, function (error, response, body) {
if (error) throw new Error(error);
console.log(body);
tkn.body = body;
tkn.emit('update');
});
tkn.on('update', function(){
data = JSON.parse(tkn.body);
callback(data.access_token);
});
}
}
The reason you have to do this is because with asynchronous operations, you do not know when you will get a response. The code is no longer linear in the same way it was before.
You have to adjust your code to continue its operations WHEN you get a response instead of AFTER you do the operation.

Node.js - Check if stream has error before piping response

In Node.js, say that I want to read a file from somewhere and stream the response (e.g., from the filesystem using fs.createReadStream()).
application.get('/files/:id', function (request, response) {
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
var mimeType = getMimeTypeSomehow(request.params.id);
if (mimeType === 'application/pdf') {
response.set('Content-Range', ...);
response.status(206);
} else {
response.status(200);
}
readStream.pipe(response);
});
However, I want to detect if there is an error with the stream before sending my response headers. How do I do that?
Pseudocode:
application.get('/files/:id', function (request, response) {
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
readStream.on('ready', function () {
var mimeType = getMimeTypeSomehow(request.params.id);
if (mimeType === 'application/pdf') {
response.set('Content-Range', ...);
response.status(206);
} else {
response.status(200);
}
readStream.pipe(response);
});
readStream.on('error', function () {
response.status(404).end();
});
});
Write stream is ended when readStream ends or has an error. You can prevent this default behaviour by passing end:false during pipe and end the write stream manually.
So even if the error occurs, your write stream is still open and you can do other stuff(e.g. sending 404 status) with writestream in the error callback.
var readStream = fs.createReadStream('/saved-files/' + request.params.id);
readStream.on('error', function () {
res.status(404).end();
});
readStream.on('end', function(){
res.end(); //end write stream manually when readstream ends
})
readStream.pipe(res,{end:false}); // prevent default behaviour
Update 1: For file streams, you can listen for open event to check if the file is ready to read:
readStream.on('open', function () {
// set response headers and status
});
Update 2: As OP mentioned there may be no open event for other streams, we may use the following if the stream is inherited from node's stream module. The trick is we write the data manually instead of pipe() method. That way we can do some 'initialization' on writable before starting to write first byte.
So we bind once('data') first and then bind on('data'). First one will be called before actual writing is happened.
readStream
.on('error',function(err) {
res.status(404).end();
})
.once('data',function(){
//will be called once and before the on('data') callback
//so it's safe to set headers here
res.set('Content-Type', 'text/html');
})
.on('data', function(chunk){
//now start writing data
res.write(chunk);
})
.on('end',res.end.bind(res)); //ending writable when readable ends

express.js: how to use value returned by http.request in app.get

I want to use app.get to deliver the data from an API on another domain. I can write the data to the console, but nothing is appearing on the page ('~/restresults').
This is the code I have so far:
app.get('/restresults', function (req, res) {
var theresults;
var http = require('http');
var options = {
port: '80' ,
hostname: 'restsite' ,
path: '/v1/search?format=json&q=%22foobar%22' ,
headers: { 'Authorization': 'Basic abc=='}
} ;
callback = function(res) {
var content;
res.on('data', function (chunk) {
content += chunk;
});
res.on('end', function () {
console.log(content);
theresults = content ;
});
};
http.request(options, callback).end();
res.send(theresults) ;
});
how can I bind the result of the http.request to a variable and return it when 'restresults/' is requested?
Move res.send(theresults); to here:
callback = function(res2) {
var content;
res2.on('data', function (chunk) {
content += chunk;
});
res2.on('end', function () {
console.log(content);
theresults = content ;
res.send(theresults) ; // Here
});
};
Note: You'll have to change res to something else as you want the express res, no the request res.
The callback is an asynchronous call. You're sending the response before you get a result from the request.
You'll also want to handle the case in which there is an error, otherwise the client's request may hang.
You are currently sending the response before the callback (from the http request) is done.
The http.request is async, the script will not wait til its done and then send the data back to client.
You will have to wait for the request to be done and then send the result back to the client (preferably in the callback function).
Example:
http.request(options, function(httpRes) {
// Notice that i renamed the 'res' param due to one with that name existing in the outer scope.
/*do the res.on('data' stuff... and any other code you want...*/
httpRes.on('end', function () {
res.send(content);
});
}).end();

Node.js sending http request in a loop

I'm actually facing a problem with my javascript code executed with node.js
i need to send http requests in a loop to a distant server (i set www.google.ca in the code).
Here is my code :
var http = require('http');
var options = {
hostname: 'www.google.ca',
port: 80,
path: '/',
method: 'GET'
};
function sendRequest(options){
console.log('hello');
var start = new Date();
var req = http.request(options,function(res) {
console.log('Request took:', new Date() - start, 'ms');
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
req.end();
};
for(var i=0;i<10;i++){
sendRequest(options);
}
The problem I have is that, no matter how many times i go through my loop, i get a response for only the 5 first of them. For the rest of the requests, the function sendRequest() is called but I don't get any responses, neither error message. And then the program terminates.
However it works fine when I set localhost as a host.
Is anyone would have a solution to this problem ?
Thanks in advance !
perhaps either your machine or the remote machine is getting overwhelmed by the 10 simultaneous requests you make. try sending them one at a time, you will have to wait until the first request completes before continuing. one easy way to do so is with async.timesSeries
var http = require('http');
var async = require('async');
var options = {
hostname: 'www.google.ca',
port: 80,
path: '/',
method: 'GET'
};
function sendRequestWrapper(n, done){
console.log('Calling sendRequest', n);
sendRequest(options, function(err){
done(err);
});
};
function sendRequest(options, callback){
//console.log('hello');
var start = new Date();
var req = http.request(options,function(res) {
// I don't know if this callback is called for error responses
// I have only used the `request` library which slightly simplifies this
// Under some circumstances you can accidentally cause problems by calling
// your callback function more than once (e.g. both here and on('error')
console.log('Request took:', new Date() - start, 'ms');
callback(null);
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
callback(err);
});
req.end();
};
async.timesSeries(10, sendRequestWrapper);

How to pass object parameters to functions in JavaScript

My server.js is
// server.js - the outer server loop
var http = require('http')
, php = require("./phpServer");
function start() {
function onRequest(request, response) {
php.phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
response.write('Ending');
response.end();
}
http.createServer(onRequest).listen(80);
console.log("Server started.");
}
exports.start = start;
That calls php.phpServer every request with response as the 3rd param.
phpServer contains.
//
// phpServer.js - a generic server to serve static files and
//
var fs = require('fs')
, pathfuncs = require('path')
, url = require('url')
, mimetypes = require('./mimetypes')
function phpServer(root, request, response) {
// serve static or pass to php.
var data = url.parse(request.url);
var ext = pathfuncs.extname(data.pathname);
fs.stat(root+request.url, function(err, stat) {
if (err || !stat.isFile()) { // error or not file.
console.log('404');
response.writeHead(404);
response.write('Not Found');
return;
}
// exists - serve.
console.log("serve("+root+request.url+", mimetypes.mimetype("+ext+"))");
response.writeHead(200, {'Content-Type': mimetypes.mimetype(ext)});
response.write('Somethign to serve');
// fs.createReadStream(root+request.url).pipe(response);
});
}
exports.phpServer = phpServer
As I see it, response is an object and is passed by reference, therefore the response.write() here should write to the response.
It doesn't. Response here is NOT the same as response in onRequest, so nothing in phpServer is sent to the browser - not code nor content.
The console.logs come out and show what I would expect.
How can I get the object response passed so I can call write on it?
------------- added later -------------------
I've tried to apply answers given and code for server.is now
// server.js - the outer server loop
var http = require('http')
, fs = require('fs')
, pathfuncs = require('path')
, url = require('url')
, mimetypes = require('./mimetypes')
function phpServer(root, request, res) {
// code adapted from page 118 of Smashing Node.js by Guillermo Rauch
// res is response provided to onRequest.
var data = url.parse(request.url);
var ext = pathfuncs.extname(data.pathname);
res.write('Start reply');
fs.stat(root+request.url, function(err,stat) {
// define delayed callback - reponse in scope
if (err || !stat.isFile()) { // error or not file.
console.log('404');
res.writeHead(404);
res.write('Not Found');
res.end
return;
};
// exists so serve.
console.log("serve("+root+request.url+", mimetypes.mimetype("+ext+"))");
res.writeHead(200, {'Content-Type': mimetypes.mimetype(ext)});
res.write('The file contents');
res.end;
} // end callback,
); // end fs.stat call.
} // end phpServer
function start() {
function onRequest(request, response) {
phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
}
http.createServer(onRequest).listen(80);
console.log("Server started.");
}
exports.start = start;
This does not reply at all - it times out. However the call to res.writeHead will either
fail, if res is out of scope/does not exist/undefined, or succeed if re is the param passed in.
It succeeds, and is followed by write and end, so please - what have I got wrong.
If the file does not exist I get a start reply and then a timeout.
At the res.write('Start reply'); res is the response param, yet it isn't later in the fs.stat call-back.
Why not?
Damn - this is frustrating.
The call to response.end should be moved from the onRequest function to phpServer. As it stands phpServer cannot write anything else since the stream has been closed.
function onRequest(request, response) {
php.phpServer('D:/websites/coachmaster.co.uk/htdocs',request, response);
// response.end(); // move this to phpServer
}
As explained in the documentation for response.end
This method signals to the server that all of the response headers and body have been sent; that server should consider this message complete.
Your problem is not with parameter passing, it's with basic asynchronous control flow. The stat() function does not do its work immediately. Its callback parameter is called when it's done. You basically cannot structure the code the way you've done it. Instead, your "phpServer" code will need to take a callback parameter of its own, and call it after it does its work.

Categories

Resources