Node.js - Asynchronous JSON Query - javascript

I apologize if this is a stupid question, but I am new to Javascript and Node.js really hurts my head because it is asynchronous.
My goal is to query for a JSON object from an API and be able to work with it. I have tried to look for questions and answers on what I should be doing but none of it really makes sense to me, so I am hoping to learn by just seeing proper code.
var request = require('request');
var url = 'url here';
var temp;
var done = false;
request(url, function (error, response, body) {
if (!error) {
temp = body;
done = true;
console.log(temp);
} else {
console.log(error);
}
});
if (done){
console.log(temp);
}
Can someone please walk me through the proper way to restructure my code?

The function you are creating with the line
request(url, function (error, response, body) {
is not executed until the response is received. The rest of your code continues to run. Think of the flow something like this:
var request = require('request');
var url = 'url here';
var temp;
var done = false;
request(url, XXX);
if (done){
console.log(temp);
then when the response is received (perhaps much later on) the function XXX is executed.
As you can see, done will always be false when the line
if (done){
is executed.

Related

Override post requests

I have this code that I put in my console:
XMLHttpRequest.prototype.send = function(body) {
// modifies inputted request
newBody = JSON.parse(body);
newBody.points = 417;
// sends modified request
this.realSend(JSON.stringify(newBody));
}
It is supposed to make the points 417 every time it sends a request, but when I look at the request body, it still says the original amount of points. Any help?
Try to add an alert() or console.log() into your modified XMLHttpRequest.prototype.send to check if it actually works. There is a way to prevent this kind of modifications silently.
As others have noted, the error you are experiencing is hard to diagnose exactly without seeing how you created this.realSend.
However, this code will work:
const send = XMLHttpRequest.prototype.send;
XMLHttpRequest.prototype.send = function (body) {
const newBody = JSON.parse(body);
newBody.points = 417;
send.call(this, JSON.stringify(newBody));
};
Note that instead of storing the original send method on XMLHttpRequest.prototype, I've kept in a separate variable and simply invoked it with the correct this value through send.call(). This seems like a cleaner implementation with less chance for conflicts with other code.
See this codesandbox for a working example.
If your function is not being called, possible fetch is used to make ajax requests.
So you can wrap both functions, like this
const send = XMLHttpRequest.prototype.send;
const _fetch = window.fetch;
XMLHttpRequest.prototype.send = function (body) {
const newBody = JSON.parse(body);
newBody.points = 417;
send.call(this, JSON.stringify(newBody));
};
window.fetch = function(url, options){
let newBody;
if(options.body) {
newBody = JSON.parse(options.body);
newBody.points = 417;
options.body = JSON.stringify(newBody);
}
_fetch.call(this, url, options);
}

Variable inside request

I want to parse body outside of request. But i can not figure out a way to get it outside of the request function. Can you please show me how? Or at least give me an example. I do not understand it.
"use strict"
var robotsParser = require('robots-parser');
var request = require('request');
var fs = require('fs')
let url = 'http://google.de/robots.txt'
request(url, function(error, response, body) {
//console.log(body)
robots = robotsParser(url, body)
var reserveisDisallowed = robots.isDisallowed('http://google.de/maps/reserve/api/', '*')
console.log(reserveisDisallowed)
})
Use jQuery deferred, if you want to execute this instruction after success callback as:
var $deferred = $.Deferred(),
robots,
globalBody;
$deferred.done(function(body){
robots = robotsParser(url, body);
globalBody = body; //After here globalBody object has body available. Do whatever you want to do now.
var reserveisDisallowed = robots.isDisallowed('http://google.de/maps/reserve/api/', '*')
console.log(reserveisDisallowed)
});
request(url, function(error, response, body) {
//console.log(body)
$deferred.resolve(body);
})
or if U just want body available outside, simply use 'globalBody' variable in your code
As:
request(url, function(error, response, body) {
//console.log(body)
robots = robotsParser(url, body);
globalBody = body;
var reserveisDisallowed = robots.isDisallowed('http://google.de/maps/reserve/api/', '*')
console.log(reserveisDisallowed)
})

JavaScript return not working as expected

This is a total newbie questions, and I'm trying to learn JavaScript and node.js. I'm attempting to use request to grab information from an api, and then store the information in an array that I can then manipulate.
Here is my code so far:
const request = require('request');
var url = "https://www.predictit.org/api/marketdata/ticker/CHINA.INAUGURAL.2017";
var info = request(url, function(err, res, body){
var json = JSON.parse(body);
return json;
})
However, the info variable seems to only store something that I think is related to the request call, but I'm not sure. If I replace return json with console.log(json) then it prints the array immediately, though I can't get it to store it.
I would like to tell node info['ID'] and have it return 2835
Node does not work like this. Node is asynchronous.
You can try this,
var info;
request(url, function(err, res, body){
info = JSON.parse(body);
});
Looks like you' re requesting xml file from that url. You can install xml2js library for nodejs by typing npm install xml2js and let it parse the xml for you. After that
var parseString = require('xml2js').parseString;
var url = "https://www.predictit.org/api/marketdata/tick /CHINA.INAUGURAL.2017";
var info = request(url, function(err, res, body){
parseString(body, function (err, result) {
info = JSON.parse(JSON.stringify(result));
});
return info;
})
I hope this one should work,
request('https://www.predictit.org/api/marketdata/ticker/CHINA.INAUGURAL.2017',
function (err, res) {
if (err) {
console.log('ERROR: Something went wrong');
}
else {
parseString(responce.body, function (err, result) {
res.json(result);
});
}
});
I think that data is stored into 'info' variable but you're using it before data is stored.
Here it takes some time for API call to process and get data. Then it will be stored into 'info' variable. Have a timeout of 5s and then try console.log(info[1]) and would return expected value.
This is due to asynchronous nature of nodejs. Here your code might be executing info[1] before it is set by API call.

Request Stream Get + Post edited JSON body in Node.js

I'm new to Node.js and am working on a project and I'd like to use Request to stream from one endpoint to another. My goal is to use Request to get and post an edited JSON body using a pipe stream. I know that when doing so, content-type and content-length will be preserved in the POST headers. However, I would like to apply .forEach to all JSON objects in the body from the first url, and post them to the second url.
I'm not sure about the correct format, and I'd appreciate some clarification.
I know the basic syntax is this:
request.get('URL').pipe(request.post('URL'));
And so far my best guess is something like this:
request('FIRST_URL', function (error, response, body) {
body = JSON.parse(body);
body.forEach( function(arg) {
//return edited body
});
}).pipe(request.post('SECOND_URL'));
Am I missing something? Is there a better way to do this?
You could write your own transform stream. For example:
var Transform = require('stream').Transform;
var inherits = require('util').inherits;
function JSONTransform() {
Transform.call(this);
this._bufffer = '';
}
inherits(JSONTransform, Transform);
JSONTransform.prototype._transform = function(chunk, enc, cb) {
this._buffer += chunk;
cb();
});
JSONTransform.prototype._flush = function(cb) {
try {
var result = JSON.parse(this._buffer);
this._buffer = null;
// Do whatever transformations
// ...
this.push(JSON.stringify(result));
cb();
} catch (ex) {
cb(ex);
}
});
// Then just pipe
request.get('FIRST_URL')
.pipe(new JSONTransform())
.pipe(request.post('SECOND_URL'));
One other slightly different solution that may be worth considering would be to use a third-party streaming JSON parser module, which may or may not work for your use case.

Node.JS where to put the response.end()

I'm developing a simple NODE.JS application. First I create an httpServer using http module. Then I route the request to the requestsHandlers.js page. 'Response' parameter cames from the creation of the httpServer. Process1, process2 and process3 should write an answer to the page. This is the objective of this app, that process1, process2 and process3 write its respective text.
requestHandlers.js
var process1 = require("./process1");
var process2 = require("./process2");
var process3 = require("./process3");
function iniciar(response) {
console.log("Manipulador de peticiĆ³n 'iniciar' fue llamado.");
response.writeHead(200, {"Content-Type": "text/html"});
process1.fc1(response);
process2.fc2(response);
process3.fc3(response);
//response.end() //WHERE DO I PLACE IT?
}
As you can see, the response parameter is passed to process1.js, which after parsing some data shoud echo some information.
process1.js
var request = require('request')
function fc1 (response){
var url = 'http://webpagethatreturnsJSONfile.com/'
//Download webpage data and parses it
request(url, function(err, resp, body) {
if (err)
throw err;
var jsonResult = JSON.parse(body);
response.write("Number:" + jsonResult.number + '');
//response.end() //WHERE DO I PLACE IT?
});
}
exports.fc1 = fc1;
The point is that I don't know where to put 'response.end()'. Each process takes some time and I want to 'end' when all processes have echo their text.
How could I do it?
I don't know if the code I've attached is enough for helping me.

Categories

Resources