Chaining HTTP requests in Electron using request-promise - javascript

UPDATE
OK so I worked it out. When using highland.js I needed a .done() to finish the stream.
var requests = [];
_(fs.createReadStream("small.txt", { encoding: 'utf8' }))
.splitBy('-----BEGIN-----\n')
.splitBy('\n-----END-----\n')
.filter(chunk => chunk !== '')
.each(function (x) {
requests.push(function (next) {
Helpers.Authenticate()
.then(function (response1) {
return Helpers.Retrieve();
})
.then(function (response2) {
return Helpers.Retrieve();
})
.then(function () {
next();
});
});
})}).done(function () {
async.waterfall(requests);
});
The request array is now working.
I'm having some issues using electron and chained promises. Heres my code that being run in the main process.
var request = require('request-promise');
request.post(tppAuthenticate)
.then(function (responseFromFirstPost) {
var newoptions = tppRetrieveCertificate(responseFromFirstPost.APIKey)
return request.post(newoptions) // The return is important
})
.then(function (responseFromSecondPost) {
console.log(responseFromSecondPost)
})
The entire code block executes several thousand times as its called by iterating over each line of a file. The first request gets fired continuously, but this seems to significantly block/throttle back the second request which is only getting called periodically.
I was hoping that the entire block would get called in sequence but this does not seem to be happening.
Heres my complete code block including the iteration:
const _ = require('highland');
const request = require('request-promise');
fs.createReadStream(files[0], { encoding: 'utf8' }))
.splitBy('-----BEGIN -----\n')
.splitBy('\n-----END -----\n')
.filter(chunk => chunk !== '')
// .each(_.log);
.each(function (x) {
request.post(tppHelpers.Authenticate)
.then(function (responseFromFirstPost) {
const newoptions = tppHelpers.tppRetrieveCertificate(responseFromFirstPost.APIKey)
console.log(newoptions)
return request.post(newoptions) // The return is important
})
.then(function (responseFromSecondPost) {
console.log(responseFromSecondPost)
event.sender.send('selected-directory', responseFromSecondPost)
})
});

If you don't want to fire every request at once, which seems to be the case reading the comments you made, then instead of running everything in parallel with:
.each(function (x) {
// ...
});
You can instead create an empty array before you run fs.createReadStream:
var requests = [];
and in your each callback create functions to add to your array:
.each(function (x) {
requests.push(function (next) {
// ...
next();
});
});
and then you can run it in series with:
async.series(requests);
using the async module.
Just make sure that the next() is called at the right moment, e.g. in the last .then() callback of the given chain of promises.
Another way would be to use async.queue:
var queue = async.queue(function(x, callback) {
//
callback();
}, 1);
(Here make sure that the callback() is called when it should. Instead of 1 at the end you could use some other number to have a certain number of requests done in parallel.)
And then in your each callback:
.each(function (x) {
queue.push(x);
});
See the async.queue docs for more info. (Thanks to robertklep for mentioning async.queue() in the comments.)
By the way: do you even use the x in your iterations or are you just making a bunch of identical request for each line of your input?
Example
To answer your question from the comments, here is a way to construct the array of functions.
If this was your original code:
yourStream.each(function (x) {
doRequest1()
.then(function (response1) {
return doRequest2();
})
.then(function (response2) {
return doRequest3();
});
});
Then you could consruct that array of functions with something like:
var requests = [];
yourStream.each(function (x) {
requests.push(function (next) {
doRequest1()
.then(function (response1) {
return doRequest2();
})
.then(function (response2) {
return doRequest3();
})
.then(function () {
next();
});
});
});
And you could run them with:
async.series(requests);
Hope it helps.

Related

How to return the response from a Nodejs' HTTPS GET request?

I am still learning node.js, so please be kind.
I am struggling to get my head around some of the basics without having a book open.
I have written a function to go out and get some JSON from a URL. This works but how do I return the data from the function, then pick bits out. here is my code. SerialNumber is one of the JSON elements being returned.
const https = require('https');
function authenticate( uuid , cdcloc ) {
let url = cdcloc + "/api.php?uuid=" + uuid + '&auth';
https.get(url,(res) => {
let body = "";
res.on("data", (chunk) => {
body += chunk;
});
res.on("end", () => {
try {
let cdcResponse = JSON.parse(body);
// do something with JSON
return cdcResponse[0];
} catch (error) {
console.error(error.message);
};
});
}).on("error", (error) => {
console.error(error.message);
});
}
const connection = authenticate( 'DATATOBEPASSED' , 'https://URLHERE');
console.log(connection.SerialNumber);
node-style callbacks
The answer from O.Jones is correct but it goes against Node's convention of error-first callbacks. I think it is also a mistake to reach for https.get every single time you need to make a request. It is a low-level function and because it asks you to connect so many bits and pieces, it is likely you will make easily-avoidable mistakes.
We can write a generic getString function that wraps https.get -
const https = require('https')
function getString(url, options, callback)
{ https
.get(url, options, res => {
let s = "";
res.on("data", d => s += d)
res.on("end", _ => callback(null, s) // error-first callback
})
.on("error", e => callback(e)) // error-first callback
}
Now that we have a generic function to fetch a string, we don't need to write res.on("data, ...) and res.on("end", ...) in every function that makes a request. But don't stop here. You will often want to JSON.parse the result -
function getJSON(url, options, callback)
{ getString(url, options, function(err, data)
{ if (err) callback(err) // getString error
else try { callback(null, JSON.parse(data) } // JSON.parse success
catch (e) { callback(e) } // JSON.parse error
}
}
Now we can write authenticate without having touch the bare https.get or worrying about parsing JSON each time -
function authenticate(uuid, cdcloc, callback) // callback
{ const url = cdcloc + "/api.php?uuid=" + uuid + '&auth'
getJSON(url, {}, function(err, json)
{ if (err)
callback(err) // getJSON error
else if (json.length == 0)
callback(Error("empty response")) // empty response error
else
callback(null, json[0]) // success
}
}
promises
But all of this is pretty painful still, isn't it? Enter Promises. Node-style callbacks were designed at a time when we didn't have access to async control flow primitives. We've come a long way since then. To see how promises work, we will re-implement the functions above, but this time without the need to pass callback and error-check everywhere -
const https = require('https')
function getString(url, options) // no callback
{ return new Promise((resolve, reject) => // return Promise
{ https
.get(url, options, res => {
let s = "";
res.on("data", d => s += d)
res.on("end", _ => resolve(s)) // success, resolve
})
.on("error", e => reject(e)) // failure, reject
}
}
We immediately see the benefits of our new implementation when we rewrite getJSON -
function getJSON(url, options = {}) // no callback
{ return getString(url, options) // return promise
.then(s => JSON.parse(s)) // errors auto bubble up
}
And more benefits when we write authenticate -
function authenticate(uuid, cdcloc) // no callback
{ const url = `${cdcloc}/api.php?uuid=${uuid}&auth`
return getJSON(url) // return promise
.then(data => {
if (data.length == 0)
throw Error("empty response") // local error
else
return data[0] // success
}) // no error-check
}
async/await
Even Promises have been around for a long time and we've learned a lot since their native inclusion in ECMAScript. Remember to return promises and having to sequence all of the data through .then calls is tedious, the same way writing those initial res.on("data", ...) and res.on("end", ...) handlers felt. async and await keywords allows us to work with asynchronous control flow without having to sacrifice synchronous programming style -
async function getJSON(url, options = {}) // async
{ const s = await getString(url, options) // await
return JSON.parse(s) // auto wrapped in Promise
}
Writing authenticate is easy and feels natural -
async function authenticate(uuid, cdcloc) // async
{ const url = `${cdcloc}/api.php?uuid=${uuid}&auth`
const data = await getJSON(url) // await
if (data.length == 0)
throw Error("empty response") // throw if empty
else
return data[0] // return first
}
Using it is easy and feels natural too -
async function connect()
{ const connection = await authenticate( 'DATATOBEPASSED' , 'https://URLHERE')
console.log(connection.SerialNumber)
// ...
return "done" // or whatever
}
connect().then(console.log, console.error) // errors bubble all the way up
URL
I should also mention that building URLs with string concatenation is tedious and prone to a host of errors. You should develop a sense for this pain and know that it means there's room for relief. Take a look at the URL module that can safely build/manipulate URLs in pretty much every way imaginable.
Ah, the joys of learning Javascript's asynchronous programming model!
This line
const connection = authenticate( 'DATATOBEPASSED' , 'https://URLHERE');
returns to its caller before either event handler -- res.on("data", ...) and res.on("data", ...) get called with the results of your get operation.
You need to use a callback from your authenticate() function to deliver the results to its caller.
function authenticate( uuid , cdcloc, callback ) {
let url = cdcloc + "/api.php?uuid=" + uuid + '&auth';
https.get(url,(res) => {
let body = "";
res.on("data", (chunk) => {
body += chunk;
});
res.on("end", () => {
try {
let cdcResponse = JSON.parse(body);
// do something with JSON
callback(cdcResponse[0]);
} catch (error) {
console.error(error.message);
};
});
}).on("error", (error) => {
console.error(error.message);
});
}
authenticate( 'DATATOBEPASSED' , 'https://URLHERE',
function (connection) {
console.log(connection.SerialNumber);
}
);
There are language features known as Promises and async / await to help escape the confusing mess of nested callbacks we get into when we write significant code.

What are ways to run a script only after another script has finished?

Lets say this is my code (just a sample I wrote up to show the idea)
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
function RunFunc() {
extract.Start();
console.log("Extraction complete");
rescore.Start();
console.log("Scoring complete");
}
And I want to not let the rescore.Start() run until the entire extract.Start() has finished. Both scripts contain a spiderweb of functions inside of them, so having a callback put directly into the Start() function is not appearing viable as the final function won't return it, and I am having a lot of trouble understanding how to use Promises. What are ways I can make this work?
These are the scripts that extract.Start() begins and ends with. OpenWriter() is gotten to through multiple other functions and streams, with the actual fileWrite.write() being in another script that's attached to this (although not needed to detect the end of run. Currently, fileWrite.on('finish') is where I want the script to be determined as done
module.exports = {
Start: function CodeFileRead() {
//this.country = countryIn;
//Read stream of thate address components
fs.createReadStream("Reference\\" + postValid.country + " ADDRESS REF DATA.csv")
//Change separator based on file
.pipe(csv({escape: null, headers: false, separator: delim}))
//Indicate start of reading
.on('resume', (data) => console.log("Reading complete postal code file..."))
//Processes lines of data into storage array for comparison
.on('data', (data) => {
postValid.addProper[data[1]] = JSON.stringify(Object.values(data)).replace(/"/g, '').split(',').join('*');
})
//End of reading file
.on('end', () => {
postValid.complete = true;
console.log("Done reading");
//Launch main script, delayed to here in order to not read ahead of this stream
ThisFunc();
});
},
extractDone
}
function OpenWriter() {
//File stream for writing the processed chunks into a new file
fileWrite = fs.createWriteStream("Processed\\" + fileName.split('.')[0] + "_processed." + fileName.split('.')[1]);
fileWrite.on('open', () => console.log("File write is open"));
fileWrite.on('finish', () => {
console.log("File write is closed");
});
}
EDIT: I do not want to simply add the next script onto the end of the previous one and forego the master file, as I don't know how long it will be and its supposed to be designed to be capable of taking additional scripts past our development period. I cannot just use a package as it stands because approval time in the company takes up to two weeks and I need this more immediately
DOUBLE EDIT: This is all my code, every script and function is all written by me, so I can make the scripts being called do what's needed
You can just wrap your function in Promise and return that.
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
fs.createReadStream(
'Reference\\' + postValid.country + ' ADDRESS REF DATA.csv'
)
// .......some code...
.on('end', () => {
postValid.complete = true;
console.log('Done reading');
resolve('success');
});
});
}
};
And Run the RunFunc like this:
async function RunFunc() {
await extract.Start();
console.log("Extraction complete");
await rescore.Start();
console.log("Scoring complete");
}
//or IIFE
RunFunc().then(()=>{
console.log("All Complete");
})
Note: Also you can/should handle error by reject("some error") when some error occurs.
EDIT After knowing about TheFunc():
Making a new Event emitter will probably the easiest solution:
eventEmitter.js
const EventEmitter = require('events').EventEmitter
module.exports = new EventEmitter()
const eventEmitter = require('./eventEmitter');
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
//after all of your code
eventEmitter.once('WORK_DONE', ()=>{
resolve("Done");
})
});
}
};
function OpenWriter() {
...
fileWrite.on('finish', () => {
console.log("File write is closed");
eventEmitter.emit("WORK_DONE");
});
}
And Run the RunFunc like as before.
There's no generic way to determine when everything a function call does has finished.
It might accept a callback. It might return a promise. It might not provide any kind of method to determine when it is done. It might have side effects that you could monitor by polling.
You need to read the documentation and/or source code for that particular function.
Use async/await (promises), example:
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
async function extract_start() {
try {
extract.Start()
}
catch(e){
console.log(e)
}
}
async function rescore_start() {
try {
rescore.Start()
}
catch(e){
console.log(e)
}
}
async function RunFunc() {
await extract_start();
console.log("Extraction complete");
await rescore_start();
console.log("Scoring complete");
}

The best way to summarize http returns on nodejs

Have to take multiple SOAP requests and need to take back all the returns together.
What I do now is:
for (let this_target of list_of_target)
{
const req = http.request(conn, (res) => {
let data='';
res.on('data', (chunk) =>
{
data += chunk;
});
res.on('end', () =>
{
... do some stuff with the result of the SOAP request ...
my_external_array.concat (myData);
});
});
var om_req={
'CT_Get' : {
...some extra stuff...
'target': this_target
...some extra stuff...
}
};
var builder=new xml2js.Builder();
var om_req_xml=builder.buildObject(om_req);
req.write(om_req_xml);
req.end();
}
ok
I know that I can't retrive the data outside this loop because the callback.
Curiously (or not), if I add a console.log(my_external_array) just after my_external_array.concat (myData);, the script shows the adding of the items after each processing... However if I put the same console.log outside the callback (anywhere) I don't have any return... :(
Which would be the best way to process all those request, join the results into an array and pass it to some another function. I need all the values from this requests to summarize them and save the summarized result into a database
Sorry being not more detailed about what I'm doing...
This is a common problem with asynchronous operations. You could use Promises to solve your issue.
Here is a code snippet to give you an idea of how to use Promises:
const promisesList = [];
for (let this_target of list_of_target) {
promisesList.push(new Promise((resolve, reject) => {
// Your request code...
// Make sure to resolve the promise with your transformed data, eg.:
/*
res.on('end', () => {
// ... do some stuff with the result of the SOAP request ...
resolve(myData);
});
*/
}));
}
// Process all the promises
Promise.all(promisesList).then(results => {
// `results` is an array which contains all the resolved values.
// This would be equivalant to `my_external_array` from your old code.
console.log(results);
});

Retry nodejs http.request (post,put,delete)

What is the correct way to implement a retry on error/condition without using any third party modules in nodejs, please?
I'm not sure how to call the same function on the error and how to then pass the original callback/data to the newly called function?
Do I need to destroy/end the sockets?
I've tried looking for examples but have only found reference to third party modules and http.get samples which don't seem to work. How does one test this?
I have attempted the below without success:
async pingApi(cb) {
let options = {
"method":"post",
"path": `/API/pingAPI?${this.auth}`, /ect do I reference this path?
}
};
let request = await http.request(options, (response) => {
let body = new Buffer(0);
response.on('data', (chunk) => {
body = Buffer.concat([body, chunk]);
});
response.on('end', function () {
if (this.complete) {
let decoded = new Buffer(body, 'base64').toString('utf8')
let json = JSON.parse(decoded);
if (json.result != 'OK') {
setTimeout(pingApi, 1000); //cant pass callback
} else {
cb(null, json.result) //works
}
}
});
})
request.end(); //does the socket need to be closed if retry is this function?
}
Any help, pointing in the right direction or criticism will be greatly appreciated as I think this is a very important learning curve for me.
Thank you in advance,
I'm not sure how to call the same function on the error and how to then pass the original callback/data to the newly called function?
I don't know for sure that everything else in your function is correct, but you can fix the recursion that you're asking about by changing this:
setTimeout(pingApi, 1000); //cant pass callback
to this:
setTimeout(() => {
this.pingApi(cb);
}, 1000);
You aren't showing the whole context here, but if pingApi() is a method, then you also need to keep track of the this value to you can call this.pingApi(db). You can preserve the value of this by using arrow function callbacks like this:
response.on('end', () => { ... });
Other things I notice that look off here:
There's no reason to use await http.request(). http.request() does not return a promise so using await with it does not do anything useful.
Without the await, there's then no reason for your function to be declared async since nobody is using a returned promise from it.
It's not clear what if (this.complete) is meant to do. Since this is inside a regular function callback, the value of this won't be your pingApi object. You should either save this higher in the scope typically with const self = this or all callbacks internally need to be arrow functions so the value of this is preserved.
You should probably put try/catch around JSON.parse() because it can throw if the input is not perfect JSON.
You should probably not retry forever. Servers really hate clients that retry forever because if something goes wrong, the client may just be bashing the server every second indefinitely. I'd suggest a certain number of max retries and then give up with an error.
Do I need to destroy/end the sockets?
No, that will happen automatically after the request ends.
How does one test this?
You have to create a test route in your server that returns the error condition for the first few requests and then returns a successful response and see if your code works with that.
Here's an attempt at a code fixup (untested):
const maxRetries = 10;
pingApi(cb, cnt = 0) {
let options = {
"method":"post",
"path": `/API/pingAPI?${this.auth}`, // ect do I reference this path?
};
let request = http.request(options, (response) => {
let body = new Buffer(0);
response.on('data', (chunk) => {
body = Buffer.concat([body, chunk]);
});
response.on('end', () => {
if (this.complete) {
let decoded = new Buffer(body, 'base64').toString('utf8')
try {
let json = JSON.parse(decoded);
if (json.result != 'OK') {
if (cnt < maxRetries)
setTimeout(() => {
this.pingApi(cb, ++cnt);
}, 1000);
} else {
cb(new Error("Exceeded maxRetries with error on pingApi()"));
}
} else {
cb(null, json.result) //works
}
} catch(e) {
// illegal JSON encountered
cb(e);
}
}
});
})
request.end();
}
Remaining open questions about this code:
What is this.complete doing and what this should it be referencing?
Why is there no request.write() to send the body of the POST request?
I know you ask for no external modules, but my preferred way of doing this would be to use promises and to use the request-promise wrapper around http.request() because it handles a lot of this code for you (checks response.status for you, parses JSON for you, uses promise interface, etc...). You can see how much cleaner the code is:
const rp = require('request-promise');
const maxRetries = 5;
pingApi(cnt = 0) {
let options = {
method: "post",
url: `http://somedomain.com/API/pingAPI?${this.auth}`,
json: true
};
return rp(options).then(result => {
if (result.result === "OK") {
return result;
} else {
throw "try again"; // advance to .catch handler
}
}).catch(err => {
if (cnt < maxRetries) {
return pingApi(++cnt);
} else {
throw new Error("pingApi failed after maxRetries")
}
});
}
And, then sample usage:
pingApi().then(result => {
console.log(result);
}).catch(err => {
console.log(err);
})
your use of async/await with core node server intrigued me and I've tried to use much as possible of this new async features.
This is what I end up with: https://runkit.com/marzelin/pified-ping
const pify = require("util").promisify;
const http = require("http");
const hostname = "jsonplaceholder.typicode.com";
const failEndpoint = "/todos/2";
const goodEndpoint = "/todos/4";
let options = {
method: "get",
path: `${failEndpoint}`,
hostname
};
async function ping(tries = 0) {
return new Promise((res) => {
const req = http.request(options, async (response) => {
let body = new Buffer(0);
response.on("data", (chunk) => {
body = Buffer.concat([body, chunk]);
})
const on = pify(response.on.bind(response));
await on("end");
let decoded = new Buffer(body, 'base64').toString('utf8')
let json = JSON.parse(decoded);
if (json.completed) {
return res("all good");
}
if (tries < 3) {
console.log(`retrying ${tries + 1} time`);
return res(ping(tries + 1));
}
return res("failed");
})
req.on('error', (e) => {
console.error(`problem with request: ${e.message}`);
});
// write data to request body
req.end();
})
}
const status = await ping();
"status: " + status

How do I wait until an asynchronous process inside a loop is finished before exiting the loop?

I have some asynchronous code running inside a JavaScript forEach loop. I want to wait until the code inside the asynchronous process has finished running before proceeding after the loop.
Example below:
ids is an array of strings. db is a node module I created to work with MongoDB
var appIdsNotFound = "";
var count = 0;
ids.forEach(function(id) {
output[count] = {};
//console.log(id);
db.findApp(id, function(error, result) {
if(error) {
fatalError = true;
console.log(error);
} else {
if (result) {
output[count] = result;
//console.log(output[count]);
count++;
} else {
appNotFound = true;
appIdsNotFound += id + ", ";
console.log(appIdsNotFound);
}
}
});
});
//more code that we want to wait before executing
Is there a way to wait before executing the rest of the code that is outside the loop, and if so, how would I go about doing that.
Assuming db is some module to access your DB, try to look for the synchronous version. This assumes you are ok with synchronous, since you're attempting to write it that way, waiting for everything before proceeding.
If your db library uses promises, you can use it in conjunction with Promise.all. Fire a request for each item, collect all their promises in an array, feed them to Promise.all. The promise from Promise.all will resolve when all promises resolve.
const promises = ids.map(id => db.promiseReturningFindApp(id));
const allRequests = Promise.all(promises).then(responses => {
// responses is an array of all results
});
If you don't have a promise-returning version of your API, wrap db.findApp in a promise, do suggestion #2.
function promiseReturningFindApp(id){
return new Promise((resolve, reject) => {
db.findApp(id, (error, result) => {
if(error) reject(error);
else resolve(result);
});
});
}
Options 2 and 3 are asynchronous, and as such, you technically don't "wait". Therefore, code that needs to execute after can only reside in a callback.
You could make every item into an function and use async:
var async = require('async');
var output = [], appsNotFound = [];
var appRequests = ids.map((id) => (cb) => {
db.findApp(id, (error, result) => {
if (error) {
appsNotFound.push(id);
return cb();
}
output.push(id);
return cb();
})
})
async.parallel(appRequests, () => {
console.log('N# of Apps found',output.length);
console.log("Ids not found:",appIdsNotFound.join(','))
console.log("N# Apps not found:",appIdsNotFound.length)
})
If the DB don't handle it, try to use async.serial.
You can make something similar with promises if you prefer, but this requires less lines of code.

Categories

Resources