XMLHTTPRequest inside forEach loop doesn't work - javascript

Hi I'm writing a short node.js app that sends an XMLHTTPRequest to an API each time it iterates over an array. The issue is it continues the foreach loop before the request is returned because of the async nature. I'm probably overlooking something big but I've spent the better part of this afternoon trying to rack my brain over this. I've tried using await to no avail, any solutions would be appreciated.
Thanks in advance.
NODE JS app
const mongoose = require("mongoose");
const fs = require("fs");
const ajax = require("./modules/ajax.js");
// Bring in Models
let Dictionary = require("./models/dictionary.js");
//=============================
// MongoDB connection
//=============================
// Opens connection to database "test"
mongoose.connect("mongodb://localhost/bookCompanion");
let db = mongoose.connection;
// If database test encounters an error, output error to console.
db.on("error", (err)=>{
console.console.error("Database connection failed.");
});
// Check for connection to the database once.
db.once("open", ()=>{
console.info("Connected to MongoDB database...");
fs.readFile("./words.json", "utf8", (err, data)=>{
if(err){
console.log(err);
} else {
data = JSON.parse(data);
data.forEach((word, index)=>{
let search = ajax.get(`LINK TO API?=${word}`);
search.then((response)=>{
let newWord = new Dictionary ({
Word: response.word,
phonetic: response.phonetic,
meaning: response.meaning
}).save();
console.log(response);
}).catch((err)=>{
console.log(err);
});
});
}
});
});
XMLHTTPRequest Module
// Get Request module utilising promises
const XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
const get = (url)=>{
// This function will return a promise, promises use resolve and reject. The resolve is accessed through .then and the reject through the .catch
return new Promise((resolve, reject)=>{
// Create new XMLhttp (AJAX) Request
let xhr = new XMLHttpRequest();
// Sets up the request, setting it to a GET request, pointing to the website and setting it to asynchronous
xhr.open("GET", url , true);
//sends the request
xhr.send();
xhr.onload = ()=>{
if (xhr.status == 200){
// When loaded pass the response over to the .then method
resolve(JSON.parse(xhr.responseText));
} else {
// if error pass the status code to the .catch method for error handling
reject(xhr.statusText);
}
};
xhr.onerror = ()=>{
// if error pass the status code to the .catch method for error handling
reject(xhr.statusText && xhr.status);
};
});
};
module.exports.get = get;

You should use promise.all to wait for all the promises to complete. Promise.all takes array of promises as input ans waits till all the promises get resolved. It rejects with the reasIf you use promise.all, your code will be something like this.
const mongoose = require("mongoose");
const fs = require("fs");
const ajax = require("./modules/ajax.js");
// Bring in Models
let Dictionary = require("./models/dictionary.js");
//=============================
// MongoDB connection
//=============================
// Opens connection to database "test"
mongoose.connect("mongodb://localhost/bookCompanion");
let db = mongoose.connection;
// If database test encounters an error, output error to console.
db.on("error", (err) => {
console.console.error("Database connection failed.");
});
// Check for connection to the database once.
db.once("open", () => {
console.info("Connected to MongoDB database...");
fs.readFile("./words.json", "utf8", (err, data) => {
if (err) {
console.log(err);
} else {
data = JSON.parse(data);
var promiseArr = []
Promise.all(promiseArr.push(
new Promise((resolve, reject) => {
let search = ajax.get(`LINK TO API?=${word}`);
search.then((response) => {
let newWord = new Dictionary({
Word: response.word,
phonetic: response.phonetic,
meaning: response.meaning
}).save();
console.log(response);
resolve();
}).catch((err) => {
console.log(err);
reject();
});
})
)).then((response) => {
//whatever you want to do after completion of all the requests
})
}
});
});

It seems my code works fine when working with smaller arrays, the real issue I'm having is dealing with the blocking nature of the forEach loop and memory. The array I need to loop through composes of over 400,000+ words and the app runs out of memory before the forEach loop can finish and free up the call stack for the httprequests to resolve.
Any information on how I can create a synchronous forEach loop that doesn't block the call stack would be very appreciated.

Related

How to return the results of mySql query using express.js?

I am trying to get the results of my simple SELECT command to the index.js file, where I would like to have all records separated in a array. If I print the results in the database.js the JSON.parse just work fine. But if I want to return them and get them into the index.js where I need them, I always get undefined when I print it.
index.js CODE
const express = require('express');
const app = express();
const database = require('./database');
app.use(express.json());
app.use(express.urlencoded());
app.use(express.static('public'));
app.get('/form', (req,res) =>{
res.sendFile(__dirname + '/public/index.html' );
console.log(req.url);
console.log(req.path);
})
app.listen(4000, () =>{
console.log("Server listening on port 4000");
database.connection;
database.connected();
//console.log(database.select());
let results = [];
//results.push(database.select('username, password'));
let allPlayer = database.select('username');
console.log(allPlayer);
});
database.js CODE
let mysql = require('mysql');
const connection = mysql.createConnection({
host: 'localhost',
database: 'minigames',
user: 'root',
password: 'root'
});
function connected(){
connection.connect((err) => {
if(err) throw err;
console.log("Connected...");
})
}
function select(attribute){
let allPlayer = [];
let sql = `SELECT ${attribute} FROM player`;
let query = connection.query(sql, (err, result, field) => {
if(err) throw err;
return Object.values(JSON.parse(JSON.stringify(result)));
})
}
module.exports = {connection, connected, select};
Understand that one of the main things that make JavaScript different from other languages is that it's asynchronous, in simple terms meaning code doesn't "wait" for the code before it to finish executing. Because of this, when you're trying to query a database, which takes some time, the code after it gets impatient and executes regardless of how to the query is doing. To solve this problem, the mysql package utilizes callbacks, which allows you to pass a function to it to execute once the query is finished with the queries result.
Because the library operates on callbacks, it doesn't return anything; that seems quite problematic for using it somewhere else, doesn't it?
To solve this problem, we can make our own callback. Or better yet, use the newer JavaScript feature called promises, where you can basically "return" anything from a function, even when you're in a callback.
Let's implement it with the query:
function select(attribute) {
return new Promise((resolve, reject) => {
let sql = `SELECT ${attribute} FROM player`;
let query = connection.query(sql, (err, result, field) => {
if(err) return reject(err);
resolve(Object.values(JSON.parse(JSON.stringify(result))));
});
});
}
To "return" from a promise, we pass a value to the resolve function. To throw an error, we call the reject function with the error as the argument.
Our new function is rather easy to use.
select("abcd").then(result => {
console.log("Result received:", result);
}).catch(err => {
console.error("Oops...", err);
});
You might look at this code and go, "Wait a minute, we're still using callbacks. This doesn't solve my problem!"
Introducing async/await, a feature to let you work just with that. We can call the function instead like this:
// all 'await's must be wrapped in an 'async' function
async function query() {
const result = await select("abcd"); // woah, this new await keyword makes life so much easier!
console.log("Result received:", result);
}
query(); // yay!!
To implement error catching, you can wrap you stuff inside a try {...} catch {...} block.

async/await with Limiter for sending requests

I'm trying to limit the number of requests I send to an API.
I'm using Limiter and it's working just like I need, the only issue is that I can't find a way to use it with await (I need all the responses before rendering my page)
Can someone give me a hand with it?
Btw the Log returns a boolean.
const RateLimiter = require('limiter').RateLimiter;
const limiter = new RateLimiter(50, 5000)
for (let i = 0; i < arrayOfOrders.length; i++) {
const response = limiter.removeTokens(1, async (err, remainingRequests) => {
console.log('request')
return await CoreServices.load('updateOrder', {
"OrderNumber": arrayOfOrders[i],
"WorkFlowID": status
})
})
console.log('response', response)
}
console.log('needs to log after all the request');
this is loggin:
response true
response true
response false
needs to log after all the request
request
request
request
...
Promisifying .removeTokens will help, see if this code works
const RateLimiter = require('limiter').RateLimiter;
const limiter = new RateLimiter(50, 5000);
const tokenPromise = n => new Promise((resolve, reject) => {
limiter.removeTokens(n, (err, remainingRequests) => {
if (err) {
reject(err);
} else {
resolve(remainingRequests);
}
});
});
(async() => { // this line required only if this code is top level, otherwise use in an `async function`
const results = await Promise.all(arrayOfOrders.map(async (order) => {
await tokenPromise(1);
console.log('request');
return CoreServices.load('updateOrder', {
"OrderNumber": order,
"WorkFlowID": status
});
}));
console.log('needs to log after all the request');
})(); // this line required only if this code is top level, otherwise use in an `async function`
explanation
Firstly:
const tokenPromise = n => new Promise((resolve, reject) => {
limiter.removeTokens(n, (err, remainingRequests) => {
if (err) {
reject(err);
} else {
resolve(remainingRequests);
}
});
});
promisifies the limiter.removeTokens to use in async/await - in nodejs you could use the built in promisifier, however lately I've had too many instances where that fails - so a manual promisification (I'm making up a lot of words here!) works just as well
Now the code is easy - you can use arrayOfOrders.map rather than a for loop to create an array of promises that all run parallel as much as the rate limiting allows, (the rate limiting is done inside the callback)
await Promise.all(... will wait until all the CoreServices.load have completed (or one has failed - you could use await Promise.allSettled(... instead if you want)
The code in the map callback is tagged async so:
await tokenPromise(1);
will wait until the removeTokens callback is called - and then the request
return CoreServices.load
is made
Note, this was originally return await CoreServices.load but the await is redundant, as return await somepromise in an async function is just the same as return somepromise - so, adjust your code too

How can i extract values from promise and send them to client via node.js server?

I have a promise that returns data and I want to pass values of the promise as a response to client(web browser). I know i should probably use asynchronous js, but I'm not sure how to do that. Could you please give me some advice?
Here is how it looks like:
if(req.url === "/api/posts"){
res.writeHead(200, {"Content-Type": "application/json"})
let db = new AppDAO('./db/db.sqlite3')
const postsDb = new PostsRepository(db)
let posts = postsDb.getAll()
db.close()
console.log(posts)
res.end()
}
What you need is to build the response when the DB Promise resolves
postsDb.getAll().then(posts => {
console.log(posts)
res.send(posts)
}).finally(() => db.close())
Or if you want to use the modern syntax, and can declare the surrounding function as async:
try {
const posts = await postsDb.getAll()
console.log(posts)
res.send(posts)
} catch(e) {
// Handle database error
} finally {
db.close()
}

Value retrieved from MongoDB is undefined when fetched by Express.js server

Basically what I'm trying to accomplish is pulling the data from a MongoDB database and feed it into my Express.js server. Both of these are ran on the localhost with different addresses. I have tried numerous different methods to get it working and can't seem to get it to work.
Express portion
app.get('/products', function(request, response) {
var query = request.query.keywords;
var promise = new Promise(async (resolve, reject) => {
var productList = await db.getProducts(query);
resolve(productList);
});
promise.then(function(productList) {
console.log(productList); // <--- productList is undefined here
response.json({
productList
});
}).catch('Error retrieving data');
// var productList = Promise.resolve(db.getProducts(query));
// productList.then(products => response.json({products}));
});
MongoDB portion
StoreDB.prototype.getProducts = function(queryParams){
return this.connected.then(function(db){
// TODO: Implement functionality
const collection = db.collection('products');
var productList;
if (!queryParams){
console.log('no queryParams');
collection.find({}).toArray(function(err, docs) {
productList = convertToObject(docs);
// console.log(productList); // <--- productList is defined here
return productList;
});
////////////
Some extra information:
- StoreDB is a functional class with only one property , connected, which is a promise that gets resolved once the connection to the database has been established
- The Express.js server calls getProducts which then resolves the promise
- Within the getProducts call, productList is defined, but when it returns, the value is undefined
With this being said, I thought that by putting the getProducts within in a promise, it would wait for the function call to be complete, then resolve it. However, that is not the case. Some insights would be greatly appreciated. Thanks.
SOLVED
Thanks to #Renan Le Caro
app.get('/products', function(request, response, next) {
db
.getProducts(request.query.keywords)
.then(productList=>response.json({productList}))
.catch(err=>next(err));
});
StoreDB.prototype.getProducts = function(queryParams){
return this.connected.then(db =>
new Promise((resolve, reject) => {
db
.collection('products')
.find({})
.toArray((err, docs) => err ? reject(err) : resolve(docs))
})
}
getProducts is already a Promise; you shouldn't need to be wrapping a Promise in another Promise, and that just opens the code up to more places things can go wrong. But Promise.resolve does not wait for a Promise to resolve; instead, it IMMEDIATELY returns a new Promise that has already resolved with the value you pass it. That's not what you want. And .catch takes a function to run when the Promise fails, not a string.
Your code should be as simple as this:
app.get('/products', function(request, response) {
var query = request.query.keywords;
db.getProducts(query).then(function(productList) {
console.log(productList);
response.json({
productList
});
}).catch(function() {
response.send('Error retrieving data');
});
});
Or, if you prefer the async/await syntax:
app.get('/products', async function(request, response) {
var query = request.query.keywords;
try {
const productList = await db.getProducts(query);
console.log(productList);
response.json({
productList
});
} catch (err) {
response.send('Error retrieving data');
}
});
This is just a promises misuse problem. Here's a fix (not tested).
app.get('/products', function(request, response, next) {
db
.getProducts(request.query.keywords)
.then(productList=>response.json({productList}))
.catch(err=>next(err));
});
StoreDB.prototype.getProducts = function(queryParams){
return this.connected.then(db =>
new Promise((resolve, reject) => {
db
.collection('products')
.find({})
.toArray((err, docs) => err ? reject(err) : resolve(docs))
})
}

Promisfy writing file to filesystem

I am interested in understanding how to Promisfy this block of code:
const http = require('http');
const fs = require('fs');
const download = function(url, dest, cb) {
let file = fs.createWriteStream(dest);
const request = http.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
My first take on this was something to the extent of:
const http = require('http');
const fs = require('fs');
const download = async (url, dest, cb) => {
let file = fs.createWriteStream(dest);
const request = http.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
const closed = await file.close(cb); // close() is async, await here?
if (closed) {
// handle cleanup and retval
}
});
}).on('error', function(err) { // Handle errors
const deleted = await fs.unlink(dest); // Delete the file async.
if (!deleted) { ... }
});
};
The implementation above is clearly wrong. What is the right away to approach this to remove callbacks and just use async/await?
Here's a way to manually wrap the pipe operation in a promise. Unfortunately, most of this is just error handling to cover all the possible places an error can occur:
const http = require('http');
const fs = require('fs');
const download = function(url, dest) {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(dest);
// centralize error cleanup function
function cleanup(err) {
reject(err);
// cleanup partial results when aborting with an error
file.on('close', () => {
fs.unlink(dest);
});
file.end();
}
file.on('error', cleanup).on('finish', resolve);
const request = http.get(url, function(response) {
if (response.status < 200 || response.status >= 300) {
cleanup(new Error(`Unexpected Request Status Code: ${response.status}`);
return;
}
response.pipe(file);
response.on('error', cleanup);
}).on('error', cleanup);
});
};
download(someURL, someDest).then(() => {
console.log("operation complete");
}).catch(err => {
console.log(err);
});
This does not wait for files to be closed or removed in the error conditions before rejecting (figuring that there's typically nothing constructive to do if those cleanup operations have errors anyway). If that is desired, it could be added easily by just calling reject(err) from the asynchronous callbacks for those cleanup operations or by using the fs.promises version of those functions and awaiting them.
A few things to note. This is mostly error handling because there are three possible places you can have errors and some cleanup work needed for some errors.
Added required error handling.
In the OP's original code, they called file.close(), but file is a stream and there is no .close() method on a writeStream. You call .end() to close the write stream.
You also probably need to check for an appropriate response.status because http.get() still returns a response object and stream even if the status is something like 4xx or 5xx.
Here's how I'd re-write your node-style callback API as an asynchronous function:
const http = require('http');
const fs = require('fs');
async function download (url, dest) {
const response = await new Promise((resolve, reject) => {
http.get(url, resolve).once('error', reject);
});
if (response.status < 200 || response.status >= 300) {
throw new Error(`${responses.status} ${http.STATUS_CODES[response.status]}`);
}
const file = await fs.promises.open(dest, 'w');
try {
for await (const data of response) {
await file.write(data);
}
} catch (error) {
await file.close();
await fs.promises.unlink(dest);
throw error;
}
await file.close();
}
Note that this approach uses the FileHandle class in the fs.promises namespace, as well as the Symbol.asyncIterator interface defined on the Readable stream class, which allows you to consume the data events of the response with a for await...of loop and propagate error handling from the error event of the response to the catch block by implicitly rejecting the promise returned by the underlying asynchronous iterator.

Categories

Resources