Running a synchronous call after awaiting an asynchronous function - javascript

I am writing a node js program as follows. The purpose of this code is to parse from multiple pages of an API (variable number of pages thus scraping the first page to see how many pages are to be scraped) followed by uploading all the pages to MongoDB and then "analysing the pages" with a function in another file (manipulate keyword):
const MongoClient = require('mongodb').MongoClient
const fetch = require('node-fetch')
const config = require('./config.json')
const manipulate = require('./manipulateDB')
async function startAHLoop() {
async function getAuctionPage(page = 0) {
return fetch(`https://website.net/page/${page}`).then(res => {
return res.json()
}).catch (error => console.error("Faced an error: " + error))
}
async function getFullAH() {
try {
let ah = []
let completedPages = 0
let firstPage = await getAuctionPage(0)
for (let i = 1; i <= firstPage.totalPages; i++) {
getAuctionPage(i).then((page) => {
if (completedPages !== firstPage.totalPages - 1) {
completedPages++
}
if (page.success) {
for (auction of page.auctions) {
ah.push(auction)
if (completedPages == firstPage.totalPages - 1) {
completedPages++
}
}
} else if (completedPages == firstPage.totalPages - 1) {
completedPages++
}
})
}
// Wait for the whole ah to download
while (completedPages !== firstPage.totalPages)
await new Promise((resolve) => setTimeout(resolve, 10))
return ah
} catch (e) {
console.log('Failed to update auctions', e)
return
}
}
async function main() {
let startTime = Date.now()
if (!db.isConnected()) await connectToDB()
let auctionCollection = data.collection('auctions')
let ah = await getFullAH()
let timeTaken = Date.now() - startTime
if (typeof ah.ok == 'undefined') {
auctionCollection.drop()
auctionCollection.insertMany(ah)
console.log(`Auction update complete in ${timeTaken} ms ${Date().toLocaleString()}`)
console.log("Starting analysis")
await auctionCollection.insertMany(ah)
manipulate.printAHInfos()
} else {
console.log(`Auction update failed in ${timeTaken} ms ${Date().toLocaleString()}`)
}
// This essentially is the delay instead of every 60000 ms
setTimeout(main, 60000 - timeTaken)
}
main()
}
async function connectToDB(isFirstConnect) {
console.log('Connecting to db...')
MongoClient.connect(
config.mongoSRV,
{ useNewUrlParser: true, useUnifiedTopology: true },
(err, DB) => {
if (err) return connectToDB()
db = DB
skyblock = DB.db('skyblock')
}
)
while (typeof db == 'undefined') {
await new Promise((resolve) => setTimeout(resolve, 10))
}
if (!db.isConnected()) {
console.log('Something weird happened... re-starting db connection')
return connectToDB()
}
console.log('Successful connection to database')
if (isFirstConnect) startAHLoop()
return db
}
connectToDB(true)
I am looking for a way to wait until collection.insertMany(ah) has finished before doing manipulate.AHdata
The issue I get is that manipulate.AHdata is invoked before collection.insertMany(ah) is finished. Resulting as follows when manipulate.AHdata outputs "Invoked":
Invoked
Connecting to db...
I tried using the following:
collection.insertMany(ah)
await collection.insertMany(ah)
manipulate.AHdata
But it doesn't work tho...
Any idea of what I could do?
Thanks for the help and have a great day!

Following up with all my comments and points, here's what I believe is a better (but obviously untested) version of the code :
const MongoClient = require('mongodb').MongoClient
const fetch = require('node-fetch')
const config = require('./config.json')
const manipulate = require('./manipulateDB')
let auctionCollection
async function getAuctionPage(page = 0) {
try {
const response = await fetch(`https://website.net/page/${page}`)
const data = await response.json()
return data
} catch (err) {
console.error("Faced an error: " + err)
}
}
async function getFullAH() {
try {
let ah = []
let firstPage = await getAuctionPage(0)
for (let i = 1; i <= firstPage.totalPages; i++) {
const page = await getAuctionPage(i);
for (let auction of page.auctions) ah.push(auction);
}
return ah
} catch (e) {
console.log('Failed to update auctions', e)
return
}
}
async function main() {
let startTime = Date.now()
let ah = await getFullAH()
let timeTaken = Date.now() - startTime
auctionCollection.drop()
auctionCollection.insertMany(ah)
console.log(`Auction update complete in ${timeTaken} ms ${Date().toLocaleString()}`)
console.log("Starting analysis")
await auctionCollection.insertMany(ah)
manipulate.printAHInfos()
// This essentially is the delay instead of every 60000 ms
setTimeout(main, 60000 - timeTaken)
}
async function connectToDB() {
console.log('Connecting to db...')
let db
try {
db = await MongoClient.connect(
config.mongoSRV,
{ useNewUrlParser: true, useUnifiedTopology: true });
} catch (err) {
return connectToDB()
}
auctionCollection = db.collection('auctions');
console.log('Successful connection to database')
main() // You don't need hacks and verifications to check if the DB is connected. If there was a problem, it got caught in the catch()
}
connectToDB()

Related

Async function returns 200 without executing

I have a Cloud Function written in Node JS that accesses data from BigQuery, converts this to CSV and exports to a Google Storage bucket.
Currently it executes and returns a 200, but does not run any of the code within my try/catch.
When testing it just returns:
Function execution took x ms. Finished with status code: 200
I've attempted to debug by adding console logs at various points, but it doesn't log anything - it just returns a 200.
exports.run_checks = (req, res) => {
"use strict";
let parsedBody = req.body;
let startCount = parsedBody.start;
let endCount = parsedBody.end;
(async function () {
try {
for (let i = startCount; i < endCount; i += 1) {
//Exclude overly large files here
if (i != 100) {
const query =
`SELECT *
FROM \`bq_dataset\`
WHERE id_number = ${i}`;
const options = {
query: query,
location: "europe-west2",
};
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);
const [rows] = await job.getQueryResults();
let id = rows[0].id;
const createFile = storage.bucket(bucketName).file(`${id}.csv`);
const csv = parse(rows, { fields });
const dataStream = new stream.PassThrough();
dataStream.push(csv);
dataStream.push(null);
await new Promise((resolve, reject) => {
console.log(`Writing ${id} to GCS`);
dataStream
.pipe(
createFile.createWriteStream({
resumable: false,
validation: false,
metadata: { "Cache-Control": "public, max-age=31536000" },
})
)
.on("error", (error) => {
console.error("Stream failed", error);
reject(error);
})
.on("finish", () => {
resolve(true);
});
});
}
}
res.status(200).send();
} catch (err) {
res.send(err);
}
})();
};
Your function is not async. The host has no idea that you are still doing something in your function, it returns without any error.
Modify your arrow function to async, and no need for IIFE, remove it, or await it, that is also important!
exports.run_checks = async (req, res) => {
"use strict";
let parsedBody = req.body;
let startCount = parsedBody.start;
let endCount = parsedBody.end;
try {
for (let i = startCount; i < endCount; i += 1) {
//Exclude overly large files here
if (i != 100) {
const query =
`SELECT *
FROM \`bq_dataset\`
WHERE id_number = ${i}`;
const options = {
query: query,
location: "europe-west2",
};
const [job] = await bigquery.createQueryJob(options);
console.log(`Job ${job.id} started.`);
const [rows] = await job.getQueryResults();
let id = rows[0].id;
const createFile = storage.bucket(bucketName).file(`${id}.csv`);
const csv = parse(rows, { fields });
const dataStream = new stream.PassThrough();
dataStream.push(csv);
dataStream.push(null);
await new Promise((resolve, reject) => {
console.log(`Writing ${id} to GCS`);
dataStream
.pipe(
createFile.createWriteStream({
resumable: false,
validation: false,
metadata: { "Cache-Control": "public, max-age=31536000" },
})
)
.on("error", (error) => {
console.error("Stream failed", error);
reject(error);
})
.on("finish", () => {
resolve(true);
});
});
}
}
res.status(200).send();
} catch (err) {
res.send(err);
}
};

how can I use async/await or promises to make my code run synchronously

When ever I run this code the content_2 function runs first instead of content_1. The code below runs asynchronously and the second function uses a variable in the first function through "node.js store" to run so I need content_2 to wait for content_1 to finish before it starts running, I want it to run synchronously.
const content_1 = function main_Content(req, res, callback) {
const assert = require('assert');
const fs = require('fs');
const mongodb = require('mongodb');
const mv = require('mv');
var filename = req.body.Filename + Math.ceil((Math.random() * 1000000000000) + 10);
console.log(req.body.Filename)
//CREATE A FILE
fs.writeFile(filename + '.html', req.body.Content, (err) => {
if (err) throw err;
console.log('File was created successfully...');
});
//MOVE TO UPLOADS
const currentPath = path.join(__dirname, "../", filename + ".html");
const destinationPath = path.join(__dirname, "../uploads", filename + ".html");
mv(currentPath, destinationPath, function(err) {
if (err) {
throw err
} else {
console.log("Successfully moved the file!");
}
});
const uri = 'mongodb://localhost:27017';
const dbName = 'registration';
const client = new mongodb.MongoClient(uri);
client.connect(function(error) {
assert.ifError(error);
const db = client.db(dbName);
var bucket = new mongodb.GridFSBucket(db);
//UPLOAD FILE TO DB THROUGH STREAMING
fs.createReadStream('./uploads/' + filename + '.html').
pipe(bucket.openUploadStream(filename + ".html")).
on('error', function(error) {
assert.ifError(error);
}).
on('finish', function(res) {
var result = res._id
store.set('id', result);
//process.exit(0);
});
});
}
const content_2 = function metaData(req, res, callback) {
const obj = new ObjectId()
var filename = req.body.Filename + Math.ceil((Math.random() * 1000000000000) + 10);
const slice = require('array-slice')
var id = store.get('id');
console.log(id)
var objID = slice(id, 14, 24)
console.log(objID + '2nd')
Key.findByIdAndUpdate(req.body.id, {$push: {Articles:{Title: req.body.Title, Desc:req.body.Desc, Content: {_id: `ObjectId("${objID}")`}}} }, (err, docs) => {
if(err){
console.log(err)
}else{
console.log('done' + obj)
}
});
}
I will give you two examples. First one will be using promises. Second one will be using async await. The output though is exactly the same.
Supose those three methods. Someone cleans a room, "then" someone checks if the room is clean, "then" a payment is done.
Promises version
const cleanRoom = new Promise((res, rej) => {
console.log("Cleaning room...");
setTimeout(() => {
console.log("Room clean!");
res();
}, 2000);
});
const cleanCheckup = () => {
return new Promise((res, rej) => {
console.log("Clean checkup...");
setTimeout(() => {
console.log("Checkup complete!");
res();
}, 2000);
});
}
const payMoney = () => {
console.log("Open Wallet!");
return new Promise((res, rej) => {
setTimeout(() => {
res("50€");
}, 2000);
});
}
cleanRoom
.then(cleanCheckup)
.then(payMoney)
.then(console.log);
Async/Await version
const sleep = mils => {
return new Promise(r => setTimeout(r, mils));
};
const cleanRoomAW = async () => {
console.log("Cleaning room...");
await sleep(2000);
console.log("Room clean!");
};
const cleanCheckupAW = async () => {
console.log("Clean checkup...");
await sleep(2000);
console.log("Checkup complete!");
};
const payMonney = async () => {
console.log("Open Wallet!");
await sleep(2000);
return "50€";
};
async function run() {
await cleanRoomAW();
await cleanCheckupAW();
const value = await payMonney();
console.log(value);
};
run();
Please be aware of the helper method sleep, since you can't await for setTimeout in the browser yet. (In node js >=16 I believe you dont need this helper method).
You can copy/paste any of the two versions into the browser's console and confirm that both versions run synchronously despite the asynchronous nature of the setTimeout.

(JavaScript) How to return result from another function?

imagem printscreen ---------
Platform: NodeJS
I have two roles in the nodejs project
I'm having trouble inserting the result of the second function into the first function.
How to insert the result of the RUN function within the first function
start (client)?
function start (client)
...
.sendText (message.from, 'Example' + result.rows + 'Text text')
...
function run ()
...
Console.log (result.rows);
...
Full code
'use strict';
const venom = require('venom-bot');
const oracledb = require('oracledb');
const dbConfig = require('./dbconfig.js');
venom
.create()
.then((client) => start(client))
.catch((erro) => { console.log(erro); });
function start(client)
{
client.onMessage((message) =>
{
if (message.body === 'Vendas' && message.isGroupMsg === false)
{ client
.sendText(message.from, 'Example text' + result.rows + 'Etc etc')
.then((result) => {console.log('Result: ', result); })
.catch((erro) => { console.error('Error when sending: ', erro); });
}});
}
//----------------------------------------------------------------
async function run() {
let connection;
try {
connection = await oracledb.getConnection(dbConfig);
const sql =`SELECT 'Testeee' FROM dual`;
let result;
result = await connection.execute(sql);
Console.log(result.rows);
} catch (err) {
console.error(err);
} finally {
if (connection) {
try {
await connection.close();
} catch (err) {
console.error(err);
}
}
}
}
Try making start() async either and apply wait.
async function start(client){
const result = await run();
// the rest of your code
}
In run() you let the method return the value.
async function run() {
let connection;
try {
connection = await oracledb.getConnection(dbConfig);
const sql =`SELECT 'Testeee' FROM dual`;
return await connection.execute(sql);
} // rest of your code

Async/Await in for loop NodeJS Not blocking the loop execuation

I know that old school for loop works in the traditional way - that it waits for the await to finish getting results.
But in my use case, I need to read a file from local/s3 and process it line by line, and for each line I need to call an External API.
Generally I use await inside the loop because all are running inside a lambda and I don't want to use all memory for running it parallelly.
Here I am reading the file using a stream.on() method, and in order to use await inside that, I need to add async in read method, like so:
stream.on('data',async () =>{
while(data=stream.read()!==null){
console.log('line');
const requests = getRequests(); // sync code,no pblms
for(let i=0;i<requests.length;i++){
const result = await apiCall(request[i);
console.log('result from api')
const finalResult = await anotherapiCall(result.data);
}
}
});
This is working but order in which the lines are processed is not guaranteed. I need all in a sync manner. Any help?
Complete Code
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
console.log('Read line');
console.log(id);
try {
for (let i = 0;i < XMLRequests.length;i++) {
totalRequestsCount++;
console.log('Sending request');
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log("got response");
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
failedRequestsCount++;
} else {
successRequestsCount++;
console.log('Response from the Lost Server');
console.log(response[i].data);
}
}
} catch (err) {
console.log(err);
}
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
});
}
async function sendLoSTRequest (options, params) {
const { axios } = options;
const { url, data } = params;
if (url) {
return axios.post(url, data);
// eslint-disable-next-line no-else-return
} else {
console.log('URL is not found');
return null;
}
}
Code needs to flow like so:
read a line in a sync way
process the line and transform the line into an array of two members
for every member call API and do stuff
once line is complete, look for another line, all done in order
UPDATE: Got a workaround..but it fires stream.end() without waiting stream to finish read
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const { ERR_DIR, fs, xml2js, LOST_URL, byline, event } = options;
const { key } = params;
const responseObject = {};
let totalRecordsCount = 0;
let totalRequestsCount = 0;
let failedRequestsCount = 0;
let successRequestsCount = 0;
let dataObject = {};
const queue = (() => {
let q = Promise.resolve();
return fn => (q = q.then(fn));
})();
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
// eslint-disable-next-line no-loop-func
queue(async () => {
try {
for (let i = 0;i < XMLRequests.length;i++) {
console.log('Sending request');
console.log(id);
totalRequestsCount++;
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log('got response');
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
// console.log('Response have the error:');
// await handleError(options, { err: responseObj, id });
failedRequestsCount++;
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
} else {
console.log('Response from the Lost Server');
console.log(response[i].data);
successRequestsCount++;
}
}
} catch (err) {
console.log(err);
}
});
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
Object.assign(responseObject, {
failedRequestsCount,
successRequestsCount,
totalRecordsCount,
totalRequestsCount
});
});
}
Thank You
The sample code at the top of your question could be rewritten like
const queue = (() => {
let q = Promise.resolve();
return (fn) => (q = q.then(fn));
})();
stream.on('data', async() => {
while (data = stream.read() !== null) {
console.log('line');
const requests = getRequests(); // sync code,no pblms
queue(async () => {
for (let i = 0; i < requests.length; i++) {
const result = await apiCall(request[i]);
console.log('result from api');
const finalResult = await anotherapiCall(result.data);
}
});
}
});
Hopefully that will be useful for the complete code
If anyone want a solution for synchronisely process the file, ie, linebyline read and execute some Async call, it's recommended to use inbuilt stream transform. There we can create a transform function and return a callback when finishes.
That's will help of any one face this issues.
Through2 is a small npm library that also can be used for the same.

How to achieve recursive Promise calls in Node.js

I am calling an API where I can only fetch 1000 records per request,
I was able to achieve this using recursion.
I am now trying to achieve the same using promises, I am fairly new to Node.js and JavaScript too.
I tried adding the recursion code in an if else block but failed
var requestP = require('request-promise');
const option = {
url: 'rest/api/2/search',
json: true,
qs: {
//jql: "project in (FLAGPS)",
}
}
const callback = (body) => {
// some code
.
.
.//saving records to file
.
//some code
if (totlExtractedRecords < total) {
requestP(option, callback).auth('api-reader', token, true)
.then(callback)
.catch((err) => {
console.log('Error Observed ' + err)
})
}
}
requestP(option).auth('api-reader', token, true)
.then(callback)
.catch((err) => {
console.log('Error Observed ' + err)
})
I want to execute the method using promise and in a synchronous way,
i.e. I want to wait until the records are all exported to a file and continue with my code
I think its better to create your own promise and simply resolve it when your done with your recursion. Here's a simply example just for you to understand the approach
async function myRecursiveLogic(resolveMethod, ctr = 0) {
// This is where you do the logic
await new Promise((res) => setTimeout(res, 1000)); // wait - just for example
ctr++;
console.log('counter:', ctr);
if (ctr === 5) {
resolveMethod(); // Work done, resolve the promise
} else {
await myRecursiveLogic(resolveMethod, ctr); // recursion - continue work
}
}
// Run the method with a single promise
new Promise((res) => myRecursiveLogic(res)).then(r => console.log('done'));
Here's a clean and nice solution using the latest NodeJS features.
The recursive function will continue executing until a specific condition is met (in this example asynchronously getting some data).
const sleep = require('util').promisify(setTimeout)
const recursive = async () => {
await sleep(1000)
const data = await getDataViaPromise() // you can replace this with request-promise
if (!data) {
return recursive() // call the function again
}
return data // job done, return the data
}
The recursive function can be used as follows:
const main = async () => {
const data = await recursive()
// do something here with the data
}
Using your code, I'd refactored it as shown below. I hope it helps.
const requestP = require('request-promise');
const option = {
url: 'rest/api/2/search',
json: true,
qs: {
//jql: "project in (FLAGPS)",
}
};
/*
NOTE: Add async to the function so you can udse await inside the function
*/
const callback = async (body) => {
// some code
//saving records to file
//some code
try {
const result = await requestP(option, callback).auth('api-reader', token, true);
if (totlExtractedRecords < total) {
return callback(result);
}
return result;
} catch (error) {
console.log('Error Observed ' + err);
return error;
}
}
Created this code using feed back from Amir Popovich
const rp = require('Request-Promise')
const fs = require('fs')
const pageSize = 200
const options = {
url: 'https://jira.xyz.com/rest/api/2/search',
json: true,
qs: {
jql: "project in (PROJECT_XYZ)",
maxResults: pageSize,
startAt: 0,
fields: '*all'
},
auth: {
user: 'api-reader',
pass: '<token>',
sendImmediately: true
}
}
const updateCSV = (elment) => {
//fs.writeFileSync('issuedata.json', JSON.stringify(elment.body, undefined, 4))
}
async function getPageinatedData(resolve, reject, ctr = 0) {
var total = 0
await rp(options).then((body) => {
let a = body.issues
console.log(a)
a.forEach(element => {
console.log(element)
//updateCSV(element)
});
total = body.total
}).catch((error) => {
reject(error)
return
})
ctr = ctr + pageSize
options.qs.startAt = ctr
if (ctr >= total) {
resolve();
} else {
await getPageinatedData(resolve, reject, ctr);
}
}
new Promise((resolve, reject) => getPageinatedData(resolve, reject))
.then(() => console.log('DONE'))
.catch((error) => console.log('Error observed - ' + error.name + '\n' + 'Error Code - ' + error.statusCode));

Categories

Resources