Wait for event in node js - javascript

How can I wait for an event in node js? I'm developing a bpmn workflow and I have to execute the event step by step.The server is compounded by several scripts and each script is an event, like this:
'use strict';
const Bpmn = require('bpmn-engine');
const processXml = `
<?xml version="1.0" encoding="UTF-8"?>
<definitions xmlns="http://www.omg.org/spec/BPMN/20100524/MODEL"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<process id="theProcess" isExecutable="true">
<startEvent id="start" />
<exclusiveGateway id="decision" />
<endEvent id="RFID_ERRATO" />
<endEvent id="RFID=M1" />
<sequenceFlow id="flow1" sourceRef="start" targetRef="decision" />
<sequenceFlow id="flow2" sourceRef="decision" targetRef="RFID_ERRATO">
<conditionExpression xsi:type="tFormalExpression"
language="JavaScript"><![CDATA[
this.variables.input != "M1"
]]></conditionExpression>
</sequenceFlow>
<sequenceFlow id="flow3" sourceRef="decision" targetRef="RFID=M1">
<conditionExpression xsi:type="tFormalExpression"
language="JavaScript"><![CDATA[
this.variables.input = "M1"
]]></conditionExpression>
</sequenceFlow>
</process>
</definitions>`;
const engine = new Bpmn.Engine({
name: 'exclusive gateway example1',
source: processXml
});
engine.once('end', (definition) => {
if (definition.getChildActivityById('RFID_ERRATO').taken) throw new
Error('<RFID_ERRATO> was not supposed to be taken, check your input');
console.log('TAKEN RFID=M1',
definition.getChildActivityById('RFID=M1').taken);
});
function sendEvent(value){
engine.execute({
variables: {
input: value
}
}, (err, definition) => {
console.log(engine.getState())
});
}
var i = 0;
//hello.js
module.exports = (req, res, next) => {
//res.header('X-Hello', 'World')
//console.log(req);
if(!i++){
sendEvent(req.body.rfid);
}
console.log(engine.getState())
next()
}
(I'm using these modules https://www.npmjs.com/package/bpmn-engine https://www.npmjs.com/package/json-server). The server is started writing on the command line "json-server db.json --middlewares ./script1.js ./script2.js" and then I call the request post sending the data over the server, only one time. The problem is that all the events reply at the only first request sequentially. I want that the first script/event reply at the first request while the second event is waiting,and when the second request is sent,the following script perform it,and so on. It is possible?

To wait and then do something, you need to run the code in an asynchronous way, there's a lot of good approaches for that.
The most common is the promise, a promise will gets the return or a error from an asynchronous code. Basic example(from Mozilla Developers):
let myFirstPromise = new Promise((resolve, reject) => {
// We call resolve(...) when what we were doing asynchronously was successful, and reject(...) when it failed.
// In this example, we use setTimeout(...) to simulate async code.
// In reality, you will probably be using something like XHR or an HTML5 API.
setTimeout(function(){
resolve("Success!"); // Yay! Everything went well!
}, 250);
});
myFirstPromise.then((successMessage) => {
// successMessage is whatever we passed in the resolve(...) function above.
// It doesn't have to be a string, but if it is only a succeed message, it probably will be.
console.log("Yay! " + successMessage);
});
The "thing" in asynchronous is that we'll do something and then we'll do something, this then is doing what we need and don't have in a sync code.
There's a lot of npm packages that can help us to do that too, like async-waterfall that will run the functions in series, example from their github:
/* basic - no arguments */
waterfall(myArray.map(function (arrayItem) {
return function (nextCallback) {
// same execution for each item, call the next one when done
doAsyncThingsWith(arrayItem, nextCallback);
}}));
/* with arguments, initializer function, and final callback */
waterfall([function initializer (firstMapFunction) {
firstMapFunction(null, initialValue);
}].concat(myArray.map(function (arrayItem) {
return function (lastItemResult, nextCallback) {
// same execution for each item in the array
var itemResult = doThingsWith(arrayItem, lastItemResult);
// results carried along from each to the next
nextCallback(null, itemResult);
}})), function (err, finalResult) {
// final callback
});
It will run an Array.map of functions in series, avoiding a good enemy when we work with async codes, the callback hell.
So async code will let you wait for an event cause it let's you do something and then do another thing with the results.

Related

Recursion & Async function does not respect/wait for promise

Situation:
I have a function which runs at the start of my code load_content:
async function load_content() {
console.log("I GOT HERE 1");
await load_js_files("./cmds/","commands")
console.log("I GOT HERE 2");
await load_js_files("./events/","events");
}
This function calls load_js_files twice, load_js_files is a recursive function which calls itself for each directory in the specified directory, 'requiring' each file found and doing different things if type = commands or type = events.
The function load_js_files looks like:
function load_js_files(dir,type){
fs.readdir(dir, (e, files) => {
if(e) console.error(e);
let jsfiles = files.filter(f => f.split(".").pop() === "js");
if(jsfiles.length <= 0){
console.log(`No commands to load from ${dir}!`);
return;
}
for(const file of files){
if(fs.lstatSync(dir+file).isDirectory()){
load_js_files(dir+file+"/",type)
}
}
if(type === "commands"){
console.log("\x1b[4m%s\x1b[0m",`Loading ${jsfiles.length} commands from ${dir} ...`);
jsfiles.forEach((f,i) => {
let command = require(`${dir}${f}`);
console.log(`${i + 1}: ${f} loaded!`);
bot.commands.set(command.info.name, command);
});
} else if (type === "events"){
console.log("\x1b[4m%s\x1b[0m",`Loading ${jsfiles.length} events from ${dir} ...`);
jsfiles.forEach((f,i) => {
let event = require(`${dir}${f}`);
console.log(`${i + 1}: ${f} loaded!`);
let commands = [];
for(const cmd of bot.commands){
if(cmd[1].data) commands.push(cmd[1].data.toJSON());
}
if(event.once){
bot.once(event.name, (...args) => event.execute(...args, commands));
} else {
bot.on(event.name, (...args) => event.execute(...args, commands));
}
});
} else {
console.log(log_Red,"FUNCTION 'load_js_files' CALLED WITH INCORRECT 'type'.")
}
});
return new Promise((resolve,reject) => resolve("DONE"));
}
I would expect in load_content that the events occur in this order:
console logs I GOT HERE 1
load_js_files occurs with commands parameter (granted I haven't solved recursion promises yet it should run once at least)
console logs I GOT HERE 2
load_js_files occurs again but with events parameter.
Issue:
Upon running load_js_files requiring type = event the variable (bot.commands) is undefined. bot.commands is assigned values based in step 2 above, during the load_js_files call.
From what I can debug to, the initial function load_content does not respect (my understanding) of async/await, so I assume I am doing something incorrectly with promises.
In my console however the two console.log statements execute immidiatley & before the function is finished:
I GOT HERE 1
I GOT HERE 2
Loading 3 commands from ./cmds/ ...
1: createtables.js loaded!
2: ping.js loaded!
3: sqltest.js loaded!
Loading 1 events from ./events/ ...
1: ready.js loaded!
Loading 1 commands from ./cmds/settings/ ...
1: set.js loaded!
What I've tried:
I've tried the code noted above, additionally I have tried wrapping the second run of load_js_files in a .then(), I've tried a callback function & I've also tried nesting Promises but run into issues as load_js_files is calling itself recursively.
I'm having a hard time understanding if these Promises are going to work with this type of recursion (all recursions of load_js_files must finish before the second load_js_files is called within load_content).
Bonus points:
Bonus points if you can help me understand promises within a recursive function. I've read
https://blog.scottlogic.com/2017/09/14/asynchronous-recursion.html
https://www.bennadel.com/blog/3201-exploring-recursive-promises-in-javascript.htm
and
https://medium.com/#wrj111/recursive-promises-in-nodejs-769d0e4c0cf9
But it's not quite getting through.
Attempt at implementing David's answer:
This results in error, I believe related to fs.readdir(dir) requiring a callback.
Error: TypeError [ERR_INVALID_CALLBACK]: Callback must be a function. Received undefined
async function load_js_files_async_test(dir,type){
const files = fs.readdir(dir);
for (const file of files) {
const file_info = await lstat(dir + file);
if(file_info.isDirectory()){
await load_jsfiles_async_test(dir + file + "/", type);
} else {
console.log("Thanks David!");
}
}
}
does not respect/wait for promise
Sure it does. This is the Promise it's awaiting:
new Promise((resolve,reject) => resolve("DONE"))
That Promise of course finishes very quickly (and synchronously) and then the code moves on to the next task. But what isn't being awaited anywhere in the code is this asynchronous operation:
fs.readdir(dir, (e, files) => {
//...
});
This call to readdir is an asynchronous operation, and as such that callback function won't be invoked until after the current thread finishes everything it's doing. Which includes the "promises" being awaited (which don't do anything asynchronous) as well as the console.log statements and the next invocation of load_js_files.
Fortunately, Node provides Promise-based versions of these operations as well.
Simplifying the original code a bit, imagine instead this structure:
async function load_js_files(dir, type) {
const files = await readdir(dir);
for (const file of files) {
const fileInfo = await lstat(dir + file);
if(fileInfo.isDirectory()) {
await load_js_files(dir + file + "/", type)
}
}
// etc.
}
As you can see, this "reads" a lot more like synchronous operations. The idea here is to remove the usage of callback functions, which are essentially causing you confusion and making "awaiting" much more difficult. Now all of the logic in the load_js_files function is directly in the load_js_files function, not in other anonymous callback functions. And that logic proceeds, step by step, awaiting each asynchronous operation.
Then you can await the calls to load_js_files as expected.
The function
fs.readdir
Is a non blocking function which means that the code that you inserted there is not being 'awaited' to be done, you could try with fs.readdirSync and remove your return new Promise().

node async/await not working for me (when using Postgres / Node - working with DB updates before going to next call) [duplicate]

This question already has answers here:
Using async/await with a forEach loop
(33 answers)
Closed 3 years ago.
await is not blocking as expected, when a block of code updates db (using postgres / node )
https://node-postgres.com
I have a list of async function calls, each call udpates a database, and each subsequent call works on data updated by the previous call.
There are about eight calls in a row, and each call must update the complete set of data it is working with, 100% to completion, before going to the next.
I tried to make everything not async, but it appears I am forced to make everything async/await because of the library I am using (postgres / node).
Each function call must complete 100% before going on to the next function call, because the next step does a select on rows where a field is not null (where the previous step fills in a value).
I have an await in front of each call, that does something (see code below):
loads the db from a csv,
next step selects all rows just inserted, calls an API and updates the database,
and so on,
but at one point, when the next function executes, NONE of the rows have been updated (as I trace through and verify, a SQL statement returns nothing back),
the code seems to pass right through going to the second function call, not blocking, honoring the await, and completing it's code block.
If I comment out some of the latter rows (dependent on the previous), and let the program run to completion, the database gets updated.
There is nothing functionally wrong with the code, everything works, just not from beginning to completion.
After running two function calls at the beginning, letting that run, I can then comment out those rows, uncomment the later rows in the flow, and run again, and everything works as expected, but I cannot run to completion with both uncommented.
What can I do to make sure each function call completes 100%, has all updates completed in the database, before going to the next step?
async/await is not working for me.
this is not pseudo-code it's the actual code, that is executing, that I am working with, the function names changed only. It is real working code, cut-n-pasted direct from my IDE.
// these are functions I call below (each in their own .js)
const insert_rows_to_db_from_csv = require('./insert_rows_to_db_from_csv')
const call_api_using_rows_from_function_above = require('./call_api_using_rows_from_function_above')
const and_so_on = require('./and_so_on')
const and_so_on_and_on = require('./and_so_on_and_on')
const and_so_on_and_on_and_on = require('./and_so_on_and_on_and_on')
// each of the above exports a main() function where I can call func.main() just // like this one defined below (this is my main() entry point)
module.exports = {
main: async function (csvFilePath) {
console.log('service: upload.main()')
try {
const csvList = []
let rstream = fs.createReadStream(csvFilePath)
.pipe(csv())
.on('data', (data) => csvList.push(data))
.on('end', async () => {
let num_rows = csvList.length
//step one (if I run these two, with step two calls below commented out, this works)
await insert_rows_to_db_from_csv.main(csvList);
await call_api_using_rows_from_function_above.main();
// step two
// blows up here, on the next function call,
// no rows selected in sql statements, must comment out, let the above run to
// completion, then comment out the rows above, and let these run separate
await work_with_rows_updated_in_previous_call_above.main(); // sets
await and_so_on.main();
await and_so_on_and_on.main();
await and_so_on_and_on_and_on.main();
})
} catch (err) {
console.log(err.stack)
} finally {
}
}
};
here is the one liner I am using to call the insert/update to the DB:
return await pool.query(sql, values);
that's it, nothing more. This is from using:
https://node-postgres.com/
npm install pg
PART 2 - continuing on,
I think the problem might be here. This is where I am doing each
API call, then insert (that the next function call is dependent upon), some code smell here that I can't sort out.
processBatch(batch) is called, that calls the API, gets a response back, and then within there it calls `handleResponseDetail(response), where the insert is happening. I think the problem is here, if there are any ideas?
this is a code block inside:
await call_api_using_rows_from_function_above.main();
It completes with no errors, inserts rows, and commits, then the next function is called, and this next function finds no rows (inserted here). But the await on the entire main() .js blocks and waits, so I don't understand.
/**
* API call, and within call handleResponse which does the DB insert.
* #param batch
* #returns {Promise<*>}
*/
async function processBatch(batch) {
console.log('Processing batch');
return await client.send(batch).then(res => {
return handleResponseDetail(res);
}).catch(err => handleError(err));
}
// should this be async?
function handleResponseDetail(response) {
response.lookups.forEach(async function (lookup) {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup);
}
});
}
Given the code block from your Part 2 edit, the problem is now clear: all of your insert()s are being scheduled outside of the blocking context of the rest of your async/await code! This is because of that .forEach, see this question for more details.
I've annotated your existing code to show the issue:
function handleResponseDetail(response) { //synchronous function
response.lookups.forEach(async function (lookup) { //asynchronous function
//these async functions all get scheduled simultaneously
//without waiting for the previous one to complete - that's why you can't use forEach like this
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup); //this ONLY blocks the inner async function, not the outer `handleResponseDetail`
}
});
}
Here is a fixed version of that function which should work as you expect:
async function handleResponseDetail(response) {
for(const lookup of response.lookups) {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup); //blocks handleResponseDetail until done
}
}
}
Alternatively, if the order of insertion doesn't matter, you can use Promise.all for efficiency:
async function handleResponseDetail(response) {
await Promise.all(response.lookups.map(async lookup => {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup);
}
})); //waits until all insertions have completed before returning
}
To reiterate, you cannot easily use .forEach() with async/await because .forEach() simply calls the given function for each element of the array synchronously, with no regard for awaiting each promise before calling the next. If you need the loop to block between each element, or to wait for all elements to complete processing before returning from the function (this is your use case), you need to use a different for loop or alternatively a Promise.all() as above.
What your main function currently does is merely creating stream, assigning listeners and instantly returning. It does not await for all the listeners to resolve like you are trying to have it do
You need to extract your file reading logic to another function, which will return a Promise that will resolve only when the entire file is read, then await for that Promise inside main
function getCsvList(csvFilePath) {
return new Promise((resolve, reject) => {
const csvList = []
fs.createReadStream(csvFilePath)
.pipe(csv())
.on('data', (data) => csvList.push(data))
.on('end', () => {
resolve(csvList)
})
.on('error', (e) => reject(e))
})
}
module.exports = {
main: async function (csvFilePath) {
try {
const csvList = await getCsvList(csvFilePath)
await insert_rows_to_db_from_csv.main(csvList);
await call_api_using_rows_from_function_above.main();
await work_with_rows_updated_in_previous_call_above.main();
await and_so_on.main();
await and_so_on_and_on.main();
await and_so_on_and_on_and_on.main();
} catch (err) {
console.log(err.stack)
} finally {
}
}
};

Run async code from command line, node js

I have a function that generates some test data and inserts it to a mongodb:
'use strict';
const CvFaker = require('./cv-faker');
const mongoose = require('mongoose');
require('../models/cv_model.js');
module.exports.init = function(){
var cvfaker = new CvFaker();
cvfaker.genCvs(100);
mongoose.model('cv').create(cvfaker.cvs, (err, createdCvs) => {
if(err){
console.log('something went wrong');
}
})
};
I want to execute this code from the command line:
node -e 'require("./create-mock-db").init()'
The function executes, but it does not wait for the function to complete since it is async. How do I make it wait for the function to complete?
This is not working either:
module.exports.init = function(cb){ ...
..
cb();
node -e 'require("./create-mock-db").init(function(){})'
As this answer might come up for more peopleā€¦
// test.js
const request = require('request');
const rp = require('request-promise');
const demo = module.exports.demo = async function() {
try {
const res = await rp.post( {
uri: 'https://httpbin.org/anything',
body: { hi: 'there', },
}, function (error, response, body) {
return error ? error : body;
} )
console.log( res )
return res;
}
catch ( e ) {
console.error( e );
}
};
Call it like this:
$ node -e 'require("./test").demo()'
Sidenote:
it does not wait for the function to complete since it is async
It's not async. You might call asynchronous functions, but you are not treating them as such and not awaiting any result.
The node process will not exit until the event queue is empty. The event loop uses the event queue to make asynchronous execution possible.
It's pretty simple to verify that this is not an issue with executing asynchronous code.
node -e "setTimeout(() => console.log('done'), 5000)"
This example takes 5 seconds to run, as you would expect.
The problem with your code is the fact that you never establish a connection with the database. The model.create method doesn't do anything until there is a connection, therefor nothing ever gets queued and the process is free to exit.
This means your code needs to change to do two things:
Connect to the database so that the model.create method can execute.
Disconnect from the database when model.create is complete so the process is free to exit.
To add to Kaiser's answer, if you are on Windows using cmd, the single/double quotes are important. Put the double quotes on the outside, i.e.
node -e "require('./test').demo()"

bookshelf.js locking transactions

Is there a possibility to create atomic database transactions with bookshelf? I'm having a problem with duplicates in the database. The problematic code is as below:
bookshelf.transaction(function (t) {
var modelLocation = new Models.Location({'name':event.venue});
modelLocation.fetch({transacting:t})
.then(function (fetchedLocation) {
if (!fetchedLocation) {
modelLocation.save(null,{transacting:t}).then(function (savedModel) {
t.commit(savedModel)
}).catch(function (err) {
t.rollback(err)
});
}
else{
t.commit(fetchedLocation)
}
})
})
I call the method containing this code almost simultaniously and asynchronously 20 times. From these 20, there are 5 duplicate datasets. This results in around 2-3 duplicates in the database. The current workaround is to wrap the whole thing in a setTimeout with a random timout between 0 and 10 seconds which almost never gives me duplicates. But this is obviously not a production ready solution.
OK so in the end, I decided to go with the async.js library and it's queue.
The queue guarantees that maximum n async tasks are executed concurrently. In this case 1.
I made a module which exports a queue instance. This way I can use it across multiple modules. It simply waits for the promise to fulfill.
var async = require('async');
module.exports = async.queue(function (task, callback) {
task().then(function () {
callback();
});
},1);
Then in the module, where I need an "atomic" transaction I have the following code:
var queue = require('./transactionQueue');
...
...
queue.push(function(){
return bookshelf.transaction(function (t) {
var modelLocation = new Models.Location({'name':event.venue});
return modelLocation
.fetch({transacting:t})
.then(function (fetchedLocation) {
if (!fetchedLocation) {
return modelLocation
.save(null,{transacting:t});
}
});
});
});
It's important to wrap the transaction into a function so it won't get executed right away.
Since Bookshelf transactions are promises you do not need to explicitly call commit() or rollback(). Just let the fulfilled promise to commit itself, or you can force a rollback by throwing an exception.
In your code there was apparently a small bug that could be causing the trouble: an argument missing from the fetch()'s then() -- this argument is the result from the fetch() invocation, an instance if the object was found or null if not.
bookshelf.transaction(function (t) {
var modelLocation = new Models.Location({'name':event.venue});
return modelLocation
.fetch()
.then(function (fetchedLocation) {
if (!fetchedLocation) {
modelLocation
.save(null,{transacting:t});
}
})l
});
I am not able to test that now, but I hope it helps.

How to ensure asynchronous code is executed after a stream is finished processing?

I have a stream that I process by listening for the data,error, and end events, and I call a function to process each data event in the first stream. Naturally, the function processing the data calls other callbacks, making it asynchronous. So how do I start executing more code when the data in the stream is processed? Listening for the end event in the stream does NOT mean the asynchronous data processing functions have finished.
How can I ensure that the stream data processing functions are finished when I execute my next statement?
Here is an example:
function updateAccountStream (accountStream, callThisOnlyAfterAllAccountsAreMigrated) {
var self = this;
var promises = [];
accountStream
.on('data', function (account) {
migrateAccount.bind(self)(account, finishMigration);
})
.on('error', function (err) {
return console.log(err);
})
.on('end', function () {
console.log("Finished updating account stream (but finishMigration is still running!!!)");
callThisOnlyAfterAllAccountsAreMigrated() // finishMigration is still running!
});
}
var migrateAccount = function (oldAccount, callback) {
executeSomeAction(oldAccount, function(err, newAccount) {
if (err) return console.log("error received:", err);
return callback(newAccount);
});
}
var finishMigration = function (newAccount) {
// some code that is executed asynchronously...
}
How do I ensure that callThisOnlyAfterAllAccountsAreMigrated is called AFTER the stream has been processed?
Can this be done with promises? Can it be done with through streams? I am working with Nodejs, so referencing other npm modules could be helpful.
As you said, listening for the end event on the stream is useless on its own. The stream doesn't know or care what you're doing with the data in your data handler, so you would need to write some code to keep track of your own migrateAccount state.
If it were me, I would rewrite this whole section. If you use the readable event with .read() on your stream, you can read as many items at a time as you feel like dealing with. If that's one, no problem. If it's 30, great. The reason you do this is so that you won't overrun whatever is doing work with the data coming from the stream. As-is right now, if accountStream is fast, your application will undoubtedly crash at some point.
When you read an item from a stream and start work, take the promise you get back (use Bluebird or similar) and throw it into an array. When the promise is resolved, remove it from the array. When the stream ends, attach a .done() handler to .all() (basically making one big promise out of every promise still in the array).
You could also use a simple counter for jobs in progress.
Using a through stream (the npm through2 module), I solved this problem using the following code that controls the asynchronous behaviour:
var through = require('through2').obj;
function updateAccountStream (accountStream, callThisOnlyAfterAllAccountsAreMigrated) {
var self = this;
var promises = [];
accountStream.pipe(through(function(account, _, next) {
migrateAccount.bind(self)(account, finishMigration, next);
}))
.on('data', function (account) {
})
.on('error', function (err) {
return console.log(err);
})
.on('end', function () {
console.log("Finished updating account stream");
callThisOnlyAfterAllAccountsAreMigrated();
});
}
var migrateAccount = function (oldAccount, callback, next) {
executeSomeAction(oldAccount, function(err, newAccount) {
if (err) return console.log("error received:", err);
return callback(newAccount, next);
});
}
var finishMigration = function (newAccount, next) {
// some code that is executed asynchronously, but using 'next' callback when migration is finished...
}
It is a lot easier when you handle streams via promises.
Copied from here, an example that uses spex library:
var spex = require('spex')(Promise);
var fs = require('fs');
var rs = fs.createReadStream('values.txt');
function receiver(index, data, delay) {
return new Promise(function (resolve) {
console.log("RECEIVED:", index, data, delay);
resolve(); // ok to read the next data;
});
}
spex.stream.read(rs, receiver)
.then(function (data) {
// streaming successfully finished;
console.log("DATA:", data);
}, function (reason) {
// streaming has failed;
console.log("REASON:", reason);
});

Categories

Resources