I am new to Node development.
I having trouble understanding the asynchronous nature of JS and Node. I am building a microservices backend web server, running Express in the gateway service which translated REST requests to series of messages published using a RabbitMQ asynchronous messaging (amqplib) module, which in turn, other services can subscribe to, process the request and then respond.
My service which processes the asynchronous request from the gateway looks like this:
amqp.connect('amqp://172.17.0.2', function(err, conn) {
console.log("connection created");
conn.createChannel(function(err, ch) {
console.log("channel created");
var exchange = 'main';
ch.assertExchange(exchange, 'topic', {durable: true});
ch.assertQueue('', {exclusive: true}, function(err, q) {
console.log(' [*] Waiting for logs. To exit press CTRL+C');
ch.bindQueue(q.queue, exchange, "VENUE_TOPIC.PENDING_STATUS.*.*");
ch.consume(q.queue, function(msg) {
console.log(" [x] %s:'%s'", msg.fields.routingKey, msg.content.toString());
var pending_event = JSON.parse(msg.content.toString())
console.log(pending_event.payload.id == 2)
console.log(pending_event.payload.id)
if (pending_event.payload.id == 1) {
var venue = getVenueByID(pending_event.payload.id);
const approved_event = new Event("VENUE_TOPIC", "APPROVED_STATUS", false, "READ_METHOD", {"venue":venue});
var key = approved_event.getMetaAsTopics();
var msg = Buffer.from(JSON.stringify(approved_event.getEventAsJSON()));
ch.assertExchange(exchange, 'topic', {durable: true});
ch.publish(exchange, key, msg, {persistent: true});
console.log(" [x] Sent '%s'", msg);
} else if (pending_event.payload.id == 2) {
sleep(10000); //this function checks the OS's clock to count time in ms
var venue = getVenueByID(pending_event.payload.id);
const approved_event = new Event("VENUE_TOPIC", "APPROVED_STATUS", false, "READ_METHOD", {"venue":venue});
var key = approved_event.getMetaAsTopics();
var msg = Buffer.from(JSON.stringify(approved_event.getEventAsJSON()));
ch.assertExchange(exchange, 'topic', {durable: true});
ch.publish(exchange, key, msg, {persistent: true});
console.log(" [x] Sent '%s'", msg);
}
}, {noAck: true});
});
});
});
Let's say I have 2 requests, one which takes long to complete and another which is shorter. The longer request comes in before the shorter one.
In my example the long process is ID === 2 and the short process is ID === 1.
Now, if I send a request where ID is 2 and then immediately send a request where ID is 1, I have to wait 10 seconds for the first to complete, and then the other completes.
I cannot understand if it is possible to process both requests concurrently, without having the long process block the short process until it completes.
I cannot comment since I don't have enough SO reputation, but I'll try to provide as much insight as possible.
Assuming sleep is this npm package this is expected behavior, so called blocking behavior. If you're planning to make non-blocking calls (like web requests) inside your consume function you will be fine and node will be able to consume further messages when it is processing. However if you need to do blocking/cpu heavy lifting and still be able to process incoming messages you will need to offload this work to some worker.
Try this code instead, it will "sleep" without blocking:
setTimeout(() => {
var venue = getVenueByID(pending_event.payload.id);
const approved_event = new Event("VENUE_TOPIC", "APPROVED_STATUS", false, "READ_METHOD", {
"venue": venue
});
var key = approved_event.getMetaAsTopics();
var msg = Buffer.from(JSON.stringify(approved_event.getEventAsJSON()));
ch.assertExchange(exchange, 'topic', {
durable: true
});
ch.publish(exchange, key, msg, {
persistent: true
});
console.log(" [x] Sent '%s'", msg);
}, 10000);
Related
My problem is that the code does not seem to be running in order, as seen below.
This code is for my discord.js bot that I am creating.
var Discord = require("discord.js");
var bot = new Discord.Client();
var yt = require("C:/Users/username/Documents/Coding/Discord/youtubetest.js");
var youtubetest = new yt();
var fs = require('fs');
var youtubedl = require('youtube-dl');
var prefix = "!";
var vidid;
var commands = {
play: {
name: "!play ",
fnc: "Gets a Youtube video matching given tags.",
process: function(msg, query) {
youtubetest.respond(query, msg);
var vidid = youtubetest.vidid;
console.log(typeof(vidid) + " + " + vidid);
console.log("3");
}
}
};
bot.on('ready', () => {
console.log('I am ready!');
});
bot.on("message", msg => {
if(!msg.content.startsWith(prefix) || msg.author.bot || (msg.author.id === bot.user.id)) return;
var cmdraw = msg.content.split(" ")[0].substring(1).toLowerCase();
var query = msg.content.split("!")[1];
var cmd = commands[cmdraw];
if (cmd) {
var res = cmd.process(msg, query, bot);
if (res) {
msg.channel.sendMessage(res);
}
} else {
let msgs = [];
msgs.push(msg.content + " is not a valid command.");
msgs.push(" ");
msgs.push("Available commands:");
msgs.push(" ");
msg.channel.sendMessage(msgs);
msg.channel.sendMessage(commands.help.process(msg));
}
});
bot.on('error', e => { console.error(e); });
bot.login("mytoken");
The youtubetest.js file:
var youtube_node = require('youtube-node');
var ConfigFile = require("C:/Users/username/Documents/Coding/Discord/json_config.json");
var mybot = require("C:/Users/username/Documents/Coding/Discord/mybot.js");
function myyt () {
this.youtube = new youtube_node();
this.youtube.setKey(ConfigFile.youtube_api_key);
this.vidid = "";
}
myyt.prototype.respond = function(query, msg) {
this.youtube.search(query, 1, function(error, result) {
if (error) {
msg.channel.sendMessage("There was an error finding requested video.");
} else {
vidid = 'http://www.youtube.com/watch?v=' + result.items[0].id.videoId;
myyt.vidid = vidid;
console.log("1");
}
});
console.log("2");
};
module.exports = myyt;
As the code shows, i have an object for the commands that the bot will be able to process, and I have a function to run said commands when a message is received.
Throughout the code you can see that I have put three console.logs with 1, 2 and 3 showing in which order I expect the parts of the code to run. When the code is run and a query is found the output is this:
I am ready!
string +
2
3
1
This shows that the code is running in the wrong order that I expect it to.
All help is very highly appreciated :)
*Update! Thank you all very much to understand why it isn't working. I found a solution where in the main file at vidid = youtubetest.respond(query, msg) when it does that the variable is not assigned until the function is done so it goes onto the rest of my code without the variable. To fix I simply put an if statement checking if the variable if undefined and waiting until it is defined.*
Like is mentioned before, a lot of stuff in javascript runs in async, hence the callback handlers. The reason it runs in async, is to avoid the rest of your code being "blocked" by remote calls. To avoid ending up in callback hell, most of us Javascript developers are moving more and more over to Promises. So your code could then look more like this:
myyt.prototype.respond = function(query, msg) {
return new Promise(function(resolve, reject) {
this.youtube.search(query, 1, function(error, result) {
if (error) {
reject("There was an error finding requested video."); // passed down to the ".catch" statement below
} else {
vidid = 'http://www.youtube.com/watch?v=' + result.items[0].id.videoId;
myyt.vidid = vidid;
console.log("1");
resolve(2); // Resolve marks the promises as successfully completed, and passes along to the ".then" method
}
});
}).then(function(two) {
// video is now the same as myyt.vidid as above.
console.log(two);
}).catch(function(err) {
// err contains the error object from above
msg.channel.sendMessage(err);
})
};
This would naturally require a change in anything that uses this process, but creating your own prototypes seems.. odd.
This promise returns the vidid, so you'd then set vidid = youtubetest.response(query, msg);, and whenever that function gets called, you do:
vidid.then(function(id) {
// id is now the vidid.
});
Javascript runs async by design, and trying to hack your way around that leads you to dark places fast. As far as I can tell, you're also targetting nodeJS, which means that once you start running something synchronously, you'll kill off performance for other users, as everyone has to wait for that sync call to finish.
Some suggested reading:
http://callbackhell.com/
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
https://stackoverflow.com/a/11233849/3646975
I'd also suggest looking up ES6 syntax, as it shortens your code and makes life a hellofalot easier (native promises were only introduced in ES6, which NodeJS 4 and above supports (more or less))
In javascript, please remember that any callback function you pass to some other function is called asynchronously. I.e. the calls to callback function may not happen "in order". "In order" in this case means the order they appear on the source file.
The callback function is simply called on certain event:
When there is data to be processed
on error
in your case for example when the youtube search results are ready,
'ready' event is received or 'message' is received.
etc.
I have been building a youtube video conversion app which streams youtube videos using youtube-dl and saves them, everything was working fine until I attempted to stream a video that was over an hour long. When the task was anywhere between 50% - 100% complete or 40-80seconds in, would be when the entire block of code would get re-run resulting in multiple simultaneous streams occurring. The response can therefor never get sent as it waits for the pipe to finish. Adding next(); outside the stream function allowed the conversion to complete with out any interruption or reruns of the code block, however it resulted in the following error when attempting to send the response:
throw new Error('Can\'t set headers after they are sent.');
This is the Node.js code block in question:
app.post('/convert/', function (req, res, next){
var url = 'http://www.youtube.com/'+req.body.url;
var quality = req.body.quality;
var socketId = req.body.socketId;
stream = youtubedl(url,
['-f ' + quality],
// // Additional options can be given for calling `child_process.execFile()`.
{ cwd: __dirname });
stream.on('info', function(info) {
console.log('Download started');
console.log('filename: ' + info._filename);
console.log('size: ' + info.size);
console.log('format: ' + info.format);
var fileName = info._filename;
var videoId = info.id;
var videoTitle = info.title;
videoTitle = videoTitle.replace(/[^a-zA-Z0-9\s]/g, '');
console.log(videoTitle);
var mp4 = 'downloads/'+videoTitle+'-'+info.format_id+'.mp4';
fs.writeFile(mp4, "file", function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
var stat = fs.statSync(mp4);
var str = progress({
length: info.size,
time: 100
});
str.on('progress', function(progress) {
io.to(global.socketid).emit('progressVideo',{progress: progress.percentage});
console.log(info.size);
console.log(progress.transferred);
console.log(progress.percentage);
console.log(progress.remaining);
});
var pipe = stream.pipe(str).pipe(fs.createWriteStream(mp4));
pipe.on('finish', function () {
console.log("stream finished.");
res.json(videoTitle+'-'+info.format_id+'.mp4');
});
});
});
// next();
});
Called by some Angular code.
// Sends youtube link to backend
$scope.getVideo = function(youtubeLink, resolution){
var cleanedLink = youtubeLink.substring(24);
var url = {url: cleanedLink, quality: resolution};
$http.post('/convert/', url).success(function (response){
// Do some stuff
});
}
Confused as to why it was getting run more then once, I slowly removed more and more code until I was left with this simple test.
app.post('/convert/', function (req, res, next){
console.log('hello!');
});
Which was called by an ng-click event and after waiting a minute or so the console also printed out two and then three hello! statements. I am completely lost as to why this happens. If anyone could shed some light on this for me it would be greatly appreciated.
So after just getting down to a very basic post route and logging out some text but not returning a response, I came to the conclusion the issue resides with node. I decided to record the length of time between the console.log statements which turned out to be every 2 minutes. With this I was able to find out that node has a default timeout of 2 minutes if a response is not sent back to the client.
I was able to set the response to never timeout with the following code:
res.connection.setTimeout(0);
I hope this helps anyone else that needs to hold connections for large periods of times for file conversions/transfers etc...
So I'm working on a text based game where users send post requests to the server written in node.js. However, I need to match 2 players together by letting the first player to wait for the 2nd player before submitting a response. The problem right now is that after I call the post request once, I cannot send another post request for any response. It seems like the first thread is blocking any more future requests. What should in terms of using callbacks and async. I do not want to use sockets if possible as that will limit the languages that players can code in.
var newGame=0;
//joins new game (username,password)
app.post('/game/join', function(req, res) {
var username = req.body.username;
var password = req.body.password;
if(newGame==1){
res.send('Game Started');
newGame=0;
}
else{
newGame=1;
wait(username, function(){
res.send('Game Started'+username);
});
}
});
function wait(username){
while(newGame==1){
console.log(username + newGame);
}
}
So you may have found that once your wait functions starts, nothing else happens. This is because an infinite while loop with a blocking call (console.log) prevents the event loop from doing anything else until it is 'done'. You can include waiting with setTimeout, setInterval, and clearInterval to run a function at some point in the future, run a function on an interval, and stop running a function on an interval, respectively.
var newGame = 0;
app.post('/game/join', function(req, res) {
var username = req.body.username;
var password = req.body.password;
if (newGame === 1) {
res.send('Game started' + username);
} else {
var wait = setInterval(function() {
if (newGame === 1){
clearInterval(wait);
res.send('Game Started');
newGame = 0;
}
}, 5000); // retry every 5 seconds
}
}
});
Node.js is single threaded so your code is preventing your server from processing additional requests. Blocking the event loop is def something you don't want to be doing.
I was going to request using something like Socket.IO or WebRTC but I saw your constraint. I'm not really sure what you meant by "limit the language players can code in"
For me, its not clear that how you pick 2 users to start a match. If it is like, one player is connected and another one is joining and then the match is started between them, I would do something like this.
var waitingResponses = [];
//joins new game (username,password)
app.post('/game/join', function(req, res) {
var username = req.body.username;
var password = req.body.password;
if(pendingJoinRequests.length) {
var waitingPlayerResponse = waitingResponses.shift();
waitingPlayerResponse.send('Game Started'+username);
res.send('Game Started');
}
else {
waitingResponses.push(res);
}
});
This is the relevant code :
This sends a request to the server, takes an userName as an argument.
function send_CLIENT_LOOKUP(userName)
{
send(
auth.AOCP.CLIENT_LOOKUP,
[ [ 'S', userName.toString() ]
]
)
}
This part handles the response from the server.
handle[auth.AOCP.CLIENT_LOOKUP] = function (data, u)
{
console.log('CLIENT_LOOKUP')
var userId = u.I()
var userName = u.S()
u.done()
console.log({ userId : userId, userName : userName }) // this works properly
var idResult = userId; // I can actually use the userID but I just assigned it to idResult to make the code easier to understand.
}
The above 2 functions work how they are supposed to, nothing to change/fix there.
Now I have a function that receives a request from one user and sends a request to another user, it takes 2 arguments: first arg is the userId of the user that sends the request and the second is the userName of user the request will be sent to however the server/game only works with userIds so the userName has to be converted:
var commands = {
invite: function(userId, userName) {
send_CLIENT_LOOKUP(userName); // send a request to the server to find out the userId of the userName
send_PRIVGRP_INVITE(idResult);
}
}
The problem is that idResult == undefined, unless I call cmd.invite() again then idResult == 'with the previous response like this:
cmd.invite(user1);
cmd.invite(user2);
cmd.invite(user3);
And the output of idResult is:
idResult == undefined
idResult == 'info for user1'
idResult == 'info for user2'
I tried defining idResult outside response handler and update it inside, to make sure its not some sort of delay from the server I just did a massive invite spam and the result was the same, 1 step behind no matter how fast I sent the invites. Any suggestions are welcome :)
The problem is rather hard to solve, as sending and collecting replies are isolated. It seems that send() sends a packet and handle() receives a reply, and there can be many possible unrelated packets between sending and receiving.
If it's the case then a special global map should be created to link requests and responses. For cases when there are 2 concurrent requests for the same username, EventEmitter from events module is a good fit, as it's essentially a multimap of strings to callbacks:
var events = require('events')
var outstandingLookups = new events.EventEmitter()
So when you send a lookup you register a one time listener, so there's no need for manual cleanup:
var commands = {
invite: function(userName) {
outstandingLookups.once(userName, function (idResult)
{
send_PRIVGRP_INVITE(idResult);
})
// send a request to the server to find out the userId of the userName
send_CLIENT_LOOKUP(userName);
}
}
In response handler emit conveniently calls for you all the callbacks expecting userId for the received userName:
handle[auth.AOCP.CLIENT_LOOKUP] = function (data, u)
{
console.log('CLIENT_LOOKUP')
var userId = u.I()
var userName = u.S()
u.done()
console.log({ userId : userId, userName : userName }) // this works properly
// I can actually use the userID but I just assigned it to idResult
// to make the code easier to understand.
var idResult = userId;
// this is the only addition to original handler code
outstandingLookups.emit(userName, idResult)
}
The need to use outstandingLookups.once is actually an implementation detail and it should be properly encapsulated/hidden if you want to lookup userIds in more than one place in the code.
I will use promises from q npm library as I remember the interface, but similar ES6 promises should be used in production code as they are a modern standard.
var Q = require('q')
var commands = {
lookupUserName: function (userName)
{
var result = Q.defer()
outstandingLookups.once(userName, function (idResult)
{
result.resolve(idResult);
})
// send a request to the server to find out the userId of the userName
send_CLIENT_LOOKUP(userName);
return result.promise
},
invite: function(userName) {
commands.lookupUserName(userName).then(function (idResult)
{
send_PRIVGRP_INVITE(idResult);
})
}
}
The code for commands.invite is much cleaner now.
It looks like you're doing an async-request but updating the var idResult synchronously.
So you should put the call to send_PRIVGRP_INVITE(idResult) into the callback or response handler of send_CLIENT_LOOKUP(userName)
I need to handle a websocket timeout when the socket is trying to connect to a server available but busy.
More in deep I have a web application that can connect to several single-thread remote socket server in charge to post-process data passed as input.
The goal is to collect data from web GUI and then submit them to the first socket available and listening.
In case a server socket daemon is already processing data, it cannot serve the user request that has instead to be addressed to the second socket of the list.
Basically and with a practical example, I have such socket ready to accept a call coming from a web brworser:
ws://10.20.30.40:8080/
ws://10.20.30.40:8081/
ws://10.20.30.40:8082/
ws://10.20.30.40:8083/
ws://192.192.192.192:9001/
When the first socket receive the call and perform handshake, it starts to process data (and this process may took some minutes).
When another client do the same request on a different set of data, I would like that that the request is served by the 2nd socket available and so on.
So the question is... how can I contact the socket (the first of the list), wait 250 milliseconds and (in case of timeout) skip to the next one?
I've started from the following approach:
$.interval = function(func, wait, times) {
var interv = function(w, t) {
return function() {
if (typeof t === "undefined" || t-- > 0) {
setTimeout(interv, w);
try {
func.call(null);
} catch(e) {
t = 0;
throw e.toString();
}
} else {
alert('stop');
}
};
} (wait, times);
setTimeout(interv, wait);
}
handler = new WebSocket(url);
loop = 1;
$.interval(function(){
rs = handler.readyState;
console.log("(" + loop++ + ") readyState: " + rs);
}, 25, 10); //loop 10 times every 25 milliseconds
But how can I stop in case the socket.readyState became 1 (OPEN) or the limit is reached?