Running a node.js script every 10 seconds - javascript

I just started using Node.js and I'm now trying to make my script run in the background every 10 seconds like a daemon waiting for something to do, when there is something to run from the database It reads the output from the program and does certain tasks depending on the output.
This is what I've been able to do so far, It works just as I intended but can only run once even in the background. How can I make it run like a daemon every 10 seconds?
Code:
var spawn = require('child_process').spawn;
var mysql = require('mysql');
var JSFtp = require('jsftp');
var check = require('node-validator').check;
var sanitize = require('node-validator').sanitize;
//Setup the db connection
var db = mysql.createConnection({
host : 'db',
port : 3306,
database: 'db',
user : 'db',
password : 'db'
});
//Make the connection
db.connect(function(err){
if(err != null) {
res.end('Error connecting to mysql:' + err+'\n');
}
});
var die = function(msg){
console.error(msg);
process.exit(1);
}
function ip2long ( ip_address ) {
var output = false;
var parts = [];
if (ip_address.match(/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/)) {
parts = ip_address.split('.');
output = ( parts[0] * 16777216 +
( parts[1] * 65536 ) +
( parts[2] * 256 ) +
( parts[3] * 1 ) );
}
return output;
}
db.query("SELECT * FROM queue WHERE cooldown > UNIX_TIMESTAMP(NOW()) AND simulated=0 ORDER BY cooldown DESC LIMIT 1", function(err, rows){
if(err != null){
die("Query error: " + err);
}
if(rows < 1){
die("No rows");
}
//Set the vars from the query
var name = rows[0]['name'];
var ip = rows[0]['ip'];
var iterations = rows[0]['runs'];
var bin = "/home/hoar/sum/run"
var args = ['arg='+name, 'arg2='+iterations, 'path=/var/www/upload/'+name+'.html', 'output=log.log'];
var proc = spawn(bin, args);
var time = "/.*/";
var pct = "/^\d/";
var name = rows[0]['name'];
var ip = rows[0]['ip'];
var iterations = rows[0]['runs'];
proc.stdout.setEncoding('utf8');
proc.stdout.on('data', function(data) {
var str = data.toString();
var s = str.split("|");
var p = s[0].split("/");
var t = (s[1] == null) ? "" : s[1];
if(p != null && s[0] != "#"){ //Needed to check for # because the program prints this as first line, which is good then we can do the query further done only once.
//Check the return numbers from simc to see how many sims it has done
if(parseInt(p[0]) < parseInt(p[1])){
//Check if the 6th match is a number and the 7th only contains letters
if(t != null){
var time = t.replace(/(\r\n|\n|\r)/gm,""); //Remove any line disturbers for db
//Update the database with the amount of time left on the simulation
db.query("UPDATE `queue` SET `status`=" + db.escape(time) + " WHERE (`name`=" + name + ")");
//console.log(p[0]+"/"+p[1] + " - " + t + " left");
}
//console.log(p[0]+"/"+p[1] + " iterations done");
}
}else{
//If the stdout is null run this query since we don't want to run this more than once.
db.query("UPDATE `queue` SET `simulated`='2' WHERE (`name`=" + name + " AND simulated!='2')");
//console.log("Updated db to 2");
}
});
proc.stderr.on('data', function (data) {
var str = data.toString();
//If the program returns stderr we want to make sure it stops and we update the database to let the user know.
if(str.indexOf("ERROR! Setup failure...")){
//Update the database with the amount of time left on the simulation
db.query("UPDATE `queue` SET `simulated`='3' WHERE (`name`=" + name + ")");
//Kill the DB connection
db.destroy();
die("There was an error: " + data);
}
});
proc.on('exit', function (code) {
//Setup the ftp connection
var ftp = new JSFtp({
host: "ftp",
port: 21,
user: "ftp",
pass: "ftp"
});
//Simulation ended with success update the database and kill.
db.query("UPDATE `queue` SET `simulated`='1' WHERE (`name`=" + name + " AND simulated='2')");
ftp.put('/var/www/upload/'+rows[0]['name']+'.html', 'public_html/mysite/'+ip2long(rows[0]['ip'])+'/'+rows[0]['name']+'.html', function(hadError) {
if (!hadError)
console.log("FTP error");
ftp.raw.quit();
});
db.destroy();
//die("Simulation is done");
});
});//end sql

Put your db query in a function with callback, and make the callback fire the function again 10sec later:
function mydbquery(callback) {
db.query("SELECT * FROM queue WHERE cooldown > UNIX_TIMESTAMP(NOW()) AND simulated=0 ORDER BY cooldown DESC LIMIT 1", function(err, rows){
if(err != null){
die("Query error: " + err);
}
if(rows < 1){
die("No rows");
}
//Set the vars from the query
var name = rows[0]['name'];
var ip = rows[0]['ip'];
var iterations = rows[0]['runs'];
var bin = "/home/hoar/sum/run"
var args = ['arg='+name, 'arg2='+iterations, 'path=/var/www/upload/'+name+'.html', 'output=log.log'];
var proc = spawn(bin, args);
var time = "/.*/";
var pct = "/^\d/";
var name = rows[0]['name'];
var ip = rows[0]['ip'];
var iterations = rows[0]['runs'];
proc.stdout.setEncoding('utf8');
proc.stdout.on('data', function(data) {
var str = data.toString();
var s = str.split("|");
var p = s[0].split("/");
var t = (s[1] == null) ? "" : s[1];
if(p != null && s[0] != "#"){ //Needed to check for # because the program prints this as first line, which is good then we can do the query further done only once.
//Check the return numbers from simc to see how many sims it has done
if(parseInt(p[0]) < parseInt(p[1])){
//Check if the 6th match is a number and the 7th only contains letters
if(t != null){
var time = t.replace(/(\r\n|\n|\r)/gm,""); //Remove any line disturbers for db
//Update the database with the amount of time left on the simulation
db.query("UPDATE `queue` SET `status`=" + db.escape(time) + " WHERE (`name`=" + name + ")");
//console.log(p[0]+"/"+p[1] + " - " + t + " left");
}
//console.log(p[0]+"/"+p[1] + " iterations done");
}
}else{
//If the stdout is null run this query since we don't want to run this more than once.
db.query("UPDATE `queue` SET `simulated`='2' WHERE (`name`=" + name + " AND simulated!='2')");
//console.log("Updated db to 2");
}
});
proc.stderr.on('data', function (data) {
var str = data.toString();
//If the program returns stderr we want to make sure it stops and we update the database to let the user know.
if(str.indexOf("ERROR! Setup failure...")){
//Update the database with the amount of time left on the simulation
db.query("UPDATE `queue` SET `simulated`='3' WHERE (`name`=" + name + ")");
//Kill the DB connection
db.destroy();
die("There was an error: " + data);
}
});
proc.on('exit', function (code) {
//Setup the ftp connection
var ftp = new JSFtp({
host: "ftp",
port: 21,
user: "ftp",
pass: "ftp"
});
//Simulation ended with success update the database and kill.
db.query("UPDATE `queue` SET `simulated`='1' WHERE (`name`=" + name + " AND simulated='2')");
ftp.put('/var/www/upload/'+rows[0]['name']+'.html', 'public_html/mysite/'+ip2long(rows[0]['ip'])+'/'+rows[0]['name']+'.html', function(hadError) {
if (!hadError)
console.log("FTP error");
ftp.raw.quit();
});
db.destroy();
//die("Simulation is done");
//NEW CODE!!!
callback();
//END OF NEW CODE
});
});//end sql
}
//NEW CODE!!!
function wait10sec(){
setTimeout(function(){
mydbquery(wait10sec);
}, 10000);
}
mydbquery(wait10sec);
//END OF NEW CODE
So it will do your query, then wait 10sec before firing another.

Just have your program run continuously and use setTimeout to re-execute the main logic on a timer. There is also setInterval which is tempting but you risk starting a run before the prior run completes. Here's the basic pattern.
function doMainStuff() {
//do all your stuff
lastAsyncThing(function (error) {
//When your final async thing is done, start the timer
if (error) {
//log error. Maybe exit if it's irrecoverable.
}
setTimeout(doMainStuff, 10 * 1000);
});
}
//when your program starts, do stuff right away.
doMainStuff();

run this script by forever package - https://npmjs.org/package/forever
$ forever script.js
This fill run this script in background, detached from console.
For 10 seconds timeuout you can use
setInterval(function(){...}, 10*1000);
More info is there - http://www.w3schools.com/jsref/met_win_setinterval.asp

Related

Server crash, sending message at restart (Node.js / Socket.io)

Hey guys i am doing a message system all is working fine but now i want to add the thing that if the server crash and restart the message that have been send during this time will be send at the restart of the server. I am trying to save the information of the message in the client side and make a "waiting" system that will wait to a server response. So i wanted to know how can i do that "waiting" system because now i am doing like that :
while (socket.connected === false) {
}
But that make bug the client because the infinit loop is way too fast ... so is that possible to set a timer ? (i have already tried but i dind't find how to make a good timer in a loop).
Or maybe i am totaly wrong and i haven't to do a waiting system but an other thing so tell me if my technic will not work or if there is one better :)
So here is my code :
Client.js (startTchat is called when someone is connected)
(function($){
var socket = io.connect('http://localhost:1337');
var lastmsg = [];
var me_id = [];
var friend_ = [];
var conv_ = [];
var isPlace_ = [];
var isLocation_ = [];
var me_ = [];
var my_id;
startTchat = function(user_id, username, friend_id, conv_id, isPlace, isLocalisation) {
my_id = user_id;
socket.emit('login_chat', {
id : user_id,
username : username,
friend : friend_id,
conv : conv_id,
isPlace : isPlace,
isLocalisation : isLocalisation,
})
};
/**
* Error
*/
socket.on('error', function(err){
alert(err);
});
/**
* Messages
*/
$('#chat_form').submit(function(event){
var a = 0;
while (socket.connected === false) {
}
event.preventDefault();
console.log('ME', my_id, 'TAB', me_id);
socket.emit('new_msg', {message: $('#message').val() }, me_id[my_id], friend_[my_id], conv_[my_id], isPlace_[my_id], isLocation_[my_id], me_[my_id]);
if (a === 1) {
console.log('HEYYYYYYYYYY', my_id);
}
$('#message').val('');
$('#message').focus();
});
socket.on('new_msg', function(message, me, id_receiver, id_transmiter){
if (me.id === id_receiver || me.id === id_transmiter) {
if (lastmsg != message.user.id) {
$('#new_message').append('<span class="time_date"> ' + message.h + ' : ' + message.m + ' | ' + message.y + '-' + message.m + '-' + message.d + ' | ' + message.user.username + '</span>'
+ '<p>' + message.message + '</p>\n'
);
lastmsg = message.user.id;
} else {
$('#new_message').append('<p>' + message.message + '</p>'
);
}
}
});
/**
* Login
*/
socket.on('new_user', function(user, friend, conv, isPlace, isLocation){
me_id[user.id] = user.id;
friend_[user.id] = friend;
conv_[user.id] = conv;
isPlace_[user.id] = isPlace;
me_[user.id] = user;
isLocation_[user.id] = isLocation;
$('#new_user').append('<div class="chat_list active_chat" id="' + user.id + '">\n' +
' <div class="chat_people">\n' +
' <div class="chat_img"> <img src="https://ptetutorials.com/images/user-profile.png" alt="sunil"> </div>\n' +
' <div class="chat_ib">\n' +
' <h5>' + user.username + ' <span class="chat_date">Id : ' + user.id + '</span></h5>\n' +
' </div>\n' +
' </div>\n' +
' </div>');
});
/**
* Disconnect
*/
socket.on('disc_user', function(user){
$('#' + user.id).remove();
})
})(jQuery);
And server.js :
var http = require('http');
var MongoClient = require('mongodb').MongoClient;
// Connection URL
const url = 'mongodb://localhost:27017';
// Database Name
const dbName = 'msg';
MongoClient.connect(url, function(err, client) {
if (err)
throw err;
console.log('MongoDB connected ...');
httpServer = http.createServer(function(req, res) {
console.log('This is a test');
res.end('Hello World');
});
httpServer.listen(1337);
var io = require('socket.io').listen(httpServer);
var users = {};
var messages = [];
io.sockets.on('connection', function (socket) {
const collection = client.db(dbName).collection('MessageUser');
var me = false;
var friend = false;
var conv = false;
var isPlace = false;
var room = false;
var isLocalisation = false;
for(var k in users) {
socket.emit('new_user', users[k]);
}
/**
* Login
*/
socket.on('login_chat', function (user) {
me = user;
friend = user.friend;
isPlace = user.isPlace;
conv = user.conv;
isLocalisation = user.isLocalisation;
if (isPlace === 0) {
room = user.conv;
} else {
room = user.conv + '-Place';
}
socket.join(room);
//console.log('New user : ', me.username, ' - id : ', me.id);
users[me.id] = me;
io.sockets.emit('new_user', me, friend, conv, isPlace, isLocalisation);
});
/**
* Disconnect
*/
socket.on('disconnect', function() {
if (!me) {
return false;
}
delete users[me.id];
io.sockets.emit('disc_user', me);
});
/**
* Message receive
*/
socket.on('new_msg', function(message, me_id, friend_, conv_, isPlace_, isLocalisation_, me_){
if (message.message !== '') {
message.user = me;
date = new Date();
message.h = date.getHours();
message.m = date.getMinutes();
message.y = date.getFullYear();
message.m = date.getMonth();
message.d = date.getDate();
console.log(message);
messages.push(message);
msg = {};
msg.content = message.message;
msg.sendAt = new Date();
msg.idTransmitter = me.id;
if (isPlace === 0) {
msg.idReceiver = friend;
} else {
msg.idReceiver = conv;
}
msg.idConversation = conv;
msg.isPlace = isPlace;
msg.isLocalisation = isLocalisation;
collection.insertOne(msg);
console.log('---1---', msg.idReceiver, '---2---', msg.idTransmitter, '---3---', me);
io.to(room).emit('new_msg', message, me, msg.idReceiver, msg.idTransmitter);
}
});
});
});
ps : Tell me if you need more info, sorry if i forget something that my first time using js, node and socket.io :)
while (socket.connected === false) {
}
Don't do that, it will block your page and hold your processor to 100%.
Instead, use setTimeout. It's the equivalent of sleep in javascript. You need to refactor your code to call setTimeout in a recursive fashion, and count the number of "retries" (if you want to stop at some point).
Code:
$('#chat_form').submit(function(event){
var retries = 0, max_retries = 10;
function tryNewMessage() {
if (socket.connected === false) {
if (retries >= max_retries) return; //handle max_retries properly in your code
//this is where you sleep for 1 second, waiting for the server to come online
setTimeout(tryNewMessage, 1000);
retries++;
}
else {
var a = 0;
event.preventDefault();
console.log('ME', my_id, 'TAB', me_id);
socket.emit('new_msg', {message: $('#message').val() }, me_id[my_id], friend_[my_id], conv_[my_id], isPlace_[my_id], isLocation_[my_id], me_[my_id]);
if (a === 1) {
console.log('HEYYYYYYYYYY', my_id);
}
$('#message').val('');
$('#message').focus();
}
}
tryNewMessage();
});

Node JS UDP Datagram API hangup when scaling

So I'm new to node js and I'm trying to port over an existing API. This one function I'm working on simply udp queries a remote server and waits for a response, but when I loop a ton of randomly generated queries, a lot of them start timing out.
When I check the server logs, I notice that not all of the query requests make it through. This only happens when I loop it several hundred times or more, very rarely when it's a smaller number.
var dgram = require("dgram");
var randomstring = require("randomstring");
var xml2js = require("xml2js");
var naTools = require("./NATools.js")
var ipVersion = 5;
var apiId = 7;
/**
*
* #param queryParamArray query params
* #param callback function provided by user
*/
exports.queryServer = function(queryParamArray, callback) {
//set socket type and query string
var client = dgram.createSocket("udp4");
var transactionId = randomstring.generate({
length : 20,
charset : "alphanumeric"
});
var queryString = queryParamArray[0] + ";" + queryParamArray[1] + ";" + queryParamArray[2] + ";" + ipVersion + ";" + apiId + ";" + transactionId + ";";
var responseObject;
var bufferMsg = new Buffer(queryString);
//if the request times out, close the client and call the callback function with the response
var timeoutObject = setTimeout(function() {
client.close();
responseObject = "Error : Request timed out after " + queryParamArray[4] + " milliseconds for transaction : " + transactionId;
callback(responseObject);
}, queryParamArray[4]);
//parses the message received from the netacuity server and calls a function that generates the response objects and calls the callback function
client.on("message", function(message) {
client.close();
clearTimeout(timeoutObject);
var msg = message.toString();
var delimitedArray = msg.split(";");
//find the transactionId section, followed by the error, and then the fields - sometimes netacuity server pads with an extra field
var index = 0;
while(delimitedArray[index] != transactionId && index<delimitedArray.length){
index++;
}
if(index >= delimitedArray.length) {
responseObject = "Error : transaction id from response does not match transaction id of request.";
callback(responseObject);
return;
}
if(delimitedArray[index+1] == '') { //make sure error field is empty
var responseArray = delimitedArray.slice(index+2, delimitedArray.length-1);
var paramArray = [queryParamArray[0], transactionId, queryParamArray[2]];
naTools.generateResponseObject(paramArray, responseArray, callback);
} else {
responseObject = "Error : " + delimitedArray[index+1];
callback(responseObject);
return;
}
});
//send the request
client.send(bufferMsg, 0, bufferMsg.length, 5400, queryParamArray[3], function() {
console.log("Querying Server : " + queryString);
});
}
naTools.generateResponseObject is just a switch statement based on paramArray[0] and generates an Object with the data and calls the passed in callback function on it.
Here is the test case : (Assume all the required imports are there)
var databaseEnums = {
xxx : 3,
xxx : 4,
xxx : 5,
xxx : 6,
xxx : 7,
xxx : 8,
xxx : 9,
xxx : 10,
xxx : 11,
xxx : 12,
xxx : 14,
xxx : 15,
xxx : 17,
xxx : 18,
xxx : 19,
xxx : 24,
xxx : 25,
xxx : 26
};
var incompleteIp = "2.242.71."; //put some c class ip here
var randomIp = incompleteIp;
for(var i = 0; i < 500; i++) { //running x test queries
for(var j = 0; j < 3; j++) {
var randomNumber = j == 0 ? Math.floor(Math.random() * 3) : Math.floor(Math.random() * 6);
randomIp += randomNumber;
}
var propertyList = Object.keys(databaseEnums);
var randomPropertyName = propertyList[Math.floor(Math.random()*propertyList.length)];
var randomFeatureCode = databaseEnums[randomPropertyName];
api.queryServer([randomFeatureCode, 64, randomIp, "192.168.11.28", 2000, 5400], function(response) {
console.log(response);
});
randomIp = incompleteIp;
}
I don't understand why some of the queries don't make it to the server and time out when the for loop gets past 300? Sometimes it happens, sometimes they all complete. Is there something I did wrong that's blocking the event loop?

Node.js Async not working properly with whilst or forever

I have data in a database which i would like to accumulate. Therefore I need a function that is called reapeatedly several times but not in parallel. It should wait for the function before to finish.
I have encoutered async and was trying to call the function multiple times but the loop iteration happens only once...
function callDocAccumulation() {
var db = openSQL();
db.serialize(function() {
db.get(batchSQL.queryProgramLineCount, function(err, row) {
var count = 0;
console.log('count rows ' + row.size);
var async = require('async');
async.whilst(function() {
return count < row.size
}, function(next) {
docAccumulation();
console.log('counter whilst ' + count)
count++;
/*added function*/
setTimeout(next, 1000);
}, function(err) {
console.log(err);
});
});
});
db.close();
}
function callDocAccumulation2() {
console.log('callDocAccumulation2')
var async = require('async');
var err = function(err) {
console.log(err);
};
var call = function() {
console.log('first call of docAccumulation')
docAccumulation();
};
async.forever(call, err);
}
the docAccumulation is a bit more complex. I am checking if a line exists that has not been processed. Then I look if i can be accumulated because in the other table an entry exists otherwise it is the start of the accumulation.
function docAccumulation(){
var db = openSQL();
db.serialize(function() {
db.get(batchSQL.queryProgramLine, function(err, row) {
var idProgram = row.idProgram;
var Name = row.Name;
var ProgramName = row.ProgramName;
var ProgramPath = row.ProgramPath;
var DocumentPath = row.DocumentPath;
var StartDate = new Date(row.StartDate);
var EndDate = new Date(row.EndDate);
var seconds = (EndDate.getTime() - StartDate.getTime())/1000;
console.log(seconds);
console.log(batchSQL.queryProcessedLine(row.Name,row.ProgramName,row.DocumentPath));
db.get(batchSQL.queryProcessedLine(Name,ProgramName,DocumentPath), function(err, row) {
if(row === undefined || row === null){
var stmt = db.prepare(batchSQL.insertRowProc);
stmt.run([Name, ProgramName, ProgramPath, DocumentPath, seconds], function(error){
console.log('lastInsert ' + this.lastID);
var stmt = db.prepare(batchSQL.updateRowProgram);
stmt.run(this.lastID, idProgram);
stmt.finalize();
});
stmt.finalize();
}else{
console.log('seconds before ' + seconds);
console.log('seconds row.time ' + row.Time);
seconds += row.Time;//increment time
console.log('seconds after ' + seconds);
var stmt = db.prepare(batchSQL.updateRowProcessed);
stmt.run([seconds, row.idProcessed], function(error){
console.log('lastInsert ' + row.idProcessed);
var stmt = db.prepare(batchSQL.updateRowProgram);
stmt.run(row.idProcessed, idProgram);
stmt.finalize();
});
stmt.finalize();
}
});
});
});
}
Where is my error? No matter what function I call the loop is only called once. From the log I can see, that the number of rows is higher.
Would recursion be smarter here?

Can't send fetched data to my socket.io stream?

I'm trying to switch from single mysql-queries to mysql-pool connection, so users can share one mysql-connection, but I'm not familiar with this at all (also new to nodejs/socket.io).
The following code is what I've done so far to send data every second to the socket in an array:
var
port = process.env.OPENSHIFT_NODEJS_PORT || 8000,
ip = process.env.OPENSHIFT_NODEJS_IP || '127.0.0.1',
app = require('http').createServer(handler),
fs = require('fs'),
request = require('request'),
mysql = require('mysql'),
moment = require('moment'),
tz = require('moment-timezone'),
pool = mysql.createPool({
connectionLimit: 100,
host: 'xxx',
user: 'xxx',
password: 'xxx',
database: 'xxx',
debug: false,
port: 3306}),
socketArray = [],
POLLING_INTERVAL = 1000,
pollingTimer;
moment.tz.setDefault("Europe/Berlin");
var io = require('socket.io').listen(app);
io.set('origins', '*:*');
function time()
{
output = new Date();
output = moment().format('(H:mm:ss.SS) ');
return output;
}
function handler(req,res)
{
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
res.statusCode = 200;
res.connection.setTimeout(0);
res.end();
}
app.listen(port,ip);
function pollingLoop () {
if (socketArray.length === 0) {
// no connections, wait and try again
setTimeout(pollingLoop, POLLING_INTERVAL);
return; // continue without sending mysql query
}
pool.getConnection(function(err,connection){
if (err) { console.log({"code" : 100, "status" : "connection-db error"}); return; }
console.log('connected as id ' + connection.threadId);
console.log('socketArray length: ' + socketArray.length);
var selection =
"SELECT\
a.`id`,a.`product_id` AS pid,a.`random` AS nr,a.`price`,a.`price_end` AS pe,\
TIMESTAMPDIFF(SECOND,NOW(),a.`datetime`) AS duration,\
ABS(TIMESTAMPDIFF(SECOND,NOW(),b.`date`)) AS hb\
FROM `auctions` AS a\
LEFT JOIN `auctions_bids` AS b ON b.`auction_id` = a.`id`\
WHERE TIMESTAMPDIFF(SECOND,NOW(),a.`datetime`) > '-1'\
GROUP BY a.`id`\
ORDER BY `duration` DESC,`id` DESC LIMIT 15";
var streamArray = [], lg = '';
var query = connection.query(selection, function(err, results, rows){
lg += ('id: '+results[0].id+' ('+results[0].duration+') ');
if
(
((results[0].duration < 2 || results[0].duration <= results[0].nr) && (results[0].price <= results[0].pe))
||
((results[0].duration < 2 || results[0].duration <= results[0].nr) && (results[0].hb > 0 && results[0].hb < 30))
)
{
min = 3;
max = 5;
rand = Math.floor(Math.random()*(max-min+1)+min);
price = results[0].price+0.01;
price = price.toFixed(2);
pool.query('UPDATE `auctions` SET `random` = ?,`price` = ?, `datetime` = DATE_ADD(`datetime`,INTERVAL(17-TIMESTAMPDIFF(SECOND,NOW(),`datetime`))SECOND) WHERE `id` = ?',[rand, price, results[0].id]);
console.log(time()+'UPDATED id '+results[0].id+': random ('+rand+') price ('+price+'€)');
}
streamArray.push(results[0]);
updateSockets({ streamArray: streamArray });
console.log("auctions pushed: " + streamArray);
connection.release();
setTimeout(pollingLoop, POLLING_INTERVAL);
});
console.log(time()+lg+' C: '+socketArray.length);
});
}
pollingLoop();
io.sockets.on('connection', function(socket) {
socket.on('disconnect', function() {
clearTimeout(pollingTimer);
var socketIndex = socketArray.indexOf(socket);
console.log(time()+'SOCKET-ID = %s DISCONNECTED', socketIndex);
if (~socketIndex) { socketArray.splice(socketIndex, 1); }
});
console.log(time()+'NEW SOCKET CONNECTED!');
socketArray.push(socket);
});
var updateSockets = function(data) {
socketArray.forEach(function(tmpSocket) { tmpSocket.volatile.emit('stream', data); });
};
console.log(time()+'server.js executed\n');
But this doesn't send me any data to the WebSocket. Is this approach (code-structure) even correct? Previously I used query.on('results') to get data like this:
var selection = "SELECT * FROM auctions";
var query = mysql.query(selection), auctions = [];
query.on('result', function(auction) {
console.log('id: '+auction.id+' ('+auction.duration+') ');
});
This worked fine showing data with auction.row but how to do this in my mysql pool connection?
Also after some seconds I'm getting an error that release() isn't even defined, but it's listed in the mysql-module documentation... so I think my whole logical process is somehow incorrect.
Should I use connection.end() and .release() at all? Because the
connection should never end.
Should I still use setInterval(function () { mysql.query('SELECT
1'); }, 5000); as answered in another StackOverflow question to keep
the connection alive here? (nodejs mysql Error: Connection lost The server closed the connection)
(Appreciate any tips or answers to even some of my questions! Better some answers than none, because I experienced that this topic isn't answered much at all.)
EDIT:
Updated my whole code (see above). Output looks like this now: http://s21.postimg.org/avsxa87rb/output.jpg
So the stream gets the data, but in the console.log is nothing and there's this javascript error?
You should be creating a pool, and using getConnection on that pool. Then, when you're done with the connection, release it. Additionally, you do not need to stop the pollingLoop or start it for each connection, one loop is enough.
I didn't understand the if statement with conditions, so i omitted it. It likely needs to go somewhere else.
var socketArr = [];
function handler(req, res) {
res.statusCode = 200;
res.connection.setTimeout(0);
res.end();
}
app.listen(port, ip);
var pool = mysql.createPool({
host : 'example.org',
user : 'bob',
password : 'secret'
});
function pollingLoop () {
if (socketArr.length === 0) {
// no connections, wait and try again
setTimeout(pollingLoop, 1000);
return; // continue without sending mysql query
}
pool.getConnection(function (err, connection) {
if (err) {
console.log({
"code": 100,
"status": "Error in connection database"
});
return;
}
console.log('connected as id ' + connection.threadId);
var selection = "SELECT * FROM auctions";
var streamArray = [],
lg = '';
var query = connection.query(selection, function (err, results, fields, rows) {
lg += ('id: ' + results[0].id + ' (' + results[0].duration + ') ');
/*if (conditions) {
var query_update = connection.query('UPDATE `auctions` SET `price` = ? WHERE `id` = ?', [price, auction.id]);
console.log(time() + 'UPDATED id ' + auction.id + ': price (' + price + '€)');
}*/
streamArray.push(results);
updateSockets({
streamArray: streamArray
});
console.log("auctions pushed: " + streamArray);
connection.release();
setTimeout(pollingLoop, 1000);
});
console.log(time() + lg + ' C: ' + socketArr.length);
});
}
// start loop
pollingLoop();
io.sockets.on('connection', function (socket) {
socket.on('disconnect', function () {
var socketIndex = socketArr.indexOf(socket);
console.log(time() + 'SOCKET-ID = %s DISCONNECTED', socketIndex);
if (~socketIndex) {
socketArr.splice(socketIndex, 1);
}
});
console.log(time() + 'NEW SOCKET CONNECTED!');
socketArr.push(socket);
});
var updateSockets = function (data) {
socketArr.forEach(function (tmpSocket) {
tmpSocket.volatile.emit('stream', data);
});
};

Apigee Usergrid: Mass delete option missing

I am using usergrid to store data for a customer project. It's got two collections carShowrooms and cars. So far I am good. But I have a scenario where I have refresh the masterdata of the collection cars. Everytime I do this, I have to delete all the existing data in cars and replace it with incoming cars data from the master inventory system.
Now, with the docu in https://www.npmjs.org/package/usergrid, I see that I can only destroy one car at a time.
car.destroy(function(err){
if (err){
//error - car not deleted
//winston log - tbd
} else {
//success - car deleted
}
});
This is ok for smaller showrooms, but bigger multibrand showrooms have variety of cars - sometimes even upto 50 different varieties (8 car brands * approx. 8 different options).
Is there a mass delete option? can someone please point me to a docu if I am missing something here.
P.S. I am new to usergrid, if this is a repeated question, please mark so and point me to the right url
If you're so inclined, I've written a Node.js bulk deleter that runs delete requests in parallel. It takes approximately 3 minutes to delete 1000 entities.
Here's an always up-to-date gist, and a copy for SO:
// Installation
// 1. Install Node.js http://nodejs.org/download/
// 2. In Terminal, cd (navigate) to the directory where you saved this file
// 3. Run 'npm install request async'
// 4. Edit the script config below with your token, org, app, and collection name.
// 5. To run the script, at the Terminal prompt, run 'node api_baas_deleter.js'
// Config
var access_token = "{token}";
var as_basepath = "http://api.usergrid.com/{org}/{app}/"; // You need the trailing slash!
var collection = "{collection_name}";
// End Config
var request = require('request');
var async = require('async');
var authstring = "access_token=" + access_token;
var total = 0;
var startTime = Date.now();
function deleteRecords(callback) {
request.get({
url: as_basepath + collection + "?" + authstring,
json: true
}, function(e, r, body) {
if (body.count === undefined) {
var err = "Error: invalid endpoint. Check your basepath and collection name.";
console.log(err);
if (typeof(callback) === 'function') {
callback(err)
}
} else {
// console.log("Found " + body.count + " entities");
if (body.count > 0) {
var deletes = [];
for (var i = 0; i < body.count; i++) {
deletes.push({
url: as_basepath + collection + "/" + body.entities[i].uuid + "?" + authstring,
json: true
});
console.log("Deleting " + body.entities[i].uuid)
}
async.each(deletes, function(options, callback) {
request.del(options, function(e, r, body) {
if (r.statusCode === 200) {
total++;
}
callback(e);
});
}, function(err) {
setTimeout(function() {
deleteRecords(collection, function(e) {
callback(e);
});
}, 600); // Mandatory, since it seems to not retrieve entities if you make a request in < 600ms
});
} else {
var timeInMinutes = minutesFromMs(Date.now() - startTime);
console.log("Deleted " + total + " entities in " + timeInMinutes + " minute" + ((timeInMinutes > 1 || timeInMinutes < 1) ? "s" : ""));
if (typeof(callback) === 'function') {
callback()
}
}
}
});
}
function minutesFromMs(time) {
return Math.round(((time % 86400000) % 3600000) / 60000).toString();
}
deleteRecords();
There currently isn't a mass delete function in the Usergrid Node SDK, but you can create one. This is how I added a monkey-patched delete-by-query function into the Node SDK:
Usergrid.client.prototype.delete = function(opts, callback) {
if (_.isFunction(opts)) { callback = opts; opts = undefined; }
if (!opts.qs.q) { opts.qs.q = '*'; }
var options = {
method: 'DELETE',
endpoint: opts.type,
qs: opts.qs
};
var self = this;
this.request(options, function (err, data) {
if (err && self.logging) {
console.log('entities could not be deleted');
}
if (typeof(callback) === 'function') {
callback(err, data);
}
});
};
Hope that helps!
Scott

Categories

Resources