I would use the promises of angularJS to fill data to a grid. I'd like to load data "row by row" as soon as the nodeJS's server, on which use the module "mssql" with the "stream" enabled, back to client every single line from the DB.
On the client side I use these functions:
function asyncGreet() {
var deferred = $q.defer();
var _url = 'http://localhost:1212/test';
$http.get(_url).
then(function(result) {
deferred.resolve(result);
}, function(error) {
deferred.reject(error);
}, function(value) {
deferred.notify(value); //<<-- In "value" I would like to get every single row
});
return deferred.promise;
}
$scope.btnTest = function () {
var promise = asyncGreet();
promise.then(function(res) {
console.log('Success: ' + res.data + "\n");
}, function(reason) {
console.log('Failed: ' + reason);
}, function(update) {
console.log('Got notification: ' + update); //<<--
});
};
On nodeJS server those:
app.get('/test', function (req, res) {
//sql for test
var _query = 'select top 50 * from tb_test';
var sql = require('mssql');
var connection;
var config = {
user: 'testUser',
password: '12345',
server: 'localhost\\test',
database: 'testDB',
stream: true
};
connection = new sql.Connection(config, function (err) {
var request = new sql.Request(connection);
request.query(_query);
request.on('recordset', function(columns) {
// Emitted once for each recordset in a query
//res.send(columns);
});
request.on('row', function(row) {
res.write(JSON.stringify(row)); //<<-- I would like intercept this event on client side
// and get the result in my angularJS function on deferred.notify
});
request.on('error', function(err) {
// May be emitted multiple times
console.error(err)
});
request.on('done', function(returnValue) {
// Always emitted as the last one
res.end('DONE');
});
});
});
Anyone can help me with this?
Thanks!
I'm done it using socket.io :)
On angularJS side:
// count the row for test only
$scope.count = 0;
$scope.prova = function () {
mySocket.emit('getTableByRow', {});
mySocket.on('resRow', function (data) {
if (data.event == 'ROW') {
$scope.count += 1;
}else {
$scope.count += " !!DONE!! ";
}
});
};
On NodeJS side:
[ ... connection with DB ... ]
io.on('connection', function (socket) {
socket.on('getTableByRow', function (data) {
_getTableByRow(socket, data);
});
});
_getTableByRow function:
var _getTableByRow = function (socket, data) {
var _query = 'select top 50 * from tb_test';
request.query(_query);
request.on('row', function(row) {
// return only the ids for test
socket.emit('resRow', {event: 'ROW', data: row.id.toString()});
});
request.on('done', function(returnValue) {
socket.emit('resRow', {event: 'DONE'});
});
request.on('recordset', function(columns) {
console.log(columns);
});
request.on('error', function(err) {
socket.emit('resRow', {event: 'ERROR', data: err});
});
}
In this way, as soon as one row is read from the DB, it is immediately sent to the client :)
Related
I am currently trying to make a 2 player HTML/JS boardgame and am using PeerJS to connect two players sessions together but cant get it to work.
Here is a quick test i havent been able to get to send/receive data even though connecting works
On the sending end
var peer = new Peer();
var con;
function c() {
con = peer.connect('id');
con.on('error', function(err) { alert(err); });
con.on('data', function(data){ console.log(data) });
};
function send() {
con.on('open', function(){
con.send('HELLO WORLD')
});
}
and on the receiving end:
var peer = new Peer('id');
peer.on('connection', function(con){
console.log('connected')
con.on('error', function(err) { alert(err) });
con.on('open', () => {
con.on('data', (data) => {
console.log('Incoming data', data);
con.send('REPLY');
});
});
});
You need to configure stun and turn servers for Peer. Here is complete working example.
/**********
var peer = new Peer({
config: {
'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
]
} /* Sample servers, please use appropriate ones */
*******/
<script src="https://unpkg.com/peerjs#1.3.1/dist/peerjs.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.6.0/jquery.min.js" integrity="sha512-894YE6QWD5I59HgZOGReFYm4dnWc1Qt5NtvYSaNcOP+u1T9qYdvdihz0PPSiiqn/+/3e7Jo4EaG7TubfWGUrMQ==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
<script type="text/javascript">
var peer = new Peer({
config: {
'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
]
} /* Sample servers, please use appropriate ones */
});
peer.on("open", function (id) {
$("#chat").hide()
$("#peerid").text(id)
$("form#connect").submit(function () {
var remoteID = $(this).find('input[type="text"]').val()
console.log("connect to", remoteID);
var conn = peer.connect(remoteID)
gotConnection(conn)
return false;
})
})
peer.on("connection", gotConnection)
function gotConnection(conn) {
conn.on("error", function (err) {
console.error("connection error", err, conn)
})
conn.on("open", function () {
console.log("conn open", conn)
$("#remoteid").text(conn.peer)
$("form#connect").hide()
$("#chat").show()
$("#chat form").submit(function () {
var message = $(this).find('input[type="text"]').val()
console.log("send", message);
conn.send(message)
$(this).find('input[type="text"]').val("")
$("#messages").append($('<li>' + peer.id + ': ' + message + '</li>'))
return false;
})
conn.on("data", function (data) {
console.log("got", data);
$("#messages").append($('<li>' + conn.peer + ': ' + data + '</li>'))
})
})
}
</script>
I have stored 2775 urls in my mlab database and then I take each URL down to get more information. All of the URL I store in an Array then pass it into a function to process .However, The code only run up to about 1700 urls and process it and then stop. Here is my code (sorry about the code, this is my first time using stackoverflow :
Product.find({}, (err, foundProducts) => {
if (err) {
console.log("err " + err);
} else {
foundProducts.forEach(function(foundProduct) {
var updateProduct = service.updateTikiProduct(foundProduct.url);
});
}
});
updateTikiProduct: function(url) {
const options = {
url: url,
json: true
};
request(options,
function(err, res, body) {
// SOME code to crawl data
Product.findOneAndUpdate({
url: options.url
}, {
$set: {
name: name,
brand: brand,
store: store,
location: location,
base_category: categoryType,
top_description: topDescription,
feature_description: featureDescription
}
}, {
upsert: true,
new: true
}, (err, createdProduct) => {
if (err) {
reject(err);
} else {
var currentDate = new Date();
if (!createdProduct.hasOwnProperty("price")) {
createdProduct.price.push({
current: currentPrice,
origin: originPrice
});
createdProduct.save();
} else if (createdProduct.hasOwnProperty("price") &&
createdProduct.price[0].date.getDate() != currentDate.getDate()) {
createdProduct.price.push({
current: currentPrice,
origin: originPrice
});
createdProduct.save();
console.log("Update price");
}
counter++;
console.log("url : " + options.url);
console.log("Created product " + counter + " success!");
}
});
}
i guess mongo have limits to get items from db, you should try findAll or https://stackoverflow.com/a/3705615/4187058
I think your code is not processing all the elements is because you are processing all the elements in parallel, which will stop processing at one time when the memory will get full.
foundProducts.forEach(function(foundProduct) {
var updateProduct = service.updateTikiProduct(foundProduct.url);
});
what you should do is process them in series. you can use async await for that, do the following changes it will work :-
for(let foundProduct of foundProducts){
var updateProduct = await
service.updateTikiProduct(foundProduct.url);
};
i have an application which needs a data.json file in order to draw a d3-graph. However i need to update that file on an onClick-Event:
d3.select("#updatebutton").on("click", function(e) {
try{
$.get('https://localhost:4444/data', function(data) {
});
}
catch (e) {
alert('Error: ' + e);
}
});
Above is the update-Button with the jquery-call. In my app.js File I am using it like this:
app.get('/data', function(req, res, next) {
try{
getJSON();
}
catch(e) {
alert('Error');
}
});
The getJSON()-Function is received Data over an https-Request, processes that data and saves it to data.json:
function getJSON() {
var req = https.get(options, function(response) {
// handle the response
var res_data = '';
response.on('data', function(chunk) {
res_data += chunk;
});
response.on('end', function() {
//process data
// save to file
fs.writeFile(filePath, JSON.stringify(finalJson), function(err) {
if (err)
throw err;
});
});
});
}
However if i click on my updateButton repeatedly after seconds, it seems that data.json is not overwritten but the file gets bigger and bigger, means that data is added to the file instead of overwritten.
What am I doing wrong?
Thanks for help.
Since you use app.get as your route, I guess you are using express.
In your routes definition:
var getData = (function() {
var callbacks = [];
function executeCallbacks(err, data) {
for (var i = 0; i < callbacks.length; i++) {
callbacks[i](err, data);
}
callbacks = [];
}
return function(cb) {
callbacks.push(cb);
if( callbacks.length === 1 ) {
var req = https.get(options, function(response) {
// handle the response
var res_data = '';
response.on('data', function(chunk) {
res_data += chunk;
});
response.once('end', function() {
// process data here
// save to file
fs.writeFile(filePath, JSON.stringify(finalJson), function(err) {
if (err) {
// call error handler
return executeCallbacks(err);
}
executeCallbacks(null, body);
});
});
response.once('error', function() {
return executeCallbacks(err);
});
}
req.end();
}
};
})();
app.get('/data', function(req, res, next) {
getData(function(err, data) {
if(err) {
return next(err);
}
return data;
});
});
In your browser js file:
d3.select("#updatebutton").on("click", function(e) {
$.get( 'https://localhost:4444/data', function(data) {
alert( "success" );
var json = JSON.parse(data);
})
.fail(function() {
alert( "error" );
});
});
I see you use try / catch around callback functions. The callback function fires after the original function completes. So don't use Try / Catch around callback function.
Read: https://strongloop.com/strongblog/async-error-handling-expressjs-es7-promises-generators/
I am having db.js with db related functions, I want to make call to db.js and wait until it returns the query result.
But the result is returned after the execution of the db call. Can anyone please help how to solve this.
Code sample:
var Q = require('q');
db= require("./dbaccess.js");
function waitfor(ms){
var deferred = Q.defer();
setTimeout(function() {
deferred.resolve(db);
}, 5000);
return deferred.promise;
}
waitfor(2000).done(function(dbcall) {
console.log('contrived example '+ dbcall.query1());
});
dbacess.js:
var sql = require('mssql');
var config = {
user: 'xx',
password: 'xxx',
server: 'aaa',
database: 'RequestCenter',
stream: true,
}
this.query1=function() {
sql.connect(config, function(err) {
var result;
var request = new sql.Request();
request.query("select * from dbo.AcAccount where Name like 'AutomationCli%' ");
request.on('row', function(row) {
console.log(row.Name);
result = row.Name;
});
request.on('error', function(err) {
console.log("err : "+err);
});
request.on('done', function(returnValue) {
console.log("done");
});
return result;
});
sql.on('error', function(err) {
console.log("sql err : "+err);
});
}
Output:
contrived example undefined
in db: AutomationClient
Expected output:
in db: AutomationClient
contrived example AutomationClient
Not sure why your main code passes in 2000 for the ms argument and then does a 5000ms timeout, in fact, why are you doing a timeout at all, if that was some attempt to wait for the db function to complete, then you don't need it
If you must use promises - personally I'd use a simple callback for such simple code, however, I get that you want to learn how to use Promises
Your original code looked like it was attempting to return the value of the LAST row.name
This code returns an array of row.name
Not knowing the type of data you'd be getting, I don't know which is correct
dbacess.js
var Q = require('q');
var sql = require('mssql');
var config = {
user: 'xx',
password: 'xxx',
server: 'aaa',
database: 'RequestCenter',
stream: true,
}
this.query1 = function() {
var deferred = Q.defer();
sql.connect(config, function(err) {
var result = []; // return all rows - modify as required
var request = new sql.Request();
request.query("select * from dbo.AcAccount where Name like 'AutomationCli%' ");
request.on('row', function(row) {
console.log(row.Name);
result.push(row.Name);
});
request.on('error', function(err) {
console.log("err : " + err);
deferred.reject(err);
});
request.on('done', function(returnValue) {
deferred.resolve(result);
});
});
sql.on('error', function(err) {
console.log("sql err : " + err);
deferred.reject(err);
});
return deferred.promise;
}
Code sample:
db = require("./dbaccess.js");
db.query1().then(function(result) {
console.log('contrived example ' + result);
});
function die(err) {
console.log('Uh oh: ' + err);
process.exit(1);
}
var box, cmds, next = 0, cb = function(err) {
if (err)
die(err);
else if (next < cmds.length)
cmds[next++].apply(this, Array.prototype.slice.call(arguments).slice(1));
};
cmds = [
function() { imap.connect(cb); },
function() { imap.openBox('INBOX', false, cb); },
function(result) { box = result; imap.search([ 'UNSEEN', ['SINCE', 'April 5, 2011'] ], cb); },
function(results) {
var msgCache = {},
fetch = imap.fetch(results, { request: { headers: ['from', 'to', 'subject', 'date'] } });
console.log('Now fetching headers!');
fetch.on('message', function(msg) {
msg.on('end', function() {
msgCache[msg.id] = { headers: msg.headers };
console.log(msg.headers.date[0]);
console.log(msg.headers.to[0]);
console.log(msg.headers.from[0]);
console.log(msg.headers.subject[0]);
var from = /(.*)?<(.*?)>/.exec(msg.headers.from[0]);
console.log(from[1]); // nome from
console.log(from[2]); // from
});
});
fetch.on('end', function() {
console.log('Done fetching headers!');
console.log('Now fetching bodies!');
fetch = imap.fetch(results, { request: { headers: false, body: '1' } });
fetch.on('message', function(msg) {
msg.data = '';
msg.on('data', function(chunk) {
msg.data += chunk;
});
msg.on('end', function() {
msgCache[msg.id].body = msg.data;
console.log(msg.data);
});
});
fetch.on('end', function() {
console.log('Done fetching bodies!');
cb(undefined, msgCache);
});
});
},
function(msgs) {
// Do something here with msgs, which contains the headers and
// body (parts) of all the messages you fetched
// console.log(msgs);
//imap.logout(cb);
imap.on('mail', function () {
// body...
console.log("New Email Has Arrived!");
next = 0;
cb();
})
}
];
cb();
When a new e-mail arrives imap.on('mail', function () I want it to run the cb() function again. However, it doesn't do anything after the console.log.
What am I doing wrong?
Thanks
reset your next counter, and your imap.on('mail', ... should be outside of cmds so that it's not bound again, and again, and again...
There are some modules around for "flattening" async operations to not end in callback hell.
e.g.: async
Maybe this could help you.