Got the code working, but got into a minor problem. When it prints
${this.lastID.creator}$,${this.lastID.Team1}$,${this.lastID.Team2}$.
It comes out as undefined!
They are getting printed as undefined. Meanwhile it gets saved correctly in the database. What should I do or change? Thanks in advance!
c=config.slots.length;while(c)b=Math.random()*c--|0,d=config.slots[c],config.slots[c]=config.slots[b],config.slots[b]=d
var db = new sqlite3.Database('Matches');
var dt = new Date();
var n = dt.toLocaleTimeString();
db.run(`INSERT INTO Match VALUES(?,?,?,?,?)`, [`${config.slots[0]}, ${config.slots[1]}, ${config.slots[2]} |`, ` ${config.slots[3]}, ${config.slots[4]}, ${config.slots[5]} |`, " |", n , ` ${config.creator[0]}`], function(err) {
if (err) {
return console.log(err.message);
} // get the last insert id
console.log(`A row has been inserted with rowid ${this.lastID}`);
message.channel.send({embed: {
color: 3447003,
title: "Game has started!",
fields: [{
name: `Match Lobby #${this.lastID} creator:`,
value: `${this.lastID.creator}`
},
{
name: "**Team 1:**",
value: `${this.lastID.Team1}`
},
{
name: "**Team 2:**",
value: `${this.lastID.Team2}`
}
],
}
});
});
Related
I'm using the DataTable plugin, populating the table with data from Firebase Firestore database.
But I'm getting multiple data-set arrays
which continues till it reaches the length of data in the db.
Also, getting some errors
DataTables warning: table id=dataTable - Requested unknown parameter '5' for row 0, column 5. For more information about this error, please see http://datatables.net/tn/4
and
DataTables warning: table id=dataTable - Cannot reinitialise DataTable. For more information about this error, please see http://datatables.net/tn/3
But after I skip through all the errors, I get the Firestore data in the table.
Here is the code I'm using to get the data from Firestore and populating in the table;
$(document).ready(function () {
var dataSet = new Array();
var query = db.collection('news').orderBy('timestamp');
let observer = query.onSnapshot(snapshot => {
let changes = snapshot.docChanges();
changes.forEach(change => {
if (change.type == 'added') {
dataSet.push([change.doc.data().newsTitle,
change.doc.data().newsContent,
change.doc.data().newsImage,
change.doc.data().newsPublisher,
change.doc.data().timestamp.toDate()]);
console.log("data", dataSet);
const dataTable = $('#dataTable').DataTable({
data: dataSet,
order: [0, 'desc'],
columns: [
{ title: 'Title' },
{ title: 'Content' },
{ title: 'Picture' },
{ title: 'Publisher' },
{ title: 'Date' },
{ title: 'Action' }
]
});
dataTable.clear();
dataTable.rows.add(dataSet);
dataTable.draw();
}
})
});
})
What can I do to resolve this?
Figured it out, just removed the dataTable.rows.add(dataSet) from the loop.
$(document).ready(function () {
var dataSet = new Array();
var query = db.collection('news').orderBy('timestamp');
let observer = query.onSnapshot(snapshot => {
let changes = snapshot.docChanges();
changes.forEach(change => {
if (change.type == 'added') {
dataSet.push([change.doc.data().newsTitle,
change.doc.data().newsContent,
change.doc.data().newsImage,
change.doc.data().newsPublisher,
change.doc.data().timestamp.toDate()]);
}
})
console.log("data", dataSet);
const dataTable = $('#dataTable').DataTable({
data: dataSet,
order: [0, 'desc'],
columns: [
{ title: 'Title' },
{ title: 'Content' },
{ title: 'Picture' },
{ title: 'Publisher' },
{ title: 'Date' },
{ title: 'Action' }
]
});
dataTable.clear();
dataTable.rows.add(dataSet);
dataTable.draw();
});
})
But I'm still getting the alert error below twice, any ideas?
DataTables warning: table id=dataTable - Requested unknown parameter '5' for row 0, column 5. For more information about this error, please see http://datatables.net/tn/4
I have loaded a saved search in my beforeLoad userevent script.
When I try to run the search after applying one filter, Netsuite throws an UNEXPECTED_ERROR. Can anyone suggest any reason for this error?
var poRec = context.newRecord;
var countItem = poRec.getLineCount({ sublistId: 'item' });
var reqValue = poRec.getValue({ fieldId:'custbody_reqfield' });
var itemSearch = search.load({
id: 'customsearch_anis_item_search'
});
log.error('itemSearch', itemSearch);
if (!!reqValue) {
itemSearch.filters.push(
search.createFilter({
name: "custitem_an_test_field",
operator: search.Operator.ANYOF,
values: reqValue
})
);
}
var results = itemSearch.run().getRange({ start: 0, end: 1000 });//ERROR ON THIS LINE(line:181)
The Error is as follows:
{
"type":"error.SuiteScriptError",
"name":"UNEXPECTED_ERROR",
"message":null,
"stack":
[
"getRange(N/searchObject)",
"myAfterSubmit(/SuiteScripts/sample_ue.js:181)",
"afterSubmit(/SuiteScripts/sample_ue.js:35)"
],
"cause":
{
"type":"internal error",
"code":"UNEXPECTED_ERROR",
"details":null,
"userEvent":"aftersubmit",
"stackTrace":
[
"getRange(N/searchObject)",
"myAfterSubmit(/SuiteScripts/sample_ue.js:181)",
"afterSubmit(/SuiteScripts/sample_ue.js:35)"
],
"notifyOff":false
},
"id":"f414b115-c840-40de-8cf8-c0148da0c506-2d323032302e30322e3035",
"notifyOff":false,
"userFacing":false
}
You need to replace the search.filters array or use a filter expression.
var newFilters = [];
newFilters.push(
search.createFilter({
name: "custitem_an_test_field",
operator: search.Operator.ANYOF,
values: reqValue
}));
itemSearch.filters = newFilters;
// OR
itemSearch.filters = ["custitem_an_test_field", "ANYOF", reqValue];
I have got a WebSocket client running, that gets the streams of data from the Binance WebSocket API. The client accesses the stream and updates a Datatable with the stream data. But due to the big amount of data being updated to the table, it has made the DataTable very slow. And due to the amount of memory being used by the WebSocket it causes the page to reload if it is left open for too long
let trackedStreams = [];
var table = $('#example').DataTable( {
columns: [
{ title: "Pair", data: "pair" },
{ title: "Last Price", data: "lprice" },
{ title: "24h Change", data: "24change" },
{ title: "24h High", data: "24high" },
{ title: "24h Low", data: "24low" }
]
});
let ws = new WebSocket("wss://stream.binance.com:9443/ws/!ticker#arr");
ws.onopen = function() {
console.log("Binance connected...");
};
ws.onmessage = function(evt) {
try {
let msgs = JSON.parse(evt.data);
if (Array.isArray(msgs)) {
for (let msg of msgs) {
handleMessage(msg);
}
} else {
handleMessage(msgs)
}
} catch (e) {
console.log('Unknown message: ', e);
}
}
ws.onclose = function() {
console.log("Binance disconnected");
}
function handleMessage(msg) {
const stream = msg.s;
if (trackedStreams.indexOf(stream) === -1) {
//if symbol doesnt exist in array
var tmpArray = [msg.s, msg.c, msg.P, msg.h, msg.l];
trackedStreams.push(stream);
table.rows.add([ {
"pair": msg.s,
"lprice": msg.c,
"24change": msg.P,
"24high": msg.h,
"24low": msg.l
} ])
.draw()
.nodes()
.to$()
.addClass( msg.s );
} else {
var selectedRow = table.rows('.' + msg.s);
console.log(selectedRow);
table.row( selectedRow ).data( {
"pair": msg.s,
"lprice": msg.c,
"24change": msg.P,
"24high": msg.h,
"24low": msg.l
} ).draw();
}
My question is, is there a better approach to this method to make it more efficient, to make the DataTables more responsive and to make sure the WebSocket is not using too much memory?
Here is my function used to retrieve data from the database depending on the parameter idCats:
this.getSubcat = function(){
//Load products on scroll.
this.subscribe('SubcatIndex', () => [ $stateParams.idCats, self.loaded ], {
onReady: function() {
Meteor.call('allSubcats', $stateParams.idCats, function(err, count) {
self.allsubcats = count;
self.limit = self.loaded;
console.log("Test Log: " + $stateParams.idCats);
self.subcats = Products.find({
catLinkID : $stateParams.idCats
},{
fields: {
_id: true,
name: true,
catLinkID: true,
idCat: true,
image: true,
listingsCount: true,
productOffersCount: true,
productSoldCount: true
}
}).fetch();
window.localStorage.setItem('subcats', JSON.stringify(self.subcats) );
self.contentLoaded = true;
self.noPosts = 'No posts available.';
$ionicLoading.hide();
return;
});
},
onStop: function(err){
if(err){
self.contentLoaded = true;
self.noPosts = "No internet connection.";
console.log(JSON.stringify(err));
return;
}
}
});
}
this.getSubcat();
When i change this line:
self.subcats = Products.find({
catLinkID : $stateParams.idCats
}
To:
self.subcats = Products.find({
catLinkID : 7 // 7 for example
}
It is working well ! But as soon as I replace it with $stateParams.idCats, I receive this message coming from the function: No posts available.
Note that there are products using the idCats: 7.
When I log it:
console.log("Test Log: " + $stateParams.idCats);
This returns the same number: Test Log: 7.
If you have any suggestion or a starting point to solve this issue, it will be welcome !
Notice that there are no error in the Console (Both server and client side).
Thank you.
I am using cron based schedular to hit an external api that gives me json data every 2mins. I write data to the file and then read it, clean the data and add it to a collection in mongodb. It works fine the first time but second time I get an error like this:-
C:\smatserver\smatserver\deals.js:74
throw err;
^
SyntaxError: ./files/localdeals.json: Unexpected end of input
at Object.parse (native)
at C:\smatserver\smatserver\node_modules\jsonfile\index.js:31:18
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:380:3)
Here is the deals.js code where I am hitting external API and saving in file and reading it and trying to push it to mongodb:-
var schedule=require('node-schedule');
var request=require('request');
var jsonfile = require('jsonfile');
var _=require('underscore');
var LocalDeals=require('./models/localdeal.js');
//run job every 2 minutes
var dataSchedular=schedule.scheduleJob('*/2 * * * *', function(){
var local_deals_file='./files/localdeals.json';
var local_deals_url='http://api.8coupons.com/v1/getdeals?key=API_KEY&mileradius=10&limit=100&orderby=radius';
request({url:local_deals_url,
json:true
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
jsonfile.writeFile(local_deals_file, body, {spaces: 2}, function(err) {
console.error(err);
});
console.log('File write success for '+local_deals_file);
//problem area
jsonfile.readFile(local_deals_file, function(err, local_deals_obj) {
//save in mongodb collection
if(err){ //this error gets thrown on 2nd read
throw err;
}
local_deals_obj.forEach(function(local_deal_obj){
var local_deal_filtered=_.pick(local_deal_obj, 'name', 'address', 'storeID','chainID','phone','state','city','ZIP','URL','storeURL',
'dealTitle','dealinfo','expirationDate','postDate','showImageStandardBig','showImageStandardSmall','providerName','DealTypeID','categoryID',
'lat','lon','distance','dealOriginalPrice','dealPrice','dealSavings','dealDiscountPercent');
var new_local_deal=new LocalDeals({
name : local_deal_filtered.name,
address : local_deal_filtered.address,
storeID : local_deal_filtered.storeID,
chainID : local_deal_filtered.chainID,
phone : local_deal_filtered.phone,
state : local_deal_filtered.state,
city : local_deal_filtered.city,
ZIP : local_deal_filtered.ZIP,
URL : local_deal_filtered.URL,
storeURL : local_deal_filtered.storeURL,
dealTitle : local_deal_filtered.dealTitle,
dealinfo : local_deal_filtered.dealinfo,
expirationDate:local_deal_filtered.expirationDate,
postDate : local_deal_filtered.postDate,
showImageStandardBig: local_deal_filtered.showImageStandardBig,
showImageStandardSmall: local_deal_filtered.showImageStandardSmall,
providerName: local_deal_filtered.providerName,
DealTypeID: local_deal_filtered.DealTypeID,
categoryID: local_deal_filtered.categoryID,
lat: local_deal_filtered.lat,
lon: local_deal_filtered.lon,
distance: local_deal_filtered.distance,
dealOriginalPrice: local_deal_filtered.dealOriginalPrice,
dealPrice: local_deal_filtered.dealPrice,
dealSavings: local_deal_filtered.dealSavings,
dealDiscountPercent: local_deal_filtered.dealDiscountPercent
});
LocalDeals.saveLocalDeals(new_local_deal,function(err,deal){
if(err){
throw err;
}else{
//console.log("local deals added to mongodb");
}
});
});
});
}
else{
console.log(error);
}
});
});
module.exports=dataSchedular;
And here is my model file called localdeal.js:-
var mongoose=require('mongoose');
mongoose.connect('mongodb://localhost/smat');
var db=mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function() {
console.log('Successfully connected to mongodb');
});
var localdealsSchema=mongoose.Schema({
name: {
type: String
},
address:{
type: String
},
storeID:{
type: String
},
chainID:{
type: String
},
phone:{
type: String
},
state:{
type: String
},
city:{
type: String
},
ZIP:{
type: String
},
URL:{
type: String
},
storeURL: {
type: String
},
dealTitle:{
type: String
},
dealinfo:{
type: String
},
expirationDate:{
type: String
},
postDate:{
type: String
},
showImageStandardBig:{
type: String
},
showImageStandardSmall:{
type: String
},
providerName:{
type: String
},
DealTypeID:{
type: Number
},
categoryID:{
type: Number
},
lat:{
type: Number
},
lon:{
type: Number
},
distance:{
type: Number
},
dealOriginalPrice:{
type: Number
},
dealPrice:{
type: Number
},
dealSavings:{
type: Number
},
dealDiscountPercent:{
type: Number
}
});
var LocalDeals=module.exports=mongoose.model('LocalDeals',localdealsSchema);
module.exports.saveLocalDeals=function(newLocalDeals, callback){
newLocalDeals.save(callback);
}
Never use setTimeout in this case !
writeFile return callback ! ^^
Simply put the readFile in the writeFile callback
jsonfile.writeFile(local_deals_file, body, {spaces: 2}, function(err) {
if(err){
// error case
}
else
{
jsonfile.readFile(local_deals_file, function(err, local_deals_obj) {
if(err){
// error case
}
else
{
// do something
}
});
}
});
But while reading the file ? You can simply parse the response body :
jsonfile.writeFile(local_deals_file, body, {spaces: 2}, function(err) {
if(err){
// error case
}
else
{
var obj = null;
try
{
obj = JSON.parse(body);
}
catch(err){
obj = null;
}
if(obj === null)
{
// error case
}
else
{
// insert in mongo
}
}
});
In the part where I am reading the file, I needed a setTimeout because it takes some time to write to the file. So here is my change:-
setTimeout(function(){
jsonfile.readFile(local_deals_file, function(err, local_deals_obj) {
//the code same as above
})
}, 1000); //waiting a second to read file