I have topic created on kafka server now i am creating consumer to read topic messages from server, but i dont see any data when i try to consume message using consumer.on('message') , Any idea what is implemented wrong in below code , Do i need to set offset ?
consumer.js
var kafka = require('kafka-node');
var config = require('./config.js');
var zk = require('node-zookeeper-client');
var kafkaConn = config.kafkaCon.dit;
var HighLevelConsumer = kafka.HighLevelConsumer;
var Client = kafka.Client;
function start() {
topics = [{
topic: 'test-1'
}];
var groupId = 'push';
var clientId = "consumer-" + Math.floor(Math.random() * 10000);
var options = {
autoCommit: true,
fetchMaxWaitMs: 100,
fetchMaxBytes: 10 * 1024 * 1024,
groupId: groupId
};
console.log("Started consumer: ", clientId);
var consumer_client = new kafka.Client(kafkaConn, clientId);
var client = new Client(consumer_client.connectionString, clientId);
var consumer = new HighLevelConsumer(client, topics, options);
consumer.on('message', function(message) {
var topic = message.topic;
console.log('Message', topic);
});
};
start();
const Kafka = require("node-rdkafka");
const kafkaConf = {
"group.id": "cloudkarafka-example",
"metadata.broker.list": ["localhost:9092"],
"socket.keepalive.enable": true,
//"security.protocol": "SASL_SSL",
//"sasl.mechanisms": "SCRAM-SHA-256",
//"sasl.username": process.env.CLOUDKARAFKA_USERNAME,
//"sasl.password": process.env.CLOUDKARAFKA_PASSWORD,
"debug": "generic,broker,security",
'enable.auto.commit': false,
};
//const prefix = process.env.CLOUDKARAFKA_USERNAME;
const topics = ['topicName'];
const consumer = new Kafka.KafkaConsumer(kafkaConf, {
"auto.offset.reset": "beginning"
});
consumer.on("error", function(err) {
console.error(err);
});
consumer.on("ready", function(arg) {
console.log(`Consumer ${arg.name} ready`);
consumer.subscribe(topics);
consumer.consume();
});
consumer.on("data", function(m) {
console.log(m.value.toString());
});
consumer.on("disconnected", function(arg) {
process.exit();
});
consumer.on('event.error', function(err) {
console.error(err);
process.exit(1);
});
consumer.on('event.log', function(log) {
console.log(log);
});
consumer.connect();
A Kafka Consumer can be written with the kafka-node npm module. For my use case, my consumer was a separate Express server which listened to events and stored them in a database.
import kafka from "kafka-node"
const client = new kafka.Client("http://localhost:2181");
const topics = [
{
topic: "webevents.dev"
}
];
const options = {
autoCommit: true,
fetchMaxWaitMs: 1000,
fetchMaxBytes: 1024 * 1024,
encoding: "buffer"
};
const consumer = new kafka.HighLevelConsumer(client, topics, options);
consumer.on("message", function(message) {
// Read string into a buffer.
var buf = new Buffer(message.value, "binary");
var decodedMessage = JSON.parse(buf.toString());
//Events is a Sequelize Model Object.
return Events.create({
id: decodedMessage.id,
type: decodedMessage.type,
userId: decodedMessage.userId,
sessionId: decodedMessage.sessionId,
data: JSON.stringify(decodedMessage.data),
createdAt: new Date()
});
});
consumer.on("error", function(err) {
console.log("error", err);
});
process.on("SIGINT", function() {
consumer.close(true, function() {
process.exit();
});
});
more info in https://nodewebapps.com/2017/11/04/getting-started-with-nodejs-and-kafka/
Related
I am new to GTM+GA.I am trying to display google Analytics(GA4) reports on my webpage. I created Oauth Client Id in google cloud console and also done other settings in Google cloud console. Through javascript code i am trying to get access token from google Api and I am getting below exception.
After successful authentication I will integrate GA repots with my web page. Below is my javascript code for getting access token.
function main(propertyId = 'YOUR-GA4-PROPERTY-ID') {
propertyId = '347415282';
const {
OAuth2Client
} = require('google-auth-library');
const {
grpc
} = require('google-gax');
const http = require('http');
const url = require('url');
const open = require('open');
const destroyer = require('server-destroy');
const keys = require('./oauth2.keys.json');
const SCOPES = ['https://www.googleapis.com/auth/analytics.readonly'];
function getAnalyticsDataClient(authClient) {
const sslCreds = grpc.credentials.createSsl();
const credentials = grpc.credentials.combineChannelCredentials(
sslCreds,
grpc.credentials.createFromGoogleCredential(authClient));
return new BetaAnalyticsDataClient({
sslCreds: credentials,
});
}
function getOAuth2Client() {
return new Promise((resolve, reject) => {
const oAuth2Client = new OAuth2Client(
keys.web.client_id,
keys.web.client_secret,
'http://localhost:3000/oauth2callback');
const authorizeUrl = oAuth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES.join(' '),
});
const server = http
.createServer(async(req, res) => {
try {
if (req.url.indexOf('/oauth2callback') > -1) {
const qs = new url.URL(req.url, 'http://localhost:3000')
.searchParams;
const code = qs.get('code');
console.log(`Code is ${code}`);
res.end(
'Authentication successful! Please return to the console.');
server.destroy();
const r = await oAuth2Client.getToken(code);
oAuth2Client.setCredentials(r.tokens);
console.info('Tokens acquired.');
resolve(oAuth2Client);
}
} catch (e) {
reject(e);
}
})
.listen(3000, () => {
console.info(`Opening the browser with URL: ${authorizeUrl}`);
open(authorizeUrl, {
wait: false
}).then(cp => cp.unref());
});
destroyer(server);
});
}
async function runReport() {
const oAuth2Client = await getOAuth2Client();
const analyticsDataClient = getAnalyticsDataClient(oAuth2Client);
const[response] = await analyticsDataClient.runReport({
property: `properties/${propertyId}`,
dateRanges: [{
startDate: '2020-03-31',
endDate: 'today',
},
],
dimensions: [{
name: 'city',
},
],
metrics: [{
name: 'activeUsers',
},
],
});
console.log('Report result:');
response.rows.forEach(row => {
console.log(row.dimensionValues[0], row.metricValues[0]);
});
}
runReport();
process.on('unhandledRejection', err => {
console.error(err.message);
process.exitCode = 1;
});
main(...process.argv.slice(2));
Please let me know how to get rid off this issue.
Regards,
Prabhash
Im trying to deploy a nodejs app on my host cpanel but whenever i try to run it the html opens but the data is not being sent from nodejs to the client side, i just get this error on the console
index.js:83
GET http://streamedbot.xyz/socket.io/?EIO=3&transport=polling&t=OFQdTfs 404 (Not Found)
this is my app.js
const fetchP2PData = require("./fetchP2PData.js");
const app = require("express")();
const path = require("path");
const Datastore = require("nedb");
// const http = require("http").createServer(app);
// const io = require("socket.io")(http);
const { createServer } = require('http')
const { Server } = require('socket.io')
const server = createServer(app)
const io = new Server(server, {
cors: {
origin: "*",
methods: ["GET", "POST"],
credentials: false,
transports: ['websocket', 'polling'],
},
allowEIO3: true
})
app.get('/bkpapp/', function(req,res){
res.sendFile(__dirname + '/index.html');
});
let prices = [];
let openingPrice = 0 ;
const priceDb = new Datastore("./price.db");
priceDb.loadDatabase();
priceDb.find({}, { time: 1, value: 1, _id: 0 }, function (err, docs) {
docs.reverse();
prices = docs;
console.log(prices);
});
setInterval(function () {
(async function () {
let totalPrices = [];
const firstPage = await fetchP2PData(1, "ETB", "BUY", "USDT", null);
if (firstPage && firstPage.success) {
const totalPages = Math.ceil(firstPage.total / 20);
const pagesToRun = new Array(totalPages - 1).fill(null);
const totalElements = await pagesToRun.reduce(async (prev, _, idx) => {
const accData = await prev;
const page = idx + 2;
const pageResult = await fetchP2PData(page, "ETB", "BUY", "USDT", null);
if (pageResult && pageResult.success) {
return [...accData, ...pageResult.data];
}
return accData;
}, Promise.resolve(firstPage.data));
totalElements.map((obj) => {
totalPrices.push(parseFloat(obj.adv.price));
});
}
let currentPrice = ((totalPrices[0]+totalPrices[1]+totalPrices[2]+totalPrices[3]+totalPrices[4]+totalPrices[5]+totalPrices[6]+totalPrices[7]+totalPrices[8]+totalPrices[9]+totalPrices[10])/11)-3.95;
const d = new Date();
if(d.getHours() == 00 && d.getMinutes() == 00){
openingPrice = currentPrice;
}
let dailyPercentage = ((currentPrice - openingPrice)/openingPrice)*(100)
prices.push({ time: d.getTime()/1000, value: (((totalPrices[0]+totalPrices[1]+totalPrices[2]+totalPrices[3]+totalPrices[4]+totalPrices[5]+totalPrices[6]+totalPrices[7]+totalPrices[8]+totalPrices[9]+totalPrices[10])/11)-3.95)})
io.on('connection', sock => {
})
if(totalPrices != null){
io.sockets.emit('price', prices)
priceDb.insert({ time: d.getTime()/1000, value: (((totalPrices[0]+totalPrices[1]+totalPrices[2]+totalPrices[3]+totalPrices[4]+totalPrices[5]+totalPrices[6]+totalPrices[7]+totalPrices[8]+totalPrices[9]+totalPrices[10])/11)-3.95)});
io.sockets.emit('dailyPercentage', dailyPercentage)
}
// io.sockets.on('connection', function (socket){
// io.sockets.emit('price', { time: d.getTime(), value: totalPrices[0] });
// })
console.log(prices);
})();
}, 10000);
server.listen(3000, function(){
console.log('listening to 80')
})
It seems necessary to merge the io instance and the server instance.
According to the socket.io docs you can merge the two instances by adding line io.attach(server);.
socket.io docs
I found a better project node-opcua in github, use the simple example of this project can create a opc server and client, and in the local can access, I need to access a remote opc server now, provide a few parameters are: IP network nodes (such as 10.195.156.150), opc service (for example ArchestrA. FSGateway), opc group (such as ArchestrA), opc TagName (for example HXi1_P201_00. Ia), can I access it use node-opcua?
Thank you!
sample_server code:
var opcua = require("node-opcua");
var os = require("os");
// Let's create an instance of OPCUAServer
var server = new opcua.OPCUAServer({port: 4334});
var siteName = 'site01';
var var1 = {nodeId: 'ns=4;s=hook_weight', browseName: 'hook_weight', dataType: 'Double'};
var var2 = {nodeId: 'ns=4;s=rpm', browseName: 'rpm', dataType: 'Double'};
var var3 = {nodeId: 'ns=1;s=free_memory', browseName: 'free_memory', dataType: 'Double'};
function post_initialize(){
console.log("server initialized");
function construct_my_address_space(server) {
server.engine.createFolder("RootFolder",{ browseName: siteName});
// emulate variable1 changing every 500 ms
var variable1 = 1;
setInterval(function(){ variable1 += 1; }, 1000);
var1.value = {
get: function(){
return new opcua.Variant({dataType: opcua.DataType.Double, value: variable1 });
}
};
server.var1 = server.engine.addVariableInFolder(siteName, var1);
var2.value = {
get: function(){
return new opcua.Variant({dataType: opcua.DataType.Double, value: 10});
}
};
server.var2 = server.engine.addVariableInFolder(siteName, var2);
/**
/**
* returns the percentage of free memory on the running machine
* #return {double}
*/
function available_memory() {
// var value = process.memoryUsage().heapUsed / 1000000;
var percentageMemUsed = os.freemem() / os.totalmem() * 100.0;
return percentageMemUsed;
}
var3.value = {
get: function(){
return new opcua.Variant({dataType: opcua.DataType.Double, value: available_memory()});
}
};
server.var3 = server.engine.addVariableInFolder(siteName, var3);
}
construct_my_address_space(server);
server.start(function() {
console.log("Server is now listening ... ( press CTRL+C to stop)");
console.log("port ", server.endpoints[0].port);
var endpointUrl = server.endpoints[0].endpointDescription().endpointUrl;
console.log(" the primary server endpoint url is ", endpointUrl );
});
}
server.initialize(post_initialize);
sample_client code:
var opcua = require("node-opcua");
var async = require("async");
var client = new opcua.OPCUAClient();
var endpointUrl = "opc.tcp://" + require("os").hostname().toLowerCase() + ":4334/UA/SampleServer";
var session, subscription;
async.series([
// step 1 : connect to
function(callback) {
client.connect(endpointUrl,function (err) {
if(err) { console.log(" cannot connect to endpoint :" , endpointUrl ); }
else { console.log("connected !"); }
callback(err);
});
},
// step 2 : createSession
function(callback) {
client.createSession( function(err, _session) {
if(!err) { session = _session; }
callback(err);
});
},
// step 5: install a subscription and install a monitored item for 10 seconds
function(callback) {
subscription=new opcua.ClientSubscription(session,{
requestedPublishingInterval: 1000,
requestedLifetimeCount: 10,
requestedMaxKeepAliveCount: 2,
maxNotificationsPerPublish: 10,
publishingEnabled: true,
priority: 10
});
subscription.on("started",function(){
console.log("subscriptionId=", subscription.subscriptionId);
}).on("keepalive",function(){ console.log(); })
.on("terminated",function(){ callback(); });
setTimeout(function () {
subscription.terminate();
}, 10000);
// install monitored item
var monitoredItem = subscription.monitor({
nodeId: opcua.resolveNodeId("ns=1;s=free_memory"),
attributeId: opcua.AttributeIds.Value
},
{
samplingInterval: 100,
discardOldest: true,
queueSize: 100
},
opcua.read_service.TimestampsToReturn.Both
);
console.log("-------------------------------------");
monitoredItem.on("changed", function (dataValue) {
console.log(" % free mem = ", dataValue.value.value);
});
},
// close session
function (callback) {
// _"closing session"
session.close(function (err) {
if (err) {
console.log("session closed failed ?");
}
callback();
});
}
],
function(err) {
if (err) { console.log(" failure ",err); }
else { console.log('done!'); }
client.disconnect(function(){});
});
I am afraid Archestra doesn't have an OPC UA Server, so to access it from node-opcua, you will need to use an OPC UA Gateway, such as https://www.prosysopc.com/products/opc-ua-gateway/, which can convert OPC DA to OPC UA.
Can u please explain the kafkaClient using Nodejs with using port 9092 not zookeeper port 2181.
var kafka = require('kafka-node'),
Producer = kafka.Producer,
KeyedMessage = kafka.KeyedMessage,
client = new kafka.Client(),
producer = new Producer(client),
km = new KeyedMessage('key', 'message'),
payloads = [
{ topic: 'topic1', messages: 'hi', partition: 0 },
{ topic: 'topic2', messages: ['hello', 'world', km] }
];
producer.on('ready', function () {
producer.send(payloads, function (err, data) {
console.log(data);
});
});
producer.on('error', function (err) {})
Newer Kafka clients no longer need to talk directly to zookeeper. They only need to talk to Kafka.
Using the new KafkaClient is basically how u use the old Client, but talking directly to kafka.
const kafka = require('kafka-node');
let kafkaHost = '172.30.33.181:9092';
const Consumer = kafka.Consumer;
const client = new kafka.KafkaClient({kafkaHost: kafkaHost});
const consumer = new Consumer(
client,
[
{ topic: 'myTopic' },
],
{
autoCommit: false,
fromOffset: 'latest'
}
);
consumer.on('message', function (message) {
console.log(message);
});
consumer.on('error', function (err) {
console.log('Error:',err);
});
consumer.on('offsetOutOfRange', function (err) {
console.log('offsetOutOfRange:',err);
});
I am doing a unit testing with mocha + unit.js + proxyquire. Right now I am stuck at mocked emitter. First mocked emitter which is a fake on 'row' signal, it done. Second one I followed the same like first one. Since it is not a big difference, but I at here I have a question.
Why emitter is not emit 'end'?
userHandler_post.js :
var pg = require('pg');
var connectionString = require('./postgres.ini');
var pgdown = require('./pgdown.js');
exports.post = function(req,res, callback){
var cb_is_func = (typeof(callback)==='function');
// Grab data from http request
var adata = [req.body.username,
req.body.password,
req.body.privilege,
req.body.firstname,
req.body.middlename,
req.body.lastname,
req.body.streetaddress, //rename
req.body.subdistrict,
req.body.district,
req.body.province,
req.body.country,
req.body.zipcode,
req.body.email,
req.body.phonecountrycode,
req.body.phoneareacode,
req.body.phonenumber, //rename
req.body.faxcountrycode,
req.body.faxareacode,
req.body.faxnumber]; //rename
// Get a Postgres client from the connection pool
pg.connect(connectionString, function(err, client, done) {
// Handle Errors
if(err) {
done();
pgdown.pgdown(res);
if(cb_is_func){callback();}
}else{
// SQL Query > Insert Data
var func_ = 'SELECT Dugong.Users_Add($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19)';
var addUser_ = client.query(func_, adata);
addUser_.on('error', function(error){
var data = {success : false,
username : req.body.username,
reason : {errmsg : 'Unable to add user',
errid : 'addUser_' }};
done();
res.json(data);
if(cb_is_func){callback(data);}
});
addUser_.on('end',function(result){
var data = {success : true, username : req.body.username};
done();
res.json(data);
if(cb_is_func){callback(data);}
});
}
});
pg.end();
};
userHandler_post.test.js :
var httpMocks = require('node-mocks-http');
var test = require('unit.js');
var real_userHandler = require('../routes/userHandler_post.js');
var proxyquire = require('proxyquire'),
pgStub = { };
var events = require('events');
describe('userHandler_post : ', function () {
var request;
beforeEach(function (){
request = httpMocks.createRequest({
method: 'POST',
url: '/users',
body : { Username : 'Yoda',
... //Omit to save question lines.
FaxNum: '',
}
});
});
it('should show connetion error. If cannot connect to database.', function (done) {
pgStub.connect = function (aconnectionString, cb){
cb('Connection refused',null,null);
};
var response = httpMocks.createResponse();
var userHandler_post = proxyquire('../routes/userHandler_post.js', { 'pg' : pgStub} );
userHandler_post.post(request,response); //Let it update response
var answer = response._getData();
test.value(answer).match('Connection refused');
done(); //Need
});
it('should show query error. If database rejects query.', function (done) {
pgStub.connect = function (aconnectionString, cb){
var client = {};
client.query = function(querystr){
var emitter = new events.EventEmitter();
setTimeout(function() {
emitter.emit('error', {detail: 'Query error'})
}, 0);
return emitter;
};
var done = function(){};
cb(null,client,done);
};
var response = httpMocks.createResponse();
var userHandler_post = proxyquire('../routes/userHandler_post.js', { 'pg' : pgStub} );
userHandler_post.post(request, response, function(){
var answer = response._getData();
test.value(answer).match('Query error');
});
done(); //Need
});
it('should show several records. If database processes query.', function (done) {
pgStub.connect = function (aconnectionString, cb){
var client = {};
client.query = function(querystr){
var emitter = new events.EventEmitter();
console.log('below emitter');
setTimeout(function() {
console.log('-*************--');
emitter.emit('end', {a : 1});
}, 0); //
return emitter;
};
var done = function(){};
cb(null, client, done);
};
var response = httpMocks.createResponse();
var userHandler_post = proxyquire('../routes/userHandler_post.js', { 'pg' : pgStub} );
userHandler_post.post(request, response, function(){
var answer = response._getData();
console.log(answer);
// test.value(answer).match('Connection refused');
});
// console.log(response._getData());
done(); //Need
});
});
Terminal :
mocha testing/userHandler_post.test.js
userHandler_post :
✓ should show connetion error. If cannot connect to database.
✓ should show query error. If database rejects query.
below emitter
✓ should show several records. If database processes query.
3 passing (10ms)
If it really emitted the 'end' signal. Then it should say 'Insert record completed'.
I found it.
Upgrade node from v0.10.29 to v0.12.6
npm cache clean -f
npm install -g n
aptitude install curl
n stable