Monitor AWS instance using cloudwatch on nodejs - javascript

I'm trying to build an app that monitors AWS instances by a given instance IP.
Now, I managed to get a response from the instance but this is - the Datapoints returns empty.
Constrains:
the Dimensions are unknown to me
the Namespace is unknown as well I used EC2 as an arbitrary value
The code reaching to AWS servers:
const winston = require('winston');
const { CloudWatchClient, GetMetricStatisticsCommand, ListMetricsCommand } = require("#aws-sdk/client-cloudwatch");
const config = require('config');
const region = config.get('region');
const cwClient = new CloudWatchClient({ region: region });
const getCPUUtilization = async (ip, timePeriod, interval) => {
let today = new Date();
let dateToStart = new Date();
let dateOffset = (24 * 60 * 60 * 1000) * timePeriod;
dateToStart.setTime(today.getTime() - dateOffset);
const params = {
Dimensions: [
{
Name: "InstanceId",
Value: "i-test",
}
],
Namespace: `AWS/EC2`,
MetricName: 'CPUUtilization',
Period: interval,
Unit: "Percent",
StartTime: dateToStart,
EndTime: today,
Statistics: ["Maximum", "Minimum", "Average"],
};
try {
return await cwClient.send(new GetMetricStatisticsCommand(params));
} catch (err) {
console.log(err);
}
}
module.exports = getCPUUtilization;
The output I'm getting:
{
'$metadata': {
httpStatusCode: 200,
requestId: '192bce9c-42b0-4820-9b66-ca0c8a4e0159',
extendedRequestId: undefined,
cfId: undefined,
attempts: 1,
totalRetryDelay: 0
},
Label: 'CPUUtilization',
Datapoints: []
}
I would really appreciate your help,
thank you in advance

Related

Invalid signature when siging EIP-712 data

I am trying call transferWithAuthorization on the USDC contract (which implements EIP-3009). To do this I need to sign the transaction data and then execute the transaction, however I am getting the error: Fail with error 'FiatTokenV2: invalid signature'
Example transaction error:
https://goerli.etherscan.io/tx/0x3d2cee8414682bcf55bc8adf86c2deaf14eb77448824c417629cca0d3f9bea94
Any ideas why?
const { ethers } = require('ethers');
const abi = require('./abi');
async function main() {
/* Generate Signature */
const sendingWallet = ethers.Wallet.createRandom();
const sendingAddress = await sendingWallet.getAddress();
const types = {
TransferWithAuthorization: [
{ name: 'from', type: 'address' },
{ name: 'to', type: 'address' },
{ name: 'value', type: 'uint256' },
{ name: 'validAfter', type: 'uint256' },
{ name: 'validBefore', type: 'uint256' },
{ name: 'nonce', type: 'bytes32' },
],
};
const domain = {
name: 'testapp',
version: '1',
chainId: '5',
verifyingContract: '0x07865c6E87B9F70255377e024ace6630C1Eaa37F', // USDC Contract
};
const message = {
from: sendingAddress,
to: '0x1bC152F3E47CC7baDF5629bc77CBEf9DaE813843', // Receiver wallet
value: 100000,
validAfter: 0,
validBefore: Math.floor(Date.now() / 1000) + 3600, // Valid for an hour
nonce: ethers.utils.hexValue(ethers.utils.randomBytes(32)), // 32 byte hex string
};
const signature = await sendingWallet._signTypedData(domain, types, message);
console.log(signature);
const { v, r, s } = ethers.utils.splitSignature(signature);
console.log(v, r, s);
/* Execute transaction */
// Throw away wallet :)
const wallet = ethers.Wallet.fromMnemonic(
'youth tool amount venue exact birth opinion derive lend charge roof food',
);
const connection = new ethers.providers.InfuraProvider(
'goerli', // or 'ropsten', 'rinkeby', 'kovan', 'goerli'
'x', // Infura API Key
);
const signer = wallet.connect(connection);
const contract = new ethers.Contract(
'0x07865c6E87B9F70255377e024ace6630C1Eaa37F',
abi,
signer,
);
const res = await contract.transferWithAuthorization(
message.from,
message.to,
message.value,
message.validAfter,
message.validBefore,
message.nonce,
v,
r,
s,
{
gasLimit: 1000000,
},
);
console.log(res);
const receipt = await res.wait();
console.log(receipt);
}
main();

Better way to write CSV from Parquet in Javascript

I am converting from Parquet to CSV using javascript.
The example below works, but i am storing in memory the array of values read from Parquet, in records.
Parquet library uses AsyncIterator while the CSV library uses Node Stream API.
I would like to know how to implement a more elegant solution, leveraging streams and reducing memory footprint. TIA
libraries -
Parquet: https://github.com/ironSource/parquetjs
CSV: https://csv.js.org/
import pts from 'parquets'
let { ParquetSchema, ParquetWriter, ParquetReader } = pts
import * as fs from 'fs'
import stringify from 'csv-stringify'
// declare a schema for the `PI` table
let schema = new ParquetSchema({
Source: { type: 'UTF8' },
TagID: { type: 'UTF8' },
Timestamp: { type: 'TIMESTAMP_MILLIS' },
Value: { type: 'DOUBLE' },
});
const WriterParquet = async () => {
// create new ParquetWriter that writes to 'pi.parquet`
let writer = await ParquetWriter.openFile(schema, 'pi.parquet')
// append a few rows to the file
await writer.appendRow({Source: 'PI/NO-SVG-PISRV01', TagID: 'OGP8TI198Z.PV', Timestamp: new Date(), Value: 410 })
await writer.appendRow({Source: 'PI/NO-SVG-PISRV01', TagID: 'OGP8TI198Z.PV', Timestamp: new Date(), Value: 420 })
await writer.close()
}
const WriterCSV = async () => {
// create new ParquetReader that reads from 'pi.parquet`
let reader = await ParquetReader.openFile('pi.parquet')
// create a new cursor
let cursor = reader.getCursor()
// read all records from the file and print them
let records = []
let record = null;
while (record = await cursor.next()) {
console.log(record)
records.push(record)
}
await reader.close()
// write to CSV
stringify(records, {
header: true
}, function (err, output) {
console.log(output)
fs.writeFile('./pi.csv', output, () => {});
})
}
const Main = async () => {
console.log('writing parquet...')
await WriterParquet()
console.log('reading parquet and writing csv...')
await WriterCSV()
}
Main()
Instead of using the cursor i used the Readable.from(reader) creating a ReadableStream, after this, it was easy to pipe into csv-stringify:
const WriterCSV = async () => {
// create new ParquetReader that reads from 'pi.parquet`
let reader = await ParquetReader.openFile('pi.parquet')
// read all records from the file and print them
const readStream = Readable.from(reader)
readStream.pipe(
stringify({
header: true,
columns: {
Source: 'Source',
TagID: 'TagID',
Timestamp: 'Timestamp',
Value: 'Value'
}
}, function (error, output) {
fs.writeFile('./pi.csv', output, () => {});
}))
readStream.on('end', async function () {
await reader.close();
});
}

Hyperledger sawtooth JavaScript SDK:submitted batches are invalid

I am trying to implement hyperledger sawtooth transaction through javascript SDK following this https://sawtooth.hyperledger.org/docs/core/releases/1.0/_autogen/sdk_submit_tutorial_js.html#encoding-your-payload.
/*
*Create the transaction header
*/
const createTransactionHeader = function createTransactionHeader(payloadBytes) {
return protobuf.TransactionHeader.encode({
familyName: 'intkey',
familyVersion: '1.0',
inputs: [],
outputs: [],
signerPublicKey: '02cb65a26f7af4286d5f8118400262f7790e20018f2d01e1a9ffc25de1aafabdda',
batcherPublicKey: '02cb65a26f7af4286d5f8118400262f7790e20018f2d01e1a9ffc25de1aafabdda',
dependencies: [],
payloadSha512: createHash('sha512').update(payloadBytes).digest('hex')
}).finish();
}
/*
* Create the transactions
*/
const createTransaction = function createTransaction(transactionHeaderBytes, payloadBytes) {
const signature = signer.sign(transactionHeaderBytes)
return transaction = protobuf.Transaction.create({
header: transactionHeaderBytes,
headerSignature: Buffer.from(signature, 'utf8').toString('hex'),
payload: payloadBytes
});
}
While submitting the transaction I am getting the following error from REST API
{
"error": {
"code": 30,
"message": "The submitted BatchList was rejected by the validator. It was poorly formed, or has an invalid signature.",
"title": "Submitted Batches Invalid"
}
}
Found the following issue similar to my problem
Sawtooth Invalid Batch or Signature
But its implemented in java the solution not work for my case
This should work, try this:
const cbor = require('cbor');
const {createContext, CryptoFactory} = require('sawtooth-sdk/signing');
const {createHash} = require('crypto');
const {protobuf} = require('sawtooth-sdk');
const request = require('request');
const crypto = require('crypto');
const context = createContext('secp256k1');
const privateKey = context.newRandomPrivateKey();
const signer = CryptoFactory(context).newSigner(privateKey);
// Here's how you can generate the input output address
const FAMILY_NAMESPACE = crypto.createHash('sha512').update('intkey').digest('hex').toLowerCase().substr(0, 6);
const address = FAMILY_NAMESPACE + crypto.createHash('sha512').update('foo').digest('hex').toLowerCase().substr(0, 64);
const payload = {
Verb: 'set',
Name: 'foo',
Value: 42
};
const payloadBytes = cbor.encode(payload);
const transactionHeaderBytes = protobuf.TransactionHeader.encode({
familyName: 'intkey',
familyVersion: '1.0',
inputs: [address],
outputs: [address],
signerPublicKey: signer.getPublicKey().asHex(),
batcherPublicKey: signer.getPublicKey().asHex(),
dependencies: [],
payloadSha512: createHash('sha512').update(payloadBytes).digest('hex')
}).finish();
const transactionHeaderSignature = signer.sign(transactionHeaderBytes);
const transaction = protobuf.Transaction.create({
header: transactionHeaderBytes,
headerSignature: transactionHeaderSignature,
payload: payloadBytes
});
const transactions = [transaction]
const batchHeaderBytes = protobuf.BatchHeader.encode({
signerPublicKey: signer.getPublicKey().asHex(),
transactionIds: transactions.map((txn) => txn.headerSignature),
}).finish();
const batchHeaderSignature = signer.sign(batchHeaderBytes)
const batch = protobuf.Batch.create({
header: batchHeaderBytes,
headerSignature: batchHeaderSignature,
transactions: transactions
};
const batchListBytes = protobuf.BatchList.encode({
batches: [batch]
}).finish();
request.post({
url: 'http://rest.api.domain/batches',
body: batchListBytes,
headers: {'Content-Type': 'application/octet-stream'}
}, (err, response) => {
if(err) {
return console.log(err);
}
console.log(response.body);
});

cex.io web socket Authentication timestamp error

I'm currently trying to connect to the CEX.IO bitcoin exchange's websocket. Websocket connection is OK but at the time of authentication, I have the error: Timestamp is not in 20sec range. I don't know what this error.
Test case 1 & 2 for createSignature is OK (https://cex.io/websocket-api#authentication).
Code for calculating the signature and request params
const WebSocket = require('ws');
const cexioWs = new WebSocket(
'wss://ws.cex.io/ws/',
{
perMessageDeflate: false
}
);
function createAuthRequest(apiKey, apiSecret) {
let curTime = Math.floor(Date.now() / 1000);
let hmac = crypto.createHmac('sha256', apiSecret);
hmac.update(curTime.toString());
hmac.update(apiKey);
let args =
{
e: "auth",
auth: {
key: apiKey,
signature: hmac.digest('hex'), //createSignature(curTime, apiKey, apiSecret),
timestamp: curTime
}
};
let authMessage = JSON.stringify(args);
console.log(args);
return authMessage;
}
cexioWs.on('message', (mess, error) => {
//console.log("connected");
console.log("cexio message");
console.log(mess);
let JSONMess = JSON.parse(mess);
if (JSONMess.e === "connected") {
cexioWs.send(createAuthRequest(key, secret));
cexioWs.send(JSON.stringify({
e: "subscribe",
roomss: [
"tickers"
]
}));
}
if (JSONMess.e === "ping") {
console.log("pong message");
cexioWs.send(JSON.stringify({e: "pong"}));
}
});
Here is working code:
const crypto = require('crypto')
const WebSocket = require('ws')
var apiKey = ''
var apiSecret = ''
const cexioWs = new WebSocket('wss://ws.cex.io/ws/', {perMessageDeflate: false });
function createSignature(timestamp, apiKey, apiSecret){
var hmac = crypto.createHmac('sha256', apiSecret );
hmac.update( timestamp + apiKey );
return hmac.digest('hex');
}
function createAuthRequest(apiKey, apiSecret ){
var timestamp = Math.floor(Date.now() / 1000);
var args = { e: 'auth', auth: { key: apiKey,
signature: createSignature(timestamp, apiKey, apiSecret), timestamp: timestamp } };
var authMessage = JSON.stringify( args );
return authMessage;
}
cexioWs.on('message', (mess, error) => {
console.log("cexio message");
console.log(mess);
let JSONMess = JSON.parse(mess);
if (JSONMess.e === "connected") {
cexioWs.send(createAuthRequest(apiKey, apiSecret));
cexioWs.send(JSON.stringify({
e: "subscribe",
rooms: [
"tickers"
]
}));
}
if (JSONMess.e === "ping") {
console.log("pong message");
cexioWs.send(JSON.stringify({e: "pong"}));
}
});
Don't know if this helps but I had the same problem for two days, checked everything then I checked and the code looked absolutely fine. Later on I checked what the actual time I was getting and compared it to the Internet time. My computer's time was 4 minutes ahead of Internet time and my settings were off for 'update time from Internet'.
After sync'ing my computer's time with Internet I ran the script and it worked perfectly.
Moral of the story, make sure your PC's time and the Internet's time are the same.
Goodluck!

How to create consumer for kafka topic?

I have topic created on kafka server now i am creating consumer to read topic messages from server, but i dont see any data when i try to consume message using consumer.on('message') , Any idea what is implemented wrong in below code , Do i need to set offset ?
consumer.js
var kafka = require('kafka-node');
var config = require('./config.js');
var zk = require('node-zookeeper-client');
var kafkaConn = config.kafkaCon.dit;
var HighLevelConsumer = kafka.HighLevelConsumer;
var Client = kafka.Client;
function start() {
topics = [{
topic: 'test-1'
}];
var groupId = 'push';
var clientId = "consumer-" + Math.floor(Math.random() * 10000);
var options = {
autoCommit: true,
fetchMaxWaitMs: 100,
fetchMaxBytes: 10 * 1024 * 1024,
groupId: groupId
};
console.log("Started consumer: ", clientId);
var consumer_client = new kafka.Client(kafkaConn, clientId);
var client = new Client(consumer_client.connectionString, clientId);
var consumer = new HighLevelConsumer(client, topics, options);
consumer.on('message', function(message) {
var topic = message.topic;
console.log('Message', topic);
});
};
start();
const Kafka = require("node-rdkafka");
const kafkaConf = {
"group.id": "cloudkarafka-example",
"metadata.broker.list": ["localhost:9092"],
"socket.keepalive.enable": true,
//"security.protocol": "SASL_SSL",
//"sasl.mechanisms": "SCRAM-SHA-256",
//"sasl.username": process.env.CLOUDKARAFKA_USERNAME,
//"sasl.password": process.env.CLOUDKARAFKA_PASSWORD,
"debug": "generic,broker,security",
'enable.auto.commit': false,
};
//const prefix = process.env.CLOUDKARAFKA_USERNAME;
const topics = ['topicName'];
const consumer = new Kafka.KafkaConsumer(kafkaConf, {
"auto.offset.reset": "beginning"
});
consumer.on("error", function(err) {
console.error(err);
});
consumer.on("ready", function(arg) {
console.log(`Consumer ${arg.name} ready`);
consumer.subscribe(topics);
consumer.consume();
});
consumer.on("data", function(m) {
console.log(m.value.toString());
});
consumer.on("disconnected", function(arg) {
process.exit();
});
consumer.on('event.error', function(err) {
console.error(err);
process.exit(1);
});
consumer.on('event.log', function(log) {
console.log(log);
});
consumer.connect();
A Kafka Consumer can be written with the kafka-node npm module. For my use case, my consumer was a separate Express server which listened to events and stored them in a database.
import kafka from "kafka-node"
const client = new kafka.Client("http://localhost:2181");
const topics = [
{
topic: "webevents.dev"
}
];
const options = {
autoCommit: true,
fetchMaxWaitMs: 1000,
fetchMaxBytes: 1024 * 1024,
encoding: "buffer"
};
const consumer = new kafka.HighLevelConsumer(client, topics, options);
consumer.on("message", function(message) {
// Read string into a buffer.
var buf = new Buffer(message.value, "binary");
var decodedMessage = JSON.parse(buf.toString());
//Events is a Sequelize Model Object.
return Events.create({
id: decodedMessage.id,
type: decodedMessage.type,
userId: decodedMessage.userId,
sessionId: decodedMessage.sessionId,
data: JSON.stringify(decodedMessage.data),
createdAt: new Date()
});
});
consumer.on("error", function(err) {
console.log("error", err);
});
process.on("SIGINT", function() {
consumer.close(true, function() {
process.exit();
});
});
more info in https://nodewebapps.com/2017/11/04/getting-started-with-nodejs-and-kafka/

Categories

Resources