How to send transaction on solana? - javascript

So I literally copied from
(https://docs.solana.com/developing/clients/javascript-api)
I need to send a transaction from one wallet to another, easy right? But this doest work here
error
Specifically, the transfer method cannot create an instruction. I've tried everything I can, but it hasn't worked. Please help dear programmers
MY CODE:
async function transfer() {
const provider = await IsConnected();
console.log(provider.publicKey.toString());
const connection = new solanaWeb3.Connection(
"https://api.devnet.solana.com",
"confirmed"
);
var recieverWallet = new solanaWeb3.PublicKey(
"3uePV7kJcT3w5qMLPTHVAdLc72SF2iDRwNcxDE713EMf"
);
// var airdropSignature = await connection.requestAirdrop(
// provider.publicKey,
// solanaWeb3.LAMPORTS_PER_SOL * 0.4
// );
// await connection
// .confirmTransaction(airdropSignature)
// .then(console.log("Airdropped"));
var transaction = new solanaWeb3.Transaction();
transaction.feePayer = await provider.publicKey;
let blockhashObj = await connection.getRecentBlockhash();
transaction.recentBlockhash = await blockhashObj.blockhash;
transaction.add(
await solanaWeb3.SystemProgram.transfer({
fromPubkey: provider.publicKey,
toPubkey: recieverWallet,
lamports: 10000,
})
);
if (transaction) {
console.log("Txn created successfully");
}
let signed = await provider.signTransaction(transaction).serialize();
let signature = await connection.sendRawTransaction(signed);
console.log(signature);
let bConfirmed = await connection.confirmTransaction(signature);
console.log(bConfirmed);
}
CODE WHERE PROBLEM:
static transfer(
params: TransferParams | TransferWithSeedParams,
): TransactionInstruction {
let data;
let keys;
if ('basePubkey' in params) {
const type = SYSTEM_INSTRUCTION_LAYOUTS.TransferWithSeed;
data = encodeData(type, {
lamports: BigInt(params.lamports),
seed: params.seed,
programId: toBuffer(params.programId.toBuffer()),
});
keys = [
{pubkey: params.fromPubkey, isSigner: false, isWritable: true},
{pubkey: params.basePubkey, isSigner: true, isWritable: false},
{pubkey: params.toPubkey, isSigner: false, isWritable: true},
];
} else {
const type = SYSTEM_INSTRUCTION_LAYOUTS.Transfer;
***THERE ->*** data = encodeData(type, {lamports: BigInt(params.lamports)});
keys = [
{pubkey: params.fromPubkey, isSigner: true, isWritable: true},
{pubkey: params.toPubkey, isSigner: false, isWritable: true},
];
}
return new TransactionInstruction({
keys,
programId: this.programId,
data,
});
}
export function encodeData<TInputData extends IInstructionInputData>(
type: InstructionType<TInputData>,
fields?: any,
): Buffer {
const allocLength =
type.layout.span >= 0 ? type.layout.span : Layout.getAlloc(type, fields);
const data = Buffer.alloc(allocLength);
const layoutFields = Object.assign({instruction: type.index}, fields);
type.layout.encode(layoutFields, data);
return data;
}
encode(src, b, offset = 0) {
const firstOffset = offset;
let lastOffset = 0;
let lastWrote = 0;
for (const fd of this.fields) {
let span = fd.span;
lastWrote = (0 < span) ? span : 0;
if (undefined !== fd.property) {
const fv = src[fd.property];
if (undefined !== fv) {
lastWrote = fd.encode(fv, b, offset);
if (0 > span) {
/* Read the as-encoded span, which is not necessarily the
* same as what we wrote. */
span = fd.getSpan(b, offset);
}
}
}
lastOffset = offset;
offset += span;
}
const bigInt =
(length: number) =>
(property?: string): Layout<bigint> => {
const layout = blob(length, property);
const {encode, decode} = encodeDecode(layout);
const bigIntLayout = layout as Layout<unknown> as Layout<bigint>;
bigIntLayout.decode = (buffer: Buffer, offset: number) => {
const src = decode(buffer, offset);
return toBigIntLE(Buffer.from(src));
};
bigIntLayout.encode = (bigInt: bigint, buffer: Buffer, offset: number) => {
const src = toBufferLE(bigInt, length);
return encode(src, buffer, offset);
};
return bigIntLayout;
};
function toBufferLE(num, width) {
{
const hex = num.toString(16);
const buffer = Buffer.from(hex.padStart(width * 2, '0').slice(0, width * 2), 'hex');
buffer.reverse();
return buffer;
}
// Allocation is done here, since it is slower using napi in C
return converter.fromBigInt(num, Buffer.allocUnsafe(width), false);
}
exports.toBufferLE = toBufferLE;

Related

Tensorflowjs classification model, Getting error,the graph model has 2 placeholders, while there are 1 input tensors

const NSFWNET_WEIGHTS_PATH ='models/model.json';
const IMAGE_SIZE = 224;
const IMAGE_CROP_SIZE = 224;
const TOPK_PREDICTIONS = 4;
const NSFW_CLASSES = {
0: 'Hentai',
1: 'Neural',
2: 'Porn',
3: 'Sexy',
};
let nsfwnet;
const nsfwnetDemo = async () => {
nsfwnet = await tf.loadGraphModel(NSFWNET_WEIGHTS_PATH);
nsfwnet.predict(tf.zeros([1, IMAGE_CROP_SIZE, IMAGE_CROP_SIZE, 3])).dispose();
console.log('Model Warm complete');
const image_Element = document.getElementById('test_draw');
if (image_Element.complete && image_Element.naturalHeight !== 0) {
predict(image_Element);
image_Element.style.display = '';
}
document.getElementById('file-container').style.display = '';
};
async function predict(imgElement) {
const logits = tf.tidy(() => {
const img = tf.browser.fromPixels(imgElement).toFloat();
const crop_image = tf.slice(img, [16, 16, 0], [224, 224, -1]);
const img_reshape = tf.reverse(crop_image, [-1]);
let imagenet_mean = tf.expandDims([103.94, 116.78, 123.68], 0);
imagenet_mean = tf.expandDims(imagenet_mean, 0);
const normalized = img_reshape.sub(imagenet_mean);
const batched = normalized.reshape([1, IMAGE_CROP_SIZE, IMAGE_CROP_SIZE, 3]);
return nsfwnet.predict(batched);
});
const classes = await getTopKClasses(logits, TOPK_PREDICTIONS);
display(classes);
}
async function getTopKClasses(logits, topK){
const values = await logits.data();
sortArray = Array.from(values).map((value, index) => {
return {
value: value,
index: index
};
}
).sort((a, b) => {
return b.value - a.value;
}).slice(0, topK);
return sortArray.map(x => {
return {
className: NSFW_CLASSES[x.index],
probability: x.value
};
}
);
}
function display(classes){
console.log(classes);
}
nsfwnetDemo();
I get
Uncaught (in promise) Error: Input tensor count mismatch,the graph model has 2 p
laceholders, while there are 1 input tensors
This NSFW classifier, I am trying to classify images the tensorflowjs version is 3.20. If possible please help me with finding a solution.
The model is split into separate shard files, so the path is according to that.

Result won't update VAR

I am trying to run a query, inside AXIOS which gets data from a 3rd party URL. Then uses some of that data to search our mongoDB database.
However it seems it won't update var total = 0
While the query below does function correctly, the return result won't allow me to set that it to the query.
Promise.all(arr.forEach( async (id,index) => {
//(CODE REMOVED JUST TO GET THIS FUNCTION TO WORK)
const search = await geoLocation.find({
'location': {
'$geoWithin': {
'$box': [
[-35.2418503, -13.5076852], [112.8656697, 129.0020486]
]
}
}}).toArray();
total = search.length;
}));
See the full code below
var array = [];
var pointarray = []
var total = 0;
areas.forEach((id,index) => {
if(id.type == "Point"){
pointarray[index] = "N"+id.id;
}else{
array[index] = "R"+id.id;
}
});
var arraySearch = "https://nominatim.openstreetmap.org/lookup?osm_ids="+array.toString()+"&polygon_geojson=1&bbox=1&format=json";
var pointSearch = "https://nominatim.openstreetmap.org/lookup?osm_ids="+pointarray.toString()+"&polygon_geojson=1&bbox=0&format=json"
const requestOne = axios.get(arraySearch);
const requestTwo = axios.get(pointSearch);
axios.all([requestOne, requestTwo])
.then(axios.spread((...responses) => {
const responseOne = responses[0]
const responseTwo = responses[1]
/*
process the responses and return in an array accordingly.
*/
return [
responseOne.data,
responseTwo.data,
];
}))
.then(arr => {
Promise.all(arr.forEach( async (id,index) => {
//const middleIndex = id[index].boundingbox.length / 2;
//const firstHalf = id[index].boundingbox.splice(0, middleIndex);
//const secondHalf = id[index].boundingbox.splice(-middleIndex);
//res.send(secondHalf[0]);
const query = [{
$match: {
location: {
$geoWithin: {$box:[[Number(firstHalf[0]),Number(firstHalf[1])],[Number(secondHalf[0]),Number(secondHalf[1])]]
}
}
}
},{
$count: 'id'
}]
const search = await geoLocation.find({
'location': {
'$geoWithin': {
'$box': [
[-35.2418503, -13.5076852], [112.8656697, 129.0020486]
]
}
}}).toArray();
total = search.length;
// total = search.length;
// const search = geoLocation.aggregate(query).toArray.length;
}));
})
.catch(errors => {
console.log("ERRORS", errors);
})
.then(function () {
res.send(total);
});

Aws SDK In lambda function

I am working on lambda function and creating a method for AWS-SDK historical metric report using node, js and I am getting the following errors. Have look at a SS of error
here is my code
function getKeyByValue(object, value) {
return Object.keys(object).find(key =>
object[key] === value);
}
exports.handler = async (event) => {
const AWS = require('aws-sdk');
var connect = new AWS.Connect({ apiVersion: '2017-08-08' });
let queueARN = event.queueARN || null;
const connectInstanceId = process.env.instanceID;
let flag =0, nextToken = null;
let queueARNsObject = {}, queueARNsArray=[], queueTypeObject={};
do{
let listQueuesParams = {
InstanceId: connectInstanceId, /* required */
QueueTypes: [
"STANDARD",
],
NextToken: nextToken,
};
let listQueuesPromise = connect.listQueues(listQueuesParams).promise();
listQueuesResult = await listQueuesPromise;
// console.log(listQueuesResult);
listQueuesResult.QueueSummaryList.forEach(queue => {
if(queueARN != null){
if (queue.Arn == queueARN){
queueARNsArray = [queue.Arn];
queueARNsObject[queue.Name]= queue.Arn;
queueTypeObject[queue.QueueType]= queue.Arn;
flag = 1;
return;
}
}else{
queueARNsObject[queue.Name]= queue.Arn;
queueTypeObject[queue.QueueType]= queue.Arn;
queueARNsArray.push(queue.Arn);
nextToken = listQueuesResult.NextToken;
}
});
}while (flag=0 && nextToken != null);
const HistoricalMetricsList = [
{
Name : 'CONTACTS_HANDLED',
Unit : 'COUNT',
Statistic : 'SUM'
},
{
Name : 'CONTACTS_ABANDONED',
Unit : 'COUNT',
Statistic : 'SUM'
},
];
// Metrics params
var getHistoricalMetricsParams = {
InstanceId: connectInstanceId,
StartTime: 1592993700,
EndTime: 1593039900,
Filters: {
Channels: ["VOICE"],
Queues: queueARNsArray
},
HistoricalMetrics: HistoricalMetricsList,
Groupings: ["QUEUE"]
};
// get current metrics by queues
var getHistoricalMetricsPromise = connect
.getMetricData(getHistoricalMetricsParams)
.promise();
var getHistoricalMetricsResult = await getHistoricalMetricsPromise;
// console.log("current |||||||| 1 metrics:", JSON.stringify(getCurrentMetricsResult));
let queueMetricsArray = [];
if(getHistoricalMetricsResult.MetricResults.length){
getHistoricalMetricsResult.MetricResults.forEach(queue => {
let queueMetrics = {
"Queue_Name" : getKeyByValue(queueARNsObject ,queue.Dimensions.Queue.Arn),
"CallsHandled": queue.Collections[0].Value ,
"CallsAbanoded": queue.Collections[1].Value ,
}
queueMetricsArray.push(queueMetrics);
console.log("TYPE||||", getKeyByValue(queueTypeObject ,queue.Dimensions.Queue.Arn))
});
}else{
keys.forEach(key => {
let queueMetrics = {
"Queue_Name" : getKeyByValue(event ,queue.Dimensions.Queue.Arn),
"CONTACTS_HANDLED": 0,
"CONTACTS_ABANDONED": 0
}
queueMetricsArray.push(queueMetrics);
})
}
const response = {
responseCode: 200,
metricResults: queueMetricsArray
};
return response;
};
I don't have any idea what this is an error about. if anyone of you knows please help me to fix it Thanks. i've done the same thing with Real-Time Metric report and that is running perfectly

Bitfinex API websocket orderbook

I am trying to get the order book through the bitfacex bitfex websocks, but I get an error in the console.
Error in the console: Cannot read property '0' of undefined on the line if (bidsKeys [i]) {
var ws = new WebSocket("wss://api-pub.bitfinex.com/ws/2");
ws.onopen = function() {
// ws.send(JSON.stringify({ event: 'conf', flags: 131072 }))
let msg = JSON.stringify({
event: 'subscribe',
channel: 'book',
pair: 'tBTCUSD',
freq: 'F1',
len: '25',
prec: 'P0'
})
ws.send(msg)
ws.onmessage = function(msg) {
const BOOK = {}
BOOK.bids = {}
BOOK.asks = {}
BOOK.psnap = {}
BOOK.mcnt = 0
msg = JSON.parse(msg.data)
// var response = JSON.parse(msg.data);
// console.log(msg.data);
const csdata = []
const bidsKeys = BOOK.psnap['bids']
const asksKeys = BOOK.psnap['asks']
for (let i = 0; i < 25; i++) {
if (bidsKeys[i]) {
const price = bidsKeys[i]
const pp = BOOK.bids[price]
csdata.push(pp.price, pp.amount)
}
if (asksKeys[i]) {
const price = asksKeys[i]
const pp = BOOK.asks[price]
csdata.push(pp.price, -pp.amount)
}
}
const csStr = csdata.join(':')
const csCalc = CRC.str(csStr)
if (csCalc !== checksum) {
console.error('CHECKSUM_FAILED')
}
}

Redis how node return True in function async/await

forgive me the question.
I'm not used to node and sync / await
I have the following function which queries a mongodb returning a json, I'm saving that return on redis.
So far so good.
findLightnings: async (request, h) => {
const q = request.query.q.split(':');
const index = q[0];
const value = q[1].split(',');
const dateInit = new Date(request.query.dateInit);
const dateEnd = new Date(request.query.dateEnd);
const page = request.query.page;
const distance = request.query.distance;
const redis = require('redis');
const client = redis.createClient();
let limit = 300;
let filter = {
$and: [{
data: {
$gte: dateInit.toISOString(),
$lte: dateEnd.toISOString()
}
}]
}
if (index === 'latlng') {
filter['$and'][0]['geo.coordinates'] = {
$near: {
$geometry: {
type: 'Point',
coordinates: value.map(Number),
$minDistance: 0,
$maxDistance: distance
}
}
}
limit = 100;
} else {
filter['$and'][0][`cidade.${index}`] = {
$in: value
}
}
return client.get('elektro', async (err, reply) => {
let resp = null;
if (reply) {
console.log(reply); //<<<<<<<< Return Json OK
resp = reply // <<<<<<<<<< Return TRUE in json's place
} else {
console.log('db')
const query = await Lightning.find(filter).sort('data').skip(page*limit).limit(limit).exec();
client.set('elektro', JSON.stringify(query));
client.expire('elektro', 3600);
resp = query
}
return JSON.stringify(resp);
})
}
The problem is time to recover this data from the redis.
In the console log json appears normal, how much tento returns that value for the main function it comes 'TRUE' and not the json saved in redis.
Someone can give me a helping hand on this.
I really need this function.
const redis = require('redis');
const client = redis.createClient(6379);
const bluebird = require("bluebird");
bluebird.promisifyAll(redis.RedisClient.prototype);
bluebird.promisifyAll(redis.Multi.prototype);
const redisdata = await client.getAsync("user:photos");
if (redisdata) {
console.log(`cache EXISTS`)
return res.json({ source: 'cache', data: JSON.parse(redisdata) })
}
I was able to solve the problem with the redis client.getAsync().
which already has a native async function:
source: Node-redis
The final code is as follows:
findLightnings: async (request, h) => {
const q = request.query.q.split(':');
const index = q[0];
const value = q[1].split(',');
const dateInit = new Date(request.query.dateInit);
const dateEnd = new Date(request.query.dateEnd);
const page = request.query.page;
const distance = request.query.distance;
let limit = 300;
let filter = {
$and: [{
data: {
$gte: dateInit.toISOString(),
$lte: dateEnd.toISOString()
}
}]
}
if (index === 'latlng') {
filter['$and'][0]['geo.coordinates'] = {
$near: {
$geometry: {
type: 'Point',
coordinates: value.map(Number),
$minDistance: 0,
$maxDistance: distance
}
}
}
limit = 100;
} else {
filter['$and'][0][`cidade.${index}`] = {
$in: value
}
}
return getAsync('findLightnings'+ '/' + request.query.q + '/' + request.query.dateInit + '/' + request.query.dateEnd).then(async (res) => {
if(res){
console.log('Fonte Dados => redis')
return res
}else{
console.log('Fonte Dados => db')
try {
const query = await Lightning.find(filter).sort('data').exec();//.skip(page*limit).limit(limit).exec();
client.set('findLightnings'+ '/' + request.query.q + '/' + request.query.dateInit + '/' + request.query.dateEnd, JSON.stringify(query));
return query;
} catch (err) {
return Boom.badData(err);
}
}
client.close();
});
},

Categories

Resources