Get Invite_link by getUpdates method Telegram-API - javascript

I want to know the link which was used by the user to join the group and I try to get the invite_link derived from this variable :
But, when I used getUpdates() method to receive incoming updates (Update is the object on which ChatMemberUpdated is),it didn't include invite_link.
This is the code (nodejs):
await client.getUpdates({ offset : 0, allowed_updates : 'chat_member' }).then(async function(update) {
console.log(update)
if (update.length != 0){
fs.appendFile('update.txt', JSON.stringify(update), function (err) {
if (err) throw err
console.log('Saved!')
})
}
})
And this is the result:
[{"update_id":xx96xxxxx,"message":{"message_id":2xxxx,"from":{"id":x669xxxxxx,"is_bot":false,"first_name":"xx.nwxxx","language_code":"es"},"chat":{"id":-xxxx4506xxxxx,"title":"xxxxxxx","type":"supergroup"},"date":xxx195xxxx,"new_chat_participant":{"id":xxx9660xxx,"is_bot":false,"first_name":"xxxnwxxx","language_code":"es"},"new_chat_member":{"id":xxx9660xxx,"is_bot":false,"first_name":"xxxnwxxx","language_code":"es"},"new_chat_members":[{"id":xxx9660xxx,"is_bot":false,"first_name":"xxxnwxxx","language_code":"es"}]}}]

At the moment to create the invite_link, you need to specified "creates_join_request" as true. Otherwise, the bot cannot detect the link.

Related

Azure CosmosDb Stored Procedure IfMatch Predicate

In a DocDb stored procedure, as the first step in a process retrieving data that I'm mutating, I read and then use the data iff it matches the etag like so:
collection.readDocument(reqSelf, function(err, doc) {
if (doc._etag == requestEtag) {
// Success - want to update
} else {
// CURRENTLY: Discard the read result I just paid lots of RUs to read
// IDEALLY: check whether response `options` or similar indicates retrieval
was skipped due to doc not being present with that etag anymore
...
// ... Continue with an alternate strategy
}
});
Is there a way to pass an options to the readDocument call such that the callback will be informed "It's changed so we didn't get it, as you requested" ?
(My real problem here is that I can't find any documentation other than the readDocument undocumentation in the js-server docs)
Technically you can do that by creating a responseOptions object and passing it to the call.
function sample(selfLink, requestEtag) {
var collection = getContext().getCollection();
var responseOptions = { accessCondition: { type: "IfMatch", condition: requestEtag } };
var isAccepted = collection.readDocument(selfLink, responseOptions, function(err, doc, options) {
if(err){
throw new Error('Error thrown. Check the status code for PreconditionFailed errors');
}
var response = getContext().getResponse();
response.setBody(doc);
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}
However, even if the etag you provide is not the one that the document has, you won't get an error and you will properly get the document itself back. It's just not supposed to work with that using the readDocument function in a stored procedure.
Thanks to some pushing from #Nick Chapsas, and this self-answer from #Redman I worked out that in my case I can achieve my goal (either read the current document via the self-link, or the newer one that has replaced it bearing the same id) by instead generating an Alt link within the stored procedure like so:
var docId = collection.getAltLink() + "/docs/"+req.id;
var isAccepted = collection.readDocument(docId, {}, function (err, doc, options) {
if (err) throw err;
// Will be null or not depending on whether it exists
executeUpsert(doc);
});
if (!isAccepted) throw new Error("readDocument not Accepted");

MongoDB - Mongoose - TypeError: save is not a function

I am attempting to perform an update to a MongoDB document (using mongoose) by first using .findById to get the document, then updating the fields in that document with new values. I am still a bit new to this so I used a tutorial to figure out how to get it working, then I have been updating my code for my needs. Here is the tutorial: MEAN App Tutorial with Angular 4. The original code had a schema defined, but my requirement is for a generic MongoDB interface that will simply take whatever payload is sent to it and send it along to MongoDB. The original tutorial had something like this:
exports.updateTodo = async function(todo){
var id = todo.id
try{
//Find the old Todo Object by the Id
var oldTodo = await ToDo.findById(id);
}catch(e){
throw Error("Error occured while Finding the Todo")
}
// If no old Todo Object exists return false
if(!oldTodo){
return false;
}
console.log(oldTodo)
//Edit the Todo Object
oldTodo.title = todo.title
oldTodo.description = todo.description
oldTodo.status = todo.status
console.log(oldTodo)
try{
var savedTodo = await oldTodo.save()
return savedTodo;
}catch(e){
throw Error("And Error occured while updating the Todo");
}
}
However, since I don't want a schema and want to allow anything through, I don't want to assign static values to specific field names like, title, description, status, etc. So, I came up with this:
exports.updateData = async function(update){
var id = update.id
// Check the existence of the query parameters, If they don't exist then assign a default value
var dbName = update.dbName ? update.dbName : 'test'
var collection = update.collection ? update.collection : 'testing';
const Test = mongoose.model(dbName, TestSchema, collection);
try{
//Find the existing Test object by the Id
var existingData = await Test.findById(id);
}catch(e){
throw Error("Error occurred while finding the Test document - " + e)
}
// If no existing Test object exists return false
if(!existingData){
return false;
}
console.log("Existing document is " + existingData)
//Edit the Test object
existingData = JSON.parse(JSON.stringify(update))
//This was another way to overwrite existing field values, but
//performs a "shallow copy" so it's not desireable
//existingData = Object.assign({}, existingData, update)
//existingData.title = update.title
//existingData.description = update.description
//existingData.status = update.status
console.log("New data is " + existingData)
try{
var savedOutput = await existingData.save()
return savedOutput;
}catch(e){
throw Error("An error occurred while updating the Test document - " + e);
}
}
My original problem with this was that I had a lot of issues getting the new values to overwrite the old ones. Now that that's been solved, I am getting the error of "TypeError: existingData.save is not a function". I am thinking the data type changed or something, and now it is not being accepted. When I uncomment the static values that were in the old tutorial code, it works. This is further supported by my console logging before and after I join the objects, because the first one prints the actual data and the second one prints [object Object]. However, I can't seem to figure out what it's expecting. Any help would be greatly appreciated.
EDIT: I figured it out. Apparently Mongoose has its own data type of "Model" which gets changed if you do anything crazy to the underlying data by using things like JSON.stringify. I used Object.prototype.constructor to figure out the actual object type like so:
console.log("THIS IS BEFORE: " + existingData.constructor);
existingData = JSON.parse(JSON.stringify(update));
console.log("THIS IS AFTER: " + existingData.constructor);
And I got this:
THIS IS BEFORE: function model(doc, fields, skipId) {
model.hooks.execPreSync('createModel', doc);
if (!(this instanceof model)) {
return new model(doc, fields, skipId);
}
Model.call(this, doc, fields, skipId);
}
THIS IS AFTER: function Object() { [native code] }
Which showed me what was actually going on. I added this to fix it:
existingData = new Test(JSON.parse(JSON.stringify(update)));
On a related note, I should probably just use the native MongoDB driver at this point, but it's working, so I'll just put it on my to do list for now.
You've now found a solution but I would suggest using the MongoDB driver which would make your code look something along the lines of this and would make the origional issue disappear:
// MongoDB Settings
const MongoClient = require(`mongodb`).MongoClient;
const mongodb_uri = `mongodb+srv://${REPLACE_mongodb_username}:${REPLACE_mongodb_password}#url-here.gcp.mongodb.net/test`;
const db_name = `test`;
let db; // allows us to reuse the database connection once it is opened
// Open MongoDB Connection
const open_database_connection = async () => {
try {
client = await MongoClient.connect(mongodb_uri);
} catch (err) { throw new Error(err); }
db = client.db(db_name);
};
exports.updateData = async update => {
// open database connection if it isn't already open
try {
if (!db) await open_database_connection();
} catch (err) { throw new Error(err); }
// update document
let savedOutput;
try {
savedOutput = await db.collection(`testing`).updateOne( // .save() is being depreciated
{ // filter
_id: update.id // the '_id' might need to be 'id' depending on how you have set your collection up, usually it is '_id'
},
$set: { // I've assumed that you are overwriting the fields you are updating hence the '$set' operator
update // update here - this is assuming that the update object only contains fields that should be updated
}
// If you want to add a new document if the id isn't found add the below line
// ,{ upsert: true }
);
} catch (err) { throw new Error(`An error occurred while updating the Test document - ${err}`); }
if (savedOutput.matchedCount !== 1) return false; // if you add in '{ upsert: true }' above, then remove this line as it will create a new document
return savedOutput;
}
The collection testing would need to be created before this code but this is only a one-time thing and is very easy - if you are using MongoDB Atlas then you can use MongoDB Compass / go in your online admin to create the collection without a single line of code...
As far as I can see you should need to duplicate the update object. The above reduces the database calls from 2 to one and allows you to reuse the database connection, potentially anywhere else in the application which would help to speed things up. Also don't store your MongoDB credentials directly in the code.

For Loop not working inside Node.js while using mongoose

I am trying fetch records from mongoDB through mongoose
router.route('/testaa')
.get(function(req,res){
fisSite.find(function(err, retVal) {
var x = [];
for(var i =0;i<retVal.length;i++){
x.push(retVal[i].site);
}
res.json(x)
});
})
The above code is giving undefined values.
undefined
undefined
undefined
.....
Please help to run the for loop inside node.js so that i can use extract data from array.
Currently what you are doing is you are getting all the document fields and then using only one from it. You can optimize the query using select method which will only retrieve particular field only rather than all fields :
[Note : id field is added by default, hence to remove it in select we specifically mention it using -_id. You can remove -_id if you want to maintain ID inside the response document.]
fisSite.find({}).select('site -_id').exec(function(err, docs)){
if(err){
return res.json({ status : false, message : 'Error Occured.'} );
}
else if(!docs){
return res.json({ status : false, message : 'No documents found.'} );
}
else{
return res.json({ status : success, message : 'Documents found.', data : docs} );
}
}
Imagine you have 10 fields in each document and find results 100 documents, you only need site field from those 100 documents,however unnecessarily you are calling remaining 900 fields.
I have used Lean.exec within Mongoose.
router.route('/site')
.get(function(req,res){
fisSite.find().lean().exec(function(err, retVal) {
var x = [];
for(var i =0;i<retVal.length;i++){
x.push(retVal[i].site);
}
res.json(x)
})
});
Try to add an empty search query to select all documents
Try to change
From this:
fisSite.find(function(err, retVal)
To this:
fisSite.find({}, function(err, retVal)
Also try to do a console.log(retVal) to check if your find actually returns any values.

Preventing a function from calling until variable is defined in javascript/node.js

I'm guessing I'm doing this the wrong way and should be using promises or something but I'm trying to create a CMS whereby you can edit a mongodb document which then translates into a blog post or any template with the variables filled out.
My CMS creates a list of blogs which only have the blog image, author, title, date modified, and the mongodb document _id as a data-id attribute.
When you click on 1 of the blogs it passes the blogId via socket.io to the server, the server searches for the blog, then renders the blogTemplateForm.pug file and sends that html back to the client where the client checks if there's already html in the #editor container, deletes the html and then appends the new html inside the edit container where the user can then edit it.
Now this document lookup is handled by a mongoose model, then I set res.blog to the blog returned by that lookup via the callback function, I generate some forms for use at a later date but otherwise we then use that res.blog object to generate the html we want from the blogTemplate, and then send that html to the client.
This works great but for some unknown reason the res.blog object is sometimes undefined, even when it really shouldn't be, ie. in the next function.. like what? And so the app.render() will return an error and null for the html, so I made a loop to check whether blog is defined before rendering the template. But even this doesn't work as html sometimes gets passed as null... What gives?!
Failure
Success
If you have a look at my loop checking whether res.blog is defined it really makes no sense that any undefined res.blog object is making its way through.
socket.on('edit blog', function(blogId){
var res = {};
Blog.findById(blogId, function(err, blog){
res.blog = blog
res.form = Form(blog)
}).then(function(){
res.filledForm = Bridge(res.blog, res.form).getForm()
delete res.form
if (res.blog !== (undefined & null)) {
app.render(blogTemplateFormPath,{blog: res.blog}, function(err, html){
console.log(err);
console.log(html);
socket.emit('blog form', html)
})
} else while (res.blog == (undefined | null)) {
if (res.blog !== (undefined & null)) {
app.render(blogTemplateFormPath,{blog: res.blog}, function(err, html){
console.log(err);
console.log(html);
socket.emit('blog form', html)
})
}
}
})
})
I've tried using different operands but to no avail, always returns null about 5% of the time
if (res.blog !== (undefined | null)) {
app.render(blogTemplateFormPath,{blog: res.blog}, function(err, html){
console.log(err);
console.log(html);
socket.emit('blog form', html)
})
} else while (res.blog == (undefined | null)) {
if (res.blog !== (undefined | null)) {
app.render(blogTemplateFormPath,{blog: res.blog}, function(err, html){
console.log(err);
console.log(html);
socket.emit('blog form', html)
})
}
}
Thank you for any help. The screenshots are high res 1080p x 2 so I think you'll be able to see the code.
This looks fishy:
Blog.findById(blogId, function(err, blog){
res.blog = blog
res.form = Form(blog)
}).then(function(){
...
});
You're passing a callback to findById() and are also treating it as a promise. I can imagine that this may cause all sorts of unexpected issues.
So use just one method. My suggestion would be to use the promise:
Blog.findById(blogId).then(function(blog) {
if (! blog) {
...handle "blogId not found" here...
return;
}
...
}).catch(function(err) {
...handle errors here...
});
This will remove the need for res as well.
because res.blog !== (undefined | null) is not doing what you think it is doing.
console.log( (undefined | null) ) // 0
There is no shortcut, you need to check each one.
if (res.blog !== undefined && res.blog !== null)
or do a falsely check
if (!res.blog)

Proper way to findOne document in Template Event?

I am trying to findOne document in my Template.admin.events code. I have a form and onClick I want to verify if the ID of the ObjectID entered is an existing document in my collection and fetch that result to show it on the template.
My event code on the client :
Template.admin.events({
'click #btnAjouterObjet'(event) {
let objetIdInput = $('#object_id').val().toString();
Meteor.subscribe('objetsFindOne', objetIdInput, {
onReady: function () {
let obj = Objets.findOne();
if (obj) {
console.log("found");
console.log(obj);
objetsArr.push(objetIdInput);
}
else {
console.log("not found");
console.log(obj);
}
}
});
}
});
In my Objets api :
Meteor.publish('objetsFindOne', function objetsFindOne(param_id){
return Objets.find({_id : param_id});
})
I have verified and my objetIdInput always change on click when a different Id is entered but the subscribe always returns the first id entered. I also added the onReady because otherwise it returned undefined.
I am new to Meteor and I have also tried to subscribe to all the collection and doing the find on the client but I don't think it is the best idea as my collection has about 22000 documents.
Just to elaborate a little bit on the first answer, as to how to change this pattern:
(1) you should place your Meteor.subscribe() call in your Template.admin.onCreated() function.
(2) the subscription reads from a reactive value, for example, new ReactiveVar().
(3) now, anytime the reactive value changes, the subscription will re-run. So, in your template event, you set the reactive value to the id and let the subscription handle the rest.
Discover Meteor and other resources should be helpful on any details.
You are going about this all wrong. I suggest you take a look at Template-Level Subscriptions
I opted for the use of a method :
Client side:
'click #btnAjouterObjet'(event) {
let objetIdInput = $('#object_id').val().toString();
let result = Meteor.call('findObj', objetIdInput, function (error, result) {
if (error) {
console.log(error.reason);
return;
}
console.log(result);
});
}
On the server side :
Meteor.methods({
findObj: function (param_id) {
console.log(Objets.find({ _id: param_id }).fetch());
return Objets.find({ _id: param_id }).fetch();
},
});

Categories

Resources