MongoDb find query giving weird response - javascript

I am trying to search data in mongodb with nodejs. This is my query
collection.find({ age: { '$gt': 20 } });
its working fine in robomongo but giving me this response in my application
Readable {
pool: null,
server: null,
disconnectHandler:
{ s: { storedOps: [], storeOptions: [Object], topology: [Object] },
length: [Getter] },
bson: {},
ns: 'versioncontrol.Branch/contacts',
cmd:
{ find: 'versioncontrol.Branch/contacts',
limit: 0,
skip: 0,
query: { age: [Object] },
slaveOk: true,
readPreference: { preference: 'primary', tags: undefined, options: [Object] } }
Now i don't know how to get my data out of it.

The cursor returned from the find method is a Readable stream. You have do read items from it to get the actual result. Look at this
Example:
var cursor = collection.find({ age: { '$gt': 20 } });
cursor.each(function (err, doc) {
if (err) {
console.log(err);
} else {
console.log('Fetched:', doc);
}
});

Done it using
var cursor = collection.find({ age: { '$gt': 20 } }).toArray();
cursor.then(function (docs) {
console.log( docs );
});

Related

How to update document in mongodb NodeJS?

I am using this command to update the document.
order = await db.collection("orders").findOneAndUpdate({ order_id: req.body.ORDERID }, {$set: { payment_status: "Paid", paymentInfo: JSON.stringify(myrequest) }})
console.log(order)
But the document is not updated instead return this in console
lastErrorObject: { n: 0, updatedExisting: false },
value: null,
ok: 1,
'$clusterTime': {
clusterTime: new Timestamp({ t: 1663565745, i: 13 }),
signature: {
hash: new Binary(Buffer.from("5165sd1vdsvds651vds5vvs5dvdsvdskvjdsv, "hex"), 0),
keyId: new Long("1451151132154123165")
}
To update the Document Using MongoDB two conditions are required filter and update information.
const myquery = { orderId: req.body.ORDER_ID };
const newvalues = { $set: { payment_status: "Paid", paymentInfo: JSON.stringify('your Payment Info in JSON format') }};
await db.collection("orders").updateOne(myquery, newvalues);
Please go through the Document for detailed Information

Calling a JSON key that has a non-letter character

I am trying to return the value of #microsoft.graph.downloadUrl from the json object below:
[
{
'#microsoft.graph.downloadUrl': 'https://public.bl.files.1drv.com/XXXX',
createdDateTime: '2021-07-10T06:14:31.03Z',
cTag: 'QQQQ',
eTag: 'SSSS',
id: 'FFFF',
lastModifiedDateTime: '2021-07-12T09:27:21.69Z',
name: 'FILE_NAME',
size: 98580,
webUrl: 'https://1drv.ms/b/SSSS',
reactions: { commentCount: 0 },
createdBy: { application: [Object], user: [Object] },
lastModifiedBy: { user: [Object] },
parentReference: {
driveId: 'XXX',
driveType: 'personal',
id: 'YYYY!YYY',
name: 'Documents',
path: '/drive/root:/Documents'
},
file: { mimeType: 'application/pdf', hashes: [Object] },
fileSystemInfo: {
createdDateTime: '2021-07-10T06:14:31.03Z',
lastModifiedDateTime: '2021-07-12T09:27:21.69Z'
}
}
]
I wish to use something like this that i had done to extract the name as I need to be able to get the #microsoft.graph.downloadUrl from each json object (known as f below) in 'files'.
var fileName = (JSON.stringify(files[f].name));
I tried both:
var fileURL = (JSON.stringify(files[f]."#microsoft.graph.downloadUrl"));
var fileURL = (JSON.stringify(files[f].#microsoft.graph.downloadUrl));
but neither work -- any help would be much appreciated!
You should just use files[f]["#microsoft.graph.downloadUrl"].

why insertmany not working using mongoos with transactions?

I am trying to insert data using inertMany .but I am not able to insert the data why ?I am using mongoose session if any error occurred then I roll back changes
https://codesandbox.io/s/dreamy-bell-9u0bz
app.get("/saveData", async (req, res, next) => {
const session = await mongoose.startSession();
session.startTransaction();
try {
const data = [
{
empid: "Ad",
id: 4,
date: "19-Jul-2019"
},
{
empid: "Bc",
id: 56,
date: "18-Jul-2019"
},
{
empid: "C",
id: 6,
date: "11-Jul-2019"
}
];
console.log("before save");
let saveBlog = await BlogPostModel.insertMany(data, { session }); //when fail its goes to catch
await session.commitTransaction();
return res.send(saveBlog);
} catch (error) {
console.log(error);
await session.abortTransaction();
return res.status(400).send(error);
}
});
Since you don't appear to have understood the marked duplicate or the comment on your last question, here's a direct demonstration:
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost:27017/test';
const opts = { useNewUrlParser: true, useUnifiedTopology: true };
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useCreateIndex', true);
mongoose.set('useFindAndModify', false);
const blogPostSchema = new Schema({
id: { type: Number, unique: true },
empid: String,
date: Date
});
const BlogPost = mongoose.model('BlogPost', blogPostSchema);
const sampleData = [
{ empid: "test13", id: 6, date: '11-Jul-2019' },
{ empid: "test123", id: 4, date: '19-Jul-2019' },
{ empid: "test13", id: 4, date: '18-Jul-2019' }
];
const log = data => console.log(JSON.stringify(data, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri, opts);
// Clean data
await Promise.all(
Object.values(conn.models).map(m => m.deleteMany())
);
// Collections must existi in transactions
await Promise.all(
Object.values(conn.models).map(m => m.createCollection())
);
// With Transaction
log("With Transaction");
let session = await conn.startSession();
session.startTransaction();
try {
await BlogPost.insertMany(sampleData, { session });
await session.commitTransaction();
} catch(e) {
// Show the error and abort
log({ err: e.errmsg, result: e.result.result.writeErrors });
await session.abortTransaction();
}
log({ results: (await BlogPost.find()) });
// No transaction
log("Without Transaction");
try {
await BlogPost.insertMany(sampleData);
} catch(e) {
// Show the error
log({ err: e.errmsg, result: e.result.result.writeErrors });
}
log({ results: (await BlogPost.find()) });
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})();
And the output:
Mongoose: blogposts.createIndex({ id: 1 }, { unique: true, background: true })
Mongoose: blogposts.deleteMany({}, {})
"With Transaction"
Mongoose: blogposts.insertMany([ { _id: 5d8f28ac462a1e1a8c6838a2, empid: 'test13', id: 6, date: 2019-07-10T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a3, empid: 'test123', id: 4, date: 2019-07-18T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a4, empid: 'test13', id: 4, date: 2019-07-17T14:00:00.000Z, __v: 0 } ], { session: ClientSession("650da06d23544ef8bc1d345d93331d1e") })
{
"err": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"result": [
{
"code": 11000,
"index": 2,
"errmsg": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"op": {
"_id": "5d8f28ac462a1e1a8c6838a4",
"empid": "test13",
"id": 4,
"date": "2019-07-17T14:00:00.000Z",
"__v": 0
}
}
]
}
Mongoose: blogposts.find({}, { projection: {} })
{
"results": []
}
"Without Transaction"
Mongoose: blogposts.insertMany([ { _id: 5d8f28ac462a1e1a8c6838a5, empid: 'test13', id: 6, date: 2019-07-10T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a6, empid: 'test123', id: 4, date: 2019-07-18T14:00:00.000Z, __v: 0 }, { _id: 5d8f28ac462a1e1a8c6838a7, empid: 'test13', id: 4, date: 2019-07-17T14:00:00.000Z, __v: 0 } ], {})
{
"err": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"result": [
{
"code": 11000,
"index": 2,
"errmsg": "E11000 duplicate key error collection: test.blogposts index: id_1 dup key: { id: 4 }",
"op": {
"_id": "5d8f28ac462a1e1a8c6838a7",
"empid": "test13",
"id": 4,
"date": "2019-07-17T14:00:00.000Z",
"__v": 0
}
}
]
}
Mongoose: blogposts.find({}, { projection: {} })
{
"results": [
{
"_id": "5d8f28ac462a1e1a8c6838a5",
"empid": "test13",
"id": 6,
"date": "2019-07-10T14:00:00.000Z",
"__v": 0
},
{
"_id": "5d8f28ac462a1e1a8c6838a6",
"empid": "test123",
"id": 4,
"date": "2019-07-18T14:00:00.000Z",
"__v": 0
}
]
}
Note that when the transaction is in use there are no items inserted into the collection. Using the insertMany() with the default behavior of ordered: true will insert all batched items up until the point any error is encountered.
Note also as stated since you are indeed expecting an error you must include such a statement in it very own try..catch or similar error handler. Otherwise any error ( which is expected in the example case ) would simply fall to the outer catch, which of course in the demonstration simply exits the program.
Not actually in the question itself but something not actually mentioned in the demonstrations of How to use MongoDB transaction using Mongoose? is indeed that you should be aware that whlist a transaction is active you must also include the session attribute on any subsequent reads in order to see the changes made within that transaction.
For instance, the following would show no content in a collection:
let session = await conn.startSession();
session.startTransaction();
try {
await BlogPost.insertMany(sampleData, { session });
let documents = await BlogPost.find(); // This would return nothing
await session.commitTransaction();
} catch(e) {
// Show the error and abort
log({ err: e.errmsg, result: e.result.result.writeErrors });
await session.abortTransaction();
}
However including the session within a find() will actually show what is inserted:
try {
await BlogPost.insertMany(sampleData, { session });
// Actually includes the session and therefore the state
let documents = await BlogPost.find({},{ session });
await session.commitTransaction();
} catch(e) {
// Show the error and abort
log({ err: e.errmsg, result: e.result.result.writeErrors });
await session.abortTransaction();
}
And of course that read would in this case be dependent on the insertMany() not failing for any reason, since any error would result in exiting to the catch before the next request was made.
Once a transaction is committed, it is of course available to the global state of the connection. But whilst in progress only operations which include the same session information on which the transaction was started will have visibility of any changes implemented within that transaction.
For who get the error "Cannot read property 'map' of undefined" while passing session as option in inserMany, this errors come because your mongo is running as standalone servers, to fix this can refer npm package run-rs or following this answer to fix this: https://stackoverflow.com/a/60603587/9611273

Storing data from JSON object received from Google People Api

So I have a server that receives data from Google People api regarding contacts and my received object has the following structure:
{ connections:
[ { resourceName: 'people/c3904925882068251400',
etag: '%EgYBAgkLNy4aDQECAwQFBgcICQoLDA0iDFZUOUE0NkRBZW0wPQ==',
names:
[ { metadata: { primary: true, source: [Object] },
displayName: 'Mihai Vrincut',
familyName: 'Vrincut',
givenName: 'Mihai',
displayNameLastFirst: 'Vrincut, Mihai' },
{ metadata: { source: [Object] },
displayName: 'Mihai Vrincut',
familyName: 'Vrincut',
givenName: 'Mihai',
displayNameLastFirst: 'Vrincut, Mihai' } ],
emailAddresses:
[ { metadata: { primary: true, source: [Object] },
value: 'mihai.vrincut#gmail.com' } ] },
{ resourceName: 'people/c3275206487406036814',
etag: '%EgYBAgkLNy4aDQECAwQFBgcICQoLDA0iDHBFVzBUMm8wWU5nPQ==',
names:
[ { metadata: { primary: true, source: [Object] },
displayName: 'aaaaaaaaa',
givenName: 'aaaaaaaaa',
displayNameLastFirst: 'aaaaaaaaa' } ] },
{ resourceName: 'people/c5777943907795350059',
etag: '%EgYBAgkLNy4aDQECAwQFBgcICQoLDA0iDGxOeGYwblg3bFUwPQ==',
names:
[ { metadata: { primary: true, source: [Object] },
displayName: 'costin',
givenName: 'costin',
phoneticFamilyName: 'cancius',
phoneticGivenName: 'costin',
displayNameLastFirst: 'costin' } ],
emailAddresses: [ { metadata: { primary: true, source: [Object] }, value: 'hj' } ],
phoneNumbers:
[ { metadata: { primary: true, source: [Object] },
value: '07543532512',
canonicalForm: '+40754353251' } ] } ], totalPeople: 3}totalItems: 3 }
In order to get this object I used the util.inspect() method. However, when I try to access the names for example, I get undefined:
var response=util.inspect(responses,{depth:5});
Console.log(response.connections[0].names);
What is wrong?
So, given the situation, and the information you've given over the comment sections.
I assume that responses is already an object, but util.inspect, makes it a string with a JSON kind of syntax but without the quotes (") before and after the names of the keys. That's why you get
{ connections: ^ SyntaxError: Unexpected token c in JSON at position 2
So, try going over the responses object.
console.log(responses)
And get the name of the keys. With them
console.log(responses.sth.sthElse.anotherSth.anotherSthElse.lastSth.connections)
And see if you get the expected result :)
You should convert the response to JSON Object.
try this:
console.log(JSON.parse(response).connections[0].names);
(I am assuming you are working in Javascript)
What I would do is validate if the answer is a String, you have a
console.log (typeof response)
if it is a string, convert it to JSON:
let responseObject = JSON.parse (response);
Finally, try if you can access the object:
console.log (responseObject.connections [0] .names);
You tell me your answer :)

pubnub history api call always returning timetoken

I'm using the pubnub api in my react native app and making a call to get message history as follows:
pubnub.history({
channel: channel,
count: 15,
includeTimetoken: false,
start: props.lastMessageTimestamp
},
function(status, response){
console.log(response);
if (!status.error){
props.addHistory(response);
}
}
);
For some reason I'm always getting the timetoken returned with each message. By default and even if I specify not to return the timetoken. Any idea what's going on here? Here's a sample response:
{ messages:
{ messages:
[ { timetoken: null,
entry:
{ text: 'This is a message',
user: { _id: 1 },
createdAt: '2016-10-25T18:56:50.205Z',
_id: 'temp-id-958468' } },
{ timetoken: null,
entry:
{ text: 'Message',
user: { _id: 1 },
createdAt: '2016-10-25T18:57:45.810Z',
_id: 'temp-id-322242' } },
{ timetoken: null,
entry:
{ text: 'Test',
user: { _id: 1 },
createdAt: '2016-10-25T21:25:43.290Z',
_id: 'temp-id-806299' } },
{ timetoken: null,
entry:
{ text: 'Text',
user: { _id: 1 },
createdAt: '2016-10-25T21:34:41.204Z',
_id: 'temp-id-399187' } } ],
startTimeToken: 14774207865431668,
endTimeToken: 14774312812443264 },
timestamp: undefined }
PubNub History Timetokens
This is by design - the new SDK v4 response payload always returns the timetoken key it just doesn't have a value if includeTimetoken is false (the default), but probably not necessary to ever exclude the timetokens so we'll probably just deprecate includeTimetoken.
The Future of Storage add-on & History API
We are redesigning the Storage architecture and the history API to greatly enhance features and usability (ease of use) and we will be returning timetoken with each message by default with no option to exclude them. It's just typically something you need more than you don't.

Categories

Resources