Update MongoDB in Mongoose with two arrays - javascript

I am new to mongodb and couldn't for some reason find anything in the documentation (Maybe I looked in the wrong places).
I got two arrays, say:
ids = [1, 2, 3, 4]
values = [12, 33, 44, 11]
Currently I am looping through the list ids and am updating the DB for each entry which seems incredibly inefficient:
For that I am using an object for each iteration (Simplified):
update['values']['duration'] = values[i];
This is how I insert into the DB
await CollectionName.updateOne({ids: ids[i]}, {$set: update});
Any pointers? Thanks! :)
Edit:
Example 2:
ids = [4, 7, 9]
values = [
{"array":
"preference" : "test",
"1Duration" : 55,
"2Duration" : 66
},
{"array":
"preference" : "test",
"1Duration" : 22,
"2Duration" : 33
},
{"array":
"preference" : "test",
"1Duration" : 78,
"2Duration" : 11
}
]

Your code should look something like this:
User.findOneAndUpdate(
{ }, //This is where you could insert into a spesific record
{ $push: { ids , values },
(err) => {
if (err) throw console.log('found errors');
console.log('record created')
});

You can use $in operator in this case.
await CollectionName.updateMany({ids: {$in: ids}}, {$set: update});

Related

Mongoose "Can't save() the same doc multiple times in parallel" on related data push when calling save function in same instance of model [duplicate]

I need to push multiple values into an array in mongoose using one call. I tried doing it using a smaller array but the array is getting inserted as a sub-array.
var kittySchema = new mongoose.Schema({
name: String,
values: [Number]
});
var Kitten = db.model('Kitten', kittySchema);
Kitten.update({name: 'fluffy'},{$push: {values:[2,3]}},{upsert:true},function(err){
if(err){
console.log(err);
}else{
console.log("Successfully added");
}
});
The result of the calling the above code thrice gives the below result:
{ "_id" : ObjectId("502b0e807809d79e84403606"), "name" : "fluffy", "values" : [ [ 2, 3 ], [ 2, 3 ], [ 2, 3 ] ] }
Whereas what I want is something like this:
{ "_id" : ObjectId("502b0e807809d79e84403606"), "name" : "fluffy", "values" : [ 2, 3 ,2 ,3, 2, 3] }
Another thing I noticed was that the type in the array (values) is specified as Number, then wouldnt the 'strict' option ensure that anything other than Numbers are not inserted ? In this case another array is being allowed to be inserted.
(Dec-2014 update) Since MongoDB2.4 you should use:
Kitten.update({name: 'fluffy'}, {$push: {values: {$each: [2,3]}}}, {upsert:true}, function(err){
if(err){
console.log(err);
}else{
console.log("Successfully added");
}
});
Deprecated see other solution below using $push $each
Your example is close, but you want $pushAll rather than $push to have each value added separately (rather than pushing another array onto the values array):
var Kitten = db.model('Kitten', kittySchema);
Kitten.update({name: 'fluffy'},{$pushAll: {values:[2,3]}},{upsert:true},function(err){
if(err){
console.log(err);
}else{
console.log("Successfully added");
}
});
Or use the $each modifier with $addToSet:
https://docs.mongodb.com/manual/reference/operator/update/addToSet/#each-modifier
// Existing tags array
{ _id: 2, item: "cable", tags: [ "electronics", "supplies" ] }
// Add "camera" and "accessories" to it
db.inventory.update(
{ _id: 2 },
{ $addToSet: { tags: { $each: [ "camera", "accessories" ] } } }
)
Currently, the updated doc doesn't support $pushAll. It seems to have been deprecated.
Now the good choice is to use the combination of $push & $each
an example:
//User schema: {uid: String, transaction: [objects] }
const filter = {"uid": uid};
const update = {
$push: {
transactions: {$each: dataarr}
}
}
User.updateOne(filter, update, {upsert:true}, (err) => {
if(err){
console.log(err)
}
})
pass {upsert: true} at options to insert if the filter returns false.

batchSize field name ignored in Field Projection

I have a user_batch collection. It contains following documents:
[{
_id: ObjectId("594baf96256597ec035df23c"),
name: "Batch 1",
batchSize: 30,
users:[]
},
{
_id: ObjectId("594baf96256597ec035df234"),
name: "Batch 2",
batchSize: 50,
users:[]
}]
In find query I want to project only name and batchSize. But when I execute find query from nodejs, I'm getting entire document in query result. Query:
db.collection('user_batch').find({}, {name: 1, batchSize: 1}).toArray((err, result) => {
if(err)
console.log(err)
else
console.log(result)
})
If I just pass {name: 1} then it will project _id and name. But if I pass batchSize then it will return entire document.
Note: I'm not facing this issue while executing this query in Mongo Shell
You are correct that the driver incorrectly interprets this as the batchSize option and ignores the projection statement.
The correct way to do this though in modern driver releases is to actually use the .project() "cursor method" instead. This is more consistent with other language driver implementations.
db.collection('collection').find()
.project({ name: 1, batchSize: 1})
.toArray();
As a full demonstration:
const mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient;
(async function() {
let db;
try {
db = await MongoClient.connect('mongodb://localhost/test');
// New form uses .project() as a cursor method
let result = await db.collection('collection').find()
.project({ name: 1, batchSize: 1})
.toArray();
console.log(JSON.stringify(result,undefined,2));
// Legacy form confuses this as being a legacy "cursor option"
let other = await db.collection('collection')
.find({},{ name: 1, batchSize: 1 })
.toArray();
console.log(JSON.stringify(other,undefined,2));
} catch(e) {
console.error(e)
} finally {
db.close()
}
})()
Produces the output:
[
{
"_id": "594baf96256597ec035df23c",
"name": "Batch 1",
"batchSize": 30
},
{
"_id": "594baf96256597ec035df234",
"name": "Batch 2",
"batchSize": 50
}
]
[
{
"_id": "594baf96256597ec035df23c",
"name": "Batch 1",
"batchSize": 30,
"users": []
},
{
"_id": "594baf96256597ec035df234",
"name": "Batch 2",
"batchSize": 50,
"users": []
}
]
Where the first output form is the corrected one, using .project()
The syntax of Find has changed. Below is what I needed to know to solve this problem. This is excerpted from https://github.com/mongodb/node-mongodb-native/blob/master/CHANGES_3.0.0.md#find
Find
find and findOne no longer support the fields parameter. You can achieve the same results as
the fields parameter by using Cursor.prototype.project or by passing the projection property
in on the options object . Additionally, find does not support individual options like skip and
limit as positional parameters. You must either pass in these parameters in the options object,
or add them via Cursor methods like Cursor.prototype.skip.
2.x syntax:
const cursor = coll.find({ a: 42 }, { someField: 1 });
3.x syntax:
const cursor = coll.find({ a: 42 }).project({ someField: 1 });
/* OR */
const cursor = coll.find({ a: 42 }, { projection: { someField: 1 } });

combo of async map, recursion and callback in function- nodejs

I have a function which computes a value and after the computation returns value with a callback to a another function.
The situation is starting to get pretty mixed where I have 2 nested for loops and recursion inside this.
Error is: uncaughtException: Callback was already called.
First let me write the sample of the code.
functionTest (array, function (err, result) {
if (err) {
nresponse.error(err);
} else {
nresponse.success(res, result);
}
);
function dependicies(array, callback) {
async.map(array, function(item, outerNext) {
async.map(item.members, function(value, innerNext){
dependicies(array, callback);
innerNext(); //should I write this after or before the recursion call?
outerNext(); //where should I call the outerNext?
});
}, function(err, result){
**
if(err){
callback(err);
}else{
callback(null, sthComputedInMap);
}
});
}
As I write in comments, should I write innerNext after or before the recursion call? In addition to the this question, should I call outerNext after the scope of second map?
Things are pretty messed. How am I gonna clear up? I'm looking the document.
** isn't this a place where it's the end of the first async.map. I think the problem is this is called for each async.map of the recursion. What I want is to call this as break.
In code I try to loop through an array. Assume I try to get list of name fields of one document's starting from itself to its last grand grand children. The array's structure which I trace is like this;
[
{id: 1, childrenIds: [3, 4, 5], name: ""},
{id: 3, childrenIds: [8, 5], name: ""},
{id: 21, childrenIds: [ 5], name: ""},
{id: 7, childrenIds: [5], name: ""},
{id: 5, childrenIds: [], name: ""}
]
first async.map is for traversing each document, the other one is for traversing its children array.
I hope it helps :)
You can mix loops, recursion and callbacks using SynJS. Below is a working example to illustrate. setTimeout is just used as an example of asynchronous function that returns result via callback.
global.SynJS = global.SynJS || require('synjs');
function processOneDoc(modules, documents, doc, childNames) {
for(var i=0; doc.childrenIds && i < doc.childrenIds.length; i++) {
var currDoc = documents[doc.childrenIds[i]];
if(currDoc) {
childNames.push(currDoc.name);
var chNames=[];
setTimeout(function(){
SynJS.run(modules.processOneDoc, null, modules, documents, currDoc, chNames, function(){
SynJS.resume(_synjsContext);
});
if(chNames.length)
childNames.push(chNames);
},2000);
SynJS.wait();
}
}
};
function processAll(modules, documents) {
for(var d in documents) {
var childNames=[];
SynJS.run(modules.processOneDoc,null, modules, documents, documents[d], childNames,function(){
SynJS.resume(_synjsContext);
});
SynJS.wait();
console.log(new Date().toISOString(), d, childNames);
}
}
var modules = {
SynJS: SynJS,
processOneDoc: processOneDoc,
};
var documents = {
1: {id: 1, childrenIds: [3, 4, 5], name: "name of 1st"},
3: {id: 3, childrenIds: [8, 5], name: "name of 3rd"},
21: {id: 21, childrenIds: [ 5], name: "name of 21st"},
7: {id: 7, childrenIds: [5], name: "name of 7th"},
5: {id: 5, childrenIds: [], name: "name of 5th"}
};
SynJS.run(processAll,null,modules,documents,function () {
console.log('done');
});
It would produce following output:
2017-01-06T18:50:57.750Z 1 [ 'name of 3rd', [ 'name of 5th' ], 'name of 5th' ]
2017-01-06T18:50:59.785Z 3 [ 'name of 5th' ]
2017-01-06T18:50:59.800Z 5 []
2017-01-06T18:51:01.831Z 7 [ 'name of 5th' ]
2017-01-06T18:51:03.863Z 21 [ 'name of 5th' ]
done
Based on my understanding for what you have provided in your question. Your recursion call is not on the right location. One way to solve your problem is as:
function dependencies(array, callback){
outerResult = async.map(array, function(item, outerNext){
var innerResult = async.map(item.members, function(value, innerNext){
//recursive_call_condition == true
// for instance you want to check if value has an array further
if(value.members.length)
dependencies(value.members, callback);
else
innerNext(null, innerResult);
});
outerNext(null, outerResult);
}, function(err, finalResult){
// `finalResult` is your result
callback(finalResult)
});
}

Rethinkdb append to array if exists

In RethinkDB, I have a table authors with the following layout:
{
id: 12,
videos: [1,2,3]
}
Now I get new authors with objects like this:
{
id: 12,
videos: [4,5]
}
If the author now already exists, I want to append the new videos 4 and 5 to the list of videos.
If author not exists, just insert the document like it is.
My approach was the following, but it didn't work.
r.table('authors').getAll(4, 3, 2, 1, {index: 'id'})
.replace(function(author, index) {
return r.branch(
author.eq(null),
{
id: index,
videos: [1,2,3]
},
author
);
})
-> Response:
{
"deleted": 0 ,
"errors": 3 ,
"first_error": "Expected 2 arguments but found 1." ,
"inserted": 0 ,
"replaced": 0 ,
"skipped": 0 ,
"unchanged": 0
}
Thanks!
Your logic is very good. It is just some syntax issue.
Given an author, if the author isn't existed, insert it, otherwise, append the video array, here is what I think of, using your logic:
var author = {
id: 12,
videos: [9, 10]
};
r.table('authors').insert(author).do(
function (doc) {
return r.branch(doc('inserted').ne(0),
r.expr({inserted: 1}),
r.table('authors').get(author["id"]).update(function(doc) {
return {videos: doc('videos').union(author["videos"])}
})
)
}
)
If the insert is sucesfully, it means we have no document with the same id, we don't have to do anything. Otherwise, we will update the document and append the videos into it.
To updare an array of multiple author, we can use foreach and expr to turn array into ReQL object. However, in this case, we use bracket to get field instead of using [] as in JavaScript object
var authors = [{
id: 12,
videos: [90, 91]
},{
id: 14,
videos: [1, 2]
}];
r.expr(authors).forEach(function(author) {
return r.table('authors').insert(author).do(
function (doc) {
return r.branch(doc('inserted').ne(0),
r.expr({inserted: 1}),
r.table('authors').get(author("id")).update(function(doc) {
return {videos: doc('videos').union(author("videos"))}
})
)
}
)
})
Something like this should do it:
r([4, 3, 2, 1]).foreach(function(id) {
return r.table('authors').get(id).replace(function(row) {
return r.branch(row.eq(null), {id: id, videos: [1, 2, 3]}, row);
});
});
lambda for replace method has only 1 argument. But you are trying too pass 2 args: author, index.

RethinkDB - Updating nested array

I have a survey table that looks like so:
{
id: Id,
date: Date,
clients: [{
client_id: Id,
contacts: [{
contact_id: Id,
score: Number,
feedback: String,
email: String
}]
}]
}
I need to updated the score and feedback fields under a specific contact. Currently, I am running the update like this:
function saveScore(obj){
var dfd = q.defer();
var survey = surveys.get(obj.survey_id);
survey
.pluck({ clients: 'contacts' })
.run()
.then(results => {
results.clients.forEach((item, outerIndex) => {
item.contacts.forEach((item, index, array) => {
if(Number(item.contact_id) === Number(obj.contact_id)) {
array[index].score = obj.score;
console.log(outerIndex, index);
}
});
});
return survey.update(results).run()
})
.then(results => dfd.resolve(results))
.catch(err => dfd.resolve(err));
return dfd.promise;
};
When I look at the update method, it specifies how to update nested key:value pairs. However, I can't find any examples to update an individual item in an array.
Is there a better and hopefully cleaner way to update items in a nested array?
You might need to get the array, filter out the desired value in the array and then append it again to the array. Then you can pass the updated array to the update method.
Example
Let's say you have a document with two clients that both have a name and a score and you want to update the score in one of them:
{
"clients": [
{
"name": "jacob" ,
"score": 200
} ,
{
"name": "jorge" ,
"score": 57
}
] ,
"id": "70589f08-284c-495a-b089-005812ec589f"
}
You can get that specific document, run the update command with an annonymous function and then pass in the new, updated array into the clients property.
r.table('jacob').get("70589f08-284c-495a-b089-005812ec589f")
.update(function (row) {
return {
// Get all the clients, expect the one we want to update
clients: row('clients').filter(function (client) {
return client('name').ne('jorge')
})
// Append a new client, with the update information
.append({ name: 'jorge', score: 57 })
};
});
I do think this is a bit cumbersome and there's probably a nicer, more elegant way of doing this, but this should solve your problem.
Database Schema
Maybe it's worth it to create a contacts table for all your contacts and then do a some sort of join on you data. Then your contacts property in your clients array would look something like:
{
id: Id,
date: Date,
clients: [{
client_id: Id,
contact_scores: {
Id: score(Number)
},
contact_feedbacks: {
Id: feedback(String)
}
}]
}
database schema
{
"clients": [
{
"name": "jacob" ,
"score": 200
} ,
{
"name": "jorge" ,
"score": 57
}
] ,
"id": "70589f08-284c-495a-b089-005812ec589f"
}
then you can do like this using map and branch query .
r.db('users').table('participants').get('70589f08-284c-495a-b089-005812ec589f')
.update({"clients": r.row('clients').map(function(elem){
return r.branch(
elem('name').eq("jacob"),
elem.merge({ "score": 100 }),
elem)})
})
it works for me
r.table(...).get(...).update({
contacts: r.row('Contacts').changeAt(0,
r.row('Contacts').nth(0).merge({feedback: "NICE"}))
})
ReQL solution
Creating a query to update a JSON array of objects in-place, is a rather complicated process in ReThinkDB (and most query languages). The best (and only) solution in ReQL that I know about, is to use a combination of update,offsetsOf,do,changeAt, and merge functions. This solution will retain the order of objects in the array, and only modify values on objects which match in the offsetsOf methods.
The following code (or something similar) can be used to update an array of objects (i.e. clients) which contain an array of objects (i.e. contracts).
Where '%_databaseName_%', '%_tableName_%', '%_documentUUID_%', %_clientValue_%, and %_contractValue_% must be provided.
r.db('%_databaseName_%').table('%_tableName_%').get('%_documentUUID_%').update(row =>
row('clients')
.offsetsOf(clients => client('client_id').eq('%_clientValue_%'))(0)
.do(clientIndex => ({
clients: row('clients')(clientIndex)
.offsetsOf(contacts => contact('contact_id').eq('%_contactValue_%')))(0)
.do(contactIndex => ({
contacts: row(clientIndex)
.changeAt(contractIndex, row(clientIndex)(contractIndex).merge({
'score': 0,
'feedback': 'xyz'
}))
})
}))
)
Why go through the trouble of forming this into ReQL?
survey
.pluck({ clients: 'contacts' }).run()
.then(results => {
results.clients.forEach((item, outerIndex) => {
item.contacts.forEach((item, index, array) => {
if(Number(item.contact_id) === Number(obj.contact_id)) {
array[index].score = obj.score;
console.log(outerIndex, index);
}
});
});
return survey.update(results).run()
})
While the code provided by Jacob (the user who asked the question here on Stack Overflow - shown above) might look simpler to write, the performance is probably not as good as the ReQL solution.
1) The ReQL solution runs on the query-server (i.e. database side) and therefore the code is optimized during the database write (higher performance). Whereas the code above, does not make full use of the query-server, and makes a read and write request pluck().run() and update().run(), and data is processed on the client-request side (i.e. NodeJs side) after the pluck() query is run (lower performance).
2) The above code requires the query-server to send back all the data to the client-request side (i.e. NodeJs side) and therefore the response payload (internet bandwidth usage / download size) can be several megabytes. Whereas the ReQL solution is processed on the query-server, and therefore the response payload typically just confirms that the write was completed, in other words only a few bytes are sent back to the client-request side. Which is done in a single request.
ReQL is too complicated
However, ReQL (and especially SQL) seem overly complicated when working with JSON, and it seems to me that JSON should be used when working with JSON.
I've also proposed that the ReThinkDB community adopt an alternative to ReQL that uses JSON instead (https://github.com/rethinkdb/rethinkdb/issues/6736).
The solution to updating nested JSON arrays should be as simple as...
r('database.table').update({
clients: [{
client_id: 0,
contacts: [{
contact_id: 0,
score: 0,
feedback: 'xyz',
}]
}]
});
tfmontague is on the right path but I think his answer can be improved a lot. Because he uses ...(0) there's a possibility for his answer to throw errors.
zabusa also provides a ReQL solution using map and branch but doesn't show the complete nested update. I will expand on this technique.
ReQL expressions are composable so we can isolate complexity and avoid repetition. This keeps the code flat and clean.
First write a simple function mapIf
const mapIf = (rexpr, test, f) =>
rexpr.map(x => r.branch(test(x), f(x), x));
Now we can write the simplified updateClientContact function
const updateClientContact = (doc, clientId, contactId, patch) =>
doc.merge
( { clients:
mapIf
( doc('clients')
, c => c('client_id').eq(clientId)
, c =>
mapIf
( c('contacts')
, c => c('contact_id').eq(contactId)
, c =>
c.merge(patch)
)
)
}
);
Use it like this
// fetch the document to update
const someDoc =
r.db(...).table(...).get(...);
// create patch for client id [1] and contact id [12]
const patch =
updateClientContact(someDoc, 1, 12, { name: 'x', feedback: 'z' });
// apply the patch
someDoc.update(patch);
Here's a concrete example you can run in reql> ...
const testDoc =
{ clients:
[ { client_id: 1
, contacts:
[ { contact_id: 11, name: 'a' }
, { contact_id: 12, name: 'b' }
, { contact_id: 13, name: 'c' }
]
}
, { client_id: 2
, contacts:
[ { contact_id: 21, name: 'd' }
, { contact_id: 22, name: 'e' }
, { contact_id: 23, name: 'f' }
]
}
, { client_id: 3
, contacts:
[ { contact_id: 31, name: 'g' }
, { contact_id: 32, name: 'h' }
, { contact_id: 33, name: 'i' }
]
}
]
};
updateClientContact(r.expr(testDoc), 2, 23, { name: 'x', feedback: 'z' });
The result will be
{ clients:
[ { client_id: 1
, contacts:
[ { contact_id: 11, name: 'a' }
, { contact_id: 12, name: 'b' }
, { contact_id: 13, name: 'c' }
]
}
, { client_id: 2
, contacts:
[ { contact_id: 21, name: 'd' }
, { contact_id: 22, name: 'e' }
, { contact_id: 23, name: 'x', feedback: 'z' } // <--
]
}
, { client_id: 3
, contacts:
[ { contact_id: 31, name: 'g' }
, { contact_id: 32, name: 'h' }
, { contact_id: 33, name: 'i' }
]
}
]
}
Better late than never
I had your same problem and i could solve it with two ways:
With specific client_id
r.db('nameDB').table('nameTable').get('idRegister')
.update({'clients': r.row('clients')
.map(elem=>{
return r.branch(
elem('client_id').eq('your_specific_client_id'),
elem.merge({
contacts: elem('contacts').map(elem2=>
r.branch(
elem2('contact_id').eq('idContact'),
elem2.merge({
score: 99999,
feedback: 'yourString'
}),
elem2
)
)
}),
elem
)
})
})
Without specific client_id
r.db('nameDB').table('nameTable').get('idRegister')
.update({'clients': r.row('clients')
.map(elem=>
elem.merge({
contacts: elem('contacts').map(elem2=>
r.branch(
elem2('contact_id').eq('idContact'),
elem2.merge({
score: 99999,
feedback: 'yourString'
}),
elem2
)
)
})
)
})
I hope that it works for you, even when happened much time ago

Categories

Resources