How to prepare JSON for export to CSV - javascript

I want to parse json loaded from file const notCleanData = JSON.parse(fs.readFileSync('db.json')); to be able to export to CSV using json2csv. I loaded the file and learn how to export, but I can't figure out how to clean JSON from unnecessary part of JSON, cos it's making CSV to be exported in a wrong way. Instead having data from array in separate columns, I get all data under one column with "group" as header. How to convert A.json to B.json for exporting clean JSON to CSV?
A.json
{
"group" : [
{
"A" : "1",
"B" : "2"
},
{
"A" : "3",
"B" : "4"
}
],
"profile" : {
"C" : "5"
}
}
B.json
{
"A" : "1",
"B" : "2"
},
{
"A" : "3",
"B" : "4"
}
In short: How to extract data only from "group" and add it to variable?

You can use jpath for that:
let A = {
"group" : [
{
"A" : "1",
"B" : "2"
},
{
"A" : "3",
"B" : "4"
}
],
"profile" : {
"C" : "5"
}
}
let jp = require('jsonpath');
let B = jp.query(A, '$.group');
console.log(B)
Output:
[ [ { A: '1', B: '2' }, { A: '3', B: '4' } ] ]

Related

Transform an explicit JSON payload into an array driven generic payload?

I am currently working in a project that has insisted in explicitly defining over 1,700 questions into a JSON data schema for an API and its getting out of control. I have been suggesting a more generic structure to the schema and let the data do the talking to tell you what the context is.
Whilst there are debates happening around which schema to use, we have decided on our internal systems, to go ahead and use a more generic model even if the externally facing model is the explicit one. This means we need an adapter to convert from one to the other, until such time as we can just use the one we wanted in the first place.
The business is a Java shop, I don't know whether to advise to build the adapter in Java or whether we can incorporate some lightweight JavaScript to do the work, maybe in the form of a configuration.
My question is: How would you approach converting the first JSON example into the second JSON example? It maps from explicitly defined objects to generic objects in arrays. Thanks for considering my question.
Example One
{
"nested_object" : {
"department_one" : {
"floor" : "4",
"product_one" : {
"quantity" : 10,
"size" : "L"
},
"product_two" : {
"quantity" : 5,
"size" : "S"
}
},
"department_two" : {
"floor" : "2",
"product_thirteen" : {
"quantity" : 1,
"size" : "M"
},
"product_eleven" : {
"quantity" : 8,
"size" : "L"
}
}
}
}
Example Two
{
"departments" : [
{
"department_name" : "department_one",
"floor" : "4",
"products" : [
{
"product_name" : "product_one",
"quantity" : 10,
"size" : "L"
},
{
"product_name" : "product_two",
"quantity" : 5,
"size" : "S"
}
]
},
{
"department_name" : "department_two",
"floor" : "2",
"products" : [
{
"product_name" : "product_thirteen",
"quantity" : 1,
"size" : "M"
},
{
"product_name" : "product_eleven",
"quantity" : 8,
"size" : "L"
}
]
}
]
}
You could use a combination of Object.keys (to grab product and department names). Below is a quick implementation.
const obj1 = {
"nested_object" : {
"department_one" : {
"floor" : "4",
"product_one" : {
"quantity" : 10,
"size" : "L"
},
"product_two" : {
"quantity" : 5,
"size" : "S"
}
},
"department_two" : {
"floor" : "2",
"product_thirteen" : {
"quantity" : 1,
"size" : "M"
},
"product_eleven" : {
"quantity" : 8,
"size" : "L"
}
}
}
}
const transformedObj = {
departments: [ ],
};
//holds all department names
const departmentKeys = Object.keys(obj1.nested_object)
const departmentsArr = departmentKeys.map((key) => {
const floor = obj1.nested_object[key].floor
//remove floor reference, since we already stored the value above
delete obj1.nested_object[key].floor
//holds all product names
const productsKeysArr = Object.keys(obj1.nested_object[key])
//holds all product objects for respective department
const productsArr = productsKeysArr.map((product) => {
const quantity = obj1.nested_object[key][product].quantity
const size = obj1.nested_object[key][product].size
return {
product_name: product,
quantity: quantity,
size: size
}
})
return {
department_name: key,
floor: floor,
products: productsArr
}
})
//assign departments array to transformed object
transformedObj.departments = departmentsArr
console.log(transformedObj)
This would be my take on this. I like conciseness and expressiveness in implementations:
const data = { "nested_object": { ... }}
Object.entries(data.nested_object).map(([department_name, {floor, ...ps}]) => ({
department_name,
floor,
products: Object.entries(ps).map(([product_name, p]) => ({product_name, ...p}))
}))

Count in JavaScript

If I have an object like this in javascript, how can I count the number of elements in [A, B, C, D, etc.]?
{ "test" : [ "A", "B", "C", "D", ... ] }
And if i have:
{ "_id" : "1", "value" : { "test" : [ "A", "B", "C", "D" ] } }
{ "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E", "F" ] } }
How can I convert it to the following structure?
{ "_id" : "1", "value" : 4 }
{ "_id" : "2", "value" : 6 }
Thanks
Try the following:
var data = [{ "_id" : "1", "value" : { "test" : [ "A", "B", "C", "D" ] } },{ "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E","F" ] } }];
var arr = data.map((d) => {
d["value"] = d["value"].test.length;
return d;
});
console.log(arr);
EDIT
The more smart way is to simply use a forEach loop in the following way
var data = [{ "_id" : "1", "value" : { "test" : [ "A", "B", "C", "D" ] } },{ "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E","F" ] } }];
data.forEach((d) => {
d["value"] = d["value"].test.length;
});
console.log(data);
If can always do .length on an object check the example below:
var obj = { "test" : [ "A", "B", "C", "D", "E", "F"] };
console.log(obj.test.length)
EDIT:
For your second question
you can do
obj.value = obj.value.test.length;
var obj = { "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E", "F" ] } };
obj.value = obj.value.test.length;
console.log(obj)
As the answer changed from generic javascript to mongodb, I have to update the answer to reflect the question:
Instead of using map/reduce in mongo, consider using the aggregation framework mongodb provides, it allows for very powerful querying.
In this case, you want the $size operator
db.yourCollection.aggregate([
{ $project: { value: { $size: "$value.test" } } }
])
Unless explicitly removed, mongodb will always include the document _id so for this use case we only need to address the value, if you need any other value from the documents in your result-set, add them like:
db.yourCollection.aggregate([
{ $project: {
value: { $size: "$value.test" },
foo: '$foo',
bar: '$bar',
baz: '$baz',
} }
])
Original answer (pure js, no mongodb)
I suspect the example is actually part of an array
{ "_id" : "1", "value" : { "test" : [ "A", "B", "C", "D" ] } }
{ "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E" ] } }
So I assume you want to map a large set of records this way, and you don't want to (accidentally) destroy the original value property (as other answers do), here's how I'd do this:
const list = [
{ "_id" : "1", "value" : { "test" : [ "A", "B", "C", "D" ] } },
{ "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E" ] } }
];
// map the list into a new list of object containing the _id and value count
const mapped = list.map((record) => {
return {
_id: record._id,
// assuming there's only test and it's an array
value: record.value.test.length,
};
});
If value inside each record is an object which may contain zero or more arrays and you wish to count the total amount of items inside all of them, you can reduce the arrays into a total number.
const list = [
{ "_id" : "1", "value" : { "test" : [ "A", "B", "C", "D" ] } },
{ "_id" : "2", "value" : { "test" : [ "A", "B", "C", "D", "E" ] } },
{ "_id" : "3", "value" : { } },
{ "_id" : "4", "value" : { "test" : [ "A", "B" ], "test2" : [ "C", "D", "E" ] } },
];
// map the list into a new list of object containing the _id and value count
const mapped = list.map((record) => {
return {
_id: record._id,
// assuming there's zero or more arrays
value: Object.keys(record.value)
// filter out any non-array
.filter((key) => Array.isArray(record.value[key]))
// reduce the arrays into their combined length
.reduce((carry, key) => carry + record.value[key].length, 0),
};
});
console.log(mapped);
You can transform your array with a simple forEach loop - like below:
var data = [{
"_id": "1",
"value": {
"test": ["A", "B", "C", "D"]
}
},
{
"_id": "2",
"value": {
"test": ["A", "B", "C", "D", "E"]
}
}
];
var dataCount = [];
data.forEach(function(element) {
var newElement = {};
newElement._id = element._id;
newElement.value = element.value.test.length;
dataCount.push(newElement);
//console.log(newElement);
});
console.log(dataCount);

Unwind Multiple Document Arrays Into New Documents

Today I run into a situation I need to sync a mongoDB collection to vertica (SQL Database) where my object keys will be the columns of the table in SQL.
I use mongoDB aggregation framework, first to query, manipulate and project the wanted result document and then I sync it to vertica.
The schema I want to aggregate looks like this:
{
userId: 123
firstProperty: {
firstArray: ['x','y','z'],
anotherAttr: 'abc'
},
anotherProperty: {
secondArray: ['a','b','c'],
anotherAttr: 'def'
}
}
Since array values are not related with other arrays value, what I need is that each value of nested array, will be in a separate result document.
For that I use the following aggregation pipe:
db.collection('myCollection').aggregate([
{
$match: {
$or: [
{'firstProperty.firstArray.1': {$exists: true}},
{'secondProperty.secondArray.1': {$exists: true}}
]
}
},
{
$project: {
userId: 1,
firstProperty: 1,
secondProperty: 1
}
}, {
$unwind: {path:'$firstProperty.firstAray'}
}, {
$unwind: {path:'$secondProperty.secondArray'},
}, {
$project: {
userId: 1,
firstProperty: '$firstProperty.firstArray',
firstPropertyAttr: '$firstProperty.anotherAttr',
secondProperty: '$secondProperty.secondArray',
seondPropertyAttr: '$secondProperty.anotherAttr'
}
}, {
$out: 'another_collection'
}
])
What I expect is the following result:
{
userId: 'x1',
firstProperty: 'x',
firstPropertyAttr: 'a'
}
{
userId: 'x1',
firstProperty: 'y',
firstPropertyAttr: 'a'
}
{
userId: 'x1',
firstProperty: 'z',
firstPropertyAttr: 'a'
}
{
userId: 'x1',
secondProperty: 'a',
firstPropertyAttr: 'b'
}
{
userId: 'x1',
secondProperty: 'b',
firstPropertyAttr: 'b'
}
{
userId: 'x1',
secondProperty: 'c',
firstPropertyAttr: 'b'
}
Instead I get something like that:
{
userId: 'x1',
firstProperty: 'x',
firstPropertyAttr: 'b'
secondProperty: 'a',
secondPropertyAttr: 'b'
}
{
userId: 'x1',
firstProperty: 'y',
firstPropertyAttr: 'b'
secondProperty: 'b',
secondPropertyAttr: 'b'
}
{
userId: 'x1',
firstProperty: 'z',
firstPropertyAttr: 'b'
secondProperty: 'c',
secondPropertyAttr: 'b'
}
What exactly am I missing, and how can I fix it?
This is actually a much "curlier" problem than you might think it is, and it all really boils down to "named keys", which are generally a real problem and your data "should" not be using "data points" in the naming of such keys.
The other obvious problem in your attempt is called a "cartesian product". This is where you $unwind one array and then $unwind another, which results in the items from the "first" $unwind being repeated for every value present in the "second".
Addressing that second problem, the basic approach is to "combine the arrays" in order that you only $unwind from a single source. This is pretty common to all remaining approaches.
As for the approaches, these differ in the MongoDB version you have available and the general practicality of application. So let's step through them:
Remove the named keys
The most simple approach here is to simply not expect named keys in the output, and instead mark them as a "name" identifying their source in the final output. So all we want to do is specify each "expected" key within the construction of an initial "combined" array, and then simply $filter that for any null values resulting from named paths not existing in the present document.
db.getCollection('myCollection').aggregate([
{ "$match": {
"$or": [
{ "firstProperty.firstArray.0": { "$exists": true } },
{ "anotherProperty.secondArray.0": { "$exists": true } }
]
}},
{ "$project": {
"_id": 0,
"userId": 1,
"combined": {
"$filter": {
"input": [
{
"name": { "$literal": "first" },
"array": "$firstProperty.firstArray",
"attr": "$firstProperty.anotherAttr"
},
{
"name": { "$literal": "another" },
"array": "$anotherProperty.secondArray",
"attr": "$anotherProperty.anotherAttr"
}
],
"cond": {
"$ne": ["$$this.array", null ]
}
}
}
}},
{ "$unwind": "$combined" },
{ "$unwind": "$combined.array" },
{ "$project": {
"userId": 1,
"name": "$combined.name",
"value": "$combined.array",
"attr": "$combined.attr"
}}
])
From the data included in your question this would produce:
/* 1 */
{
"userId" : 123.0,
"name" : "first",
"value" : "x",
"attr" : "abc"
}
/* 2 */
{
"userId" : 123.0,
"name" : "first",
"value" : "y",
"attr" : "abc"
}
/* 3 */
{
"userId" : 123.0,
"name" : "first",
"value" : "z",
"attr" : "abc"
}
/* 4 */
{
"userId" : 123.0,
"name" : "another",
"value" : "a",
"attr" : "def"
}
/* 5 */
{
"userId" : 123.0,
"name" : "another",
"value" : "b",
"attr" : "def"
}
/* 6 */
{
"userId" : 123.0,
"name" : "another",
"value" : "c",
"attr" : "def"
}
Merge Objects - Requires MongoDB 3.4.4 minimum
To actually use "named keys" we need the $objectToArray and $arrayToObject operators that were only available since MongoDB 3.4.4. Using these and the $replaceRoot pipeline stage we can simply process to your desired output without explicitly naming the keys to output at any stage:
db.getCollection('myCollection').aggregate([
{ "$match": {
"$or": [
{ "firstProperty.firstArray.0": { "$exists": true } },
{ "anotherProperty.secondArray.0": { "$exists": true } }
]
}},
{ "$project": {
"_id": 0,
"userId": 1,
"data": {
"$reduce": {
"input": {
"$map": {
"input": {
"$filter": {
"input": { "$objectToArray": "$$ROOT" },
"cond": { "$not": { "$in": [ "$$this.k", ["_id", "userId"] ] } }
}
},
"as": "d",
"in": {
"$let": {
"vars": {
"inner": {
"$map": {
"input": { "$objectToArray": "$$d.v" },
"as": "i",
"in": {
"k": {
"$cond": {
"if": { "$ne": [{ "$indexOfCP": ["$$i.k", "Array"] }, -1] },
"then": "$$d.k",
"else": { "$concat": ["$$d.k", "Attr"] }
}
},
"v": "$$i.v"
}
}
}
},
"in": {
"$map": {
"input": {
"$arrayElemAt": [
"$$inner.v",
{ "$indexOfArray": ["$$inner.k", "$$d.k"] }
]
},
"as": "v",
"in": {
"$arrayToObject": [[
{ "k": "$$d.k", "v": "$$v" },
{
"k": { "$concat": ["$$d.k", "Attr"] },
"v": {
"$arrayElemAt": [
"$$inner.v",
{ "$indexOfArray": ["$$inner.k", { "$concat": ["$$d.k", "Attr"] }] }
]
}
}
]]
}
}
}
}
}
}
},
"initialValue": [],
"in": { "$concatArrays": [ "$$value", "$$this" ] }
}
}
}},
{ "$unwind": "$data" },
{ "$replaceRoot": {
"newRoot": {
"$arrayToObject": {
"$concatArrays": [
[{ "k": "userId", "v": "$userId" }],
{ "$objectToArray": "$data" }
]
}
}
}}
])
Which gets pretty monstrous from converting the "keys" into an array, then the "sub-keys" into an array and mapping the values from those inner arrays onto the pair of keys in output.
The key parts being $objectToArray is essentially needed to "transform" your "nested key" structures into arrays of "k" and "v" representing the "name" of the key and the "value". This gets called twice, being once for the "outer" parts of the document and excluding the "constant" fields such as "_id" and "userId" into such an array structure. Then the second call is processed on each of those "array" elements in order to make those "inner keys" a similar "array".
Matching is then done using $indexOfCP to work out which "inner key" is the one for the value and which is the "Attr". The keys are then renamed here to the "outer" key value, which we can access because that's a "v" courtesy of $objectToArray.
Then for the "inner value" which is an "array", we want to $map each entry into a combined "array" which basically has the form:
[
{ "k": "firstProperty", "v": "x" },
{ "k": "firstPropertyAttr", "v": "abc" }
]
This happens for each "inner array" element, for which $arrayToObject reverses the process and turns each "k" and "v" into "key" and "value" of an object respectively.
Since the output is still an "array of arrays" of the "inner keys" at this point, the $reduce wraps that output and applies $concatArrays while processing each element in order to "join" into a single array for "data".
All that remains is to simply $unwind the array produced from each source document, and then apply $replaceRoot, which is the part that actually allows "different key names" at the "root" of each document output.
The "merging" here is done by supplying an array of object of the same "k" and "v" construction notated for "userId", and "concatentating" that with the $objectToArray transform of the "data". Of course this "new array" is then converted to an object via $arrayToObject one final time, which forms the "object" argument to "newRoot" as an expression.
You do something like that when there is a large number of "named keys" that you can't really name explicitly. And it actually gives you the result you want:
/* 1 */
{
"userId" : 123.0,
"firstProperty" : "x",
"firstPropertyAttr" : "abc"
}
/* 2 */
{
"userId" : 123.0,
"firstProperty" : "y",
"firstPropertyAttr" : "abc"
}
/* 3 */
{
"userId" : 123.0,
"firstProperty" : "z",
"firstPropertyAttr" : "abc"
}
/* 4 */
{
"userId" : 123.0,
"anotherProperty" : "a",
"anotherPropertyAttr" : "def"
}
/* 5 */
{
"userId" : 123.0,
"anotherProperty" : "b",
"anotherPropertyAttr" : "def"
}
/* 6 */
{
"userId" : 123.0,
"anotherProperty" : "c",
"anotherPropertyAttr" : "def"
}
Named Keys without MongoDB 3.4.4 or Greater
Without the operator support as shown in the above listing, it's simply not possible for the aggregation framework to output documents with different key names.
So though it's not possible to instruct the "server" to do this via $out, you can of course simply iterate the cursor and write a new collection
var ops = [];
db.getCollection('myCollection').find().forEach( d => {
ops = ops.concat(Object.keys(d).filter(k => ['_id','userId'].indexOf(k) === -1 )
.map(k =>
d[k][Object.keys(d[k]).find(ki => /Array$/.test(ki))]
.map(v => ({
[k]: v,
[`${k}Attr`]: d[k][Object.keys(d[k]).find(ki => /Attr$/.test(ki))]
}))
)
.reduce((acc,curr) => acc.concat(curr),[])
.map( o => Object.assign({ userId: d.userId },o) )
);
if (ops.length >= 1000) {
db.getCollection("another_collection").insertMany(ops);
ops = [];
}
})
if ( ops.length > 0 ) {
db.getCollection("another_collection").insertMany(ops);
ops = [];
}
Same sort of thing as is being done in the earlier aggregation, but just "externally". It essentially produces and array of documents for each document matching the "inner" arrays, like so:
[
{
"userId" : 123.0,
"firstProperty" : "x",
"firstPropertyAttr" : "abc"
},
{
"userId" : 123.0,
"firstProperty" : "y",
"firstPropertyAttr" : "abc"
},
{
"userId" : 123.0,
"firstProperty" : "z",
"firstPropertyAttr" : "abc"
},
{
"userId" : 123.0,
"anotherProperty" : "a",
"anotherPropertyAttr" : "def"
},
{
"userId" : 123.0,
"anotherProperty" : "b",
"anotherPropertyAttr" : "def"
},
{
"userId" : 123.0,
"anotherProperty" : "c",
"anotherPropertyAttr" : "def"
}
]
These get "cached" into a big array, which when that reaches a length of 1000 or more is finally written to the new collection via .insertMany(). Of course that requires "back and forth" communication with the server, but it does get the job done in the most efficient way possible if you don't have the features available for the previous aggregation.
Conclusion
The overall point here is that unless you actually have a MongoDB that supports it, then you are not going to get documents with "different key names" in the output, solely from the aggregation pipeline.
So when you do not have that support, you either go with the first option and then use $out discarding having named keys. Or you do the final approach and simply manipulate the cursor results and write back to the new collection.

How can I generate combinations of objects?

I have a page which allows a user to create option groups, and for each option group create a series of options.
E.g.
Colour
* Green
* Blue
* Red
Size
* Small
* Medium
* Large
Where colour and size are the option groups.
The user can create any number of groups and options.
What I'm struggling to find is a way of creating a list of all the possible variations based on the number of options available.
Ideally what I want is an object with a property called "options" which is an array containing the options that this variation is composed of e.g.
[
{ options : [ { name: "Green" } , { "name" : "Small" } ] },
{ options : [ { name: "Green" } , { "name" : "Medium" } ] },
{ options : [ { name: "Green" } , { "name" : "Large" } ] },
{ options : [ { name: "Blue" } , { "name" : "Small" } ] },
{ options : [ { name: "Blue" } , { "name" : "Medium" } ] },
{ options : [ { name: "Blue" } , { "name" : "Large" } ] },
{ options : [ { name: "Red" } , { "name" : "Small" } ] },
{ options : [ { name: "Red" } , { "name" : "Medium" } ] },
{ options : [ { name: "Red" } , { "name" : "Large" } ] },
]
I can imagine that some level of recursion is required but I'm really struggling to write the correct JavaScript code.
Any help is greatly appreciated. Thanks in advance
EDIT: Looks like I didn't explain myself correctly, apologies for that. I can see that some of the solutions provided loop through the colours first and then the sizes, however, as mentioned above the user can create any number of groups whereas those solutions are only limited to 2 groups e.g there might be more option groups like "shoe size", "storage capacity", "screen size" etc
The problem I'm trying to solve is to create stock variations on an e-commerce site, where each product can have different options available.
The final structure will contain more data that I've given in the example above, so more like:
[
{ sku: "PRODUCT1-GRSM", options : [ { name: "Green" } , { "name" : "Small" } ] },
{ sku: "PRODUCT1-GRMD", options : [ { name: "Green" } , { "name" : "Medium" } ] },
{ sku: "PRODUCT1-GRLG", options : [ { name: "Green" } , { "name" : "Large" } ] },
{ sku: "PRODUCT1-BLSM", options : [ { name: "Blue" } , { "name" : "Small" } ] },
{ sku: "PRODUCT1-BLMD", options : [ { name: "Blue" } , { "name" : "Medium" } ] },
{ sku: "PRODUCT1-BLLG", options : [ { name: "Blue" } , { "name" : "Large" } ] },
{ sku: "PRODUCT1-RESM", options : [ { name: "Red" } , { "name" : "Small" } ] },
{ sku: "PRODUCT1-REMD", options : [ { name: "Red" } , { "name" : "Medium" } ] },
{ sku: "PRODUCT1-RELG", options : [ { name: "Red" } , { "name" : "Large" } ] },
]
That data will be bound to a table which contains input fields for each generated variation, but it's generating those variations which is the problem. Thanks
May be this is what you are asking:
var color = ["red","blue"];
var size = ["Small","Medium","Large"];
var options = [];
for(var i=0;i<color.length;i++)
{
for(var j = 0;j<size.length;j++)
{
var item = [];
item.push({name:color[i]},{name:size[j]});
options.push({options:item});
}
}
console.log(options);//options will contain your desired result
You can dynamically add color, size in color and size array respectively and you will get the desired output.
This is very old but in case you want to support any number of attributes:
function add_variations_to_array(base, variations){
let ret = [];
for(let e of base)
for(let variation of variations){
ret.push(e+" - "+variation);
}
return ret;
}
//Size, color, material
let attributes = [["S","M","L","XL"],["Red","Green","Blue"],["Wool","Cotton","Steel"]];
//generate variations
let variations = attributes[0];
if(attributes.length > 1){
for(let i in attributes)
if(i>0)
variations = add_variations_to_array(variations, attributes[i]);
}
console.log(variations);
https://jsfiddle.net/pswkzLga/

mongodb update on JSON array

I have this data in Mongo:
{'_id':1,
'name':'Root',
'taskId':1,
'parentId':"",
'path':[1],
'tasks':[ {"taskId":3,parentId:1,name:'A',type:'task'},
{"taskId":4,parentId:1,name:'D',type:'task'},
{"taskId":5,parentId:4,name:'B',type:'task'},
{'type':'project' , 'proRef':2},
{"taskId":6,parentId:3,name:'E',type:'task'},
{"taskId":7,parentId:6,name:'C',type:'task'}]
}
Now I want to update taskId 6 with new Json data .
var jsonData = {"taskId":6,"name":'Sumeet','newField1':'Val1','newField2':'Val2'}
query should update if field is available else add new key to existing .Output Like
{"taskId":6,parentId:3,name:'Sumeet',type:'task','newField1':'Val1','newField2':'Val2'}]
I have tried few query but it is completely replacing json .
db.projectPlan.update({_id:1,'tasks.taskId':6},{$set :{'tasks.$':jsonData }});
Thanks in advance for your helps!
Sumeet
You need to transform the jsonData variable into something that can be passed to update. Here's an example that does exactly what you want with your sample document:
var updateData = {};
for (f in jsonData) {
if (f != "taskId") updateData["tasks.$."+f]=jsonData[f];
};
db.projectPlan.update({_id:1, 'tasks.taskId':6}, {$set:updateData})
Result:
{ "_id" : 1,
"name" : "Root",
"taskId" : 1,
"parentId" : "",
"path" : [ 1 ],
"tasks" : [
{ "taskId" : 3, "parentId" : 1, "name" : "A", "type" : "task" },
{ "taskId" : 4, "parentId" : 1, "name" : "D", "type" : "task" },
{ "taskId" : 5, "parentId" : 4, "name" : "B", "type" : "task" },
{ "type" : "project", "proRef" : 2 },
{ "taskId" : 6, "parentId" : 3, "name" : "Sumeet", "type" : "task", "newField1" : "Val1", "newField2" : "Val2" },
{ "taskId" : 7, "parentId" : 6, "name" : "C", "type" : "task" }
] }
You will need to merge the document manually:
var jsonData = {"taskId":5,"name":'Sumeet','newField1':'Val1','newField2':'Val2'};
db.projectPlan.find({ _id: 1 }).forEach(
function(entry) {
for (var taskKey in entry.tasks) {
if (entry.tasks[taskKey].taskId === jsonData.taskId) {
printjson(entry.tasks[taskKey]);
for (var taskSubKey in jsonData) {
entry.tasks[taskKey][taskSubKey] = jsonData[taskSubKey];
}
printjson(entry.tasks[taskKey]);
}
}
db.projectPlan.save(entry);
}
);
Obviously you can leave away the printjson statements. This is simply to see that the merging of the original tasks with the new tasks works. Note that this query will only update a single document as long as the _id field is unique.

Categories

Resources