Display One-to-Many result in a JSON with nodejs - javascript

Hello I have this model:
Gateways
-serial
-name
-ip
Devices
-uui
-vendor
-name
-gw_serial
Where a Gateway has 1 or many devices
I need to show a result like this in my rest/api in nodejs with SQLITE3:
{
serial: 123,
name: Huawei,
ip: 192.168.12.1,
devices:[
{
uuid: 888,
vendor: Intel,
name: mouse,
},
{
uuid: 999,
vendor: Intel,
name: keyboard,
}
],
serial: 345,
name: Apple,
ip: 192.168.12.3,
devices:[
{
uuid: 567,
vendor: Intel,
name: mouse,
},
{
uuid: 893,
vendor: Intel,
name: keyboard,
}
]
}

I presume you are using node-sqlite3. I haven't tested it with real database, but here I did some kind of mocking and demo of the function.
Perhaps this is what you need:
const get_grouped = async (db) => {
return new Promise((resolve, reject) => {
const sql = `
SELECT serial,
gateways.name AS gw_name,
uuid,
vendor,
devices.name
FROM gateways
JOIN devices
ON gateways.serial = gw_serial
`;
const acc = {};
db.each(sql, (error, gw_dev) => {
if(error) return reject(error);
acc[gw_dev.serial] = acc[gw_dev.serial] || {
serial: gw_dev.serial,
name: gw_dev.gw_name,
ip: gw_dev.ip,
devices: []
};
acc[gw_dev.serial].devices.push({
uuid: gw_dev.uuid,
vendor: gw_dev.vendor,
name: gw_dev.name,
});
}, (error, rows) => error ? reject(error) : resolve(Object.values(acc)));
});
};
Basically you pass the database instance to this function.
On every row the result is stored in the acc variable. If there is no gateway with that serial it is initialized. Then the device is just pushed to it's collection. Of course this assumes that the serial is unique per gateway but it should be.
When the oncomplete callback is called if there are no errors the result is presented as the values of the acc.

Related

Javascript - discord.js - How to simply repeated options in SlashCommandBuilder

I found that my command ended up needing many repeated options which were all similar. There are 4 players and 5 towers each and with my method it would take 20 separate inputs to address them all. This would be monotonous to input as the user and I was wondering if there was a way to simplify the inputting process for the user or just the code. I haven't found any direct solutions from the discord.js library.
Also, there is a built in limit of 25 options and I have gone over this amount.
Here is my initial attempt. Note that all the inputs use the same autocomplete options. Scroll to where it says //these lines
//party command
const { SlashCommandBuilder } = require('discord.js');
module.exports = {
data: new SlashCommandBuilder()
.setName('party')
.setDescription('Creates a party which others can join based on roles')
.addStringOption(option =>
option.setName('gamemode')
.setDescription('The name of the gamemode')
.setRequired(true)
.addChoices(
{ name: 'Hardcore', value: 'Hardcore' },
{ name: 'Fallen', value: 'Fallen' },
{ name: 'Molten', value: 'Molten' },
{ name: 'Normal', value: 'Normal' },
{ name: 'Badlands II', value: 'Badlands II' },
{ name: 'Polluted Wastelands II', value: 'Polluted Wastelands II' },
{ name: 'Pizza Party', value: 'Pizza Party' },
{ name: 'Other', value: 'Other' },
))
.addStringOption(option =>
option.setName('strategy')
.setDescription('The name of the strategy you will use - skip this option for no strategy ')
)
.addStringOption(option =>
option.setName('strategy-link')
.setDescription('The link to your strategy - skip this option for no strategy')
)
//these lines v
.addStringOption(option =>
option.setName('p1-1')
.setDescription('First tower for p1 - skip for no tower/player')
.setAutocomplete(true)
)
.addStringOption(option =>
option.setName('p1-2')
.setDescription('Second tower for p1 - skip for no tower/player')
.setAutocomplete(true)
),
// p1-3, p1-4, etc.
async autocomplete(interaction) {
const focusedOption = interaction.options.getFocused(true);
let choices;
if (focusedOption.name === 'p1-1', 'p1-2') {
//these lines ^
choices = [
'Scout',
'Sniper',
'Paintballer',
'Demoman',
'Soldier',
'Solder',
'Freezer',
'Hunter',
'Ace Pilot',
'Medic',
'Pyromancer',
'Farm',
'Militant',
'Shotgunner',
'Military Base',
'Rocketeer',
'Electroshocker',
'Commander',
'DJ Booth',
'Warden',
'Minigunner',
'Ranger',
'Accelerator',
'Engineer',
'Crook Boss',
'Turret',
'Mortar',
'Pursuit',
'Golden Minigunner',
'Golden Pyromancer',
'Golden Crook Boss',
'Golden Scout',
'Golden Cowboy',
'Golden Soldier',
'Frost Blaster',
'Swarmer',
'Toxic Gunner',
'Sledger',
'Executioner',
'Elf Camp',
'Archer',
];
}
const filtered = choices.filter(choice => choice.startsWith(focusedOption.value));
let options;
if (filtered.length > 25) {
options = filtered.slice(0, 25);
} else {
options = filtered;
}
await interaction.respond(
options.map(choice => ({ name: choice, value: choice })),
);
},
async execute(interaction) {
const gamemode = interaction.options.getString('gamemode');
const strategy = interaction.options.getString('strategy') ?? 'no';
await interaction.reply(`You created a party for ${gamemode} using ${strategy} strategy.`);
},
};
Node.js v18.12.1
npm list discord.js#12.5.3 dotenv#16.0.3
Thank you for your help.

GraphQL query return NULL in GraphiQL, but data appears in console log

I have a GraphQL API which is supposed to return data from MySQL and PostGres database. In the resolvers, I have console.log the results and can view the data in the terminal.
address: {
type: AddressType,
description: "An Address",
args: {
id: { type: GraphQLInt },
},
resolve: (parent, args) => {
// Make a connection to MySQL
let result;
connection.query(
`SELECT * FROM addresses WHERE id = ${args.id}`,
(err, res, fields) => {
if (err) console.log(err);
console.log("========");
console.log(res);
console.log("+++++++++");
console.log(res[0]);
// console.log(result);
}
);
return result;
},
},
In the terminal, I can see the results when I run the query on GraphiQL:
[nodemon] starting `node schema.js`
Server is running
Connected to PSQL database.
Connected to mySQL database.
========
[
RowDataPacket {
id: 1,
address_type: 'House',
status: 'Inactive',
entity: 'Building',
number_and_street: 'PO BOX 276',
suite_and_apartment: 'PO',
city: 'Ennis',
postal_code: '59729-0276',
country: 'USA',
notes: 'Dolorem quia repellendus et et nobis.',
created_at: 2020-12-18T05:00:00.000Z,
updated_at: 2021-05-21T04:00:00.000Z,
latitude: null,
longitude: null
}
]
+++++++++
RowDataPacket {
id: 1,
address_type: 'House',
status: 'Inactive',
entity: 'Building',
number_and_street: 'PO BOX 276',
suite_and_apartment: 'PO',
city: 'Ennis',
postal_code: '59729-0276',
country: 'USA',
notes: 'Dolorem quia repellendus et et nobis.',
created_at: 2020-12-18T05:00:00.000Z,
updated_at: 2021-05-21T04:00:00.000Z,
latitude: null,
longitude: null
}
However on GraphiQL, I get null for data.
Input:
{
address(id: 1) {
address_type
}
}
output:
{
"data": {
"address": null
}
}
I am very new to GraphQL. What could I be missing here? I am attempting to get this information from the terminal to show upon query in GraphiQL. Just trying to learn more.
The classic problem with inattention:
You use the res variable for the console. And nowhere are you assigning a value to result.
And the return result is executed before the query is executed. (out of context where you have data)
See the documentation for how to use the async / await syntax. You are currently using callbacks - which is not a recommended syntax.
Not sure, but should be something like, you should use async/await, and await the return of query data. Also be sure to assign the value to the variable you have:
address: {
type: AddressType,
description: "An Address",
args: {
id: { type: GraphQLInt },
},
resolve: async (parent, args) => {
const result = await connection.query('SELECT * FROM addresses WHERE id = $1', [args.id]);
return result;
},
},
What ended up working for me was the following:
address: {
type: AddressType,
description: "An Address",
args: {
id: { type: GraphQLInt },
},
resolve: async (parent, args) => {
const [rows, fields] = await promisePool.query(
`SELECT * FROM addresses WHERE id = ${args.id}`
);
console.log(rows[0]);
return rows[0];
},
},

The .find() function is running before .create(), even though it's below the .create() function in the JS code

I'm trying to follow along with Colt's Web Dev bootcamp and in his version, everything works fine but when I use the same code, a new entry is made into the database however it is not reflected in the printed items unless I re-run the code.
So, the question is, why is the .find running before the .create and is there a way to make it not do that?
The JS:
const mongoose = require("mongoose");
mongoose.connect("mongodb://localhost:27017/catapp",
{
useNewUrlParser: true,
useUnifiedTopology: true
}
)
.then(() => console.log('Connected to DB!'))
.catch(error => console.log(error.message));
let catSchema = new mongoose.Schema({
name: String,
age: Number,
temperament: String
});
let Cat = mongoose.model("Cat", catSchema);
Cat.create ({
name: "Snow White",
age: 15,
temperament: "Bland"
}, function(err, cat){
if(err){
console.log("error encountered");
} else {
console.log("new cat created")
}
});
Cat.find({}, function(err, cats){
if(err){
console.log("Error encountered");
console.log(err)
} else {
console.log("all cats found")
console.log(cats);
}
})
The terminal output is this (there might be many of the same entries because I've re-run the same program multiple times):
node cats.js
Connected to DB!
all cats found
[
{
_id: 5f4ea2d35e48bb06794bb96f,
name: 'George',
age: 11,
temperament: 'Grouchy',
__v: 0
},
{
_id: 5f4ea30e0fa8f806bb79d164,
name: 'George',
age: 11,
temperament: 'Grouchy',
__v: 0
},
{
_id: 5f4ea37dccb64207383a6d07,
name: 'George',
age: 11,
temperament: 'Grouchy',
__v: 0
},
{
_id: 5f4ea515a1847108f841d71a,
name: 'Snow White',
age: 15,
temperament: 'Bland',
__v: 0
},
{
_id: 5f4ea73d01e78f0ad300e5f7,
name: 'Snow White',
age: 15,
temperament: 'Bland',
__v: 0
},
{
_id: 5f4ea7803b65d20b1d4206f7,
name: 'Snow White',
age: 15,
temperament: 'Bland',
__v: 0
},
{
_id: 5f4ea9bab386320d1877eb0d,
name: 'Snow White',
age: 15,
temperament: 'Bland',
__v: 0
}
]
new cat created
Create and find are async.
Those functions will not always complete in the order they are called and the outcome will be determined by who completes first. This is known as a race condition.
To get around this, you will need to make sure the calls run in the correct order.
The simplest way is to put the find call into the callback of create. This gives you the added bonus of not running the find if for some reason creating the table fails.
There are 'better' ways of doing this depending on what you want to achieve, but that should be an exercise for you. You can read about async js, callbacks, promises, callback hell and the like to get started.
Your code could be roughly written like this, using async/await:
try {
await Cat.create ({
name: "Snow White",
age: 15,
temperament: "Bland"
});
console.log("new cat created");
const cats = await Cat.find({});
console.log("all cats found")
console.log(cats);
} catch(e) {
console.log("Error encountered");
}
This would solve the order issue.
Cat.find runs before Cat.create() because of the asynchronous nature of NodeJS.
You can use Cat.find() in callback function of Cat.create or you need to do it Promise-based.
Cat.create is an async function as by default most Node functions are, so before Cat.create completes Cat.find has already run.
Just put the Cat.find inside the callback method of Cat.create under the success if block then you should be good.

How to tidy my returning data into a better model

I have API data that isn't optimized in its structure
I cannot request backend devs restructure it, so I'm looking to tidy the model locally before use in my Angular 2 application.
As an example I receive a flat object which contains tables and user data
{$id: "400", sqlTable: "Customer", admin: 1, test: 3}
It would be best to filter these and place Users into a sub object to speed up rendering without conditional testing etc.
Preferred structure:
"sqlTable":"value",
"Users":[
"user1name":permission,
"user2name":permission
]
So from original data:
$id is unimportant (so was deleted)
sqlTable is the table name
admin and test are Users
So if we delete the $id, the "rule set" is anything that isn't sqlTable is a User and should be moved to Sub Object called Users.
It would be appreciated if anyone can provide example JS/TS on how to move the data (key/value) into the sub structure based on this ruleset.
Full returning data as requested:
{
"$id":"399",
"IsFailure":false,
"IsSuccess":true,
"Value":[
{
"$id":"400",
"sqlTable":"Customer",
"admin":1,
"test":null
},
{
"$id":"401",
"sqlTable":"CustomerAddress",
"admin":null,
"test":null
},
{
"$id":"402",
"sqlTable":"NewTable",
"admin":null,
"test":null
}
]
}
I would do some transformations to the data before rendering, something like this:
const data = {
$id: "399",
IsFailure: false,
IsSuccess: true,
Value: [
{
$id: "400",
sqlTable: "Customer",
admin: 1,
test: null
},
{
$id: "401",
sqlTable: "CustomerAddress",
admin: null,
test: null
},
{
$id: "402",
sqlTable: "NewTable",
admin: null,
test: null
}
]
};
//we map every value in "Values"
let result = data.Value.map(table => {
//first we create our users object, we copy the full object
let users = { ...table };
//then we delete extra keys that we do not want
delete users.sqlTable;
delete users.$id;
//then we build the object with the new structure
return { sqlTable: table.sqlTable, Users: users };
});
console.log(result);
Your original question asks for an array of users and your comments mention a need for the user object to have a name and permissions key, combining the two together, this transformation can be done in one statement like so,
const result = data.Value.map(value => ({
table: value.sqlTable,
users: Object.entries(value).filter(([key]) => key !== "$id" && key !== "sqlTable")
.map(([key, value]) => ({
name: key,
permissions: value
}))
}))
const data = {
$id: "399",
IsFailure: false,
IsSuccess: true,
Value: [{
$id: "400",
sqlTable: "Customer",
admin: 1,
test: null
},
{
$id: "401",
sqlTable: "CustomerAddress",
admin: null,
test: null
},
{
$id: "402",
sqlTable: "NewTable",
admin: null,
test: null
}
]
};
const result = data.Value.map(value => ({
table: value.sqlTable,
users: Object.entries(value).filter(([key]) => key !== "$id" && key !== "sqlTable")
.map(([key, value]) => ({
name: key,
permissions: value
}))
}))
console.log(result);

RethinkDB - Updating nested array

I have a survey table that looks like so:
{
id: Id,
date: Date,
clients: [{
client_id: Id,
contacts: [{
contact_id: Id,
score: Number,
feedback: String,
email: String
}]
}]
}
I need to updated the score and feedback fields under a specific contact. Currently, I am running the update like this:
function saveScore(obj){
var dfd = q.defer();
var survey = surveys.get(obj.survey_id);
survey
.pluck({ clients: 'contacts' })
.run()
.then(results => {
results.clients.forEach((item, outerIndex) => {
item.contacts.forEach((item, index, array) => {
if(Number(item.contact_id) === Number(obj.contact_id)) {
array[index].score = obj.score;
console.log(outerIndex, index);
}
});
});
return survey.update(results).run()
})
.then(results => dfd.resolve(results))
.catch(err => dfd.resolve(err));
return dfd.promise;
};
When I look at the update method, it specifies how to update nested key:value pairs. However, I can't find any examples to update an individual item in an array.
Is there a better and hopefully cleaner way to update items in a nested array?
You might need to get the array, filter out the desired value in the array and then append it again to the array. Then you can pass the updated array to the update method.
Example
Let's say you have a document with two clients that both have a name and a score and you want to update the score in one of them:
{
"clients": [
{
"name": "jacob" ,
"score": 200
} ,
{
"name": "jorge" ,
"score": 57
}
] ,
"id": "70589f08-284c-495a-b089-005812ec589f"
}
You can get that specific document, run the update command with an annonymous function and then pass in the new, updated array into the clients property.
r.table('jacob').get("70589f08-284c-495a-b089-005812ec589f")
.update(function (row) {
return {
// Get all the clients, expect the one we want to update
clients: row('clients').filter(function (client) {
return client('name').ne('jorge')
})
// Append a new client, with the update information
.append({ name: 'jorge', score: 57 })
};
});
I do think this is a bit cumbersome and there's probably a nicer, more elegant way of doing this, but this should solve your problem.
Database Schema
Maybe it's worth it to create a contacts table for all your contacts and then do a some sort of join on you data. Then your contacts property in your clients array would look something like:
{
id: Id,
date: Date,
clients: [{
client_id: Id,
contact_scores: {
Id: score(Number)
},
contact_feedbacks: {
Id: feedback(String)
}
}]
}
database schema
{
"clients": [
{
"name": "jacob" ,
"score": 200
} ,
{
"name": "jorge" ,
"score": 57
}
] ,
"id": "70589f08-284c-495a-b089-005812ec589f"
}
then you can do like this using map and branch query .
r.db('users').table('participants').get('70589f08-284c-495a-b089-005812ec589f')
.update({"clients": r.row('clients').map(function(elem){
return r.branch(
elem('name').eq("jacob"),
elem.merge({ "score": 100 }),
elem)})
})
it works for me
r.table(...).get(...).update({
contacts: r.row('Contacts').changeAt(0,
r.row('Contacts').nth(0).merge({feedback: "NICE"}))
})
ReQL solution
Creating a query to update a JSON array of objects in-place, is a rather complicated process in ReThinkDB (and most query languages). The best (and only) solution in ReQL that I know about, is to use a combination of update,offsetsOf,do,changeAt, and merge functions. This solution will retain the order of objects in the array, and only modify values on objects which match in the offsetsOf methods.
The following code (or something similar) can be used to update an array of objects (i.e. clients) which contain an array of objects (i.e. contracts).
Where '%_databaseName_%', '%_tableName_%', '%_documentUUID_%', %_clientValue_%, and %_contractValue_% must be provided.
r.db('%_databaseName_%').table('%_tableName_%').get('%_documentUUID_%').update(row =>
row('clients')
.offsetsOf(clients => client('client_id').eq('%_clientValue_%'))(0)
.do(clientIndex => ({
clients: row('clients')(clientIndex)
.offsetsOf(contacts => contact('contact_id').eq('%_contactValue_%')))(0)
.do(contactIndex => ({
contacts: row(clientIndex)
.changeAt(contractIndex, row(clientIndex)(contractIndex).merge({
'score': 0,
'feedback': 'xyz'
}))
})
}))
)
Why go through the trouble of forming this into ReQL?
survey
.pluck({ clients: 'contacts' }).run()
.then(results => {
results.clients.forEach((item, outerIndex) => {
item.contacts.forEach((item, index, array) => {
if(Number(item.contact_id) === Number(obj.contact_id)) {
array[index].score = obj.score;
console.log(outerIndex, index);
}
});
});
return survey.update(results).run()
})
While the code provided by Jacob (the user who asked the question here on Stack Overflow - shown above) might look simpler to write, the performance is probably not as good as the ReQL solution.
1) The ReQL solution runs on the query-server (i.e. database side) and therefore the code is optimized during the database write (higher performance). Whereas the code above, does not make full use of the query-server, and makes a read and write request pluck().run() and update().run(), and data is processed on the client-request side (i.e. NodeJs side) after the pluck() query is run (lower performance).
2) The above code requires the query-server to send back all the data to the client-request side (i.e. NodeJs side) and therefore the response payload (internet bandwidth usage / download size) can be several megabytes. Whereas the ReQL solution is processed on the query-server, and therefore the response payload typically just confirms that the write was completed, in other words only a few bytes are sent back to the client-request side. Which is done in a single request.
ReQL is too complicated
However, ReQL (and especially SQL) seem overly complicated when working with JSON, and it seems to me that JSON should be used when working with JSON.
I've also proposed that the ReThinkDB community adopt an alternative to ReQL that uses JSON instead (https://github.com/rethinkdb/rethinkdb/issues/6736).
The solution to updating nested JSON arrays should be as simple as...
r('database.table').update({
clients: [{
client_id: 0,
contacts: [{
contact_id: 0,
score: 0,
feedback: 'xyz',
}]
}]
});
tfmontague is on the right path but I think his answer can be improved a lot. Because he uses ...(0) there's a possibility for his answer to throw errors.
zabusa also provides a ReQL solution using map and branch but doesn't show the complete nested update. I will expand on this technique.
ReQL expressions are composable so we can isolate complexity and avoid repetition. This keeps the code flat and clean.
First write a simple function mapIf
const mapIf = (rexpr, test, f) =>
rexpr.map(x => r.branch(test(x), f(x), x));
Now we can write the simplified updateClientContact function
const updateClientContact = (doc, clientId, contactId, patch) =>
doc.merge
( { clients:
mapIf
( doc('clients')
, c => c('client_id').eq(clientId)
, c =>
mapIf
( c('contacts')
, c => c('contact_id').eq(contactId)
, c =>
c.merge(patch)
)
)
}
);
Use it like this
// fetch the document to update
const someDoc =
r.db(...).table(...).get(...);
// create patch for client id [1] and contact id [12]
const patch =
updateClientContact(someDoc, 1, 12, { name: 'x', feedback: 'z' });
// apply the patch
someDoc.update(patch);
Here's a concrete example you can run in reql> ...
const testDoc =
{ clients:
[ { client_id: 1
, contacts:
[ { contact_id: 11, name: 'a' }
, { contact_id: 12, name: 'b' }
, { contact_id: 13, name: 'c' }
]
}
, { client_id: 2
, contacts:
[ { contact_id: 21, name: 'd' }
, { contact_id: 22, name: 'e' }
, { contact_id: 23, name: 'f' }
]
}
, { client_id: 3
, contacts:
[ { contact_id: 31, name: 'g' }
, { contact_id: 32, name: 'h' }
, { contact_id: 33, name: 'i' }
]
}
]
};
updateClientContact(r.expr(testDoc), 2, 23, { name: 'x', feedback: 'z' });
The result will be
{ clients:
[ { client_id: 1
, contacts:
[ { contact_id: 11, name: 'a' }
, { contact_id: 12, name: 'b' }
, { contact_id: 13, name: 'c' }
]
}
, { client_id: 2
, contacts:
[ { contact_id: 21, name: 'd' }
, { contact_id: 22, name: 'e' }
, { contact_id: 23, name: 'x', feedback: 'z' } // <--
]
}
, { client_id: 3
, contacts:
[ { contact_id: 31, name: 'g' }
, { contact_id: 32, name: 'h' }
, { contact_id: 33, name: 'i' }
]
}
]
}
Better late than never
I had your same problem and i could solve it with two ways:
With specific client_id
r.db('nameDB').table('nameTable').get('idRegister')
.update({'clients': r.row('clients')
.map(elem=>{
return r.branch(
elem('client_id').eq('your_specific_client_id'),
elem.merge({
contacts: elem('contacts').map(elem2=>
r.branch(
elem2('contact_id').eq('idContact'),
elem2.merge({
score: 99999,
feedback: 'yourString'
}),
elem2
)
)
}),
elem
)
})
})
Without specific client_id
r.db('nameDB').table('nameTable').get('idRegister')
.update({'clients': r.row('clients')
.map(elem=>
elem.merge({
contacts: elem('contacts').map(elem2=>
r.branch(
elem2('contact_id').eq('idContact'),
elem2.merge({
score: 99999,
feedback: 'yourString'
}),
elem2
)
)
})
)
})
I hope that it works for you, even when happened much time ago

Categories

Resources