Clear database to update content with Notion API - javascript

I have a workspace in Notion, which I use to take notes for an app I have on Github.
I want to add a database which will show some download stats from different sources (incuding Github) using the beta Notion API.
Right now I can add information at the end of a database just fine, but I don't understand how to remove the content which was posted before. Or even update it if I can.
This is what I have so far:
import { Client } from "#notionhq/client";
import dotenv from "dotenv";
import { Octokit } from "#octokit/rest";
dotenv.config();
const octokit = new Octokit();
const notion = new Client({ auth: process.env.NOTION_TOKEN });
const databaseId = process.env.NOTION_DATABASE_ID;
async function addEntry(release, name, download_count, tag) {
try {
await notion.request({
path: "pages",
method: "POST",
body: {
parent: { database_id: databaseId },
properties: {
Version: {
title: [
{
text: {
content: release,
},
},
],
},
Name: {
rich_text: [
{
text: {
content: name,
},
},
],
},
"Download Count": {
type: "number",
number: download_count,
},
Tags: {
multi_select: [{ name: "Github" }, { name: tag }],
},
},
},
});
console.log("Success! Entry added.");
} catch (error) {
console.error(error.body);
}
}
(async () => {
const latest_release = await octokit.repos.listReleases({
owner: "ShadowMitia",
repo: "steam_randomiser",
});
const releases = latest_release.data;
let github_downloads = {};
for (let release of releases) {
for (let asset of release.assets) {
console.log(release["tag_name"], asset["name"], asset["download_count"]);
// github_downloads[asset["label"]];
addEntry(
`${release["tag_name"]}`,
`${asset["name"]}`,
asset["download_count"],
asset["name"].includes("linux") ? "Linux" : "Windows"
);
}
}
})();

To delete (archive) a page in a database. Set the archive parameter to true.
curl --location --request PATCH 'https://api.notion.com/v1/pages/YOUR_PAGE_ID' \
--header 'Content-Type: application/json' \
--header 'Authorization: Bearer YOUR_BOT_TOKEN' \
--data'{
"parent":{
"database_id":"YOUR_DATABASE_ID"
},
"archived": true,
"properties":{
"Name":{
"title":[
{
"text":{
"content":"A Test Page"
}
}
]
},
"Email": {
"email": "hello#test.com"
},
"multiselect_tags": {
"type": "multi_select",
"multi_select":[{
"name": "Duc Loi Market"
},
{
"name": "Rainbow Grocery"
}]
}
}
}'
To clear data in a page you would set the data to empty or null depending on the property being updated. For example, if you have an array, you would set the property to an empty array.
"multiselect_tags": {
"type": "multi_select",
"multi_select":[ {
"name": "Duc Loi Market"
},
{
"name": "Rainbow Grocery"
}
]
}
}
//Empty a multi_select property
"multiselect_tags": {
"type": "multi_select",
"multi_select":[]
}
If the property is a string, like the email property, set it to null
"Email": {
"email": "hello#test.com"
}
//Empty the value of the email property on a page
"Email": {
"email": null
}

Related

How to pass public key in javascript struct to anchor rpc method

I am trying to pass a public key to a anchor rpc method but think I am not generating it correctly in Javascript, I tried padding it with and without quotes but to no avail.
anchor test gives me a TypeError: key.toBuffer is not a function IF I pad it.
IF I do not pad it then I get the error Error: AnchorError caused by account: my_account. Error Code: AccountDidNotSerialize. Error Number: 3004. Error Message: Failed to serialize the account.
I suspect the issue is with my javascript code:
const pubkey1 = anchor.web3.Keypair.generate();
const signatory1 =
{
name: "matt",
publicKey: pubkey1.publicKey, // Error Code: AccountDidNotSerialize. Error Number: 3004
// publicKey: '"' + pubkey1.publicKey + '"', // TypeError: key.toBuffer is not a function
};
// Invoke the update rpc.
await program.rpc.addSignatory(signatory1, {
accounts: {
myAccount: myAccount.publicKey,
},
});
Full code below.
Lib.rs
use anchor_lang::prelude::*;
declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS");
#[program]
mod basic_1 {
use super::*;
pub fn initialize(ctx: Context<Initialize>, data: u64) -> Result<()> {
let my_account = &mut ctx.accounts.my_account;
my_account.data = data;
Ok(())
}
pub fn update(ctx: Context<Update>, data: u64) -> Result<()> {
let my_account = &mut ctx.accounts.my_account;
my_account.data = data;
Ok(())
}
pub fn add_signatory(ctx: Context<Update>, signatory: Signatory) -> Result<()> {
let my_account = &mut ctx.accounts.my_account;
//my_account.data = data;
my_account.signatories.push(signatory);
Ok(())
}
pub fn add_signatories(ctx: Context<Update>, signatories: Vec<Signatory>) -> Result<()> {
let my_account = &mut ctx.accounts.my_account;
//my_account.data = data;
my_account.signatories = signatories;
Ok(())
}
}
#[derive(Accounts)]
pub struct Initialize<'info> {
#[account(init, payer = user, space = 100)]
pub my_account: Account<'info, MyAccount>,
#[account(mut)]
pub user: Signer<'info>,
pub system_program: Program<'info, System>,
}
#[derive(Accounts)]
pub struct Update<'info> {
#[account(mut)]
pub my_account: Account<'info, MyAccount>,
}
#[account]
pub struct MyAccount {
pub data: u64,
pub project_id: u64,
pub project_name: String,
pub signatories: Vec<Signatory>,
//pub signatories: [Signatory; 3]
}
#[derive(Default, AnchorSerialize, AnchorDeserialize, Clone)]
pub struct Signatory {
pub name: String,
pub public_key: Pubkey,
}
basic-1.js
const assert = require("assert");
const anchor = require("#project-serum/anchor");
const { SystemProgram } = anchor.web3;
describe("basic-1", () => {
// Use a local provider.
const provider = anchor.AnchorProvider.local();
// Configure the client to use the local cluster.
anchor.setProvider(provider);
it("Creates and initializes an account in a single atomic transaction (simplified)", async () => {
// #region code-simplified
// The program to execute.
const program = anchor.workspace.Basic1;
// The Account to create.
const myAccount = anchor.web3.Keypair.generate();
// Create the new account and initialize it with the program.
// #region code-simplified
await program.rpc.initialize(new anchor.BN(1234), {
accounts: {
myAccount: myAccount.publicKey,
user: provider.wallet.publicKey,
systemProgram: SystemProgram.programId,
},
signers: [myAccount],
});
// #endregion code-simplified
// Fetch the newly created account from the cluster.
const account = await program.account.myAccount.fetch(myAccount.publicKey);
// Check it's state was initialized.
assert.ok(account.data.eq(new anchor.BN(1234)));
// Store the account for the next test.
_myAccount = myAccount;
});
it("Updates a previously created account", async () => {
const myAccount = _myAccount;
// #region update-test
// The program to execute.
const program = anchor.workspace.Basic1;
// Invoke the update rpc.
await program.rpc.update(new anchor.BN(4321), {
accounts: {
myAccount: myAccount.publicKey,
},
});
// Fetch the newly updated account.
const account = await program.account.myAccount.fetch(myAccount.publicKey);
// Check it's state was mutated.
assert.ok(account.data.eq(new anchor.BN(4321)));
// #endregion update-test
});
it("add a single signatory", async () => {
const myAccount = _myAccount;
// #region update-test
// The program to execute.
const program = anchor.workspace.Basic1;
const pubkey1 = anchor.web3.Keypair.generate();
const pubkey2 = anchor.web3.Keypair.generate();
// const pubkey1 = "abc";
// const pubkey2 = "def";
// console.log("deepak " + pubkey1.publicKey);
console.log("deepak without prop" + pubkey1);
const signatory1 =
{
name: "matt",
publicKey: pubkey1.publicKey,
// publicKey: '"' + pubkey1.publicKey + '"',
// public_key: pubkey1.publicKey,
};
// Invoke the update rpc.
await program.rpc.addSignatory(signatory1, {
accounts: {
myAccount: myAccount.publicKey,
},
});
// Fetch the newly updated account.
const account = await program.account.myAccount.fetch(myAccount.publicKey);
//assert.ok(account.signatories.len().eq(new anchor.BN(1)));
assert.equal(account.signatories.length, 1);
const signatory2 =
{
name: "smith",
publicKey: pubkey2.publicKey,
// publicKey: '"' + pubkey2.publicKey + '"',
// public_key: pubkey2.publicKey,
};
// Invoke the update rpc.
await program.rpc.addSignatory(signatory2, {
accounts: {
myAccount: myAccount.publicKey,
},
});
// Fetch the newly updated account.
const account2 = await program.account.myAccount.fetch(myAccount.publicKey);
//assert.ok(account.signatories.len().eq(new anchor.BN(1)));
assert.equal(account2.signatories.length, 2);
// Check it's state was mutated.
assert.ok(account.data.eq(new anchor.BN(4321)));
// #endregion update-test
});
/*
it("add multiple signatories", async () => {
const myAccount = _myAccount;
// #region update-test
// The program to execute.
const program = anchor.workspace.Basic1;
const pubkey1 = anchor.web3.Keypair.generate();
const pubkey2 = anchor.web3.Keypair.generate();
const signatories1 = [
{
name: "matt",
public_key: pubkey1,
},
{
name: "smith",
public_key: pubkey2,
},
];
// Invoke the update rpc.
await program.rpc.addSignatories(signatories1, {
accounts: {
myAccount: myAccount.publicKey,
},
});
// Fetch the newly updated account.
const account = await program.account.myAccount.fetch(myAccount.publicKey);
// Check it's state was mutated.
assert.ok(account.data.eq(new anchor.BN(4321)));
// #endregion update-test
});
*/
});
basic_1.json
{
"version": "0.1.0",
"name": "basic_1",
"instructions": [
{
"name": "initialize",
"accounts": [
{
"name": "myAccount",
"isMut": true,
"isSigner": true
},
{
"name": "user",
"isMut": true,
"isSigner": true
},
{
"name": "systemProgram",
"isMut": false,
"isSigner": false
}
],
"args": [
{
"name": "data",
"type": "u64"
}
]
},
{
"name": "update",
"accounts": [
{
"name": "myAccount",
"isMut": true,
"isSigner": false
}
],
"args": [
{
"name": "data",
"type": "u64"
}
]
},
{
"name": "addSignatory",
"accounts": [
{
"name": "myAccount",
"isMut": true,
"isSigner": false
}
],
"args": [
{
"name": "signatory",
"type": {
"defined": "Signatory"
}
}
]
},
{
"name": "addSignatories",
"accounts": [
{
"name": "myAccount",
"isMut": true,
"isSigner": false
}
],
"args": [
{
"name": "signatories",
"type": {
"vec": {
"defined": "Signatory"
}
}
}
]
}
],
"accounts": [
{
"name": "MyAccount",
"type": {
"kind": "struct",
"fields": [
{
"name": "data",
"type": "u64"
},
{
"name": "projectId",
"type": "u64"
},
{
"name": "projectName",
"type": "string"
},
{
"name": "signatories",
"type": {
"vec": {
"defined": "Signatory"
}
}
}
]
}
}
],
"types": [
{
"name": "Signatory",
"type": {
"kind": "struct",
"fields": [
{
"name": "name",
"type": "string"
},
{
"name": "publicKey",
"type": "publicKey"
}
]
}
}
],
"metadata": {
"address": "Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"
}
}
You misunderstood the problem. The issue here is not with Pubkey passing.
You are running out of space while adding signatory.
What you need to do is pre calculate the space required to store max number of signatories allowed to store in vec. It must have the bound.
Please refer https://borsh.io/#pills-specification to calculate how much space is required for you.
To validate this you can change the space from 100 to 1000
#[derive(Accounts)]
pub struct Initialize<'info> {
#[account(init, payer = user, space = 1000)]
pub my_account: Account<'info, MyAccount>,
#[account(mut)]
pub user: Signer<'info>,
pub system_program: Program<'info, System>,
}
this worked fine for me
✔ Creates and initializes an account in a single atomic transaction (simplified) (186ms)
✔ Updates a previously created account (407ms)
✔ add a single signatory (409ms)
✔ add multiple signatories (414ms)

Parameter in query graphql breaking JSON with NodeJS

my script connects to a graphql API by fetch and inserts the JSON return in the postgresql database, however when I insert the primaryLabels parameter into the query, it returns that my JSON is broken by a token>.
If I remove this parameter, everything goes perfectly, any solution?
I tried to turn the query into a string, but the code still fails
Code
let queryAPI = {"query": `{squads {name cards(includedOnKanban: true, closed: false, archived: false, cancelled: false, updatedSince: \"2020-01-01T00:00:00-0300\") { identifier title description status priority assignees { fullname email } secondaryLabel primaryLabels swimlane workstate}}}`};
(async () => {
try {
const rawResponse = await fetch('https://www.bluesight.io/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Bluesight-API-Token': 'token-here'
},
body: JSON.stringify(queryAPI)
});
const content = await rawResponse.json();
OUTPUT
at async C:\Users\Documents\Autoportal\Bluesight\index.js:35:25 {
name: 'FetchError',
message: 'invalid json response body at https://www.bluesight.io/graphql reason: Unexpected token < in JSON at position 0',
type: 'invalid-json'
JSON result example:
{
"data": {
"squads": [
{
"name": "SUPPORT IT",
"cards": [
{
"identifier": "06x38y",
"title": "ALL - Validate data",
"description": "review database.",
"status": null,
"priority": "medium",
"assignees": [
{
"fullname": "Carlos",
"email": "carlos#br.it.com",
}
],
"secondaryLabel": null,
"primaryLabels": [
"CLIENT"
]
}
]
}
]
} }
CONTENT
{
squads: [ { name: 'SUPPORT IT', cards: [Array] } ]
}

Ideal way to store multiple texts in mongoose with Node JS?

I've been building a mongoose schema for texts that will be displayed across different pages, and it has end point to POST data for updating the texts.
For example, I would like to store text messages that will be displayed/updated in About Page and Contact Page
What would be the preferred way of designing the text model?
1) Model that has all messages stored in one data object
In front-end, the parent component fetches all text messages with Texts.findOne() and trickles down to pages that need it
const textsSchema = new Schema(
{
aboutMessage1: {
type: String,
required: true
},
aboutMessage2: {
type: String,
required: true
},
contactMessage1: {
type: String
},
contactMessage2: {
type: String
}
},
{ timestamps: true }
);
2) Model that contains each message--so it will have multiple objects
In fron-end, each page uses Text.findById(textId) to retrieve each message
const textSchema = new Schema(
{
// Example: name = contactMessage
name: {
type: String
},
message: {
type: String
}
},
{ timestamps: true }
);
3) Multiple models that contains texts for each page
Similar to 1) approach, texts get fetched with Texts.findOne(), but performed in each page
const aboutTextsSchema = new Schema(
{
message1: {
type: String,
required: true
},
message2: {
type: String,
required: true
},
},
{ timestamps: true }
);
const contactTextsSchema = new Schema(
{
message1: {
type: String,
},
message2: {
type: String,
},
},
{ timestamps: true }
);
The most promising option is the second one. Because first and third options are static, and if in the future, you need to add a new page or or a new message to an existing page, it will require changes in the mongoose model, and deployment for API.
But I think, instead of creating a text schema, it would better to create a page schema for your scenario.
Here I embed messages inside the page schema.
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const pageSchema = new Schema(
{
page: {
type: String
},
messages: [
new Schema({
name: {
type: String
},
message: {
type: String
}
})
]
},
{ timestamps: true }
);
module.exports = mongoose.model("Page", pageSchema);
Now we can use this post route to create a page:
router.post("/pages", async (req, res) => {
const result = await Text.create(req.body);
res.send(result);
});
We can create a page and its messages using the previous post route.
Request Body:
{
"_id": "5e4937e9e2454a2c0c162890",
"page": "About",
"messages": [
{
"_id": "5e4937e9e2454a2c0c162892",
"name": "Abou1",
"message": "About1 message..."
},
{
"_id": "5e4937e9e2454a2c0c162891",
"name": "Abou2",
"message": "About2 message..."
}
],
"createdAt": "2020-02-16T12:39:05.154Z",
"updatedAt": "2020-02-16T12:39:05.154Z",
"__v": 0
}
Response:
{
"_id": "5e4937e9e2454a2c0c162890",
"page": "About",
"messages": [
{
"_id": "5e4937e9e2454a2c0c162892",
"name": "Abou1",
"message": "About1 message..."
},
{
"_id": "5e4937e9e2454a2c0c162891",
"name": "Abou2",
"message": "About2 message..."
}
],
"createdAt": "2020-02-16T12:39:05.154Z",
"updatedAt": "2020-02-16T12:39:05.154Z",
"__v": 0
}
If later we want to add a message to a page we can use the following put route.
router.put("/pages/:id", async (req, res) => {
const result = await Page.findByIdAndUpdate(
req.params.id,
{
$push: { messages: req.body }
},
{ new: true }
);
res.send(result);
});
Request Body:
{
"name": "Abou3",
"message": "About3 message..."
}
Response:
{
"_id": "5e4937e9e2454a2c0c162890",
"page": "About",
"messages": [
{
"_id": "5e4937e9e2454a2c0c162892",
"name": "Abou1",
"message": "About1 message..."
},
{
"_id": "5e4937e9e2454a2c0c162891",
"name": "Abou2",
"message": "About2 message..."
},
{
"_id": "5e493926f905ab3300106f94",
"name": "Abou3",
"message": "About3 message..."
}
],
"createdAt": "2020-02-16T12:39:05.154Z",
"updatedAt": "2020-02-16T12:44:22.763Z",
"__v": 0
}
When client needs a page's messages, all we need to do is retrieving the page by it's id or page name:
router.get("/pages/id/:id", async (req, res) => {
const result = await Page.findById(req.params.id);
res.send(result);
});
//or
router.get("/pages/name/:name", async (req, res) => {
const result = await Page.findOne({ page: req.params.name });
res.send(result);
});

Update subarray of objects in mongodb

I have this document in my database:
{
"_id": "ObjectId(...)",
"chapters": [
{
"_id": "ObjectId(...)",
"link": "128371.html",
"content": ""
}
]
}
The chapters array can have up to 3k items, and I have to populate each content attribute with some info. I want to be able to save the info I want inside the right object. Until now I was able to change the content attribute generally (in all items), but I am having trouble filtering it. This is what I managed to code using what I found in other questions:
let content = "Testing";
await models.ListNovel.updateOne(
{ link: novel_link },
{ $set: { "chapters.$[].content": content } }
);
I saw that { arrayFilters: [{ link: { $eq: chapter_link } }], multi: false } may work in some cases, but I don't use the link identifier in the update.
Thank you!
UPDATE
Similar to Suleyman's solution, I ended up with the following working code, I hope it may be useful for you.
await models.ListNovel.updateOne(
{ link: novel.link },
{ $set: { "chapters.$[elem].content": content } },
{
multi: true,
arrayFilters: [{ "elem.link": { $eq: chapter.link } }]
}
);
The condition in updateOne must match parent object, but you are using { link: novel_link } which belongs to the inner array object field, so it cannot find the document, and update doesn't happen.
To illustrate this, let's say your schema is like this:
const mongoose = require("mongoose");
const schema = new mongoose.Schema({
name: String,
chapters: [
new mongoose.Schema({
link: String,
content: String
})
]
});
module.exports = mongoose.model("ListNovel", schema);
Let's have this existing document in this collection:
{
"_id": "5e498a1fe21eea0e10690e39",
"name": "Novel1",
"chapters": [
{
"_id": "5e498a1fe21eea0e10690e3b",
"link": "128371.html",
"content": ""
},
{
"_id": "5e498a1fe21eea0e10690e3a",
"link": "222222.html",
"content": ""
}
],
"__v": 0
}
If we want to update this document's chapter with "link": "128371.html", first we need to find it with name or _id field, and update it using the filtered positional operator $.
router.put("/novels/:name", async (req, res) => {
const novel_link = "128371.html";
const content = "Testing";
const result = await ListNovel.findOneAndUpdate(
{ name: req.params.name },
{
$set: { "chapters.$[chapter].content": content }
},
{
arrayFilters: [{ "chapter.link": novel_link }],
new: true
}
);
res.send(result);
});
Here I used findOneAndUpdate to immediately retrieve the updated document, but you can also use the updateOne instead of findOneAndUpdate.
The result will be like this:
{
"_id": "5e498a1fe21eea0e10690e39",
"name": "Novel1",
"chapters": [
{
"_id": "5e498a1fe21eea0e10690e3b",
"link": "128371.html",
"content": "Testing" // => UPDATED
},
{
"_id": "5e498a1fe21eea0e10690e3a",
"link": "222222.html",
"content": ""
}
],
"__v": 0
}

Remote method doesn't show up in loopback API explorer

I have a role-mapping model which maps a userId to a roleId, I need a remote method on the role-mapping model to retrieve the role-mappingId for a given userId.
this the code for the remoteMethod
'use strict';
module.exports = function(Rolemapping) {
Rolemapping.getRolesByUser = async function (id, cb) {
const roleMappings = await Rolemapping.find({ where: { principalId: id
} })
cb(null, roleMappings);
};
Rolemapping.remoteMethod("getRolesByUser", {
http: {
path: "/getRolesByUser",
verb: "get"
},
accepts: [
{ arg: "userId", type: "string", http: { source: "query" } }
],
returns: {
arg: "result",
type: "string"
},
description: "Cvs "
});
};
this is the role-mapping json file :
{
"name": "roleMapping",
"base": "RoleMapping",
"idInjection": true,
"options": {
"validateUpsert": true
},
"properties": {},
"validations": [],
"relations": {
"role": {
"type": "belongsTo",
"model": "role",
"foreignKey": "roleId"
}
},
"acls": [],
"methods": {}
}
the above remote method doesn't show up in the loopback API explorer.
RoleMapping is a built-in model, its role-mapping.js file is hidden in node_modules/loopback, I've tested it and it doesn't look like will load a js file for itself from common/models.
It looks like a boot script is your only option. It's the same code, but your function receives the server object.
server/boot/get-roles-by-user.js
module.exports = function(server) {
const Rolemapping = server.models.RoleMapping;
Rolemapping.getRolesByUser = async function (id) {
return JSON.stringify(await Rolemapping.find({ where: { principalId: id
} }))
};
Rolemapping.remoteMethod("getRolesByUser", {
http: {
path: "/getRolesByUser",
verb: "get"
},
accepts: [
{ arg: "userId", type: "string", http: { source: "query" } }
],
returns: {
arg: "result",
type: "string"
},
description: "Cvs "
});
}
I've also removed the cb parameter from your remote method, because methods which return a Promise do not need it, just return the value like you would for any other function

Categories

Resources