Add copy to to eslaticsearch on index template - javascript

I use elasticsearch 6.2, and I want to optimize the elasticsearch search functionality to copy all field value to one value and generate than searching by query string on one field instead of multiple field. How to do that? How to copy all fields, now matter which to one filed.
initialize index template
export const init = async types => {
try {
let client = createClient()
const templateSettings = {
index_patterns : ['*'],
settings: indexTemplateSettings,
mappings : types.reduce((p, type) => ({
...p,
[type] : {
numeric_detection: true,
_source : {enabled : true},
'properties': {
'searchIndex': {
'type': 'text',
},
'*': {
'type': 'text',
'copy_to': 'searchIndex',
},
},
},
}), {}),
}
await client.indices.putTemplate({
name: 'default',
body: templateSettings,
},(error, response) => {
logger.silly('Pushing of index template completed', response)
})
} catch (e) {
logger.error(e)
}
}
put index
export const push = (message, type) => new Promise(async resolve => {
try {
let client = createClient()
let indexCreationTime = new Date('2016-02-08').toISOString().substring(0, 10)
// '2016-02-08'
console.log(message, 'message')
console.log(type, 'type')
await client.index({
index: type.toLowerCase(),
type,
body: {
...message,
_timestampIndex: indexCreationTime,
},
},
(error, response) => {
logger.silly('Pushing of data completed', response)
resolve(response)
})
} catch (e) {
logger.error(e)
}
})

The best way is to create an index template which leverages a dynamic template that will catch all fields and add the copy_to parameter to their definition.
PUT _template/my-template
{
"index_patterns": ["*"],
"settings": {},
"mappings": {
"_doc": {
"dynamic_templates": [
{
"all": {
"match": "*",
"mapping": {
"type": "text",
"copy_to": "searchIndex"
}
}
}
],
"properties": {
"searchIndex": {
"type": "text"
}
}
}
}
}

Related

Why is cache.readQuery returning null?

I have a project management application built with React and GraphQL for which the Github repo can be found here. One of the functionalities allows for deleting a project.
I am trying to update the cache when I delete an individual project.
const [deleteProject] = useMutation(DELETE_PROJECT, {
variables: { id: projectId },
update(cache, { data: { deleteProject } }) {
const { projects } = cache.readQuery({ query: GET_PROJECTS });
cache.writeQuery({
query: GET_PROJECTS,
data: {
projects: projects.filter(
(project) => project.id !== deleteProject.id
),
},
});
},
onCompleted: () => navigate("/"),
});
However, when I attempt to do so, I am getting the following error: Error: Cannot destructure property 'projects' of 'cache.readQuery(...)' as it is null
Can someone help me figure out what's going on? This is what the getProjects query looks like:
const GET_PROJECTS = gql`
query getProjects {
projects {
id
name
description
status
}
}
`;
Here is the root query:
const RootQuery = new GraphQLObjectType({
name: "RootQueryType",
fields: {
projects: {
type: new GraphQLList(ProjectType),
resolve(parent, args) {
return Project.find();
},
},
project: {
type: ProjectType,
args: { id: { type: GraphQLID } },
resolve(parent, args) {
return Project.findById(args.id);
},
},
clients: {
type: new GraphQLList(ClientType),
resolve(parent, args) {
return Client.find();
},
},
client: {
type: ClientType,
args: { id: { type: GraphQLID } },
resolve(parent, args) {
return Client.findById(args.id);
},
},
},
});

Update mongo collection with values from a javascript map

I have a collection that looks like this
[
{
"project":"example1",
"stores":[
{
"id":"10"
"name":"aa",
"members":2
}
]
},
{
"project":"example2",
"stores":[
{
"id":"14"
"name":"bb",
"members":13
},
{
"id":"15"
"name":"cc",
"members":9
}
]
}
]
I would like to update the field members of the stores array taking getting the new values from a Map like for example this one
0:{"10" => 201}
1:{"15" => 179}
The expected result is:
[
{
"_id":"61",
"stores":[
{
"id":"10"
"name":"aa",
"members":201
}
]
},
{
"_id":"62",
"stores":[
{
"id":"14"
"name":"bb",
"members":13
},
{
"id":"15"
"name":"cc",
"members":179
}
]
}
]
What are the options to achieve this using javascript/typescript?
In the end, I resolved by updating the original database entity object with the values in the map, then I have generated a bulk query.
for (let p of projects) {
for(let s of p.stores) {
if(storeUserCount.has(s.id)){
s.members = storeUserCount.get(s.id);
}
};
bulkQueryList.push({
updateOne: {
"filter": { "_id": p._id },
"update": { "$set": {"stores": p.stores} }
}});
};
await myMongooseProjectEntity.bulkWrite(bulkQueryList);
You can use update() function of Mongoes model to update your expected document.
Try following one:
const keyValArray= [{"10": 201},{"15":"179"}];
db.collectionName.update({_id: givenId},
{ $push: { "stores": {$each: keyValArray} }},
function(err, result) {
if(err) {
// return error
}
//return success
}
});

Only one page generated to sitemap with gatsby-plugin-sitemap

I am not able to create a sitemap for my Gatsy site.
The default setting of the plugin creates only one page even there is several pages:
<sitemap>
<loc>https://www.thesite.nl/sitemap/sitemap-0.xml</loc>
</sitemap>
If I try to override the default setting with:
query: `{
site {
siteMetadata {
siteUrl
}
}
allSitePage {
nodes {
path
}
}
}`,
serialize: ({ site, allSitePage }) =>
allSitePage.nodes
.filter(node => {
const path = node.path
console.log({ path })
// Filter out 404 pages
if (path.includes("404")) {
return false
}
// Filter out base pages that don't have a language directory
return supportedLanguages.includes(path.split("/")[1])
})
.map(node => {
return {
url: `${site.siteMetadata.siteUrl}${node.path}`,
changefreq: `weekly`,
priority: 0.7,
}
}),
I get TypeError: Cannot read property 'nodes' of undefined Problem is that with gatsby develop I can query the nodes like this and get the paths even it says undefined here.
I have Gatsby v3 and the only plugin I can think might affect could be "gatsby-plugin-intl": "^0.3.3",.
{
resolve: `gatsby-plugin-intl`,
options: {
// language JSON resource path
path: `${__dirname}/src/intl`,
// supported language
languages: [`nl`, `en`],
language: `nl`,
// language file path
defaultLanguage: `nl`,
// option to redirect to `/nl` when connecting `/`
redirect: false,
},
},
Any ideas?
Got it to build via custom options with gatsby build && gatsby serve after #FerranBuireu suggestion to change the query and now it looks like this but sitemap is still empty:
const siteUrl = process.env.URL || `https://www.thesite.nl`
{
resolve: "gatsby-plugin-sitemap",
options: {
query: `
{
allSitePage {
nodes {
path
}
}
}
`,
resolveSiteUrl: () => siteUrl,
resolvePages: ({ allSitePage: { nodes: allPages } }) => {
return allPages.map(page => {
return { ...page }
})
},
serialize: ({ path }) => {
return {
url: path,
}
},
},
},
I think your issue comes because you are not setting the resolveSiteUrl and, in this scenario, the siteUrl needs to be present. According to the docs:
siteMetadata: {
// If you didn't use the resolveSiteUrl option this needs to be set
siteUrl: `https://www.example.com`,
},
An ideal full configuration should be:
const siteUrl = process.env.URL || `https://fallback.net`
// In your gatsby-config.js
module.exports = {
plugins: [
{
resolve: "gatsby-plugin-sitemap",
options: {
query: `
{
allSitePage {
nodes {
path
}
}
allWpContentNode(filter: {nodeType: {in: ["Post", "Page"]}}) {
nodes {
... on WpPost {
uri
modifiedGmt
}
... on WpPage {
uri
modifiedGmt
}
}
}
}
`,
resolveSiteUrl: () => siteUrl,
resolvePages: ({
allSitePage: { nodes: allPages },
allWpContentNode: { nodes: allWpNodes },
}) => {
const wpNodeMap = allWpNodes.reduce((acc, node) => {
const { uri } = node
acc[uri] = node
return acc
}, {})
return allPages.map(page => {
return { ...page, ...wpNodeMap[page.path] }
})
},
serialize: ({ path, modifiedGmt }) => {
return {
url: path,
lastmod: modifiedGmt,
}
},
},
},
],
}
Tweak it and adapt it to fit your query.
I would do something like:
resolveSiteUrl: () => siteUrl,
resolvePages: ({
allSitePage: { nodes: allPages },
}) => {
const sitePageNodeMap = allSitePage.reduce((acc, node) => {
const { uri } = node
acc[uri] = node
return acc
}, {})
return allPages.map(page => {
return { ...page, ...sitePageNodeMap[page.path] }
})
},
serialize: ({ path, modifiedGmt }) => {
return {
url: path,
lastmod: modifiedGmt,
}
},
After mental and technical support from Ferran Buireu I managed to dig deeper.
In the public folder the sitemap was found under
sitemap/sitemap-0.xml giving the right path to all pages at thesite.nl/sitemap/sitemap-0.xml
Also notable that the <sitemapindex> is a valid element that points to the sitemap-0: https://www.sitemaps.org/protocol.html. Google search console still wants the /sitemap/sitemap-0.xml if submitted there.
So looks like the output of the pages was there most of the time. #idiot

apollo client offsetLimitPagination not working

I have a hook..
export function useLazyProposalList() {
const [getQueueData, { loading, data, error, fetchMore }] = useLazyQuery(PROPOSAL_LIST, {
fetchPolicy: 'no-cache',
});
const proposalList = React.useMemo(() => {
if (!data) {
return null;
}
return transformProposals(data);
}, [data]);
return {
getQueueData,
fetchMore,
loading,
data: proposalList,
error,
};
}
In the component
const {
getQueueData,
data: queueData,
fetchMore: fetchMoreProposals,
// loadMore: loadMore,
} = useLazyProposalList();
If user clicks on fetch more button, I call: fetchMoreProposals .
await fetchMoreProposals({
variables: {
offset: visibleProposalList.length,
},
});
but this doesn't update my data. I read that we should use offsetLimitPagination, but my data from query is not array itself. It's like this: queue { id: '1', items:[] } and because of this, offsetLimitPagination doesn't work. So I tried merge
cache: new InMemoryCache({
typePolicies: {
Query: {
fields: {
queue: {
keyArgs: false,
merge(existing, incoming) {
console.log(existing, incoming);
if (!incoming) return existing;
if (!existing) return incoming;
},
},
},
},
}
but in the console, it just prints refs instead of real data.
What could be the issue ?

Fail while replicating data from PouchDb to CouchDb

My Code
thats is a short version of my current code.:
['tableA', 'tableB', 'tableC'].forEach(name => {
let local = new PouchDB(name, { auto_compaction: true })
let server = new PouchDB(serverUrl + name)
var filtro = {
include_docs: true,
filter: 'replication/by_dispositivo',
query_params: { 'dispositivo_id': obj.deviceId }
}
local.replicate.from(server, filtro).on('complete', report => {
var sync = local.sync(server, {
live: true,
retry: true,
...filtro
})
})
})
I'm trying to do a live replication, but for some reason, that doesn't replicate the local data to the server, strangely, PouchDb didn't throw any exception.
Inspecting the Network tab on Dev Tools, I can see te follow request.:
URL: ${serverUrl}/{name}/_revs_diff
Response: {
"4b0ea507-cd88-4998-baf0-01629b50516b": {
"missing": [
"2-2133d30de8d44ebd958cee2b68726ffb"
],
"possible_ancestors": [
"1-39904a7e55b1cb266c840a2acf34fdc2"
]
}
}
Ok, PouchDb detected that something is missing in the server and must be replicated.
Auditing the Sync
Searching for a hint about what is happening, I modified my code to log the complete and error events.:
['tableA', 'tableB', 'tableC'].forEach(name => {
let local = new PouchDB(name, { auto_compaction: true })
let server = new PouchDB(serverUrl + name)
let filtro = {
include_docs: true,
filter: 'replication/by_dispositivo',
query_params: { 'dispositivo_id': obj.deviceId }
}
local.replicate.from(server, filtro).on('complete', report => {
let sync = local.sync(server, {
live: true,
retry: true,
...filtro
})
sync.on('error', (error) => {
console.error(error)
console.error(JSON.stringify(error, null, 2))
}).on('complete', (result) => {
console.log(result)
console.log(JSON.stringify(result, null, 2))
})
window.setTimeout(function (evt) {
state.syncProcess[database].cancel()
}, 15000)
})
})
I didn't catch anything in the error event, and the complete event didn't show any errors as you can see bellow.
{
"push": {
"ok": true,
"start_time": "2018-04-06T15:00:42.266Z",
"docs_read": 0,
"docs_written": 0,
"doc_write_failures": 0,
"errors": [],
"status": "cancelled",
"end_time": "2018-04-06T15:00:42.266Z",
"last_seq": 0
},
"pull": {
"ok": true,
"start_time": "2018-04-06T15:00:26.422Z",
"docs_read": 0,
"docs_written": 0,
"doc_write_failures": 0,
"errors": [],
"last_seq": "17-g1AAAAJDeJyd0EsOgjAQBuAqJj52nkCPILGldCU3UaYzBA3CQl3rTfQmehO9CRZKAiaGiJtpMs18mX8SxtgodpBNdXbSMUKQZDpM4uxwTMxXP2Qwy_N8Fzsh25vGMILIA62QjU8pUrRNCVvGYW4qrCphUgoCfMVd_W2mTQoKaV1JjpWWIUcuu0qbQjp_pBKeUESLH1OlA1PZxTwGudb7EC1dQt5xH6vdrHYvtF6pSZK-4Oov7WG1Z53QUy56UnRK-LJK406-TxIAm8ruDdzts44",
"status": "cancelled",
"end_time": "2018-04-06T15:00:41.427Z"
}
}
Calling one time local to server replication manually
that is my second attempt to catch something usefull. I trying to audit the local.replicate.to method
['tableA', 'tableB', 'tableC'].forEach(name => {
let local = new PouchDB(name, { auto_compaction: true })
let server = new PouchDB(serverUrl + name)
let filtro = {
include_docs: true,
filter: 'replication/by_dispositivo',
query_params: { 'dispositivo_id': obj.deviceId }
}
local.replicate.from(server, filtro).on('complete', report => {
local.replicate.to(server, filtro).on('complete', report => {
console.log(report)
console.log(JSON.stringify(report, null, 2))
let sync = local.sync(server, {
live: true,
retry: true,
...filtro
})
}).on('error', (error) => {
console.error(error)
console.error(JSON.stringify(error, null, 2))
})
})
})
thats time complete event isn't fired and I catch a error, but that is too generic amd don't give any clues regarding whats is happening.
{
"result": {
"ok": false,
"start_time": "2018-04-06T15:07:19.105Z",
"docs_read": 1,
"docs_written": 0,
"doc_write_failures": 0,
"errors": [],
"status": "aborting",
"end_time": "2018-04-06T15:07:19.768Z",
"last_seq": 3
}
}
Putting local data to the server
thats is my last attempt.:
I'll query local and remote databases (in that particular case, I have only one document)
Copy fields from local doc to remote doc.
Dispatch the updated remote doc to the remote database
My Junk Code
var deviceId = ''
var listLocal = []
var listServer = []
getDeviceId().then(response => {
deviceId = response
return local.find({ selector: { dispositivo_id: deviceId } })
}).then(response => {
listLocal = response.docs
return server.find({ selector: { dispositivo_id: deviceId } })
}).then(response => {
listServer = response.docs
var tlocal = listLocal[0]
var tServer = listServer[0]
Object.keys(tServer).forEach(key => {
if (key.indexOf("_") !== 0) {
tServer[key] = undefined
}
})
Object.keys(tlocal).forEach(key => {
if (key.indexOf("_") !== 0) {
tServer[key] = tlocal[key]
}
})
return server.put(tServer).then(result => {
console.log(result)
console.log(JSON.stringify(result, null, 2))
}).catch(error => {
console.error(error)
console.error(JSON.stringify(error, null, 2))
})
})
The junk code worked as expected, and i received that response.:
{
"ok": true,
"id": "4b0ea507-cd88-4998-baf0-01629b50516b",
"rev": "2-d9363f28e53fdc145610f5ad3f75a043"
}
Additional Information
My design documents in the CouchDb
_design/replication
{
"_id": "_design/replication",
"_rev": "1-42df919aaee8ed3fb309bbda999ba03d",
"language": "javascript",
"filters": {
"by_dispositivo": "function(doc, req) {\r\n return doc._id === '_design/replication' || (doc.dispositivo_id === req.query.dispositivo_id && !doc._deleted)\r\n}",
"by_situacao_remote": "function(doc, req) {\r\n return [2, 3, 4, 5].indexOf(doc.situacao) !== -1 && !doc._deleted\r\n}"
}
}
_design/authorization
{
"_id": "_design/authorization",
"_rev": "9-64c4a22645d783c9089c95d69e9424ad",
"language": "javascript",
"validate_doc_update": "..."
}
authorization/validate_doc_update
function(newDoc, oldDoc, userCtx) {
var isAdmin = userCtx.roles.indexOf('_admin') !== -1 || userCtx.roles.indexOf('admin') !== -1;
if (!isAdmin) {
if (newDoc._deleted) {
if (oldDoc.dispositivo_id !== userCtx.name) {
throw({forbidden: "..." });
}
}
else {
if (!newDoc.dispositivo_id || !newDoc.dispositivo_id.trim())
throw({forbidden: "..." });
if (newDoc.dispositivo_id !== userCtx.name) {
throw({forbidden: "..." });
}
if (oldDoc && oldDoc.dispositivo_id !== userCtx.name) {
throw({forbidden: "..." });
}
var isRequired = function (prop, msg) {
var value = newDoc[prop];
if (!value)
throw({forbidden: '...' });
}
var isDate = function (prop, msg, allow_null) {
if (!allow_null)
isRequired(prop, msg)
var value = newDoc[prop];
if (value) {
var date = new Date(value);
var isDate = date !== "Invalid Date" && !isNaN(date);
if (!isDate) {
throw({forbidden: msg });
}
}
}
var isFloat = function (prop, msg, allow_null) {
if (!allow_null)
isRequired(prop, msg)
var value = newDoc[prop];
if (value) {
var numero = new Number(value);
if (!numero || isNaN(numero) || !isFinite(numero)) {
throw({forbidden: msg });
}
}
}
var isInteger = function (prop, msg, allow_null) {
isFloat(prop, msg, allow_null)
var value = newDoc[prop];
if (value) {
var numero = new Number(value);
var isInteger = Math.floor(numero) == numero;
if (!isInteger) {
throw({forbidden: msg });
}
}
}
isRequired("talao_id", "...");
isRequired("equipe_id", "...");
isInteger("situacao", '...');
isDate("data_envio", "...");
isDate("data_recebimento", "...", true);
isDate("data_decisao", "...", true);
isRequired("tipo_ocorrencia_codigo", "...");
isRequired("tipo_ocorrencia_descricao", "...");
isInteger("talao_codigo", "...");
isRequired("talao_descricao", "...");
isRequired("talao_solicitante", "...");
isRequired("talao_endereco", "...");
}
}
else if (!newDoc._deleted) {
if (!newDoc.dispositivo_id || !newDoc.dispositivo_id.trim())
throw({forbidden: "..." });
}
}
While analyzing the stack trace of the exception throw by local.replicate.to, I noticied that reason: promise.all is not a function.
So i googled for a while and found that topic Webpack: Promise is not a constructor. I just need to copy the workaround bellow to my webpack.config and everything worked like a charm.:
resolve: {
alias: {
'pouchdb-promise$': "pouchdb-promise/lib/index.js"
}
}

Categories

Resources