Updating data in an Object using Firebase arrayUnion - javascript

I am looking to update data in an object without changing the index of the object within the array it is contained. As it currently stands, the code removes the current object from the array and then applies array Union to update the array but pushes the component to the end of the array but I am looking to just update the data without the component losing its index. This is the code I am currently working with, I looked through the Firebase docs to see if there was a way to just update the component but couldn't find anything if anyone could point me in the right direction, please.
await firestore.update(project, {
pages: firestore.FieldValue.arrayRemove(page),
});
await firestore.update(project, {
pages: firestore.FieldValue.arrayUnion(newPage),
});

Unfortunately there is no field transform to replace a value like this:
firestore.FieldValue.arrayReplace(page, newPage);
Storing arrays and making changes by index in remote databases is generally discouraged. This older Firebase blog post covers some of the reasons why even though it was written with the Firebase Realtime Database in mind.
If the order of that array is important, you have two options:
fetch the array, mutate it, and then write it back. (simple)
fetch the array, find the relevant index, update that index only. (difficult)
To achieve the first result, you would make use of a transaction to find the previous value and replace it:
const db = firebase.firestore();
const projectDocRef = db.doc("projects/projectId");
function replacePage(oldPage, newPage) {
return db.runTransaction(async (t) => {
const snapshot = await t.get(projectDocRef);
if (!snapshot.exists) {
// no previous data, abort.
return "aborted";
}
const pagesArray = snapshot.get("pages");
const index = pagesArray.findIndex((page) => page === oldPage);
if (index === -1)
return "not-found";
pagesArray[index] = newPage;
await t.set(projectDocRef, { pages: pagesArray }, { merge: true });
return "replaced";
});
}
replacePage("index", "shop")
.then((result) => console.log("Page replacement was " + (result === "replaced" ? "" : " not") + " successful"))
.catch((err) => console.error('failed: ', err));
Note: Anything beyond this point is educational. There are many issues with this approach at scale.
Because Firestore doesn't support array entry replacement by index, you'll need to implement a way to update an index using something Firestore understands - maps. Using some FirestoreDataConverter trickery, you can use the converter to serialize your array as a map when you write it to Cloud Firestore and deserialize it back to an array when you read it. The major trade-off here is in how you will be able to query your data. You will be able to perform queries by index (such as where('pages.0', '==', 'shop')) but you'll lose the ability to use array-contains queries (such as where('pages', 'array-contains', 'shop')).
First, you need to define the converter:
// const obj = {};
// setNestedProperty(obj, ["a", "b", "c"], true)
// obj is now { "a": { "b": { "c": true } } }
function setNestedProperty(originalObj, pathPropsArray, val) {
const props = pathPropsArray.slice(0, -1);
const lastProp = pathPropsArray[pathPropsArray.length-1];
const parent = props.reduce((obj, p) => obj[p] ? obj[p] : (obj[p] = {}), originalObj);
parent[lastProp] = val;
}
const pagesArrayConverter = {
toFirestore(data) {
if (data.pages !== undefined) {
// step 1) convert array to map
const pagesAsMap = {};
data.pages.forEach((page, index) => {
if (page !== undefined) {
pagesAsMap[index] = page;
}
});
data.pages = pagesAsMap;
// step 2) if there are any mutations to "pages"
// while you are changing it, make the
// changes now before uploading to Firestore
Object.keys(data)
.filter(k => k.startsWith("pages."))
.forEach(k => {
const nestedValue = data[k];
data[k] = undefined;
delete data[k];
setNestedProperty(pagesAsMap, k.slice(6).split("."), nestedValue);
});
}
return data;
},
fromFirestore(snapshot, options) {
const data = snapshot.data(options);
if (data.pages !== undefined) {
const pagesAsArray = [];
Object.entries(data.pages)
.map(([index, page]) => pagesAsArray[index] = page);
// `pagesAsArray` may have empty elements, so we need
// to fill in the gaps with `undefined`:
data.pages = Array.from(pagesAsArray);
}
return data;
}
};
Which you would then attach to a query/reference like this:
const db = firebase.firestore();
const projectDocRef = db.doc("projects/projectId")
.withConverter(pagesArrayConverter)
If you already know that the previous value has an index of 2, you can just use:
await projectDocRef.set({ "pages.2": newPage }, { merge: true });
If you need to find it like before, you can use a transaction:
function replacePage(oldPage, newPage) {
return db.runTransaction(aysnc (t) => {
const snapshot = await t.get(projectDocRef);
if (!snapshot.exists) {
// no previous data, abort.
return "missing";
}
const data = snapshot.data();
// data is a { pages: Page[] }
const index = data.pages.findIndex((page) => page === oldPage);
if (index === -1)
return "not-found";
await t.set(projectDocRef, { ["pages." + oldIndex]: newPage }, { merge: true });
return "replaced";
});
}
replacePage("index", "shop")
.then((result) => console.log("Page replacement was " + (result === "replaced" ? "" : " not") + " successful"))
.catch((err) => console.error('failed: ', err));

arrayUnion adds new items to the array and arrayRemove removes items from an array. There isn't any way to update an existing item in array directly.
You would have to fetch the document, manually add/update the item at relevant index and then update the whole array back to the document.

Related

Check for duplicates in database before pushing item logic in JS

Can please someone let me know what's the issue with this code
I run get ref database and store results in obj, then if obj is empty we push new item. Otherwise I run a loop to see if item already exists.
The code not only doesn't follow any logic, but also push 3 items on third try, 4 on 4th and so on.
This is confusing, why it's not working, Checking for strings if equals I have implemented that not sure about the rest
saveIng = (item) => {
const reference = ref(
database,
"users/" + this.state.user + "/ingredients"
);
get(ref(database, "users/" + this.state.user + "/ingredients"))
.then((snapshot) => {
var obj = snapshot.val();
if (obj === null) {
push(reference, {
name: item,
});
} else {
for (let x in obj) {
var found = obj[x].name == item;
if (!found) {
continue;
} else {
push(reference, {
name: item,
});
}
}
}
})
.catch((error) => {
console.log(error);
});
};

Wait for all Firebase data query requests before executing code

I am trying to fetch data from different collections in my cloud Firestore database in advance before I process them and apply them to batch, I created two async functions, one to capture the data and another to execute certain code only after all data is collected, I didn't want the code executing and creating errors before the data is fetched when i try to access the matchesObject after the async function to collect data is finished, it keeps saying "it cannot access a property matchStatus of undefined", i thought took care of that with async and await? could anyone shed some light as to why it is undefined one moment
axios.request(options).then(function(response) {
console.log('Total matches count :' + response.data.matches.length);
const data = response.data;
var matchesSnapshot;
var marketsSnapshot;
var tradesSnapshot;
var betsSnapshot;
matchesObject = {};
marketsObject = {};
tradesObject = {};
betsObject = {};
start();
async function checkDatabase() {
matchesSnapshot = await db.collection('matches').get();
matchesSnapshot.forEach(doc => {
matchesObject[doc.id] = doc.data();
console.log('matches object: ' + doc.id.toString())
});
marketsSnapshot = await db.collection('markets').get();
marketsSnapshot.forEach(doc2 => {
marketsObject[doc2.id] = doc2.data();
console.log('markets object: ' + doc2.id.toString())
});
tradesSnapshot = await db.collection('trades').get();
tradesSnapshot.forEach(doc3 => {
tradesObject[doc3.id] = doc3.data();
console.log('trades object: ' + doc3.id.toString())
});
betsSnapshot = await db.collection('bets').get();
betsSnapshot.forEach(doc4 => {
betsObject[doc4.id] = doc4.data();
console.log('bets object: ' + doc4.id.toString())
});
}
async function start() {
await checkDatabase();
// this is the part which is undefined, it keeps saying it cant access property matchStatus of undefined
console.log('here is matches object ' + matchesObject['302283']['matchStatus']);
if (Object.keys(matchesObject).length != 0) {
for (let bets of Object.keys(betsObject)) {
if (matchesObject[betsObject[bets]['tradeMatchId']]['matchStatus'] == 'IN_PLAY' && betsObject[bets]['matched'] == false) {
var sfRef = db.collection('users').doc(betsObject[bets]['user']);
batch11.set(sfRef, {
accountBalance: admin.firestore.FieldValue + parseFloat(betsObject[bets]['stake']),
}, {
merge: true
});
var sfRef = db.collection('bets').doc(bets);
batch12.set(sfRef, {
tradeCancelled: true,
}, {
merge: true
});
}
}
}
});
There are too many smaller issues in the current code to try to debug them one-by-one, so this refactor introduces various tests against your data. It currently won't make any changes to your database and is meant to be a replacement for your start() function.
One of the main differences against your current code is that it doesn't unnecessarily download 4 collections worth of documents (two of them aren't even used in the code you've included).
Steps
First, it will get all the bet documents that have matched == false. From these documents, it will check if they have any syntax errors and report them to the console. For each valid bet document, the ID of it's linked match document will be grabbed so we can then fetch all the match documents we actually need. Then we queue up the changes to the user's balance and the bet's document. Finally we report about any changes to be done and commit them (once you uncomment the line).
Code
Note: fetchDocumentById() is defined in this gist. Its a helper function to allow someCollectionRef.where(FieldPath.documentId(), 'in', arrayOfIds) to take more than 10 IDs at once.
async function applyBalanceChanges() {
const betsCollectionRef = db.collection('bets');
const matchesCollectionRef = db.collection('matches');
const usersCollectionRef = db.collection('users');
const betDataMap = {}; // Record<string, BetData>
await betsCollectionRef
.where('matched', '==', false)
.get()
.then((betsSnapshot) => {
betsSnapshot.forEach(betDoc => {
betDataMap[betDoc.id] = betDoc.data();
});
});
const matchDataMap = {}; // Record<string, MatchData | undefined>
// betIdList contains all IDs that will be processed
const betIdList = Object.keys(betDataMap).filter(betId => {
const betData = betDataMap[betId];
if (!betData) {
console.log(`WARN: Skipped Bet #${betId} because it was falsy (actual value: ${betData})`);
return false;
}
const matchId = betData.tradeMatchId;
if (!matchId) {
console.log(`WARN: Skipped Bet #${betId} because it had a falsy match ID (actual value: ${matchId})`);
return false;
}
if (!betData.user) {
console.log(`WARN: Skipped Bet #${betId} because it had a falsy user ID (actual value: ${userId})`);
return false;
}
const stakeAsNumber = Number(betData.stake); // not using parseFloat as it's too lax
if (isNaN(stakeAsNumber)) {
console.log(`WARN: Skipped Bet #${betId} because it had an invalid stake value (original NaN value: ${betData.stake})`);
return false;
}
matchDataMap[matchId] = undefined; // using undefined because its the result of `doc.data()` when the document doesn't exist
return true;
});
await fetchDocumentsById(
matchesCollectionRef,
Object.keys(matchIdMap),
(matchDoc) => matchDataMap[matchDoc.id] = matchDoc.data()
);
const batch = db.batch();
const queuedUpdates = 0;
betIdList.forEach(betId => {
const betData = betDataMap[betId];
const matchData = matchDataMap[betData.tradeMatchId];
if (matchData === undefined) {
console.log(`WARN: Skipped /bets/${betId}, because it's linked match doesn't exist!`);
continue;
}
if (matchData.matchStatus !== 'IN_PLAY') {
console.log(`INFO: Skipped /bets/${betId}, because it's linked match status is not "IN_PLAY" (actual value: ${matchData.matchStatus})`);
continue;
}
const betRef = betsCollectionRef.doc(betId);
const betUserRef = usersCollectionRef.doc(betData.user);
batch.update(betUserRef, { accountBalance: admin.firestore.FieldValue.increment(Number(betData.stake)) });
batch.update(betRef, { tradeCancelled: true });
queuedUpdates += 2; // for logging
});
console.log(`INFO: Batch currently has ${queuedUpdates} queued`);
// only uncomment when you are ready to make changes
// batch.commit();
}
Usage:
axios.request(options)
.then(function(response) {
const data = response.data;
console.log('INFO: Total matches count from API:' + data.matches.length);
return applyBalanceChanges();
}

Return array value from produce function | immer.js

I am using immer.js to perform operations on arrays in the state.
Arrays: basicRecipe and recipeBasicRecipe.
I am modifying the draft.basicRecipe in the produce function. My objective is to return the updated "draft.basicRecipe" value and store the same in temparray1.
let temparray1 = produce(state, draft => {
draft.basicRecipe = draft.basicRecipe.map(item => {
let element = draft.recipeBasicRecipes.find(e => e._id === item._id);
console.log(element);
if (element) {
item.details = item.details.map(e => {
let detail = element.details.find(d => d._id === e._id);
if (detail) {
e.rate = detail.rate;
}
return e;
});
}
return item;
});
return draft.basicRecipe;
});
console.log(temparray1);
When I return the draft I am able to see updated basicRecipe nested in output.
I am getting the below error when I try to return the array i.e draft.basicRecipe
[Immer] An immer producer returned a new value *and* modified its draft. Either return a new value *or* modify the draft
This code is a mess. You are using map which returns a new array but you're also trying to mutate the original draft object.
This is still unreadable and confusing, but at least by using forEach instead of map we are just mutating and not trying to do two things at once.
let temparray1 = produce(state, (draft) => {
draft.basicRecipe.forEach((item) => {
let element = draft.recipeBasicRecipes.find((e) => e._id === item._id);
if (element) {
item.details.forEach((e) => {
let detail = element.details.find((d) => d._id === e._id);
if (detail) {
e.rate = detail.rate;
}
});
}
});
});

Draft.js. How to get all entities data from the ContentState

From official docs I know about 2 methods: get entity by its key and get last created entity. In my case, I also need a method to access all entities from current ContentState.
Is there any method that could perform this? If not, is there a one that can provide all entities keys?
const getEntities = (editorState, entityType = null) => {
const content = editorState.getCurrentContent();
const entities = [];
content.getBlocksAsArray().forEach((block) => {
let selectedEntity = null;
block.findEntityRanges(
(character) => {
if (character.getEntity() !== null) {
const entity = content.getEntity(character.getEntity());
if (!entityType || (entityType && entity.getType() === entityType)) {
selectedEntity = {
entityKey: character.getEntity(),
blockKey: block.getKey(),
entity: content.getEntity(character.getEntity()),
};
return true;
}
}
return false;
},
(start, end) => {
entities.push({...selectedEntity, start, end});
});
});
return entities;
};
How I get the all entities keys:
const contentState = editorState.getCurrentContent()
const entityKeys = Object.keys(convertToRaw(contentState).entityMap)
result:
[0, 1]
then you can call the getEntity(key) method to get the responding entity.
this is how convertToRaw(contentState) looks:
Bao, You will find it inside key called 'blocks'.
convertToRaw(contentState).blocks.map(el=>el.text)
It will give you an array of raw text.
Unfortunatelly your suggested way using convertToRaw doesnt work because it reindexes all keys to ["0", .., "n"], but the real keys differ when you act with the editor. New ones > n will be added and unused will be omitted.
const rawState = convertToRaw(contentState)
const { entityMap } = rawState;
This entityMap will have list of all entities. But this is an expensive conversion. Because, it will convert whole thing to raw. A better way is loop through blocks and check for entity.
You'll have to look at every character:
const { editorState } = this.state; // assumes you store `editorState` on `state`
const contentState = editorState.getCurrentContent();
let entities = [];
contentState.getBlockMap().forEach(block => { // could also use .map() instead
block.findEntityRanges(character => {
const charEntity = character.getEntity();
if (charEntity) { // could be `null`
const contentEntity = contentState.getEntity(charEntity);
entities.push(contentEntity);
}
});
});
Then you could access it via:
entities.forEach((entity, i) => {
if (entity.get('type') === 'ANNOTATION') {
const data = entity.get('data');
// do something
}
})

Limit number of records in firebase

Every minute I have a script that push a new record in my firebase database.
What i want is delete the last records when length of the list reach a fixed value.
I have been through the doc and other post and the thing I have found so far is something like that :
// Max number of lines of the chat history.
const MAX_ARDUINO = 10;
exports.arduinoResponseLength = functions.database.ref('/arduinoResponse/{res}').onWrite(event => {
const parentRef = event.data.ref.parent;
return parentRef.once('value').then(snapshot => {
if (snapshot.numChildren() >= MAX_ARDUINO) {
let childCount = 0;
let updates = {};
snapshot.forEach(function(child) {
if (++childCount <= snapshot.numChildren() - MAX_ARDUINO) {
updates[child.key] = null;
}
});
// Update the parent. This effectively removes the extra children.
return parentRef.update(updates);
}
});
});
The problem is : onWrite seems to download all the related data every time it is triggered.
This is a pretty good process when the list is not so long. But I have like 4000 records, and every month it seems that I screw up my firebase download quota with that.
Does anyone would know how to handle this kind of situation ?
Ok so at the end I came with 3 functions. One update the number of arduino records, one totally recount it if the counter is missing. The last one use the counter to make a query using the limitToFirst filter so it retrieve only the relevant data to remove.
It is actually a combination of those two example provided by Firebase :
https://github.com/firebase/functions-samples/tree/master/limit-children
https://github.com/firebase/functions-samples/tree/master/child-count
Here is my final result
const MAX_ARDUINO = 1500;
exports.deleteOldArduino = functions.database.ref('/arduinoResponse/{resId}/timestamp').onWrite(event => {
const collectionRef = event.data.ref.parent.parent;
const countRef = collectionRef.parent.child('arduinoResCount');
return countRef.once('value').then(snapCount => {
return collectionRef.limitToFirst(snapCount.val() - MAX_ARDUINO).transaction(snapshot => {
snapshot = null;
return snapshot;
})
});
});
exports.trackArduinoLength = functions.database.ref('/arduinoResponse/{resId}/timestamp').onWrite(event => {
const collectionRef = event.data.ref.parent.parent;
const countRef = collectionRef.parent.child('arduinoResCount');
// Return the promise from countRef.transaction() so our function
// waits for this async event to complete before it exits.
return countRef.transaction(current => {
if (event.data.exists() && !event.data.previous.exists()) {
return (current || 0) + 1;
} else if (!event.data.exists() && event.data.previous.exists()) {
return (current || 0) - 1;
}
}).then(() => {
console.log('Counter updated.');
});
});
exports.recountArduino = functions.database.ref('/arduinoResCount').onWrite(event => {
if (!event.data.exists()) {
const counterRef = event.data.ref;
const collectionRef = counterRef.parent.child('arduinoResponse');
// Return the promise from counterRef.set() so our function
// waits for this async event to complete before it exits.
return collectionRef.once('value')
.then(arduinoRes => counterRef.set(arduinoRes.numChildren()));
}
});
I have not tested it yet but soon I will post my result !
I also heard that one day Firebase will add a "size" query, that is definitely missing in my opinion.

Categories

Resources