How to append to an array in IndexedDB filtered by ID? - javascript

Init code:
let dbPormise = null;
const OBJECT_STORE_NAME = 'pages';
const DB_NAME = 'tracking-log';
To initiate an ObjectStore:
dbPromise = idb.open(DB_NAME, 3, upgradeDB => {
upgradeDB.createObjectStore(OBJECT_STORE_NAME, {
autoIncrement: true,
keypath: 'id'
});
});
This is how I generate a blank record in the IndexedDB:
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
tx.objectStore(OBJECT_STORE_NAME).put(
{ id: newBucketID, data: [] });
Now, at a later point, I have some elements that I want to append to the data array for a particular id.
This is how I tried doing it:
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
tx.objectStore(OBJECT_STORE_NAME).put(
{ id: localStorage.getItem("currentBucket"), data: item }
);
Schema
{
data: Array
}
Every item has a unique key generated and provided by me.
However, this doesn't work and returns an error: "Key already exists in the object store."
So, how can I append a value to a field inside a IDB objectt?

Not sure about the error, but regardless of that, the basic way of adding an item would be something like this:
function addItem(db, bucketId, item) {
return new Promise(addItemExecutor.bind(null, db, bucketId, item));
}
function addItemExecutor(db, bucketId, item, resolve, reject) {
// Start a single writable transaction that we will use for two requests. One to
// find the corresponding bucket, and one to update it.
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
// If all requests completed without error, we are done
tx.oncomplete = resolve;
// If any request fails, the operation fails
tx.onerror = event => reject(event.target.error);
const store = tx.objectStore(OBJECT_STORE_NAME);
// Go find the corresponding bucket object to update
const findRequest = store.get(bucketId);
findRequest.onsuccess = findRequestOnsuccess.bind(findRequest, bucketId, item, reject);
}
// React to the resolution of the get request
function findRequestOnsuccess(bucketId, item, reject, event) {
const bucket = event.target.result;
// If no bucket exists for that id then fail
if(!bucket) {
const error = new Error('No bucket found for id ' + bucketId);
reject(error);
return;
}
// Lazily init the data array property
if(!bucket.data) {
bucket.data = [];
}
// Add our item to the data array
bucket.data.push(item);
// Save the bucket object back into the bucket object store, completely replacing
// the bucket that was there before.
const bucketStore = event.target.source;
bucketStore.put(bucket);
}
async function someCallingCodeExampleAvoidingTopLevelAwait() {
const bucketId = localStorage.currentBucket;
const item = {foo:bar};
const db = evilUnreliableGlobalDbVariableFromSomewhereMagicalForeverOpenAssumeInitialized;
try {
await addItem(db, bucketId, item);
} catch(error) {
console.debug(error);
}
// Leave the database connection open for page lifetime
}

Without a reduced example it's difficult to figure out what's going on. The best way to get help is to create a reduced example of the problem, as in, the smallest amount of code needed to recreate the issue you're seeing, then put it on something like jsbin.com or glitch.com so folks only have to click a link to see the error you're seeing.
I wasn't able to recreate the error you're seeing. You have keypath when it should be keyPath, but I don't think that creates the error you're seeing.
Anyway, here's how to modify a record in IDB:
async function main() {
// Set up the database.
const OBJECT_STORE_NAME = 'pages';
const DB_NAME = 'tracking-log';
const db = await idb.open(DB_NAME, 1, upgradeDB => {
upgradeDB.createObjectStore(OBJECT_STORE_NAME, {
autoIncrement: true,
keyPath: 'id'
});
});
// The OP didn't make it clear what this value was, so I'll guess.
const newBucketID = 1;
{
// Create the record.
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
tx.objectStore(OBJECT_STORE_NAME).put({ id: newBucketID, data: ['first value'] });
}
{
const tx = db.transaction(OBJECT_STORE_NAME, 'readwrite');
// Get the record.
const record = await tx.objectStore(OBJECT_STORE_NAME).get(newBucketID);
// Modify it.
record.data.push('second value');
// Put the modified record back.
tx.objectStore(OBJECT_STORE_NAME).put(record);
}
{
// Read the value to confirm everything worked.
const tx = db.transaction(OBJECT_STORE_NAME);
const value = await tx.objectStore(OBJECT_STORE_NAME).get(newBucketID);
console.log(value);
}
}
main();
And here's that example running: https://jsbin.com/dineguq/edit?js,console

Related

I get All NFTs ID when I only should get the one that belong to the current account

I get All NFTs token ID when I only should get the one that belongs to the current account.
I'm Also getting the token ID for the current account but also all the other Tokens ID.
How do i resolve this ?
This is the code
const { enableWeb3, account, isWeb3Enabled, Moralis, deactivateWeb3 } = useMoralis();
useEffect(() => {
if(isWeb3Enabled) return
if(typeof window !== 'undefined'){
if(window.localStorage.getItem('connected')){
enableWeb3();
}
}
}, [isWeb3Enabled])
useEffect(() => {
Moralis.onAccountChanged((account) => {
if(account == null){
window.localStorage.removeItem('connected')
deactivateWeb3();
}
})
}, [])
const Web3 = require("web3");
const web3 = new Web3(new Web3.providers.HttpProvider("https://dxt.dexit.network"));
const contractAddress = "0x075D8f52dC5B1F35005dBa8Db40734CBcaBEbd8F"; // replace with the actual contract address
const abi = require('../abi.json'); // replace with the actual ABI of the contract
const contract = new web3.eth.Contract(abi.abi, contractAddress);
async function getNFTs(address) {
const events = await contract.getPastEvents("Transfer", {
filter: { to: address },
fromBlock: 0,
toBlock: "latest"
});
return events.map(event => event.returnValues.tokenId);
}
const test2 = async () => {
console.log(await getNFTs(account));
}
test2();
This is the result As you can see i get all NFTs but I only want token ID 45
return events.map(event => event.returnValues.tokenId);
you are mapping all the events that have returnValues.tokenId truthy. That means if returnValues.tokenIdhas any value you will map them into the new array. This is the logic of .map in javascript, you are mapping all the elements that satisfy the given condition.
You are most likely emitting an event when you create an NFT.
event NftItemCreated(
// you have returnValues
// I dont know what ReturnValuesStruct is, maybe you have address recorded here
ReturnValuesStruct returnValues,
// you should also add this field
address creator,
);
Now you will be logging the creator address of the NFT. Then you should write this logic
async function getNFTs(address) {
const events = await contract.getPastEvents("Transfer", {
filter: { to: address },
fromBlock: 0,
toBlock: "latest"
});
const addressNfts=events.map((event)=>{
if (event.creator===address){
return event.returnValues.tokenId
}
})
return addressNfts
}

Handle indexedDB versioning

I have a page that contains data based on which ID is receiving, for some business logic I need to store those values into an indexDB.
This is my code from loadData page
await createIndexDB(querystringID);
And this is what I use to create the indexDB:
const createIndexDB= async (ID) => {
return new Promise(function (resolve, reject) {
let isTrueInit = false;
var dbReq = indexedDB.open('myindexDB', 1);
dbReq.onerror = (e) => {
myDB = dbReq.result
myDB.close()
reject({
status: 'error',
message: dbReq.error,
})
}
dbReq.onsuccess = (event) => {
const storeName = 'myindexDB' + ID;
myDB = dbReq.result
if (!myDB.objectStoreNames.contains(storeName)) {
myDB.close()
isTrueInit = true
reject({
status: 'upgradeneeded',
message: "Store doesn't exist",
upgradeNeeded: true,
version: dbReq.result.version,
isTrueInit,
})
} else {
myDB.onversionchange = (e) => {
// Close immediately to allow the upgrade requested by another
// instance to proceed.
myDB.close()
}
resolve({
db: myDB,
isTrueInit,
})
}
}
dbReq.onupgradeneeded = () => {
const storeName = 'myindexDB' + ID;
myDB = dbReq.result;
if (!myDB.objectStoreNames.contains(storeName)) isTrueInit = true
myDB.createObjectStore(storeName, { keyPath: 'itemid', autoIncrement: true })
}
});
}
Then, on the loadData page I execute some function to retrieve, set and delete items from the indexDB, to do that I use something like this:
await getAllIndexDBItem(querystringID);
const getAllIndexDBItem = async (ID) => {
return new Promise(function (resolve, reject) {
var myDB = null;
var dbReq = indexedDB.open('myindexDB', 1);
dbReq.onsuccess = function (e) {
myDB= dbReq.result;
const transaction = myDB.transaction(['myindexDB' + ID], 'readonly');
const store = transaction.objectStore('myindexDB' + ID);
let request = store.getAll();
request.onsuccess = event => {
const result = request ? request.result : null;
resolve(result);
};
transaction.onerror = event => {
console.log('error getting element from myindexDB' + event.target.error);
reject('error getting element from myindexDB ' + event.target.error);
};
transaction.oncomplete = event => {
};
}
});
}
Everything it's working with the first ID that I sent to loadData page, creates the indexDB and the object, but when I sent another ID to loadData page I get the upgradeneeded error message. If I change this:
var dbReq = indexedDB.open('myindexDB', 1);
To:
var dbReq = indexedDB.open('myindexDB', 2);
Now it works okay with the two different IDs. So my question is, what can I do to handle the indexDB versioning properly instead of changing manually the version? Taking into consideration that sometimes the indexDB will not exists yet.
The short but unhelpful answer is "you're doing it wrong". You should only change the schema (add/delete object stores) when your code and hence your logic requirements for the database are changing.
Rather than thinking about separate object stores per ID, a more typical approach is to include the ID in the store's keys. For example, use [ID, other_key] as the key structure in your stores - a compound key with the ID you are partitioning by, and whatever other key(s) you were planning to use.
Another option would be to use an entirely different database per ID, with a unique name per database. Each such database would be an identical schema. e.g. make the open call open('myDB-' + ID), and use the same store name in each database.

how to remove an array item from firestore using JavaScript?

I'm trying to add a delete button to my page. the event listener callback is working properly except for the updateDoc function.
const deleteBook = document.getElementsByClassName('deleteBook');
for (let i = 0; i < deleteBook.length; i++) {
deleteBook[i].addEventListener('click', async () => {
//book to delete
const bookToDelete = deleteBook[i].parentElement.firstElementChild.textContent
// collection title to delete the book from
const bookCol = deleteBook[i].parentElement.parentElement.parentElement.firstElementChild.textContent
// get a snap of the database
const docRef = doc(dataBase, 'users', `${auth.currentUser.uid}`)
const docSnap = (await getDoc(docRef)).data();
// loop over the collections and get a match with the bookCol
for (const col in docSnap) {
if (docSnap[col].title === bookCol) {
console.log('col to delete from found')
console.log(`book to delete ${bookToDelete}`)
await updateDoc(doc(dataBase, 'users', `${auth.currentUser.uid}`), {
[`${col}.books`]: arrayRemove(`${bookToDelete}`)
}).then(()=>{
// fullfiled
console.log('book deleted')
}, ()=>{
// rejected
console.log('promis rejected')
})
}
}
})
}
Col is the object that contains the books array. In the console it always prints book deleted, but in the firestore console, nothing changes. this is a screenshot of the database.
I would really appreciate any help and thank you.
I have replicated the behavior that you're experiencing. I tried changing the content of ${bookToDelete} to any word or even ID. It always returns book deleted even if its deleted or not. The line of code below should be changed in order to get the correct output.
.then(()=>{
// fullfiled
console.log('book deleted')
}, ()=>{
// rejected
console.log('promis rejected')
})
I have created a workaround for your use-case with this kind of issue. See snippet below:
const db = getFirestore();
const colName = "users";
const arrayName = "books";
const usersCol = collection(db, colName);
const userRef = doc(db, colName, `${auth.currentUser.uid}`);
const arrayRef = `${col}.${arrayName}`;
const q = query(usersCol, where(arrayRef, "array-contains", `${bookToDelete}`));
const querySnapshot = await getDocs(q)
.then((querySnapshot) => {
// Removal of object will not proceed if the querySnapshot is empty.
if ((querySnapshot.empty)) {
console.log("No object found!");
}
else {
// Proceeds to removal of object.
updateDoc(userRef, {
[arrayRef]: arrayRemove(`${bookToDelete}`)
})
.then(() => {
// Check again if the object was deleted successfully.
const querySnapshot = getDocs(q)
.then((querySnapshot) => {
if ((querySnapshot.empty)) {
console.log("Book Deleted!");
}
else {
console.log("Failed!");
}
})
});
}
})
// Catch if there are any Firebase errors.
.catch(error => console.log('Failed!', error));
The workaround that I created will query the object in the array then remove the object in the array if it exist. After removing, it will query again to check if the object has been deleted and logs Book Deleted!. Vise versa for checking if the object doesn't exist on the 1st query, it will not proceed on removing them and logs No object found!.
The workaround itself can still be improved. You can add any logic you want for your use-case.
I'd also recommend to create a Feature Request if you want to have this kind of feature together with the arrayRemove Method.

What is going wrong with my express call? I need an array of ID's but its returning an empty array

Im guessing this problem is because I don't know how to use async await effectively. I still dont get it and I've been trying to understand for ages. sigh.
Anyway, heres my function:
app.post("/declineTrades", async (request, response) => {
//---------------------------------------------
const batch = db.batch();
const listingID = request.body.listingID;
const tradeOfferQuery = db
//---------------------------------------------
//Get trade offers that contain the item that just sold
//(therefore it cannot be traded anymore, I need to cancel all existing trade offers that contain the item because this item isn't available anymore)
//---------------------------------------------
.collection("tradeOffers")
.where("status", "==", "pending")
.where("itemIds", "array-contains", listingID);
//---------------------------------------------
//Function that gets all trade offers that contain the ID of the item.
async function getIdsToDecline() {
let tempArray = [];
tradeOfferQuery.get().then((querySnapshot) => {
querySnapshot.forEach((doc) => {
//For each trade offer found
let offerRef = db.collection("tradeOffers").doc(doc.id);
//Change the status to declined
batch.update(offerRef, { status: "declined" });
//Get the data from the trade offer because I want to send an email
//to the who just got their trade offer declined.
const offerGet = offerRef.get().then((offer) => {
const offerData = offer.data();
//Check the items that the receiving person had in this trade offer
const receiverItemIds = Array.from(
offerData.receiversItems
.reduce((set, { itemID }) => set.add(itemID), new Set())
.values()
);
//if the receiver item id's array includes this item that just sold, I know that
//I can get the sender ID (users can be sender or receiver, so i need to check which person is which)
if (receiverItemIds.includes(listingID)) {
tempArray.push(offerData.senderID);
}
});
});
});
//With the ID's now pushed, return the tempArray
return tempArray;
}
//---------------------------------------------
//Call the above function to get the ID's of people that got declined
//due to the item no longer being available
const peopleToDeclineArray = await getIdsToDecline();
//Update the trade offer objects to declined
const result = await batch.commit();
//END
response.status(201).send({
success: true,
result: result,
idArray: peopleToDeclineArray,
});
});
Im guessing that my return tempArray is in the wrong place? But I have tried putting it in other places and it still returns an empty array. Is my logic correct here? I need to run the forEach loop and add to the array before the batch.commit happens and before the response is sent.
TIA Guys!
As #jabaa pointed out in their comment, there are problems with an incorrectly chained Promise in your getIdsToDecline function.
Currently the function initializes an array called tempArray, starts executing the trade offer query and then returns the array (which is currently still empty) because the query hasn't finished yet.
While you could throw in await before tradeOfferQuery.get(), this won't solve your problem as it will only wait for the tradeOfferQuery to execute and the batch to be filled with entries, while still not waiting for any of the offerRef.get() calls to be completed to fill the tempArray.
To fix this, we need to make sure that all of the offerRef.get() calls finish first. To get all of these documents, you would use the following code to fetch each document, wait for all of them to complete and then pull out the snapshots:
const itemsToFetch = [ /* ... */ ];
const getAllItemsPromise = Promise.all(
itemsToFetch.map(item => item.get())
);
const fetchedItemSnapshots = await getAllItemsPromise;
For documents based on a query, you'd tweak this to be:
const querySnapshot = /* ... */;
const getSenderDocPromises = [];
querySnapshot.forEach((doc) => {
const senderID = doc.get("senderID");
const senderRef = db.collection("users").doc(senderID);
getSenderDocPromises.push(senderRef.get());
}
const getAllSenderDocPromise = Promise.all(getSenderDocPromises);
const fetchedSenderDataSnapshots = await getAllSenderDocPromise;
However neither of these approaches are necessary, as the document you are requesting using these offerRef.get() calls are already returned in your query so we don't even need to use get() here!
(doc) => {
let offerRef = db.collection("tradeOffers").doc(doc.id);
//Change the status to declined
batch.update(offerRef, { status: "declined" });
//Get the data from the trade offer because I want to send an email
//to the who just got their trade offer declined.
const offerGet = offerRef.get().then((offer) => {
const offerData = offer.data();
//Check the items that the receiving person had in this trade offer
const receiverItemIds = Array.from(
offerData.receiversItems
.reduce((set, { itemID }) => set.add(itemID), new Set())
.values()
);
//if the receiver item id's array includes this item that just sold, I know that
//I can get the sender ID (users can be sender or receiver, so i need to check which person is which)
if (receiverItemIds.includes(listingID)) {
tempArray.push(offerData.senderID);
}
});
}
could be replaced with just
(doc) => {
// Change the status to declined
batch.update(doc.ref, { status: "declined" });
// Fetch the IDs of items that the receiving person had in this trade offer
const receiverItemIds = Array.from(
doc.get("receiversItems") // <-- this is the efficient form of doc.data().receiversItems
.reduce((set, { itemID }) => set.add(itemID), new Set())
.values()
);
// If the received item IDs includes the listed item, add the
// sender's ID to the array
if (receiverItemIds.includes(listingID)) {
tempArray.push(doc.get("senderID"));
}
}
which could be simplified to just
(doc) => {
//Change the status to declined
batch.update(doc.ref, { status: "declined" });
// Check if any items that the receiving person had in this trade offer
// include the listing ID.
const receiversItemsHasListingID = doc.get("receiversItems")
.some(item => item.itemID === listingID);
// If the listing ID was found, add the sender's ID to the array
if (receiversItemsHasListingID) {
tempArray.push(doc.get("senderID"));
}
}
Based on this, getIdsToDecline actually queues declining the invalid trades and returns the IDs of those senders affected. Instead of using the batch and tradeOfferQuery objects that are outside of the function that make this even more unclear, you should roll them into the function and pull it out of the express handler. I'll also rename it to declineInvalidTradesAndReturnAffectedSenders.
async function declineInvalidTradesAndReturnAffectedSenders(listingID) {
const tradeOfferQuery = db
.collection("tradeOffers")
.where("status", "==", "pending")
.where("itemIds", "array-contains", listingID);
const batch = db.batch();
const affectedSenderIDs = [];
const querySnapshot = await tradeOfferQuery.get();
querySnapshot.forEach((offerDoc) => {
batch.update(offerDoc.ref, { status: "declined" });
const receiversItemsHasListingID = offerDoc.get("receiversItems")
.some(item => item.itemID === listingID);
if (receiversItemsHasListingID) {
affectedSenderIDs.push(offerDoc.get("senderID"));
}
}
await batch.commit(); // generally, the return value of this isn't useful
return affectedSenderIDs;
}
This then would change your route handler to:
app.post("/declineTrades", async (request, response) => {
const listingID = request.body.listingID;
const peopleToDeclineArray = await declineInvalidTradesAndReturnAffectedSenders(listingID);
response.status(201).send({
success: true,
result: result,
idArray: peopleToDeclineArray,
});
});
Then adding the appropriate error handling, swapping out the incorrect use of HTTP 201 Created for HTTP 200 OK, and using json() instead of send(); you now get:
app.post("/declineTrades", async (request, response) => {
try {
const listingID = request.body.listingID;
const affectedSenderIDs = await declineInvalidTradesAndReturnAffectedSenders(listingID);
response.status(200).json({
success: true,
idArray: affectedSenderIDs, // consider renaming to affectedSenderIDs
});
} catch (error) {
console.error(`Failed to decline invalid trades for listing ${listingID}`, error);
if (!response.headersSent) {
response.status(500).json({
success: false,
errorCode: error.code || "unknown"
});
} else {
response.end(); // forcefully end corrupt response
}
}
});
Note: Even after all these changes, you are still missing any form of authentication. Consider swapping the HTTPS Event Function out for a Callable Function where this is handled for you but requires using a Firebase Client SDK.

Batch update in knex

I'd like to perform a batch update using Knex.js
For example:
'UPDATE foo SET [theValues] WHERE idFoo = 1'
'UPDATE foo SET [theValues] WHERE idFoo = 2'
with values:
{ name: "FooName1", checked: true } // to `idFoo = 1`
{ name: "FooName2", checked: false } // to `idFoo = 2`
I was using node-mysql previously, which allowed multiple-statements. While using that I simply built a mulitple-statement query string and just send that through the wire in a single run.
I'm not sure how to achieve the same with Knex. I can see batchInsert as an API method I can use, but nothing as far as batchUpdate is concerned.
Note:
I can do an async iteration and update each row separately. That's bad cause it means there's gonna be lots of roundtrips from the server to the DB
I can use the raw() thing of Knex and probably do something similar to what I do with node-mysql. However that defeats the whole knex purpose of being a DB abstraction layer (It introduces strong DB coupling)
So I'd like to do this using something "knex-y".
Any ideas welcome.
I needed to perform a batch update inside a transaction (I didn't want to have partial updates in case something went wrong).
I've resolved it the next way:
// I wrap knex as 'connection'
return connection.transaction(trx => {
const queries = [];
users.forEach(user => {
const query = connection('users')
.where('id', user.id)
.update({
lastActivity: user.lastActivity,
points: user.points,
})
.transacting(trx); // This makes every update be in the same transaction
queries.push(query);
});
Promise.all(queries) // Once every query is written
.then(trx.commit) // We try to execute all of them
.catch(trx.rollback); // And rollback in case any of them goes wrong
});
Assuming you have a collection of valid keys/values for the given table:
// abstract transactional batch update
function batchUpdate(table, collection) {
return knex.transaction(trx => {
const queries = collection.map(tuple =>
knex(table)
.where('id', tuple.id)
.update(tuple)
.transacting(trx)
);
return Promise.all(queries)
.then(trx.commit)
.catch(trx.rollback);
});
}
To call it
batchUpdate('user', [...]);
Are you unfortunately subject to non-conventional column names? No worries, I got you fam:
function batchUpdate(options, collection) {
return knex.transaction(trx => {
const queries = collection.map(tuple =>
knex(options.table)
.where(options.column, tuple[options.column])
.update(tuple)
.transacting(trx)
);
return Promise.all(queries)
.then(trx.commit)
.catch(trx.rollback);
});
}
To call it
batchUpdate({ table: 'user', column: 'user_id' }, [...]);
Modern Syntax Version:
const batchUpdate = (options, collection) => {
const { table, column } = options;
const trx = await knex.transaction();
try {
await Promise.all(collection.map(tuple =>
knex(table)
.where(column, tuple[column])
.update(tuple)
.transacting(trx)
)
);
await trx.commit();
} catch (error) {
await trx.rollback();
}
}
You have a good idea of the pros and cons of each approach. I would recommend a raw query that bulk updates over several async updates. Yes you can run them in parallel, but your bottleneck becomes the time it takes for the db to run each update. Details can be found here.
Below is an example of an batch upsert using knex.raw. Assume that records is an array of objects (one obj for each row we want to update) whose values are the properties names line up with the columns in the database you want to update:
var knex = require('knex'),
_ = require('underscore');
function bulkUpdate (records) {
var updateQuery = [
'INSERT INTO mytable (primaryKeyCol, col2, colN) VALUES',
_.map(records, () => '(?)').join(','),
'ON DUPLICATE KEY UPDATE',
'col2 = VALUES(col2),',
'colN = VALUES(colN)'
].join(' '),
vals = [];
_(records).map(record => {
vals.push(_(record).values());
});
return knex.raw(updateQuery, vals);
}
This answer does a great job explaining the runtime relationship between the two approaches.
Edit:
It was requested that I show what records would look like in this example.
var records = [
{ primaryKeyCol: 123, col2: 'foo', colN: 'bar' },
{ // some other record, same props }
];
Please note that if your record has additional properties than the ones you specified in the query, you cannot do:
_(records).map(record => {
vals.push(_(record).values());
});
Because you will hand too many values to the query per record and knex will fail to match the property values of each record with the ? characters in the query. You instead will need to explicitly push the values on each record that you want to insert into an array like so:
// assume a record has additional property `type` that you dont want to
// insert into the database
// example: { primaryKeyCol: 123, col2: 'foo', colN: 'bar', type: 'baz' }
_(records).map(record => {
vals.push(record.primaryKeyCol);
vals.push(record.col2);
vals.push(record.colN);
});
There are less repetitive ways of doing the above explicit references, but this is just an example. Hope this helps!
The solution works great for me! I just include an ID parameter to make it dynamic across tables with custom ID tags. Chenhai, here's my snippet including a way to return a single array of ID values for the transaction:
function batchUpdate(table, id, collection) {
return knex.transaction((trx) => {
const queries = collection.map(async (tuple) => {
const [tupleId] = await knex(table)
.where(`${id}`, tuple[id])
.update(tuple)
.transacting(trx)
.returning(id);
return tupleId;
});
return Promise.all(queries).then(trx.commit).catch(trx.rollback);
});
}
You can use
response = await batchUpdate("table_name", "custom_table_id", [array of rows to update])
to get the returned array of IDs.
The update can be done in batches, i.e 1000 rows in a batch
And as long as it does it in batches, the bluebird map could be used.
For more information on bluebird map: http://bluebirdjs.com/docs/api/promise.map.html
const limit = 1000;
const totalRows = 50000;
const seq = count => Array(Math.ceil(count / limit)).keys();
map(seq(totalRows), page => updateTable(dbTable, page), { concurrency: 1 });
const updateTable = async (dbTable, page) => {
let offset = limit* page;
return knex(dbTable).pluck('id').limit(limit).offset(offset).then(ids => {
return knex(dbTable)
.whereIn('id', ids)
.update({ date: new Date() })
.then((rows) => {
console.log(`${page} - Updated rows of the table ${dbTable} from ${offset} to ${offset + batch}: `, rows);
})
.catch((err) => {
console.log({ err });
});
})
.catch((err) => {
console.log({ err });
});
};
Where pluck() is used to get ids in array form

Categories

Resources