How do I await for subscribe to subscribe - javascript

I used the code below until I found out that the getConnectedUser() function takes longer than verifyUser(), so this.userUID is undefined:
this.layoutService.getConnectedUser().subscribe(
(data) => {
this.userSaml = data;
this.layoutService.connectedUser.matricule = this.userSaml.matricule;
this.layoutService.connectedUser.profil = this.userSaml.profil;
this.layoutService.connectedUser.uid = this.userSaml.uid;
this.layoutService.connectedUser.username = this.userSaml.username;
this.layoutService.connectedUser.city = this.userSaml.city;
console.log("dashboard this.layoutService.connectedUser", this.layoutService.connectedUser);
},
(err) => {
throw err;
}
);
this.userUID = this.layoutService.connectedUser.uid;
console.log("this.userUID", this.userUID);
this.adminService.verifyUser(this.userUID).subscribe(
(data) => {
this.userStatus = data[0].status;
this.userProfile = data[0].profil;
console.log("userProfile" + JSON.stringify(data[0].profil));
this.userExists = true;
},
(err) => {
this.userExists = false;
}
);
So, I wanted to make sure that the getConnectedUser subscribe is completed to call the second one, I changed my code and added the .add method just like that:
this.layoutService.getConnectedUser().subscribe(
(data) => {
this.userExistsRefog = true;
this.userSaml = data;
this.layoutService.connectedUser.matricule = this.userSaml.matricule;
this.layoutService.connectedUser.profil = this.userSaml.profil;
this.layoutService.connectedUser.uid = this.userSaml.uid;
this.layoutService.connectedUser.username = this.userSaml.username;
this.layoutService.connectedUser.city = this.userSaml.city;
console.log("home connectedUser", this.layoutService.connectedUser);
},
(err) => {
this.userExistsRefog = false;
throw err;
}
).add(() => {
this.userUID = this.layoutService.connectedUser.uid;
console.log("this.userUID", this.userUID);
this.adminService.verifyUser(this.userUID).subscribe(
(data) => {
this.userStatus = data[0].status;
this.userProfile = data[0].profil;
console.log("userProfile" + JSON.stringify(data[0].profil));
this.userExists = true;
},
(err) => {
this.userExists = false;
}
);
});
I want to learn how to use the Async/await way for this example and what is the best approach to adopt for similar functionality ? Thanks

mainly you have two ways.
1.write the method call verifyuser() inside the subscription of getconnecteduser() method.in that way you will never get a null value.
2.you can use promises instead of observable subscription. then use async/await to delay the execution of the method.
async userTasks() {
const usersDetails = await this.layoutService.getConnectedUser().toPromise();
this.layoutService.connectedUser.uid = usersDetails.uid;
this.adminService.verifyUser(this.userUID).subscribe(
(data) => {
this.userStatus = data[0].status;
this.userProfile = data[0].profil;
console.log("userProfile" + JSON.stringify(data[0].profil));
this.userExists = true;
},
(err) => {
this.userExists = false;
}
);
}

Related

cancel multiple promises inside a promise on unmount?

hi i want to cancel promise on unmount since i received warning,
Warning: Can't perform a React state update on an unmounted component. This is a no-op, but it indicates a memory leak in your application. To fix, cancel all subscriptions and asynchronous tasks in the componentWillUnmount method.
My code:
const makeCancelable = (promise: Promise<void>) => {
let hasCanceled_ = false;
const wrappedPromise = new Promise((resolve, reject) => {
promise.then(
(val) => (hasCanceled_ ? reject({ isCanceled: true }) : resolve(val)),
(error) => (hasCanceled_ ? reject({ isCanceled: true }) : reject(error))
);
});
return {
promise: wrappedPromise,
cancel() {
hasCanceled_ = true;
},
};
};
useEffect(() => {
const initialize = async () => {
const getImageFilesystemKey = (remoteUri: string) => {
const [_, fileName] = remoteUri.split('toolbox-talks/');
return `${cacheDirectory}${fileName}`;
};
const filesystemUri = getImageFilesystemKey(uri);
try {
// Use the cached image if it exists
const metadata = await getInfoAsync(filesystemUri);
if (metadata.exists) {
console.log('resolve 1');
setFileUri(filesystemUri);
} else {
const imageObject = await downloadAsync(uri, filesystemUri);
console.log('resolve 2');
setFileUri(imageObject.uri);
}
// otherwise download to cache
} catch (err) {
console.log('error 3');
setFileUri(uri);
}
};
const cancelable = makeCancelable(initialize());
cancelable.promise
.then(() => {
console.log('reslved');
})
.catch((e) => {
console.log('e ', e);
});
return () => {
cancelable.cancel();
};
}, []);
but i still get warning on fast press, help me please?
You're cancelling the promise, but you are not cancelling the axios call or any of the logic that happens after it inside initialize(). So while it is true that the console won't print resolved, setFileUri will be called regardless, which causes your problem.
A solution could look like this (untested):
const makeCancelable = (promise: Promise<void>) => {
let hasCanceled_ = false;
const wrappedPromise = new Promise((resolve, reject) => {
promise.then(
val => (hasCanceled_ ? reject({ isCanceled: true }) : resolve(val)),
error => (hasCanceled_ ? reject({ isCanceled: true }) : reject(error))
);
});
return {
promise: wrappedPromise,
cancel() {
hasCanceled_ = true;
}
};
};
const initialize = async () => {
const getImageFilesystemKey = (remoteUri: string) => {
const [_, fileName] = remoteUri.split("toolbox-talks/");
return `${cacheDirectory}${fileName}`;
};
const filesystemUri = getImageFilesystemKey(uri);
try {
// Use the cached image if it exists
const metadata = await getInfoAsync(filesystemUri);
if (metadata.exists) {
console.log("resolve 1");
return filesystemUri;
} else {
const imageObject = await downloadAsync(uri, filesystemUri);
console.log("resolve 2");
return imageObject.uri;
}
// otherwise download to cache
} catch (err) {
console.error("error 3", err);
return uri;
}
};
useEffect(() => {
const cancelable = makeCancelable(initialize());
cancelable.promise.then(
fileURI => {
console.log("resolved");
setFileUri(fileURI);
},
() => {
// Your logic is such that it's only possible to get here if the promise is cancelled
console.log("cancelled");
}
);
return () => {
cancelable.cancel();
};
}, []);
This ensures that you will only call setFileUri if the promise is not cancelled (I did not check the logic of makeCancelable).

MongoDB Find queries slow while updating/inserting schema

I'm doing a big loop once a day - which updating existing documents in the database (and also inserting new documents).
this loop get executed in a separate server ( prevents from the main server to be slow ), but the main problem is that all the find queries on the Data base (while the loop is executed) are very slow (the loop slows it down significantly).
This is a very big issue in my website ( this loop must be executed once a day ) and iv'e been trying to find a solution online - but i couldn't manage to find something.
Is there any way to prevent the find queries from being so slow while inserting/updating the database??
uploadProductsManually = async (name, products, map, valuesMap) => {
return new Promise(async function (resolve, reject) {
const company = await Company.findOne({ name }).exec();
if (!company) return reject(new errors.NotFound("Company not found"));
const rows = products;
const parsedRows = [];
const findCorrectKey = (key) => {
const correctKey = key.trim();
if (productFields[correctKey]) return productFields[correctKey];
const category = map.find((item) => {
return item.options.some((option) => {
return option.trim().toLowerCase() === correctKey.toLowerCase();
});
});
const categoryName = category && category.name;
return productFields[categoryName];
};
const hashProductValues = (product) => {
let valueToHash;
if (product.productId) {
valueToHash = product.productId;
} else if (product.certificateId) {
valueToHash = product.certificateId;
} else {
valueToHash = JSON.stringify(
product.size + product.color
);
}
return base64encode(valueToHash);
};
rows.forEach(function (row, i) {
var newProduct = {};
for (var key in row) {
var val = row[key];
if (val) {
let normalizedKey = findCorrectKey(key);
if (normalizedKey) {
newProduct[normalizedKey] = val;
}
let normalizedValue = normalizeValue(normalizedKey, val,valuesMap);
newProduct[normalizedKey] = normalizedValue;
}
}
newProduct.channels = [];
if (newProduct.productId) {
parsedRows.push(newProduct);
}
});
fetchProducts();
function fetchProducts() {
Product.find({ company: company._id }).exec(function (err, products) {
if (err) console.log(err);
var map = {};
if (products) {
products.forEach(function (product) {
const productIdentifier = hashProductValues(product);
map[productIdentifier] = product;
if (product.productStatus == "manual") {
// product.isAvailable = false;
// product.save();
} else {
product.status = "deleted";
product.save();
}
});
}
mergeData(map);
});
}
async function mergeData(map) {
let created = 0;
let updated = 0;
let manual = 0;
async.each(
parsedRows,
function (row, callback) {
const productIdentifier = hashProductValues(row);
let product = map[productIdentifier];
if (product) {
map[productIdentifier] = undefined;
Product.findByIdAndUpdate(id, { $set: updatedProduct }, function (
err,
updatedProd
) {
if (err) {
// errors.push(productIdentifier);
console.log("err is:", err);
}
updated++;
callback();
});
} else {
row = new Product(row);
row.save(function (err) {
if (err) {
// errors.push(productIdentifier);
console.log(err);
}
created++;
callback();
});
}
},
(err) => {
if (err) return reject(err);
Company.findByIdAndUpdate(
company._id,
{ lastUpdate: new Date() },
function (err, comp) {
if (err) console.log(err);
}
);
console.log(
`Created: ${created}\nUpdated: ${updated} \manual: ${manual}`
);
resolve({
created,
updated,
manual,
errors,
});
}
);
}
});
};

Synchronized in typescript angular 5

I have the following code.
public async getOrderInforAsync(customerOrderId) {
return new Promise((resolve, reject) => {
this.orderEntryService.getCommissionIncentives(customerOrderId)
.subscribe(
response => {
Object.assign(this.order, response);
this.order.bookingDate = this.order.bookingDate ? new Date(this.order.bookingDate) : null;
this.order.estBillDate = this.order.estBillDate ? new Date(this.order.estBillDate) : null;
this.order.orderSplit.forEach(element => {
element.rcNumberFullName = `${this.order.customerOrderRCNumber}${element.rcNumberSuffix}`;
});
this.initialQuantityAllocated();
this.orderSummary.allocatedEstCommissionPercent = this.calculateTotalOrderPercent();
this.orderSummary.allocatedEstCommissionAmount = this.calculateTotalOrderAmount();
this.highlight = this.checkOrderSummary(this.orderSummary.allocatedEstCommissionPercent, this.orderSummary.allocatedEstCommissionAmount);
this.calculateAllocatedActualPercent();
this.calculateAllocatedActualAmount();
this.onChangeEstSalesPrice();
resolve();
},
error => {
reject();
}
);
});
}
Sometimes the resolve() is called before this.calculateAllocatedActualPercent() and this.calculateAllocatedActualAmount() are done.
So how to make this code run synchronized, it means all functions on this block code had done before resolve() called?
Try this :
public async getOrderInforAsync(customerOrderId) {
return new Promise((resolve, reject) => {
this.orderEntryService.getCommissionIncentives(customerOrderId)
.subscribe(
response => {
Object.assign(this.order, response);
this.order.bookingDate = this.order.bookingDate ? new Date(this.order.bookingDate) : null;
this.order.estBillDate = this.order.estBillDate ? new Date(this.order.estBillDate) : null;
this.order.orderSplit.forEach(element => {
element.rcNumberFullName = `${this.order.customerOrderRCNumber}${element.rcNumberSuffix}`;
});
this.initialQuantityAllocated();
this.orderSummary.allocatedEstCommissionPercent = this.calculateTotalOrderPercent();
this.orderSummary.allocatedEstCommissionAmount = this.calculateTotalOrderAmount();
this.highlight = this.checkOrderSummary(this.orderSummary.allocatedEstCommissionPercent, this.orderSummary.allocatedEstCommissionAmount);
await this.calculateAllocatedActualPercent();
await this.calculateAllocatedActualAmount();
this.onChangeEstSalesPrice();
resolve();
},
error => {
reject();
}
);
});
}
async calculateAllocatedActualPercent(){
return new Promise(resolve,reject){
// call api
if (data)
resolve(data);
else
reject()
}
}
async calculateAllocatedActualAmount(){
return new Promise(resolve,reject){
// call api
if (data)
resolve(data);
else
reject()
}
}
An async function returns a Promise. And to declare a function as async, you need to have an await call inside it.
Change your observable returned from this.orderEntryService.getCommissionIncentives(customerOrderId) .toPromise() and then await it.
Try this:
public async getOrderInforAsync(customerOrderId) {
try {
const response = await this.orderEntryService
.getCommissionIncentives(customerOrderId).toPromise();
this.order = { ...response };
this.order.bookingDate = this.order.bookingDate ? new Date(this.order.bookingDate) : null;
this.order.estBillDate = this.order.estBillDate ? new Date(this.order.estBillDate) : null;
for (let i = 0; i < this.order.orderSplit; i++) {
const element = this.order.orderSplit[i];
element.rcNumberFullName = `${this.order.customerOrderRCNumber}${element.rcNumberSuffix}`
}
this.initialQuantityAllocated();
this.orderSummary.allocatedEstCommissionPercent = this.calculateTotalOrderPercent();
this.orderSummary.allocatedEstCommissionAmount = this.calculateTotalOrderAmount();
this.highlight = this.checkOrderSummary(this.orderSummary.allocatedEstCommissionPercent, this.orderSummary.allocatedEstCommissionAmount);
this.calculateAllocatedActualPercent();
this.calculateAllocatedActualAmount();
this.onChangeEstSalesPrice();
return response;
} catch (error) {
return error;
}
}

How to calculate final price with forEach and Mongoose queries?

I have a difficult final-price calculating issues with Mongoose Queries. I have three functions. This one get's an ID and external quantity, finds the material price, and finally mulitply it.
async function getPrivatePrice(id, quantity) {
try {
let piece = await Piece.findOne({'_id': id}).select({"meterage": 1, "material": 1, "_id" : 0}).then((piece) => {
return piece;
}).catch((err) => {
console.log(err);
});
let price = await Material.findOne({'_id': piece.material}).select({"_id" : 0, "price" : 1}).then((price) => {
return price;
}).catch((err) => {
console.log(err);
});
let final_raw = piece.meterage * price.price;
let final_price = final_raw * quantity;
return { final_price : final_price }
} catch(err) {
console.log(err);
}
}
I'm really having difficulties with the second part of the code, because I need to get many id's from another function, so, I decided to use a forEach to call this function, and get all these prices and put them in an array, but I don't know how to return it.
async function getFinalCost(pieces) {
let total_cost = [];
try {
pieces.forEach(async (response) => {
let query = await PieceController.getPrivatePrice(response["_id"], response["quantity"]).then((piecePrice) => {
return piecePrice["final_price"];
}).catch((err) => {
console.log(err);
});
total_cost.push(query);
});
return total_cost;
} catch(err) {
console.log(err);
}
}
I like to know how to get that array here:
function createProduct(req, res) {
let params = req.body;
let product = new Product();
product.name = params.name;
product.reference = params.reference;
product.pieces = params.pieces;
product.color = params.color;
let piece_price = getFinalCost(product.pieces);
console.log(piece_price); //Outputs Promise { { total_array: [] } }
}
You clearly need to refactor your code. What I would do is use something like bluebird to run a map over the pieces list like so.
const Promise = require('bluebird');
async function getMoreProductInfo(id) {
try{
const productInfo = await Piece.findOne({'_id': id}).select({"meterage": 1, "material": 1, "_id" : 0});
const materialInfo = await Material.findOne({'_id': productInfo.material}).select({"_id" : 0, "price" : 1});
return { productInfo, materialInfo };
} catch(e) {
throw e;
}
}
async function calculatePrices(pieces) {
try {
const priceList = await Promise.map(pieces, async (piece) => {
const { productInfo, materialInfo } = await getMoreProductInfo(piece._id);
return materialInfo.price * piece.quantity * productInfo.meterage;
});
return priceList
} catch (e) {
throw e;
}
}
I clearly cannot test this code as is..but I think it should be pretty straightforward to tell whats going on(I think)

Updating many(100k+) documents in the most efficient way MongoDB

I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}

Categories

Resources