Asynchronous Callback to Array.map() - javascript

I've just started learning Node.js a few weeks ago.... I am not able to understand why the 'products' array contains null instead of the desired objects....
On Line 13, when I am console logging the object, I get the desired object but I don't understand why they are null when I console log them on Line 40 after the map function has completed it's execution....
If the array length is 2 (which should imply successful pushing) why are the objects stored inside still null instead of the objects that I wanted to store ?
Console Output
Order Schema
exports.getOrders = async (req, res, next) => {
const userOrders = [];
Order.find({ 'user.userId': req.user._id }).then((orders) => {
console.log(orders); // Line 4
async.eachSeries(
orders,
function (order, callback) {
const products = order.products.map((p) => {
const seriesId = p.product.seriesId;
const volumeId = p.product.volumeId;
Series.findById(seriesId).then((series) => {
const volume = series.volumes.id(volumeId);
console.log(volume, p.quantity); // Line 13
return {
seriesTitle: volume.parent().title,
volume: volume,
quantity: p.quantity
};
});
});
console.log('Product Array Length: ', products.length); // Line 21
if (products.length !== 0) {
const data = {
productData: products,
orderData: {
date: order.date,
totalPrice: order.totalPrice
}
};
userOrders.push(data);
callback(null);
} else {
callback('Failed');
}
},
function (err) {
if (err) {
console.log('Could not retrieve orders');
} else {
console.log(userOrders); // Line 40
res.render('shop/orders', {
docTitle: 'My Orders',
path: 'orders',
orders: userOrders,
user: req.user
});
}
}
);
});
};

In line 8, order.products.map returns an array of null. Because this is an asynchronous mapping. For each product, you are calling Series.findById which is a promise and it only returns the value when it resolves. As you are not waiting for the promise to resolve, so it returns null on each iteration.
You have to map all the promises first then call Promise.all to resolve them and after that, you will get the expected value.
exports.getOrders = async (req, res, next) => {
const userOrders = [];
Order.find({ 'user.userId': req.user._id }).then((orders) => {
console.log(orders); // Line 4
async.eachSeries(
orders,
function (order, callback) {
const productPromise = order.products.map((p) => {
const seriesId = p.product.seriesId;
const volumeId = p.product.volumeId;
//ANSWER: Return the following promise
return Series.findById(seriesId).then((series) => {
const volume = series.volumes.id(volumeId);
console.log(volume, p.quantity); // Line 13
return {
seriesTitle: volume.parent().title,
volume: volume,
quantity: p.quantity
};
});
});
// ANSWER: call all the promises
Promise.all(productPromise)
.then(function(products) {
console.log('Product Array Length: ', products.length);
if (products.length !== 0) {
const data = {
productData: products,
orderData: {
date: order.date,
totalPrice: order.totalPrice
}
};
userOrders.push(data);
callback(null);
} else {
callback('Failed');
}
});
},
function (err) {
if (err) {
console.log('Could not retrieve orders');
} else {
console.log(userOrders); // Line 40
res.render('shop/orders', {
docTitle: 'My Orders',
path: 'orders',
orders: userOrders,
user: req.user
});
}
}
);
});
};

Related

Counter not increasing in async map function

I am working with mongodb and nodejs. I have an array of customers I have to create each inside database.
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
}
});
await Promise.all([...promises2]);
The issue is counter is not increasing every time. I am getting same counter in all the created customers. What is the issue here?
Issue is something like this but don't have an answer.
The problem is that all the calls overlap. Since the first thing they each do is get the current counter, they all get the same counter, then try to use it. Fundamentally, you don't want to do this:
const counter = await Counter.findOne({ type: "Customer" });
// ...
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
...because it creates a race condition: overlapping asynchronous operations can both get the same sequence value and then both issue an update to it.
You want an atomic operation for incrementing and retrieving a new ID. I don't use MongoDB, but I think the findOneAndUpdate operation can do that for you if you add the returnNewDocument option. If so, the minimal change would be to swap over to using that:
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
});
await Promise.all([...promises2]);
...but there's no reason to create an array and then immediately copy it, just use it directly:
await Promise.all(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
The overall operation will fail if anything fails, and only the first failure is reported back to your code (the other operations then continue and succeed or fail as the case may be). If you want to know everything that happened (which is probably useful in this case), you can use allSettled instead of all:
// Gets an array of {status, value/reason} objects
const results = await Promise.allSettled(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Promise.allSettled is new in ES2021, but easily polyfilled if needed.
If I'm mistaken about the above use of findOneAndUpdate in some way, I'm sure MongoDB gives you a way to get those IDs without a race condition. But in the worst case, you can pre-allocate the IDs instead, something like this:
// Allocate IDs (in series)
const ids = [];
for (const customer of customers) {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
ids.push(counter.sequence_value);
}
}
// Create customers (in parallel)
const results = await Promise.allSettled(customers.map(async(customer, index) => {
const customerId = ids[index];
try {
await Customer.create({
customerId
});
} catch (e) {
// Failed, remove the counter, but without allowing any error doing so to
// shadow the error we're already handling
try {
await Counter.someDeleteMethodHere(/*...customerId...*/);
} catch (e2) {
// ...perhaps report `e2` here, but don't shadow `e`
}
throw e;
}
});
// Get just the errors
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Your map function is not returning a promise.
Try this :
const promises2 = [];
customers.map((customer) => {
return new Promise(async (resolve) => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: 'Customer' });
console.log({ counter });
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: 'Customer' }, { $inc: { sequence_value: 1 } });
}
resolve();
});
});
await Promise.all(promises2);

Search an array of objects to match objectId from mongodb

I'm trying to search an array of objects by a key that contains nested object id for populating.
My object
{
service: 'user',
isModerator: true,
isAdmin: true,
carts: [
{
_id: 5e1344dcd4c94a1554ae0191,
qty: 1,
product: 5e09e4e0fcda6f268cefef3f,
user: 5e0dda6d6853702038da60f0,
expireAt: 2020-01-06T14:31:56.708Z,
__v: 0
},
{
_id: 5e13455306a54b31fc71b371,
qty: 1,
product: 5e09e507fcda6f268cefef40,// object ID
user: 5e0dda6d6853702038da60f0,
expireAt: 2020-01-06T14:33:55.573Z,
__v: 0
},
],
I want to match if carts array, contains cart with a product that user is adding, to not add it the second time but instead increment qty of the existing one.
My code
const itemId = req.body._id;
const userId = req.user._id;
const { qty } = req.body;
try {
const producto = await Product.findById(itemId);
const user = await User.findById(userId).populate({
path: 'carts',
});
const result = user.carts.find((o) => {
console.log(typeof o.product) // returns object
console.log(typeof producto._id); // returns object
return o.product === producto._id
});
console.log(result); // returns undefined
if (result !== undefined) {
const foundCart = await Cart.findById(result._id);
foundCart.qty += qty;
await foundCart.save();
return res.json({ message: 1 });
}
const newCart = new Cart({
qty,
product: producto,
user,
});
const cart = await newCart.save();
user.carts.push(cart);
await user.save();
return res.json({ message: 1 });
} catch (error) {
return console.log(error);
}
I think the problem is this line
return o.product === producto._id
Can you change it like this and try?
return o.product.toString() === producto._id.toString()

map() in node.js, async vs sync?

I have a segment code like below running in Node.js. And I find it will always goes to else condiction, howerver with masterData is not null.
getOperationDetails(req, res) {
let sql = 'select a.*, b.s*';
sql += ` from ${paymentSheet} a left join ${paymentHisSheet} b on a.id= b.source_id `;
sql += ' where a.id=? ';
func.connPool(sql, id, (err, rows, field) => {
if (err) {
res.json({ code: 400, message: err })
} else {
let masterData = [];
let details = rows.map((row, idx) => {
if (idx === 0) {
masterData.push({
id: row.id,
name: row.name
});
}
return {
operator: row.operator_info,
comments: row.cmt,
status: row.sta
}
})
if (masterData.length > 0 ) {
masterData[0].details = details;
} else {
console.log(sql);
console.log(id);
console.log('=======================');
console.log(masterData);
}
res.json({ code: 200, message: 'ok', data: masterData })
}
})
For example, the console will show like below. Obviously masterData has value. It means 'if' condiction run before map(). Do I have to use async to wait the map() handle the data over?
allConnections:2
select a.*, b.* from payment a left join history b on a.id= b.source_id where a.id=?
83e588cd-9b4b-4592-ac7f-529bfaa9b231
=======================
allConnections:2
allConnections:2
[
{
id: '83e588cd-9b4b-4592-ac7f-529bfaa9b231',
name: 'Jeff'
}
]
My anaysis:
the rows from database should like below
83e588cd-9b4b-4592-ac7f-529bfaa9b231', 'Jeff', 'Operator Peter', 'OK', 0
83e588cd-9b4b-4592-ac7f-529bfaa9b231', 'Jeff', 'Operator Mary', 'NO', 1
83e588cd-9b4b-4592-ac7f-529bfaa9b231', 'Jeff', 'Operator Jet', 'OK', 2
or like below, means no details
83e588cd-9b4b-4592-ac7f-529bfaa9b231', 'Jeff', null, null, null
That is why I use masterData to separate. I think push() should not be taken out the map(), becasue rows maybe return nothing. Will it be like map() is over and push() is still running?
==== P.S. func.connPool====
let mysql = require('mysql');
let db = require('../configs/db');
let pool = mysql.createPool(db);
module.exports = {
connPool (sql, val, cb) {
pool.getConnection((err, conn) => {
if (err) {
console.log('Connection Error:' + err);
cb(err, null, null);
} else {
console.log('allConnections:' + pool._allConnections.length);
let q = conn.query(sql, val, (err, rows,fields) => {
pool.releaseConnection(conn);
if (err) {
console.log('Query:' + sql + ' error:' + err);
}
cb(err, rows, fields);
});
}
});
},
What I suspected is that the push operation is somehow delay because of some code that is not shown here (I am not certain yet).
I ran the following code so many times, I still could not reproduce your problem.
var rows = [
{
id: "123",
name: "test",
},
{
id: "123",
name: "test",
},
{
id: "123",
name: "test",
},
]
let masterData = [];
let details = rows.map((row, idx) => {
if (idx === 0) {
masterData.push({
id: row.id,
name: row.name
});
}
return {
id: row.id,
name: row.name,
}
})
if (masterData.length > 0 ) {
console.log("in");
} else {
console.log(masterData);
console.log('=======================');
}
Could you try whether it goes to else or not for this code.
From this piece of code you are pushing to MasterData only the first row.id and row.name.
( that is specified in the if conditional for just the first index idx === 0 )
So if thats the case you don't need to have this push thing inside the map.
You can take that out of the map and leave the iterator to create only the details array.
You can go with:
let details = rows.map(row => ({
operator: row.operator_info,
comments: row.cmt,
status: row.sta
})
);
let masterData = [{ id: rows[0].id, name: rows[0].name, details }]

express JS exit API before promise resolving

in the mapping I have two objects which will go to default in switch and 1 record which will go to ORDER_OPEN case and the object won't enter to if statements and just it will push to orderArray but when the API is executed I only receive two objects from default and when I log the orderArray it is pushing into the objectArray after the execution of API.
router.get('/orderByPhone/:id', async (req, res) => {
const { ORDER_OPEN, ORDER_FILL, BITY_FILL, BITY_CANCEL, getOrderStatusValue } = require('../../lib/constants/orderStatus');
const statusUtils = require('../../lib/constants/orderStatus');
const apiUtils = require('../../lib/apiUtils');
const neo4jUtils = require('../../lib/neo4jUtils');
const orderArray = [];
try {
const id = req.params.id;
const response = await neo4jUtils.getOrders(1, id);
response.records.map(async (record) => {
switch (record._fields[0].properties.orderStatus) {
case ORDER_OPEN:
const ret = await apiUtils.fetchOrderStatus(record._fields[0].properties.bityId, record._fields[0].properties.token);
if (ret.legacy_status == BITY_FILL) {
await neo4jUtils.updateOrderStatus(record._fields[0].properties.bityId, getOrderStatusValue(ret.legacy_status))
} else if (ret.legacy_status == BITY_CANCEL) {
await neo4jUtils.updateOrderStatus(record._fields[0].properties.bityId, getOrderStatusValue(ret.legacy_status))
}
orderArray.push({
input: {
amount: ret.input.amount,
currency: ret.input.currency
},
ouput: {
amount: ret.output.amount,
currency: ret.output.currency
},
status: {
status: statusUtils.getOrderStatusValue(ret.legacy_status)
}
});
break;
case ORDER_FILL:
orderArray.push({
input: {
amount: record._fields[0].properties.fromAmount,
currency: record._fields[0].properties.fromCurrency
},
ouput: {
amount: record._fields[0].properties.toAmount,
currency: record._fields[0].properties.toCurrency
},
status: {
status: record._fields[0].properties.orderStatus
}
});
break;
default:
orderArray.push({
input: {
amount: record._fields[0].properties.fromAmount,
currency: record._fields[0].properties.fromCurrency
},
ouput: {
amount: record._fields[0].properties.toAmount,
currency: record._fields[0].properties.toCurrency
},
status: {
status: record._fields[0].properties.orderStatus
}
});
break;
}
});
} catch (error) {
res.status(500).send(errorHandleing.FiveZeroZero)
}
res.status(200).json(orderArray);
});
response.records.map(async (record) => {...} is a sync function, it will return a promises array, your code will not wait until all action in {...} finish. This is main reason your request only takes small time to response.
Correct way, just wait until all jobs are finish:
let promises = response.records.map(async (record) => {...}
await Promise.all(promises); // waiting....

rxjs subscribing late results to empty stream

I have the following piece of code. As is, with a couple of lines commented out, it works as expected. I subscribe to a stream, do some processing and stream the data to the client. However, if I uncomment the comments, my stream is always empty, i.e. count in getEntryQueryStream is always 0. I suspect it has to do with the fact that I subscribe late to the stream and thus miss all the values.
// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';
const {ObjectId} = imongo;
function searchElastic({query, sort}, limit) {
const body = {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
};
// keep the search results "scrollable" for 30 secs
const scroll = '30s';
let count = 0;
return Rx.Observable
.fromPromise(elasticClient.search({ index: 'data', body, scroll }))
.concatMap(({_scroll_id, hits: {hits}}) => {
const subject = new Rx.Subject();
// subject needs to be subscribed to before adding new values
// and therefore completing the stream => execute in next tick
setImmediate(() => {
if(hits.length) {
// initial data
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
const handleDoc = (err, res) => {
if(err) {
subject.onError(err);
return;
}
const {_scroll_id, hits: {hits}} = res;
if(!hits.length) {
subject.onCompleted();
} else {
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
}
};
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
} else {
subject.onCompleted();
}
});
return subject.asObservable();
});
}
function getElasticQuery(searchString, filter) {
const query = _.cloneDeep(filter);
query.query.filtered.filter.bool.must.push({
query: {
query_string: {
query: searchString
}
}
});
return _.extend({}, query);
}
function fetchAncestors(ancestorIds, ancestors, format) {
return imongo.find('session', 'sparse_data', {
query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
fields: { name: 1, type: 1 }
})
.map(entry => {
entry.id = entry._id.toString();
delete entry._id;
return entry;
})
// we don't care about the results
// but have to wait for stream to finish
.defaultIfEmpty()
.last();
}
function getEntryQueryStream(entriesQuery, query, limit) {
const {parentSearchFilter, filter, format} = query;
return searchElastic(entriesQuery, limit)
.concatMap(entry => {
const ancestors = entry.ancestors || [];
// if no parents => doesn't match
if(!ancestors.length) {
return Rx.Observable.empty();
}
const parentsQuery = getElasticQuery(parentSearchFilter, filter);
parentsQuery.query.filtered.filter.bool.must.push({
terms: {
id: ancestors
}
});
// fetch parent entries
return searchElastic(parentsQuery)
.count()
.concatMap(count => {
// no parents match query
if(!count) {
return Rx.Observable.empty();
}
// fetch all other ancestors that weren't part of the query results
// and are still a string (id)
const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
return fetchAncestors(restAncestorsToFetch, ancestors, format)
.concatMap(() => Rx.Observable.just(entry));
});
});
}
function executeQuery(query, res) {
try {
const stream = getEntryQueryStream(query);
// stream is passed on to another function here where we subscribe to it like:
// stream
// .map(x => whatever(x))
// .subscribe(
// x => res.write(x),
// err => console.error(err),
// () => res.end());
} catch(e) {
logger.error(e);
res.status(500).json(e);
}
}
I don't understand why those few lines of code break everything or how I could fix it.
Your use case is quite complex, you can start off with building up searchElastic method like the pattern bellow.
convert elasticClient.scroll to an observable first
setup the init data for elasticClient..search()
when search is resolved then you should get your scrollid
expand() operator let you recursively execute elasticClientScroll observable
use map to select data you want to return
takeWhile to decide when to complete this stream
The correct result will be once you do searchElastic().subscribe() the stream will emit continuously until there's no more data to fetch.
Hope this structure is correct and can get you started.
function searchElastic({ query, sort }, limit) {
const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
let obj = {
body: {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
},
scroll: '30s'
}
return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
.expand(({ _scroll_id, hits: { hits } }) => {
// guess there are more logic here .....
// to update the scroll id or something
return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
//.. select the res you want to return
)
}).takeWhile(res => res.hits.length)
}

Categories

Resources