I'm having some performance issues with my Parse queries, using Parse SDK.
On some requests, I have to get a ressources related to another one. E.g. :
I get all elements from ClassA
foreach ClassA elements, I have to find all ClassB and ClassC elements which contain a pointer to the relevant entry in ClassA.
Right now, to sum up:
I query all my ClassA elements
I loop on the results, and create a Promise (Parse query) to get ClassB elements and another one (Parse query) to get ClassC elements foreach result
My issue is that solution makes a lot of Parse Query, so a lot of Promise that I have to wait for. The real issue is that on my local env, all is working fine, the whole request makes 1s or less (it's quite heavy, but it does 45 parse query in the current case - based on my Class entries).
On my production server, same code, same data, same Node version, the same request makes 30s or more (=> timeout).
This is the relevant part of the code (it's old and messy) :
router.get('/:userId/company/customers', (req, res, next) => {
if('company' in req.jwtData.data) {
const company = req.jwtData.data.company;
const query = new Parse.Query('Cards');
const Companies = Parse.Object.extend('Companies');
const currentCompany = new Companies({id: company.objectId});
query.equalTo('company', currentCompany).find().then((cards) => {
if(cards.length){
const customersArrayId = cards.map(card => card.toJSON().user.objectId);
const usersQuery = new Parse.Query('_User').containedIn('objectId', customersArrayId).find({ useMasterKey: true });
usersQuery.then(customersResponse => {
const customers = [];
const customersPromises = [];
if(customersResponse.length) {
for (let index = 0; index < customersResponse.length; index++) {
let customer = {
...customersResponse[index].toJSON(),
...customersResponse[index].attributes
};
const customerPromises = [];
const customerId = customer.objectId;
const stamps = new Parse.Query('Stamps').equalTo('user', new UserModel({objectId: customerId})).equalTo('company', currentCompany).limit(CONSTANTS.QUERY_MAX_LIMIT).find().then((stamps) => {
return stamps;
}).catch(error => {
res.json({
success: false,
error
});
});
const cards = new Parse.Query('Cards').equalTo('user', new UserModel({objectId: customerId})).equalTo('company', currentCompany).limit(CONSTANTS.QUERY_MAX_LIMIT).find().then((cards) => {
return cards;
}).catch(error => {
res.json({
success: false,
error
});
});
customers.push(customer);
customerPromises.push(stamps);
customerPromises.push(cards);
customersPromises.push(customerPromises);
}
if(customersPromises.length) {
const allPromises = customersPromises.map(customerP => Promise.all(customerP));
Promise.all(allPromises).then((customerPromiseResponses) => {
console.log('allPromises done, mapping all users data...');
for (let index = 0; index < customerPromiseResponses.length; index++) {
const customerResponseData = customerPromiseResponses[index];
const stamps = customerResponseData[0];
const cards = customerResponseData[1];
const companyEmailAllowed = () => {
let check = false;
if(customers[index].hasOwnProperty('companiesNewsletterAgreements')) {
check = customers[index].companiesNewsletterAgreements.indexOf(company.objectId) > -1;
}
return check;
};
customers[index] = {
...customers[index],
email: companyEmailAllowed() ? customers[index].email : null,
stamps,
cards,
}
}
res.json({
success: true,
data: customers
});
}).catch(error => {
res.json({
success: false,
error
});
});
}
else {
res.json({
success: true,
data: customers
});
}
} else {
res.json({
success: true,
data: customers
});
}
});
} else {
res.json({
success: true,
data: []
});
}
});
} else {
res.json({
success: false,
error: "No company found."
});
}
});
My question is: is it possible to get the same data in a more performant way (less Parse queries, less Promises)?
I actually found a solution. Instead of starting Parse query for each result I found, I request the whole set of elements I need in the first place, using containedIn.
Then, I filter the results to assign the elements to their customer.
Related
What i have set up for my firestore database is one collection called 'funkoPops'. That has documents that are genres of funkoPops, with an array of funkoData that holds all pops for that genre. it looks like this below
I should also note, that the collection funkoPops has hundreds of documents of 'genres' which is basically the funko pop series with the sub collections of funkoData that I web scraped and now need to be able to search through the array field of 'funkoData' to match the name field with the given search parameter.
collection: funkoPops => document: 2014 Funko Pop Marvel Thor Series => fields: funkoData: [
{
image: "string to hold image",
name: "Loki - with helmet",
number: "36"
},
{
image: "string to hold image",
name: "Black and White Loki with Helmet - hot topic exsclusive",
number: "36"
},
{
etc...
}
So how could i run a query in firestore to be able to search in collection('funkoPops'), search through the document fields for name.
I have the ability to search for genres like so, which gives the genre back and the document with the array of data below:
const getFunkoPopGenre = async (req, res, next) => {
try {
console.log(req.params);
const genre = req.params.genre;
const funkoPop = await firestore.collection("funkoPops").doc(genre);
const data = await funkoPop.get();
if (!data.exists) {
res.status(404).send("No Funko Pop found with that search parameter");
} else {
res.send(data.data());
}
} catch (error) {
res.status(400).send(error.message);
}
};
what i am trying to use to search by the field name is below and returns an empty obj:
const getFunkoPopName = async (req, res, next) => {
try {
const name = req.params.name;
console.log({ name });
const funkoPop = await firestore
.collection("funkoPops")
.whereEqualTo("genre", name);
const data = await funkoPop.get();
console.log(data);
res.send(data.data());
} catch (error) {
res.status(400).send(error);
}
};
Any help would be great, thanks!
So the way i went about answering this as it seems from top comment and researching a little more on firebase, you do you have to match a full string to search using firebase queries. Instead, I query all docs in the collection, add that to an array and then forEach() each funkoData. From there i then create a matchArray and go forEach() thru the new funkoData array i got from the first query. Then inside that forEach() I have a new variable in matches which is filter of the array of data, to match up the data field name with .inlcudes(search param) and then push all the matches into the matchArr and res.send(matchArr). Works for partial of the string as well as .includes() matches full and substring. Not sure if that is the best and most efficient way but I am able to query thru over probably 20k data in 1-2 seconds and find all the matches. Code looks like this
try {
const query = req.params.name.trim().toLowerCase();
console.log({ query });
const funkoPops = await firestore.collection("test");
const data = await funkoPops.get();
const funkoArray = [];
if (data.empty) {
res.status(404).send("No Funko Pop records found");
} else {
data.forEach((doc) => {
const funkoObj = new FunkoPop(doc.data().genre, doc.data().funkoData);
funkoArray.push(funkoObj);
});
const matchArr = [];
funkoArray.forEach((funko) => {
const genre = funko.genre;
const funkoData = funko.funkoData;
const matches = funkoData.filter((data) =>
data.name.toLowerCase().includes(query)
);
if (Object.keys(matches).length > 0) {
matchArr.push({
matches,
genre,
});
}
});
if (matchArr.length === 0) {
res.status(404).send(`No Funko Pops found for search: ${query}`);
} else {
res.send(matchArr);
}
}
} catch (error) {
res.status(400).send(error.message);
}
with a little bit of tweaking, i am able to search for any field in my database and match it with full string and substring as well.
update
ended up just combining genre, name, and number searches into one function so that whenver someone searches, the query param is used for all 3 searches at once and will give back data on all 3 searches as an object so that we can do whatever we like in front end:
const getFunkoPopQuery = async (req, res) => {
try {
console.log(req.params);
const query = req.params.query.trim().toLowerCase();
const funkoPops = await firestore.collection("test");
const data = await funkoPops.get();
const funkoArr = [];
if (data.empty) {
res.status(404).send("No Funko Pop records exsist");
} else {
data.forEach((doc) => {
const funkoObj = new FunkoPop(doc.data().genre, doc.data().funkoData);
funkoArr.push(funkoObj);
});
// genre matching if query is not a number
let genreMatches = [];
if (isNaN(query)) {
genreMatches = funkoArr.filter((funko) =>
funko.genre.toLowerCase().includes(query)
);
}
if (genreMatches.length === 0) {
genreMatches = `No funko pop genres with search: ${query}`;
}
// name & number matching
const objToSearch = {
notNullNameArr: [],
notNullNumbArr: [],
nameMatches: [],
numbMatches: [],
};
funkoArr.forEach((funko) => {
const genre = funko.genre;
if (funko.funkoData) {
const funkoDataArr = funko.funkoData;
funkoDataArr.forEach((data) => {
if (data.name) {
objToSearch.notNullNameArr.push({
funkoData: [data],
genre: genre,
});
}
if (data.number) {
objToSearch.notNullNumbArr.push({
funkoData: [data],
genre: genre,
});
}
});
}
});
// find name that includes query
objToSearch.notNullNameArr.forEach((funko) => {
const genre = funko.genre;
const name = funko.funkoData.filter((data) =>
data.name.toLowerCase().includes(query)
);
if (Object.keys(name).length > 0) {
objToSearch.nameMatches.push({
genre,
name,
});
}
});
// find number that matches query
objToSearch.notNullNumbArr.forEach((funko) => {
const genre = funko.genre;
const number = funko.funkoData.filter((data) => data.number === query);
if (Object.keys(number).length > 0) {
objToSearch.numbMatches.push({
genre,
number,
});
}
});
if (objToSearch.nameMatches.length === 0) {
objToSearch.nameMatches = `No funko pops found with search name: ${query}`;
}
if (objToSearch.numbMatches.length === 0) {
objToSearch.numbMatches = `No funko pop numbers found with search: ${query}`;
}
const searchFinds = {
genre: genreMatches,
name: objToSearch.nameMatches,
number: objToSearch.numbMatches,
};
res.send(searchFinds);
}
} catch (error) {
res.status(400).send(error.message);
}
};
If anyone is well suited in backend and knows more about firestore querying, please let me know!
I have the following piece of code. As is, with a couple of lines commented out, it works as expected. I subscribe to a stream, do some processing and stream the data to the client. However, if I uncomment the comments, my stream is always empty, i.e. count in getEntryQueryStream is always 0. I suspect it has to do with the fact that I subscribe late to the stream and thus miss all the values.
// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';
const {ObjectId} = imongo;
function searchElastic({query, sort}, limit) {
const body = {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
};
// keep the search results "scrollable" for 30 secs
const scroll = '30s';
let count = 0;
return Rx.Observable
.fromPromise(elasticClient.search({ index: 'data', body, scroll }))
.concatMap(({_scroll_id, hits: {hits}}) => {
const subject = new Rx.Subject();
// subject needs to be subscribed to before adding new values
// and therefore completing the stream => execute in next tick
setImmediate(() => {
if(hits.length) {
// initial data
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
const handleDoc = (err, res) => {
if(err) {
subject.onError(err);
return;
}
const {_scroll_id, hits: {hits}} = res;
if(!hits.length) {
subject.onCompleted();
} else {
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
}
};
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
} else {
subject.onCompleted();
}
});
return subject.asObservable();
});
}
function getElasticQuery(searchString, filter) {
const query = _.cloneDeep(filter);
query.query.filtered.filter.bool.must.push({
query: {
query_string: {
query: searchString
}
}
});
return _.extend({}, query);
}
function fetchAncestors(ancestorIds, ancestors, format) {
return imongo.find('session', 'sparse_data', {
query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
fields: { name: 1, type: 1 }
})
.map(entry => {
entry.id = entry._id.toString();
delete entry._id;
return entry;
})
// we don't care about the results
// but have to wait for stream to finish
.defaultIfEmpty()
.last();
}
function getEntryQueryStream(entriesQuery, query, limit) {
const {parentSearchFilter, filter, format} = query;
return searchElastic(entriesQuery, limit)
.concatMap(entry => {
const ancestors = entry.ancestors || [];
// if no parents => doesn't match
if(!ancestors.length) {
return Rx.Observable.empty();
}
const parentsQuery = getElasticQuery(parentSearchFilter, filter);
parentsQuery.query.filtered.filter.bool.must.push({
terms: {
id: ancestors
}
});
// fetch parent entries
return searchElastic(parentsQuery)
.count()
.concatMap(count => {
// no parents match query
if(!count) {
return Rx.Observable.empty();
}
// fetch all other ancestors that weren't part of the query results
// and are still a string (id)
const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
return fetchAncestors(restAncestorsToFetch, ancestors, format)
.concatMap(() => Rx.Observable.just(entry));
});
});
}
function executeQuery(query, res) {
try {
const stream = getEntryQueryStream(query);
// stream is passed on to another function here where we subscribe to it like:
// stream
// .map(x => whatever(x))
// .subscribe(
// x => res.write(x),
// err => console.error(err),
// () => res.end());
} catch(e) {
logger.error(e);
res.status(500).json(e);
}
}
I don't understand why those few lines of code break everything or how I could fix it.
Your use case is quite complex, you can start off with building up searchElastic method like the pattern bellow.
convert elasticClient.scroll to an observable first
setup the init data for elasticClient..search()
when search is resolved then you should get your scrollid
expand() operator let you recursively execute elasticClientScroll observable
use map to select data you want to return
takeWhile to decide when to complete this stream
The correct result will be once you do searchElastic().subscribe() the stream will emit continuously until there's no more data to fetch.
Hope this structure is correct and can get you started.
function searchElastic({ query, sort }, limit) {
const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
let obj = {
body: {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
},
scroll: '30s'
}
return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
.expand(({ _scroll_id, hits: { hits } }) => {
// guess there are more logic here .....
// to update the scroll id or something
return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
//.. select the res you want to return
)
}).takeWhile(res => res.hits.length)
}
I have a nodejs/express server and I'm trying to merge and sort sorted results from multiple mongodb collections in order to create a sorted CSV file. The way I achieve this requires that I keep the mongodb cursors alive (no timeout) until I read/exhaust all data, or until an error occurs, in which case I have to close them manually. It seems to work when there aren't many data points. However, when the mongo queries request data for one year for example, at some point after almost half an hour, I get the following mongo error: Cursor not found: cursor id: 59427962835.
Promises are bluebird promises. Written in Typescript.
import * as _ from 'lodash';
import * as moment from 'moment-timezone';
function findNative(db, collection, spec={}) {
const {query, fields, sort, limit, skip, hint, timeout=true} = spec;
// internal function that gets a connection from the connection pool
// returns promise with connection
return ensureConnection(db)
.then(connection => {
const cursor = connection.collection(collection).find(
query || {},
{fields, sort, limit, skip, hint, timeout});
// For sorted queries we have to limit batchSize
// see https://jira.mongodb.org/browse/SERVER-14228
if (connection.serverConfig.capabilities().maxWireVersion == 0 && sort && !limit) {
cursor.batchSize(0);
}
return cursor;
});
}
function getMongoStream(col, startdate, enddate) {
return findNative('testDb', col, {
query: { t: { $gte: startdate, $lte: enddate }},
sort: { t: 1 },
fields: { i: 0, _id: 0 },
timeout: false
});
}
async function fetchNextCursorData(cursor) {
const hasMore = await cursor.hasNext();
console.log(hasMore, cursor.cursorState.cursorId.toString());
return hasMore ? cursor.next() : Promise.resolve(null);
}
function findEarliestDate(buffer: any[]): [string, number[]] {
let earliestDateMS;
const indices = _(buffer)
.map(x => x && x.t.getTime())
.forEach(t => {
// make sure timestamp is defined
// buffer also contains null values
if(t && (!earliestDateMS || (earliestDateMS && t < earliestDateMS))) {
earliestDateMS = t;
}
})
.reduce((acc, t, i) => {
if(t === earliestDateMS) {
acc.push(i);
}
return acc;
}, []);
return [moment(earliestDateMS).utc().format('YYYY-MM-DD HH:mm:ss.SSS'), indices];
}
function closeAllCursors(cursors: any[]) {
const openCursors = cursors
.filter(c => !c.isClosed());
openCursors.forEach(c => c.close());
}
async function csvData(req, res) {
const collections: string[] = req.swagger.params.collections.value.split(',').sort(),
sources: string[] = req.swagger.params.sources.value.split(',').sort(),
startdate = new Date(Number(req.swagger.params.startdate.value)),
enddate = new Date(Number(req.swagger.params.enddate.value));
const filename = `${moment.utc().format('YYYY-MM-DD_HH:mm')}.csv`;
res.set({
'Content-Type': 'text/csv',
'Content-Disposition': `attachment; filename="${filename}"`
});
res.write('Date UTC,' + sources.join(',') + '\n');
const colPromises = collections.map(col => getMongoStream(col, startdate, enddate));
let cursorsMap: { [rec: string]: any; };
try {
let buffer = [], dateCSVBuffer: any[] = _.fill(Array(sources.length), '');
// fetch first doc from all cursors
const cursors = await Promise.all(colPromises);
cursorsMap = _.zipObject<any>(collections, cursors);
let docs = await Promise.all(cursors.map(fetchNextCursorData));
// initial request made for all collections
let requestedIdx = _.range(0, collections.length);
while(true) {
docs.forEach((doc, i) => {
buffer[requestedIdx[i]] = doc;
});
// null indicates that cursor won't return more data =>
// all cursors are exhausted
if(buffer.every(d => d === null)) {
break;
}
const [date, indices] = findEarliestDate(buffer);
requestedIdx = indices;
indices.forEach(idx => {
// update csv buffer
const {data} = buffer[idx];
Object.keys(data)
.forEach(ch => {
const sourceIndex = sources.indexOf(ch);
if(sourceIndex > -1) {
dateCSVBuffer[sourceIndex] = data[ch];
}
});
// remove doc from buffer
buffer[idx] = null;
});
// send csv string
dateCSVBuffer.unshift(date);
res.write(dateCSVBuffer.join(',') + '\n');
// empty buffer
dateCSVBuffer = dateCSVBuffer.map(() => '');
// request new entry from cursors
const nextDocPromises = indices
.map(idx => cursorsMap[collections[idx]])
.map(fetchNextCursorData);
docs = await Promise.all(nextDocPromises);
}
// end data stream
res.end();
} catch(err) {
// make sure to close all cursors
// will catch all nested promise errors
closeAllCursors(_.values(cursorsMap));
console.error(err);
res.status(500).json(err);
}
}
Mongodb connection created with following options:
{
auto_reconnect: true,
poolSize: 30,
connectTimeoutMS: 90000
}
Could the problem be that I keep the cursor references in the map and thus they are not updated? And when I do a cursor.hasNext() cursor is already dead? I also tried checking whether cursor.isClosed() but it always returns false.
Mongodb driver is "mongodb": "2.2.15" and the queries are tested against a v3.0 database.
EDIT: I did a small count test to see how many docs have been processed at the time when the program crashes.
The 3 cursors (test case requested only data from 3 collections) have the following counts and ids:
3097531 '59427962835'
31190333 '53750510295'
32007475 '101213786015'
and the last document cursor with id '59427962835' processed was number 4101. So not even close to finishing
Turns out that adding the timeout to the find query doesn't work. I had to use the noCursorTimeout flag like so:
const cursor = connection.collection(collection)
.find(query || {}, {fields, sort, limit, skip, hint})
.addCursorFlag('noCursorTimeout', !timeout);
I am using cheerio and node to do web scraping, but I have a problem with promises. I can scrape an article list from a page but in that list, we have more links for single pages. I need to scrape single pages as well for each item on the list.
I will show you my code for the better solution.
import rp from 'request-promise'
import cheerio from 'cheerio'
import conn from './connection'
const flexJob = `https://www.flexjobs.com`
const flexJobCategory = ['account-management', 'bilingual']
class WebScraping {
//list of article e.g for page 2
results = [] // [[title], [link for page],...]
contentPage = [] //content for each page
scrapeWeb(link) {
let fullLink = `${link}/jobs/${flexJobCategory[1]}?page=2`
const options = {
uri: fullLink,
transform(body) {
return cheerio.load(body)
}
}
rp(options)
.then(($) => {
console.log(fullLink)
$('.featured-job').each((index, value) => {
//html nodes
let shortDescription = value.children[1].children[1].children[3].children[1].children[1].children[0].data
let link = value.children[1].children[1].children[1].children[1].children[1].children[0].attribs.href
let pageLink = flexJob + '' + link
let title = value.children[1].children[1].children[1].children[1].children[1].children[0].children[0].data
let place = value.children[1].children[1].children[1].children[1].children[3].children[1].data
let jobType = value.children[1].children[1].children[1].children[1].children[3].children[0].children[0].data
this.results.push([title, '', pageLink.replace(/\s/g, ''), '', shortDescription.replace(/\n/g, ''), place, jobType, 'PageContent::: '])
})
})
.then(() => {
this.results.forEach(element => {
console.log('link: ', element[2])
this.scrapePage(element[2])
});
})
.then(() => {
console.log('print content page', this.contentPage)
})
.then(() => {
//this.insertIntoDB()
console.log('insert into db')
})
.catch((err) => {
console.log(err)
})
}
/**
* It's going to scrape all pages from list of jobs
* #param {Any} pageLink
* #param {Number} count
*/
scrapePage(pageLink) {
let $this = this
//console.log('We are in ScrapePage' + pageLink + ': number' + count)
//this.results[count].push('Hello' + count)
let content = ''
const options = {
uri: pageLink,
transform(body) {
return cheerio.load(body)
}
}
rp(options)
.then(($) => {
//this.contentPage.push('Hello' + ' : ');
console.log('Heloo')
})
.catch((err) => {
console.log(err)
})
}
/**
* This method is going to insert data into Database
*/
insertIntoDB() {
conn.connect((err) => {
var sql = "INSERT INTO contact (title, department, link, salary, short_description, location, job_type, page_detail) VALUES ?"
var values = this.results
conn.query(sql, [values], function (err) {
if (err) throw err
conn.end()
})
})
}
}
let webScraping = new WebScraping()
let scrapeList = webScraping.scrapeWeb(flexJob)
So, at 'scrapeWeb' method, at second '.then', I am calling 'scrapePage' method, however, the third promise executed before promise inside 'scrapePage' method.
You need a little more control flow at that stage. You do not want that .then()'s promise to resolve until all the calls are resolved.
You could use a Promise library like bluebird to do a Promise.each or a Promise.map for all the results you want to run.
Or use async/await to set up like .then(async () => {}) and do not use .forEach.
for(let element of this.results){
console.log('link: ', element[2])
await this.scrapePage(element[2])
}
You have a race condition problem.
The first tweak you'll need is having scrapePage returning a Promise.
scrapePage(pageLink) {
let $this = this
let content = ''
const options = {
uri: pageLink,
transform(body) {
return cheerio.load(body)
}
}
return rp(options);
}
In the second than, you need to invoke all child pages scraping eg :
.then(() => {
return Promise.all(this.results.map(childPage => this.scrapePage(childPage)));
})
This will wrap all scrapes of child pages into promises and only if all of them are resolved the code will flow.
I'm new to NodeJS and I have a problem I don't understand.
In this function, I call several API one after another to retrieve some data about a movie. The result isn't always the same. Most of the time, the result is correct, but sometimes the result isn't complete.
I tried using then to try and chain the API calls but it doesn't seem to work.
Any idea why the result isn't always the same? Any help would be appreciated.
// test fetchData(456165)
function fetchData(filmid) {
let average = array => array.reduce((a, b) => a + b) / array.length
var notes = []
mdb.movieInfo({
id: filmid,
language: 'fr'
},
(err, resOmdb) => {
notes.push(parseFloat(resOmdb.vote_average))
imdb
.getById(resOmdb.imdb_id, {
apiKey: 'e9d59b68',
timeout: 3000
})
.then(
allocine.api(
'search', {
q: `${resOmdb.title}`,
filter: 'movie'
},
function(error, resAllo) {
if (error) {
return
}
allocine.api(
'movie', {
code: `${resAllo.feed.movie[0].code}`
},
function(error, result) {
if (error) {
return
}
notes.push(parseFloat(result.movie.statistics.userRating) * 2)
}
)
// doesn't seem to execute all the time
allocine.api(
'showtimelist', {
zip: 44260,
movie: resAllo.feed.movie[0].code
},
function(error, resultCin) {
if (error) {
return
}
// sometimes doesn't appear in the result
resOmdb.cinemas = resultCin
}
)
}
)
)
.then(
function(result) {
notes.push(parseFloat(result.rating))
resOmdb.vote_average = average(notes).toFixed(2)
// check the result
console.log(util.inspect(resOmdb, false, null))
},
function(error) {
return
}
)
}
)
}
First of all you should decide if you want to use Promises or not.
If you do, promisify all functions. Next thing you need to do is 'return' your promises if they are used inside a function.
In your case your first imbd api call is not returned probably.
As next thing you should check if your node version supports async await.
Then you can easily do your api calls without any distractions.
'use strict';
const Promise = require('bluebird');
const mdb = Promise.promisfyAll(require('mdb'));
const allocine = Promise.pomisifyAll(require('allocine-api'));
// test fetchData(456165)
async function fetchDate(filmId) {
const notes = [];
const resOmdb = await mdb.movieInfoAsync({ id: filmId });
notes.push(parseFloat(resOmdb.vote_average));
const imdbResult = await imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 });
const resAllo = await allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
// and so on ...
}
UPDATE:
To speed up your function you can do requests concurrently.
To do so, use Promise.join
const [imdbResult, allocineResult] = await Promise.join(
imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 }),
allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
);