NodeJS returned data sometimes changes - javascript

I'm new to NodeJS and I have a problem I don't understand.
In this function, I call several API one after another to retrieve some data about a movie. The result isn't always the same. Most of the time, the result is correct, but sometimes the result isn't complete.
I tried using then to try and chain the API calls but it doesn't seem to work.
Any idea why the result isn't always the same? Any help would be appreciated.
// test fetchData(456165)
function fetchData(filmid) {
let average = array => array.reduce((a, b) => a + b) / array.length
var notes = []
mdb.movieInfo({
id: filmid,
language: 'fr'
},
(err, resOmdb) => {
notes.push(parseFloat(resOmdb.vote_average))
imdb
.getById(resOmdb.imdb_id, {
apiKey: 'e9d59b68',
timeout: 3000
})
.then(
allocine.api(
'search', {
q: `${resOmdb.title}`,
filter: 'movie'
},
function(error, resAllo) {
if (error) {
return
}
allocine.api(
'movie', {
code: `${resAllo.feed.movie[0].code}`
},
function(error, result) {
if (error) {
return
}
notes.push(parseFloat(result.movie.statistics.userRating) * 2)
}
)
// doesn't seem to execute all the time
allocine.api(
'showtimelist', {
zip: 44260,
movie: resAllo.feed.movie[0].code
},
function(error, resultCin) {
if (error) {
return
}
// sometimes doesn't appear in the result
resOmdb.cinemas = resultCin
}
)
}
)
)
.then(
function(result) {
notes.push(parseFloat(result.rating))
resOmdb.vote_average = average(notes).toFixed(2)
// check the result
console.log(util.inspect(resOmdb, false, null))
},
function(error) {
return
}
)
}
)
}

First of all you should decide if you want to use Promises or not.
If you do, promisify all functions. Next thing you need to do is 'return' your promises if they are used inside a function.
In your case your first imbd api call is not returned probably.
As next thing you should check if your node version supports async await.
Then you can easily do your api calls without any distractions.
'use strict';
const Promise = require('bluebird');
const mdb = Promise.promisfyAll(require('mdb'));
const allocine = Promise.pomisifyAll(require('allocine-api'));
// test fetchData(456165)
async function fetchDate(filmId) {
const notes = [];
const resOmdb = await mdb.movieInfoAsync({ id: filmId });
notes.push(parseFloat(resOmdb.vote_average));
const imdbResult = await imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 });
const resAllo = await allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
// and so on ...
}
UPDATE:
To speed up your function you can do requests concurrently.
To do so, use Promise.join
const [imdbResult, allocineResult] = await Promise.join(
imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 }),
allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
);

Related

Fetch() running out of order

I am trying to have an API pull a word and set it to state. Then a function will read that state and complete its designed purpose. However, the way I originally coded it called it out of order. The third code snippet allows the code to run successfully, but I am not sure why. Could someone explain what the difference is or why the original way did not work?
Below is the API function followed by the second function.
wordNikApi = () => {
fetch("http://api.wordnik.com:80/v4/words.json/randomWords?hasDictionaryDef=true&minCorpusCount=0&minLength=5&maxLength=15&limit=1&api_key=/* Removed */")
.then( res => res.json() )
.then( ( result ) => {
this.setState({
apiWord: result[0].word,
});
console.log("wordNikApi: ", this.state.apiWord);
})
.catch( ( error ) => {
console.log("API ERROR: ", error);
})
};
resetGame = () => {
console.log("resetGame");
this.wordNikApi();
this.setState({
word: [],
count: 0,
allAttempts: [],
letterIndex: [],
numberOfBadAttempts: 0,
remainingAttempts: 6,
repeat: false,
pageLock: false,
invalidKey: false,
}, () => {
console.log("resetGame: function 1");
console.log(this.state.apiWord);
let fullWord = "word";
let wordArray = fullWord.split("");
let wordLength = wordArray.length;
// Send wordObj to state with value and index
let wordObj = wordArray.map((value, index) => {
return {
found: false,
val: value,
id: index,
}
})
this.setState({
word: wordObj,
wordLength: wordLength,
remainingAttempts: 6,
});
});
};
Functioning code:
resetGame = () => {
console.log("resetGame");
// this.wordNikApi();
fetch("http://api.wordnik.com:80/v4/words.json/randomWords?hasDictionaryDef=true&minCorpusCount=0&minLength=5&maxLength=15&limit=1&api_key=/* Removed */")
.then( res => res.json() )
.then( ( result ) => {
this.setState({
apiWord: result[0].word,
}, ()=> {
let fullWord = this.state.apiWord;
let wordArray = fullWord.split("");
let wordLength = wordArray.length;
// Send wordObj to state with value and index
let wordObj = wordArray.map((value, index) => {
return {
found: false,
val: value,
id: index,
}
})
this.setState({
word: wordObj,
wordLength: wordLength,
remainingAttempts: 6,
count: 0,
allAttempts: [],
letterIndex: [],
numberOfBadAttempts: 0,
repeat: false,
pageLock: false,
invalidKey: false,
});
});
})
.catch( ( error ) => {
console.log("API ERROR: ", error);
})
};
What is causing you a problem is the JS asynchronism, when you call the wordNikApi function within the resetGame function, you must use the await keyword, so that in this way the changes to the wordNijApi function are generated first and then continue with the flow of work. Try modifying the resetGame function like this:
const resetGame = async()=>{
...
await this.wordNikApi()
...
}
Fetch is an asynchronous function, which means it will run along side your other code, calling this.wordNikApi() sets the fetch request going but doesn't stop you script from continuing.
In your new version you have the code inside the .then() function which is what is called when the fetch request has called for the data and returned so your code inside here is waiting for this.wordNikApi() to finish before running in the 3rd snippet.
Hope this helped clear up Async and Sync a bit more, however there are better documents out there to explain this.
I know this is an old post but thought others may can use this.
I implemented a queue that calls fetch on the first item in the queue, then uses fetch().then to pull the next item, post it, then recurs if the queue is not empty. Here's the code I used:
var clientDataQueue = [];
function queueClientData(theData) {
clientDataQueue.push(theData);
console.log("++clientDataQueue.length:", clientDataQueue.length)
if (clientDataQueue.length == 1){
postFromQueue();
}
}
function postFromQueue() {
console.log("--clientDataQueue.length:", clientDataQueue.length)
if (clientDataQueue.length > 0) {
postClientdata(clientDataQueue[0]).then( () => {
clientDataQueue.shift();
postFromQueue();
});
}
}
function postClientdata(theData) {
var htmlData = {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(theData)
};
return fetch('/api/clientData', htmlData)
}

Delay the foreach Loop in Node JS

I hope you are well. I am getting data from one API and sending to Shopify store API. As its working fine but its entering some products as when it iterates in the loop API is busy with index suppose 0,1,2 and then indexes 3,4,...10 bypassed. So , according to me I should delay the foreach loop with 10,15 seconds. Please help me to do this . I tried it many times with SETTimer etc but foreach loop structure is difficult for me as a new person. Please check the below code. Thanks
const request = require('request');
const { json } = require('express');
const { Parser } = require('json2csv');
const fastcsv = require('fast-csv');
//const csv = require('csv-parser');
const fs = require('fs');
const { privateDecrypt } = require('crypto');
const { time } = require('console');
const fields = ['Vendor', 'Price', 'SKU','error'];
const opts = { fields };
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
const csvWriter = createCsvWriter({
path: 'C:/Users/IT City/Desktop/arslan.csv',
header: [
{id: 'Vendor', title: 'Vendor'},
{id: 'Price', title: 'Price'},
{id: 'SKU', title: 'SKU'},
{id: 'error', title: 'error'},
]
});
let new_products = {
product_final: {
Vendor: String,
Price: String,
SKU: String,
Error: String,
}
};
//////////First API from which I am getting the data
const options = {
method: 'GET',
url: 'https://api.dexi.io/runs/f58f7795-c11a-478c-b670-c7ae5df8677b/latest/result',
headers: {
accept: 'application/json',
json: true,
'x-dexiio-access': '9d56e967dfXXXXXXX725e234b311655c96',
'x-dexiio-account': '5e597feb-99axxxxxxxx-37f1315723ab'
}
};
products = ['Vendor','Price','SKU','Error']
let product = {};
request(options, function (error, response, body) {
if (error) throw new Error(error);
pro = JSON.parse(body);
/////Looping through the each Item from the first API and sending data to Shopify. But its entering only 10-12 products
//// As handle is busy to entering the product.
/// I have to delay the foreach loop 10, 15 seconds
pro.rows.forEach(
row => {
for (let z = 0; z < row.length; z++)
{
product[pro.headers[z]] = row[z];
product_final2[pro.headers[z]] = row[z];
}
productssdata.push(product_final2)
products.push(product)
var Price = product.Price;
var SKU = product.SKU;
var Vendor = product.Vendor;
var body_html = "THISFSDFSDFSDFSDFSFSDF";
let new_products = {
product: {
title: Vendor,
body_html: Price,
vendor: Vendor,
product_type: SKU,
tags: Price
}
};
const options = {
method: 'POST',
url:
'https://0abcfsdfsdf4bb6532f3b#amjad.myshopify.com/admin/api/2020-07/products.json',
headers: {
accept: 'application/json',
'apiKey': '07649cABSDCCSD8ffbae7af02',
'password': 'sSDCSDFDF',
body: new_products,
json: true,
};
request(options, function (error, response, body) {
if (error) throw new Error(error);
console.log(body)
});
}
);
}
);
you don't need to use setTimeOut() to delay the loop. thats what async and await are for let me share you an example how to make the forEach loop delay with await!!.
step1 : return a function with promise and use await until it is complete.
const wait =async props => {
return new Promise((reslove,reject) => {
return reslove(Math.random());
})
}
const x = [1,2,3,4]
x.forEach(async number =>{
const num = await wait();
console.log('start')
console.log(num);
console.log('end');
})
Request is deprecated
It can't be used with await anyway, which makes it inconvenient. There used to be another module, request-promise, that as wrapping request and return a Promise, so one could await it, but it's still deprecated.
For these reasons, use Axios or Fetch instead
You can't use await or delay a .forEach() loop, but you can in a for loop.
You think you need to delay the calls, because they are asynchronous, but in reality you should simply await each call. Delaying calls with an arbitrary timeout is a dirty workaround.
In the end, you can do something like :
( async () => {
let options = {
url : "http://......"
};
const response = await axios.get(options); // Add try/catch block around this to manage errors
const pro = response.data;
for( let row of pro.rows) {
options = {
url : "http://some.other.url"
}
const response = await axios.get(options); // Each call will be done one by one and awaited in order
}
})()
setTimeout(pro.rows.forEach((row) => {...}), 10000)
This executes the forEach after 10 seconds.

Counter not increasing in async map function

I am working with mongodb and nodejs. I have an array of customers I have to create each inside database.
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
}
});
await Promise.all([...promises2]);
The issue is counter is not increasing every time. I am getting same counter in all the created customers. What is the issue here?
Issue is something like this but don't have an answer.
The problem is that all the calls overlap. Since the first thing they each do is get the current counter, they all get the same counter, then try to use it. Fundamentally, you don't want to do this:
const counter = await Counter.findOne({ type: "Customer" });
// ...
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
...because it creates a race condition: overlapping asynchronous operations can both get the same sequence value and then both issue an update to it.
You want an atomic operation for incrementing and retrieving a new ID. I don't use MongoDB, but I think the findOneAndUpdate operation can do that for you if you add the returnNewDocument option. If so, the minimal change would be to swap over to using that:
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
});
await Promise.all([...promises2]);
...but there's no reason to create an array and then immediately copy it, just use it directly:
await Promise.all(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
The overall operation will fail if anything fails, and only the first failure is reported back to your code (the other operations then continue and succeed or fail as the case may be). If you want to know everything that happened (which is probably useful in this case), you can use allSettled instead of all:
// Gets an array of {status, value/reason} objects
const results = await Promise.allSettled(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Promise.allSettled is new in ES2021, but easily polyfilled if needed.
If I'm mistaken about the above use of findOneAndUpdate in some way, I'm sure MongoDB gives you a way to get those IDs without a race condition. But in the worst case, you can pre-allocate the IDs instead, something like this:
// Allocate IDs (in series)
const ids = [];
for (const customer of customers) {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
ids.push(counter.sequence_value);
}
}
// Create customers (in parallel)
const results = await Promise.allSettled(customers.map(async(customer, index) => {
const customerId = ids[index];
try {
await Customer.create({
customerId
});
} catch (e) {
// Failed, remove the counter, but without allowing any error doing so to
// shadow the error we're already handling
try {
await Counter.someDeleteMethodHere(/*...customerId...*/);
} catch (e2) {
// ...perhaps report `e2` here, but don't shadow `e`
}
throw e;
}
});
// Get just the errors
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Your map function is not returning a promise.
Try this :
const promises2 = [];
customers.map((customer) => {
return new Promise(async (resolve) => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: 'Customer' });
console.log({ counter });
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: 'Customer' }, { $inc: { sequence_value: 1 } });
}
resolve();
});
});
await Promise.all(promises2);

Hyperledger query never return results

I`m trying to query my business network using buildQuery but it always returns an empty array.
My code is as follows.
This is the connection.js file:
module.exports = {
BusinessNetworkConnection : require('composer-client').BusinessNetworkConnection,
cardName : '',
connection: {},
connect : function() {
var cardType = { type: 'composer-wallet-filesystem' }
this.connection = new this.BusinessNetworkConnection(cardType);
return this.connection.connect(this.cardName);
},
disconnect : function(callback) {
this.connection.disconnect();
}
};
This is my query.js file which being invoked to get results:
const connection = require('./connection');
const getContacts = async (cardName,companyID) => {
connection.cardName = cardName;
try {
await connection.connect();
main();
} catch (error) {
main(error);
}
async function main(error) {
if (error) { return new Error("Ops Error: ",error) };
const statement = 'SELECT org.finance.einvoice.participant.Company WHERE (participantId == _$companyID)'
const query = await connection.connection.buildQuery(statement);
const company = await connection.connection.query(query, { companyID }).catch(err => {return new Error(err)});
await connection.connection.disconnect().catch(err => new Error(err));
console.log(company);
return company;
};
};
module.exports = {
getContacts
};
The expected behavior from getContacts() is to return an asset from business network but it actually returns an empty array.
Current versions: composer-cli 0.20 , composer-playground 0.20 , composer-client 0.20 , composer-common 0.20 and fabric-dev-server 1.2 .
i found the solution for this issue.
i was using card which was not allowed to perform queries. However, when i used the admin card it returned with results.
other way is to allow participants to issue queries in permission.acl file.

rxjs subscribing late results to empty stream

I have the following piece of code. As is, with a couple of lines commented out, it works as expected. I subscribe to a stream, do some processing and stream the data to the client. However, if I uncomment the comments, my stream is always empty, i.e. count in getEntryQueryStream is always 0. I suspect it has to do with the fact that I subscribe late to the stream and thus miss all the values.
// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';
const {ObjectId} = imongo;
function searchElastic({query, sort}, limit) {
const body = {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
};
// keep the search results "scrollable" for 30 secs
const scroll = '30s';
let count = 0;
return Rx.Observable
.fromPromise(elasticClient.search({ index: 'data', body, scroll }))
.concatMap(({_scroll_id, hits: {hits}}) => {
const subject = new Rx.Subject();
// subject needs to be subscribed to before adding new values
// and therefore completing the stream => execute in next tick
setImmediate(() => {
if(hits.length) {
// initial data
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
const handleDoc = (err, res) => {
if(err) {
subject.onError(err);
return;
}
const {_scroll_id, hits: {hits}} = res;
if(!hits.length) {
subject.onCompleted();
} else {
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
}
};
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
} else {
subject.onCompleted();
}
});
return subject.asObservable();
});
}
function getElasticQuery(searchString, filter) {
const query = _.cloneDeep(filter);
query.query.filtered.filter.bool.must.push({
query: {
query_string: {
query: searchString
}
}
});
return _.extend({}, query);
}
function fetchAncestors(ancestorIds, ancestors, format) {
return imongo.find('session', 'sparse_data', {
query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
fields: { name: 1, type: 1 }
})
.map(entry => {
entry.id = entry._id.toString();
delete entry._id;
return entry;
})
// we don't care about the results
// but have to wait for stream to finish
.defaultIfEmpty()
.last();
}
function getEntryQueryStream(entriesQuery, query, limit) {
const {parentSearchFilter, filter, format} = query;
return searchElastic(entriesQuery, limit)
.concatMap(entry => {
const ancestors = entry.ancestors || [];
// if no parents => doesn't match
if(!ancestors.length) {
return Rx.Observable.empty();
}
const parentsQuery = getElasticQuery(parentSearchFilter, filter);
parentsQuery.query.filtered.filter.bool.must.push({
terms: {
id: ancestors
}
});
// fetch parent entries
return searchElastic(parentsQuery)
.count()
.concatMap(count => {
// no parents match query
if(!count) {
return Rx.Observable.empty();
}
// fetch all other ancestors that weren't part of the query results
// and are still a string (id)
const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
return fetchAncestors(restAncestorsToFetch, ancestors, format)
.concatMap(() => Rx.Observable.just(entry));
});
});
}
function executeQuery(query, res) {
try {
const stream = getEntryQueryStream(query);
// stream is passed on to another function here where we subscribe to it like:
// stream
// .map(x => whatever(x))
// .subscribe(
// x => res.write(x),
// err => console.error(err),
// () => res.end());
} catch(e) {
logger.error(e);
res.status(500).json(e);
}
}
I don't understand why those few lines of code break everything or how I could fix it.
Your use case is quite complex, you can start off with building up searchElastic method like the pattern bellow.
convert elasticClient.scroll to an observable first
setup the init data for elasticClient..search()
when search is resolved then you should get your scrollid
expand() operator let you recursively execute elasticClientScroll observable
use map to select data you want to return
takeWhile to decide when to complete this stream
The correct result will be once you do searchElastic().subscribe() the stream will emit continuously until there's no more data to fetch.
Hope this structure is correct and can get you started.
function searchElastic({ query, sort }, limit) {
const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
let obj = {
body: {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
},
scroll: '30s'
}
return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
.expand(({ _scroll_id, hits: { hits } }) => {
// guess there are more logic here .....
// to update the scroll id or something
return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
//.. select the res you want to return
)
}).takeWhile(res => res.hits.length)
}

Categories

Resources