I hope you are well. I am getting data from one API and sending to Shopify store API. As its working fine but its entering some products as when it iterates in the loop API is busy with index suppose 0,1,2 and then indexes 3,4,...10 bypassed. So , according to me I should delay the foreach loop with 10,15 seconds. Please help me to do this . I tried it many times with SETTimer etc but foreach loop structure is difficult for me as a new person. Please check the below code. Thanks
const request = require('request');
const { json } = require('express');
const { Parser } = require('json2csv');
const fastcsv = require('fast-csv');
//const csv = require('csv-parser');
const fs = require('fs');
const { privateDecrypt } = require('crypto');
const { time } = require('console');
const fields = ['Vendor', 'Price', 'SKU','error'];
const opts = { fields };
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
const csvWriter = createCsvWriter({
path: 'C:/Users/IT City/Desktop/arslan.csv',
header: [
{id: 'Vendor', title: 'Vendor'},
{id: 'Price', title: 'Price'},
{id: 'SKU', title: 'SKU'},
{id: 'error', title: 'error'},
]
});
let new_products = {
product_final: {
Vendor: String,
Price: String,
SKU: String,
Error: String,
}
};
//////////First API from which I am getting the data
const options = {
method: 'GET',
url: 'https://api.dexi.io/runs/f58f7795-c11a-478c-b670-c7ae5df8677b/latest/result',
headers: {
accept: 'application/json',
json: true,
'x-dexiio-access': '9d56e967dfXXXXXXX725e234b311655c96',
'x-dexiio-account': '5e597feb-99axxxxxxxx-37f1315723ab'
}
};
products = ['Vendor','Price','SKU','Error']
let product = {};
request(options, function (error, response, body) {
if (error) throw new Error(error);
pro = JSON.parse(body);
/////Looping through the each Item from the first API and sending data to Shopify. But its entering only 10-12 products
//// As handle is busy to entering the product.
/// I have to delay the foreach loop 10, 15 seconds
pro.rows.forEach(
row => {
for (let z = 0; z < row.length; z++)
{
product[pro.headers[z]] = row[z];
product_final2[pro.headers[z]] = row[z];
}
productssdata.push(product_final2)
products.push(product)
var Price = product.Price;
var SKU = product.SKU;
var Vendor = product.Vendor;
var body_html = "THISFSDFSDFSDFSDFSFSDF";
let new_products = {
product: {
title: Vendor,
body_html: Price,
vendor: Vendor,
product_type: SKU,
tags: Price
}
};
const options = {
method: 'POST',
url:
'https://0abcfsdfsdf4bb6532f3b#amjad.myshopify.com/admin/api/2020-07/products.json',
headers: {
accept: 'application/json',
'apiKey': '07649cABSDCCSD8ffbae7af02',
'password': 'sSDCSDFDF',
body: new_products,
json: true,
};
request(options, function (error, response, body) {
if (error) throw new Error(error);
console.log(body)
});
}
);
}
);
you don't need to use setTimeOut() to delay the loop. thats what async and await are for let me share you an example how to make the forEach loop delay with await!!.
step1 : return a function with promise and use await until it is complete.
const wait =async props => {
return new Promise((reslove,reject) => {
return reslove(Math.random());
})
}
const x = [1,2,3,4]
x.forEach(async number =>{
const num = await wait();
console.log('start')
console.log(num);
console.log('end');
})
Request is deprecated
It can't be used with await anyway, which makes it inconvenient. There used to be another module, request-promise, that as wrapping request and return a Promise, so one could await it, but it's still deprecated.
For these reasons, use Axios or Fetch instead
You can't use await or delay a .forEach() loop, but you can in a for loop.
You think you need to delay the calls, because they are asynchronous, but in reality you should simply await each call. Delaying calls with an arbitrary timeout is a dirty workaround.
In the end, you can do something like :
( async () => {
let options = {
url : "http://......"
};
const response = await axios.get(options); // Add try/catch block around this to manage errors
const pro = response.data;
for( let row of pro.rows) {
options = {
url : "http://some.other.url"
}
const response = await axios.get(options); // Each call will be done one by one and awaited in order
}
})()
setTimeout(pro.rows.forEach((row) => {...}), 10000)
This executes the forEach after 10 seconds.
Related
I have a use case where I'm trying to loop through an array of objects, where I need to make some GraphQL requests that may have some pagination for a given object in the array. I'm trying to speed up performance by pushing the recursive function to an array of promises, and then use Promse.all to resolve all of those.
I'm running into an issue though where I'm getting an undefined response from Promise.all - The end goal is to have the following response for each unique object in the array:
[{
account: test1,
id: 1,
high: 2039,
critical: 4059
},
{
account: test2,
id: 2,
high: 395,
critical: 203
}]
...where I'm only returning anAccount object after recursion is done paginating/making all requests for a given account object.
Here is the sample code:
const fetch = require('isomorphic-fetch');
const API_KEY = '<key>';
async function main() {
let promises = [];
let accounts = [{'name': 'test1', 'id': 1}, {'name': 'test2' , 'id': 2}];
for (const a of accounts) {
let cursor = null;
let anAccountsResults = [];
promises.push(getCounts(a, anAccountsResults, cursor));
}
let allResults = await Promise.all(promises);
console.log(allResults);
}
async function getCounts(acct, results, c) {
var q = ``;
if (c == null) {
q = `{
actor {
account(id: ${acct.id}) {
aiIssues {
issues(filter: {states: ACTIVATED}) {
issues {
issueId
priority
}
nextCursor
}
}
}
}
}`
} else {
q = `{
actor {
account(id: ${acct.id}) {
aiIssues {
issues(filter: {states: ACTIVATED}, cursor: "${c}") {
issues {
issueId
priority
}
nextCursor
}
}
}
}
}`
}
const resp = await fetch('https://my.api.com/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'API-Key': API_KEY
},
body: JSON.stringify({
query: q,
variables: ''}),
});
let json_resp = await resp.json();
let aSingleResult = json_resp.data.actor.account.aiIssues.issues.issues;
let nextCursor = json_resp.data.actor.account.aiIssues.issues.nextCursor;
console.log(nextCursor);
if (nextCursor == null) {
results = results.concat(aSingleResult);
} else {
results = results.concat(aSingleResult);
await getCounts(acct, results, nextCursor);
}
let criticalCount = results.filter(i => i.priority == 'CRITICAL').length;
let highCount = results.filter(i => i.priority == 'HIGH').length;
let anAccount = {
account: acct.name,
id: acct.id,
high: highCount,
critical: criticalCount
};
return anAccount;
}
main();
logging anAccount in function getCounts has the correct detail, but when returning it, logging the output of Promise.all(promises) yields undefined. Is there a better way to handle this in a way where I can still asynchronously run multiple recursive functions in parallel within the loop with Promise.all?
Your main problem appears to be that results = results.concat(aSingleResult); does not mutate the array you passed, but only reassigns the local variable results inside the function, so the anAccount only will use the aSingleResult from the current call.
Instead of collecting things into a results array that you pass an a parameter, better have every call return a new array. Then in the recursive await getCounts(acct, results, nextCursor) call, do not ignore the return value.
async function main() {
let promises = [];
const accounts = [{'name': 'test1', 'id': 1}, {'name': 'test2' , 'id': 2}];
const promises = accounts.map(async acct => {
const results = await getIssues(acct);
const criticalCount = results.filter(i => i.priority == 'CRITICAL').length;
const highCount = results.filter(i => i.priority == 'HIGH').length;
return {
account: acct.name,
id: acct.id,
high: highCount,
critical: criticalCount
};
});
const allResults = await Promise.all(promises);
console.log(allResults);
}
const query = `query ($accountId: ID!, $cursor: IssuesCursor) {
actor {
account(id: $accountId) {
aiIssues {
issues(filter: {states: ACTIVATED}, cursor: $cursor) {
issues {
issueId
priority
}
nextCursor
}
}
}
}
}`;
async function getIssues(acct, cursor) {
const resp = await fetch('https://my.api.com/graphql', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'API-Key': API_KEY
},
body: JSON.stringify({
query: q,
variables: {
accountId: acct.id,
cursor,
}
}),
});
if (!resp.ok) throw new Error(resp.statusText);
const { data, error } = await resp.json();
if (error) throw new Error('GraphQL error', {cause: error});
const { nextCursor, issues } = data.actor.account.aiIssues.issues;
if (nextCursor == null) {
return issues;
} else {
return issues.concat(await getIssues(acct, nextCursor));
}
}
I try to get specific documents from MongoDB with Node.js and insert them into array.
const getStockComments = async (req) => {
const stockname = req.params.stockName;
var comments = [];
var data = [];
const stock = await stockModel.findOne({ name: stockname });
comments = stock.comments;
comments.forEach(async (commentId) => {
const comm = await commentModel.findOne({ _id: commentId });
data.push(comm);
console.log(data); // This returns the data in loops, because its inside a loop.
});
console.log(data); // This not returns the data and i don't know why.
return data;
};
The first console.log(data) returns the same data a lot of times because its inside a loop.
But the second console.log(data) dosen't returns the data at all.
What I'm doing wrong?
Instead of using loop , you can use $in operator to simplify things .
const getStockComments = async (req) => {
const stockname = req.params.stockName;
var comments = [];
var data = [];
const stock = await stockModel.findOne({ name: stockname });
comments = stock.comments;
commentModel.find({ _id: { $in: comments } }, (err, comments) => {
data = comments;
});
console.log(data);
return data;
};
I'm trying to create a function with firebase, where upon request the function carries out some scraping activites and then logs the result to a collection each time. My function works and returns the Array of items that I need, but I am having trouble then adding this array to the firestore database.
I am not sure if I need to subscribe to the response or if it is returning something else.
Cloud Function:
exports.scraper = functions.https.onRequest( async (request, response) => {
cors(request, response, async () => {
const body = (request.body);
const data = await scrapeteamtags(body.text);
response.send(data)
});
return admin.firestore().collection('games').add({
teams: data
})
});
Added the function used in the await for context:
const scrapeteamtags = (text) => {
const urls = Array.from( getUrls(text) );
const requests = urls.map(async url => {
const res = await fetch(url);
const html = await res.text();
const $ = cheerio.load(html);
const getTeamlist = JSON.parse($('body').text())
var gamelist = {
games: []
}
getTeamlist.SSResponse.children.map(function(item) {
// go into the returned json
var event = new Object;
var leagues = ["Premier League", "Spanish La Liga", "Italian Serie A", 'French Ligue 1', 'German Bundesliga']
// finds all child items that contain the event tag
if(Object.keys(item).includes('event')) {
// check that the league is on the list which are of interest
if(leagues.includes(item.event.typeName)) {
event.id = item.event.id;
event.name = item.event.name;
// add the event name and id to the object then go into next level to get market data
item.event.children.map(function(item1) {
if(Object.keys(item1).includes('market')) {
event.marketid = item1.market.id
// add the market data id to the object
var eventoutcome = []
item1.market.children.map(function(item2) {
if(Object.keys(item2).includes('outcome')) {
eventoutcome.push({"id" : item2.outcome.id,
"id": item2.outcome.id,
"name": item2.outcome.name,
"price": item2.outcome.children[0].price.priceDec})
//adds the id, name and price to an array, then add it to the object
event.outcome = eventoutcome
}
})
}
})
gamelist.games.push(event)
}
// push each event as a new object to the array of games
}
})
//console.log(gamelist.games)
return {
gamelist
}
});
return Promise.all(requests);
}
HTTP functions don't let you return a promise with the data to send. (That's how callable functions work, but that doesn't apply here.) You will have to wait for the database write to finish, then send the response to terminate the function.
The function should be structured more like this:
exports.scraper = functions.https.onRequest( async (request, response) => {
cors(request, response, async () => {
const body = (request.body);
const data = await scrapeteamtags(body.text);
await admin.firestore().collection('games').add({
teams: data
})
response.send(data)
});
});
I have a Nodejs script, where it's detail look like below :
1) it requests to API to get a list of city, it will get JSON array. using this array, I do looping using forEach.
2) at each iteration (2nd loop), I do request again to API to get details (about 100 rows) and insert it into mysql database.
my question, how to make the function inside the first loop (where to get the list of city) wait to complete before going to next item (city). I want to make a loop sequential with delay.
my source code :
const request = require('request');
var moment = require('moment');
var mysql = require('mysql');
var a = moment('2019-04-01');
var b = moment('2019-04-06');
const sleep = (waitTimeInMs) => new Promise(resolve => setTimeout(resolve, waitTimeInMs));
function timer(ms) {
return new Promise(res => setTimeout(res, ms));
}
var connection = mysql.createConnection({
host : 'localhost',
user : 'user1',
password : 'password',
database : 'local'
});
async function getURL(id_city,dates) {
var url = 'https://localhost/api/format/json/schedule/city/'+id_city+'/date/'+dates;
request(url, { json: true }, (err, res, body) => {
if (err) { return console.log(err); }
// console.log(body.status);
var item1 = body.schedule.data.item1;
var item2 = body.schedule.data.item2;
connection.connect();
connection.query('INSERT INTO schedule (city,item1,item2) values ("'+id_city+'","'+task1+'", "'+task2+'")', function (error, results, fields) {
if (error) throw error;
});
// connection.end();
});
}
async function getDate(id_city)
{
var end;
for (var m = moment(a); m.isBefore(b); m.add(1, 'days')) {
getURL(id_city,m.format('YYYY-MM-DD'));
await timer(1000); //making delay
}
}
async function main () {
var url = 'https://localhost/api/format/json/list_city';
connection.connect();
request(url, { json: true }, (err, res, body) => {
if (err) { return console.log(err); }
var list_city = body.city; //this is an array
var counter = 0;
list_city.forEach(function(city){
getDate(city.id, function(){
});//i need this to complete before go to next city
});
});//end request url
}
main();
my expectation (sequential) :
city1
insert item a done...
insert item b done...
city2
insert item a done...
insert item b done...
insert item c done...
city3
...
For both request and mysql you can use the Promise supported package namely: request-promise and mysql2. To guarantee sequential execution, you can then do:
const rp = require('request-promise');
const mysql = require('mysql2/promise');
// then in your getURL function
async function getURL(id_city,dates) {
var url = 'https://localhost/api/format/json/schedule/city/'+id_city+'/date/'+dates;
const body = await rp(url, { json: true })
const item1 = body.schedule.data.item1;
const item2 = body.schedule.data.item2;
const connection = await mysql.createConnection({host:'localhost', user: 'root', database: 'test'});
const [rows, fields] = await connection.execute('INSERT INTO schedule (city,item1,item2) values ("'+id_city+'","'+task1+'", "'+task2+'")');
}
// One await in getDate should do
async function getDate(id_city) {
var end;
for (var m = moment(a); m.isBefore(b); m.add(1, 'days')) {
await getURL(id_city,m.format('YYYY-MM-DD'));
}
}
For handling error with async/await:
try {
const body = await rp(url, { json: true })
} catch (e) {
// handle erorr
console.error(e);
// or rethrow error: throw e
}
For efficiency you could use mysql connection pool like:
// myPool.js
const mysql = require('mysql2');
// create pool
const pool = mysql.createPool({
host:'localhost',
user: 'root',
database: 'test',
connectionLimit: 10,
queueLimit: 0
});
// now get a Promise wrapped instance of that pool
const promisePool = pool.promise();
module.exports = () => promisePool;
// Then in your getURL
const getPool = require('./myPool');
async function getURL(id_city,dates) {
...
const pool = await getPool();
const [rows, fields] = await pool.execute('INSERT INTO schedule (city,item1,item2) values ("'+id_city+'","'+task1+'", "'+task2+'")');
...
Also consider using prepared statement.
connection.execute('SELECT * FROM `table` WHERE `name` = ? AND `age` > ?', ['Morty', 14]);
Use a for loop instead of forEach, and on each iteration, await the call of getDate, so that one getDate call always finishes before it gets called again:
for (let i = 0; i < list_city.length; i++) {
await getDate(city[i]);
await timer(100); // this will put a delay of at least 100ms between each call
}
Make sure to make the containing function async for this to work.
Note that since getDate returns a Promise, it probably shouldn't accept a callback - either chain awaits or thens on to the end instead.
I'm new to NodeJS and I have a problem I don't understand.
In this function, I call several API one after another to retrieve some data about a movie. The result isn't always the same. Most of the time, the result is correct, but sometimes the result isn't complete.
I tried using then to try and chain the API calls but it doesn't seem to work.
Any idea why the result isn't always the same? Any help would be appreciated.
// test fetchData(456165)
function fetchData(filmid) {
let average = array => array.reduce((a, b) => a + b) / array.length
var notes = []
mdb.movieInfo({
id: filmid,
language: 'fr'
},
(err, resOmdb) => {
notes.push(parseFloat(resOmdb.vote_average))
imdb
.getById(resOmdb.imdb_id, {
apiKey: 'e9d59b68',
timeout: 3000
})
.then(
allocine.api(
'search', {
q: `${resOmdb.title}`,
filter: 'movie'
},
function(error, resAllo) {
if (error) {
return
}
allocine.api(
'movie', {
code: `${resAllo.feed.movie[0].code}`
},
function(error, result) {
if (error) {
return
}
notes.push(parseFloat(result.movie.statistics.userRating) * 2)
}
)
// doesn't seem to execute all the time
allocine.api(
'showtimelist', {
zip: 44260,
movie: resAllo.feed.movie[0].code
},
function(error, resultCin) {
if (error) {
return
}
// sometimes doesn't appear in the result
resOmdb.cinemas = resultCin
}
)
}
)
)
.then(
function(result) {
notes.push(parseFloat(result.rating))
resOmdb.vote_average = average(notes).toFixed(2)
// check the result
console.log(util.inspect(resOmdb, false, null))
},
function(error) {
return
}
)
}
)
}
First of all you should decide if you want to use Promises or not.
If you do, promisify all functions. Next thing you need to do is 'return' your promises if they are used inside a function.
In your case your first imbd api call is not returned probably.
As next thing you should check if your node version supports async await.
Then you can easily do your api calls without any distractions.
'use strict';
const Promise = require('bluebird');
const mdb = Promise.promisfyAll(require('mdb'));
const allocine = Promise.pomisifyAll(require('allocine-api'));
// test fetchData(456165)
async function fetchDate(filmId) {
const notes = [];
const resOmdb = await mdb.movieInfoAsync({ id: filmId });
notes.push(parseFloat(resOmdb.vote_average));
const imdbResult = await imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 });
const resAllo = await allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
// and so on ...
}
UPDATE:
To speed up your function you can do requests concurrently.
To do so, use Promise.join
const [imdbResult, allocineResult] = await Promise.join(
imdb.getByIdAsync(resOmdb.imdb_id, { apiKey: 'e9d59b68', timeout: 3000 }),
allocine.apiAsync('search', { q: `${resOmdb.title}`, filter: 'movie' });
);