set timer inside a loop for graphql query - javascript

The thing I'm doing here is fetching anime characters from anilist graphql api. The reason I've added a loop is so I could fetch data from a certain character id to a limit. For example 1-100. But I want to respect their API rate limits and so I'd like a way to limit my requests to 1 per second. Hence, I've used setTimeout, but I still got rate-limited from the API and using setInterval only keeps on looping it every 5 seconds. Like the same data gets fetched every 5 seconds.
Is there any way I can make it as I've mentioned?
My code:
const fs = require("fs");
const number = 3;
const axios = require("axios");
async function fetchData() {
for (let i = 1; i <= number; i++) {
const query = axios
.post(
"https://graphql.anilist.co",
{
query: `query character(
$id: Int
$page: Int
$sort: [MediaSort]
$onList: Boolean
$withRoles: Boolean = false
) {
Character(id: $id) {
id
name {
first
middle
last
full
native
userPreferred
alternative
alternativeSpoiler
}
image {
large
}
favourites
isFavourite
isFavouriteBlocked
description
age
gender
bloodType
dateOfBirth {
year
month
day
}
media(page: $page, sort: $sort, onList: $onList) #include(if: $withRoles) {
pageInfo {
total
perPage
currentPage
lastPage
hasNextPage
}
edges {
id
characterRole
voiceActorRoles(sort: [RELEVANCE, ID]) {
roleNotes
voiceActor {
id
name {
userPreferred
}
image {
large
}
language: languageV2
}
}
node {
id
type
isAdult
bannerImage
title {
userPreferred
}
coverImage {
large
}
startDate {
year
}
mediaListEntry {
id
status
}
}
}
}
}
}`,
variables: {
id: i,
withRoles: false,
},
},
{
headers: {
"Content-Type": "application/json",
},
}
)
.then((response) => {
// console.log(response.data.data.Character)
const jsonContent =
JSON.stringify(response.data.data.Character, null, 4) + ", ";
fs.appendFile("./chars.json", jsonContent, function (err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
})
.catch((error) => console.log(`Code: ${error}`, error));
}
}
fetchData();

Something like that will work for you (Asuming all the rest was ok):
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function fetchData() {
for (let i = 1; i <= number; i++) {
// So in case your function is called in loop - request will be delayed
await delay(5000); // TODO: Change to whatever you need
const query = axios.post(
"https://graphql.anilist.co",
{
query: someQuery, // TODO: Set your query here
variables: { id: i, withRoles: false }
},
{
headers: { "Content-Type": "application/json" }
}
);
try {
const response = await query;
const jsonContent = JSON.stringify(response.data.data.Character, null, 4) + ", ";
fs.appendFile("./chars.json", jsonContent, function (err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
} catch (e) {
console.log(`Code: ${e}`, e);
}
}
}

Related

Investigating an issue with my Uniswap tokens scraper that errors out after 7 requests to the Graph API

I'm making a scraper that will grab every Uniswap pair and save it to an array using the Graph API.
My problem occurs when I make my 7th request to the API.
Initially, I thought I was being rate limited because I was fetching 1000 tokens at a time, but after adding a 10 second wait between calls and decreasing the fetched tokens from 1000 to 10, it still stops on the 7th loop.
The script works perfectly until this point.
const axios = require('axios');
const fs = require('fs');
async function getTokens(skip) {
try {
const query = `
query tokens($skip: Int!) {
tokens(first: 10, skip: $skip) {
id
name
symbol
}
}
`;
const variables = {
skip: skip
};
const headers = {
"Content-Type": "application/json"
};
const { data } = await axios.post("https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v3", {
query,
variables
}, {
headers
});
return data.data.tokens;
} catch (err) {
console.error(err);
return []
}
}
async function saveTokens(tokens) {
try {
await fs.promises.writeFile("uniTokens.json", JSON.stringify(tokens), { flag: "w" });
} catch (err) {
console.error(err);
}
}
async function main() {
let skip = 0;
let tokens = [];
const retrievedIds = new Set();
while (true) {
const newTokens = await getTokens(skip);
if (newTokens.length === 0) {
console.log("Reached end of tokens, finishing up...");
break;
}
// Only save tokens that haven't been retrieved before
const newIds = new Set(newTokens.map(token => token.id));
newIds.forEach(id => {
if (!retrievedIds.has(id)) {
tokens.push(newTokens.find(token => token.id === id));
retrievedIds.add(id);
}
});
console.log(`Retrieved ${tokens.length} tokens`);
await saveTokens(tokens);
skip += 1000;
// delay the next request by 10 seconds
//await new Promise(resolve => setTimeout(resolve, 10000));
}
}
main();
This is the error that it produces:
TypeError: Cannot read properties of undefined (reading 'tokens')
at getTokens (/root/unipairs/uni:31:26)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async main (/root/unipairs/uni:52:27)
Reached end of tokens, finishing up...

React Nodejs prblem with API call

I am new to NodeJs, I created several APIs for my application with different methods and they all work fine, the issue is that I created a new API in the same file where I have the others but when calling it from my FE it is called twice, one is prefligth with options method and another is pending but without method... Attached screenshots..API Call
This is my API code:
router.put("/carrousel-item/update/:id", fileUpload, (req, res) => {
req.getConnection((err, conn) => {
try {
const image = fs.readFileSync(
path.join(__dirname, "../images/" + req.file.filename)
);
const title_en = req.body.title_en;
const title_es = req.body.title_es;
const sub_title_color_1_en = req.body.sub_title_color_1_en;
const sub_title_color_1_es = req.body.sub_title_color_1_es;
const sub_title_color_2_en = req.body.sub_title_color_2_en;
const sub_title_color_2_es = req.body.sub_title_color_2_es;
try {
conn.query(
`UPDATE home_team_section SET ? WHERE id = ?`,
[
{
title_en: title_en,
title_es: title_es,
sub_title_color_1_en: sub_title_color_1_en,
sub_title_color_1_es: sub_title_color_1_es,
sub_title_color_2_en: sub_title_color_2_en,
sub_title_color_2_es: sub_title_color_2_es,
image: image,
},
],
(err, rows) => {
res.send(rows);
}
);
} catch (error) {
console.log(error);
}
} catch (error) {}
});
});
This problem is not letting me advance with the project since it does not let me execute any new API
Note: I have added the cors()
I hope you can help me
const ModelLabel = require("../models/modelLabel")
const express=require('express')
const router=express.Router()
class ControllerLable{
static label(request,response){
ModelLabel.renderLabel((err,data)=>{
if (err) {
response.send(err)
}else{
// console.log(data);
response.render('labelView',{ newResult:data })
}
})
}
static addSongGet(cb){
let query=`
SELECT l."name",l.id FROM "Labels" l;`
pool.query(query,(err, res)=>{
if (err) {
cb(err)
}else{
let newResult=res.rows
cb(null,newResult)
}
})}
static addSongPost(request,cb){
const title1=request.body.title
const bandName1=request.body.bandName
const duration1=request.body.duration
const genre1=request.body.genre
const lyric1=request.body.lyric
const imageUrl1=request.body.imageUrl
const label1=request.body.label
const createdAt1=request.body.createdAt
let error=[]
if (!title1) {
error.push("Title is required")
}
if (title1.length>=100) {
error.push("Title maximum character is 100")
}
if (!bandName1) {
error.push("BandName is required")
}
if (!duration1) {
error.push("Duration is required")
}
if (duration1<60) {
error.push("Minimum Duration is 60 second")
}
if (!genre1) {
error.push("Genre is required")
}
if (!lyric1) {
error.push("Lyric is required")
}
if (lyric1) {
let countSpace=0
for (let i = 0; i < lyric1.length; i++) {
if (lyric1[i]===" ") {
countSpace++
}
}
// console.log(countSpace);
if (countSpace<10) {
error.push("Minimum word in lyric is 10")
}
}
if (!imageUrl1) {
error.push("Image Url is required")
}
if (imageUrl1<=50) {
error.push("ImageUrl name maximum character is 50")
}
if (!label1) {
error.push("Label Company is required")
}
if (!createdAt1) {
error.push("Date is required")
}
if (createdAt1) {
let currentDate = new Date().toJSON().slice(0, 10);
if (createdAt1>currentDate) {
error.push("Maximum created date is today")
}
}
// console.log(error);
if (error.length!==0) {
// console.log(error);
cb(error)
}else{
let vote;
const {title,bandName,duration,genre,lyric,imageUrl,label,createdAt}=request.body
const values=[title,bandName,duration,genre,createdAt,lyric,imageUrl,vote=0,label]
let query=`
INSERT INTO "Songs" ("title","bandName","duration","genre","createdDate",lyric,"imageUrl","totalVote","LabelId")
VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9);`
pool.query(query,values,(err,res)=>{
if (err) {
cb(err)
console.log('QUERY EROR');
}else{
console.log("Data berhasil ditambahkan");
cb(null,1)
}
})
}
}

GraphQl mutation wrong result

I have some troubles with mutating data within Graphql. I am saving the origin image into my database and then mutating it with Graphql (external).
In the following points in the code I get the correct data
####################1111111111111111111####################
####################222222222222222####################
####################333333333333333####################
But at point
####################444444444444444444####################
after I mutate the data I am getting wrong image src. It is the edited image src and not the origin src I retrieved from database in my revertImages() function.
Although I pass the correct variable "newVariable" with correct data, the mutation takes over the mutation function edited data that I had previously passed, but takes over the newVariable data. Do I need to clear the cache maybe?
The newVariable data is:
{
productId: 'gid://shopify/Product/6166892019882',
image: {
altText: '',
id: 'gid://shopify/ProductImage/23268973543594',
src: 'https://cdn.shopify.com/s/files/1/0508/3516/1258/products/180622-05-2.jpg?v=1611416719'
}
}
After mutation the result is:
{
productImageUpdate: {
image: {
altText: null,
id: 'gid://shopify/ProductImage/23268973543594',
src: 'https://cdn.shopify.com/s/files/1/0508/3516/1258/products/180622-05-2.jpg?v=1611416762'
},
userErrors: []
}
}
Here are my functions:
const revertImages = async (ctx) => {
let dataToRevert = ctx.request.body.data;
const { accessToken } = ctx.session;
let productsDoc = await Product.find({ productId: { $in: dataToRevert.productId } });
if (!productsDoc) {
ctx.throw('Could not find products');
}
console.log('####################1111111111111111111####################');
console.log(productsDoc);
console.log('####################1111111111111111111####################');
const res = await revertProductImages(productsDoc, accessToken);
if (res) {
console.log('Products reverted');
ctx.response.status = 200;
}
}
async function revertProductImages(products, accessToken) {
console.log('Revert Product Images')
return new Promise((resolve, reject) => {
const map = {};
let updateProduct = null;
let variables = null;
products.forEach(async (item) => {
map[item.productId] = map[item.productId] + 1 || 1;
variables = {
"productId": `gid://shopify/Product/${item.productId}`,
"image": {
"altText": "",
"id": `gid://shopify/ProductImage/${item.imageId}`,
"src": item.originalSrc
}
};
console.log('####################222222222222222####################');
console.log(variables);
console.log('####################222222222222222####################');
updateProduct = await updateProductImage(UPDATE_PRODUCT_BY_ID, variables, accessToken);
if (updateProduct) {
const res = await removeProductFromDb(map);
if (res) {
resolve(res);
}
}
});
})
}
async function updateProductImage(queryName, variables, token) {
console.log('updateProductImage..');
const newVariable = variables;
console.log('####################333333333333333####################');
console.log(newVariable);
console.log('####################333333333333333####################');
return new Promise(async (resolve, reject) => {
let res = null;
try {
res = await axios({
headers: {
'X-Shopify-Access-Token': token,
},
method: 'post',
data: {
query: queryName,
variables: newVariable,
},
url: url,
});
} catch (err) {
console.log(err.message);
}
if (res) {
console.log('Image updated ✔️');
console.log('####################444444444444444444####################');
console.log(res.data.data);
console.log('####################444444444444444444####################');
resolve(res);
} else {
reject('Can not update image');
}
}).catch((err) => {
console.log(err);
});
}
Maybe I didn't understood correctly but here's an answer...
I cannot find the src property in available fields for this mutation (i'm not talking about the ImageInput but the image object).
Can you try with the following request :
mutation productImageUpdate($productId: ID!, $image: ImageInput!) {
productImageUpdate(productId: $productId, image: $image) {
image {
id
originalSrc
transformedSrc
}
userErrors {
field
message
}
}
}
The docs says :
originalSrc : The location of the original image as a URL.
transformedSrc : The location of the transformed image as a URL.
Maybe what you are expecting is in originalSrc ?

Counter not increasing in async map function

I am working with mongodb and nodejs. I have an array of customers I have to create each inside database.
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
}
});
await Promise.all([...promises2]);
The issue is counter is not increasing every time. I am getting same counter in all the created customers. What is the issue here?
Issue is something like this but don't have an answer.
The problem is that all the calls overlap. Since the first thing they each do is get the current counter, they all get the same counter, then try to use it. Fundamentally, you don't want to do this:
const counter = await Counter.findOne({ type: "Customer" });
// ...
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
...because it creates a race condition: overlapping asynchronous operations can both get the same sequence value and then both issue an update to it.
You want an atomic operation for incrementing and retrieving a new ID. I don't use MongoDB, but I think the findOneAndUpdate operation can do that for you if you add the returnNewDocument option. If so, the minimal change would be to swap over to using that:
const promises2 = customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
});
await Promise.all([...promises2]);
...but there's no reason to create an array and then immediately copy it, just use it directly:
await Promise.all(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
The overall operation will fail if anything fails, and only the first failure is reported back to your code (the other operations then continue and succeed or fail as the case may be). If you want to know everything that happened (which is probably useful in this case), you can use allSettled instead of all:
// Gets an array of {status, value/reason} objects
const results = await Promise.allSettled(customers.map(async customer => {
if (!customer.customerId) {
const counter = await Counter.findOneAndUpdate(
{ type: "Customer" },
{ $inc: { sequence_value: 1 } },
{ returnNewDocument: true }
);
console.log({counter});
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
}
}));
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Promise.allSettled is new in ES2021, but easily polyfilled if needed.
If I'm mistaken about the above use of findOneAndUpdate in some way, I'm sure MongoDB gives you a way to get those IDs without a race condition. But in the worst case, you can pre-allocate the IDs instead, something like this:
// Allocate IDs (in series)
const ids = [];
for (const customer of customers) {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: "Customer" });
await Counter.findOneAndUpdate({ type: "Customer" }, { $inc: { sequence_value: 1 } });
ids.push(counter.sequence_value);
}
}
// Create customers (in parallel)
const results = await Promise.allSettled(customers.map(async(customer, index) => {
const customerId = ids[index];
try {
await Customer.create({
customerId
});
} catch (e) {
// Failed, remove the counter, but without allowing any error doing so to
// shadow the error we're already handling
try {
await Counter.someDeleteMethodHere(/*...customerId...*/);
} catch (e2) {
// ...perhaps report `e2` here, but don't shadow `e`
}
throw e;
}
});
// Get just the errors
const errors = results.filter(({status}) => status === "rejected").map(({reason}) => reason);
if (errors.length) {
// Handle/report errors here
}
Your map function is not returning a promise.
Try this :
const promises2 = [];
customers.map((customer) => {
return new Promise(async (resolve) => {
if (!customer.customerId) {
const counter = await Counter.findOne({ type: 'Customer' });
console.log({ counter });
const payload = {
customerId: counter.sequence_value,
};
await Customer.create(payload);
await Counter.findOneAndUpdate({ type: 'Customer' }, { $inc: { sequence_value: 1 } });
}
resolve();
});
});
await Promise.all(promises2);

rxjs subscribing late results to empty stream

I have the following piece of code. As is, with a couple of lines commented out, it works as expected. I subscribe to a stream, do some processing and stream the data to the client. However, if I uncomment the comments, my stream is always empty, i.e. count in getEntryQueryStream is always 0. I suspect it has to do with the fact that I subscribe late to the stream and thus miss all the values.
// a wrapper of the mongodb driver => returns rxjs streams
import * as imongo from 'imongo';
import * as Rx from 'rx';
import * as _ from 'lodash';
import {elasticClient} from '../helpers/elasticClient';
const {ObjectId} = imongo;
function searchElastic({query, sort}, limit) {
const body = {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
};
// keep the search results "scrollable" for 30 secs
const scroll = '30s';
let count = 0;
return Rx.Observable
.fromPromise(elasticClient.search({ index: 'data', body, scroll }))
.concatMap(({_scroll_id, hits: {hits}}) => {
const subject = new Rx.Subject();
// subject needs to be subscribed to before adding new values
// and therefore completing the stream => execute in next tick
setImmediate(() => {
if(hits.length) {
// initial data
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
const handleDoc = (err, res) => {
if(err) {
subject.onError(err);
return;
}
const {_scroll_id, hits: {hits}} = res;
if(!hits.length) {
subject.onCompleted();
} else {
subject.onNext(hits[0]._source);
// code that breaks
//if(limit && ++count === limit) {
//subject.onCompleted();
//return;
//}
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
}
};
setImmediate(() =>
elasticClient.scroll({scroll, scrollId: _scroll_id},
handleDoc));
} else {
subject.onCompleted();
}
});
return subject.asObservable();
});
}
function getElasticQuery(searchString, filter) {
const query = _.cloneDeep(filter);
query.query.filtered.filter.bool.must.push({
query: {
query_string: {
query: searchString
}
}
});
return _.extend({}, query);
}
function fetchAncestors(ancestorIds, ancestors, format) {
return imongo.find('session', 'sparse_data', {
query: { _id: { $in: ancestorIds.map(x => ObjectId(x)) } },
fields: { name: 1, type: 1 }
})
.map(entry => {
entry.id = entry._id.toString();
delete entry._id;
return entry;
})
// we don't care about the results
// but have to wait for stream to finish
.defaultIfEmpty()
.last();
}
function getEntryQueryStream(entriesQuery, query, limit) {
const {parentSearchFilter, filter, format} = query;
return searchElastic(entriesQuery, limit)
.concatMap(entry => {
const ancestors = entry.ancestors || [];
// if no parents => doesn't match
if(!ancestors.length) {
return Rx.Observable.empty();
}
const parentsQuery = getElasticQuery(parentSearchFilter, filter);
parentsQuery.query.filtered.filter.bool.must.push({
terms: {
id: ancestors
}
});
// fetch parent entries
return searchElastic(parentsQuery)
.count()
.concatMap(count => {
// no parents match query
if(!count) {
return Rx.Observable.empty();
}
// fetch all other ancestors that weren't part of the query results
// and are still a string (id)
const restAncestorsToFetch = ancestors.filter(x => _.isString(x));
return fetchAncestors(restAncestorsToFetch, ancestors, format)
.concatMap(() => Rx.Observable.just(entry));
});
});
}
function executeQuery(query, res) {
try {
const stream = getEntryQueryStream(query);
// stream is passed on to another function here where we subscribe to it like:
// stream
// .map(x => whatever(x))
// .subscribe(
// x => res.write(x),
// err => console.error(err),
// () => res.end());
} catch(e) {
logger.error(e);
res.status(500).json(e);
}
}
I don't understand why those few lines of code break everything or how I could fix it.
Your use case is quite complex, you can start off with building up searchElastic method like the pattern bellow.
convert elasticClient.scroll to an observable first
setup the init data for elasticClient..search()
when search is resolved then you should get your scrollid
expand() operator let you recursively execute elasticClientScroll observable
use map to select data you want to return
takeWhile to decide when to complete this stream
The correct result will be once you do searchElastic().subscribe() the stream will emit continuously until there's no more data to fetch.
Hope this structure is correct and can get you started.
function searchElastic({ query, sort }, limit) {
const elasticClientScroll = Observable.fromCallback(elasticClient.scroll)
let obj = {
body: {
size: 1,
query,
_source: { excludes: ['logbookType', 'editable', 'availabilityTag'] },
sort
},
scroll: '30s'
}
return Observable.fromPromise(elasticClient.search({ index: 'data', obj.body, obj.scroll }))
.expand(({ _scroll_id, hits: { hits } }) => {
// guess there are more logic here .....
// to update the scroll id or something
return elasticClientScroll({ scroll: obj.scroll, scrollId: _scroll_id }).map(()=>
//.. select the res you want to return
)
}).takeWhile(res => res.hits.length)
}

Categories

Resources