Async/Await in for loop NodeJS Not blocking the loop execuation - javascript

I know that old school for loop works in the traditional way - that it waits for the await to finish getting results.
But in my use case, I need to read a file from local/s3 and process it line by line, and for each line I need to call an External API.
Generally I use await inside the loop because all are running inside a lambda and I don't want to use all memory for running it parallelly.
Here I am reading the file using a stream.on() method, and in order to use await inside that, I need to add async in read method, like so:
stream.on('data',async () =>{
while(data=stream.read()!==null){
console.log('line');
const requests = getRequests(); // sync code,no pblms
for(let i=0;i<requests.length;i++){
const result = await apiCall(request[i);
console.log('result from api')
const finalResult = await anotherapiCall(result.data);
}
}
});
This is working but order in which the lines are processed is not guaranteed. I need all in a sync manner. Any help?
Complete Code
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
console.log('Read line');
console.log(id);
try {
for (let i = 0;i < XMLRequests.length;i++) {
totalRequestsCount++;
console.log('Sending request');
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log("got response");
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
failedRequestsCount++;
} else {
successRequestsCount++;
console.log('Response from the Lost Server');
console.log(response[i].data);
}
}
} catch (err) {
console.log(err);
}
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
});
}
async function sendLoSTRequest (options, params) {
const { axios } = options;
const { url, data } = params;
if (url) {
return axios.post(url, data);
// eslint-disable-next-line no-else-return
} else {
console.log('URL is not found');
return null;
}
}
Code needs to flow like so:
read a line in a sync way
process the line and transform the line into an array of two members
for every member call API and do stuff
once line is complete, look for another line, all done in order
UPDATE: Got a workaround..but it fires stream.end() without waiting stream to finish read
async function processSOIFileLocal (options, params) {
console.log('Process SOI file');
const { ERR_DIR, fs, xml2js, LOST_URL, byline, event } = options;
const { key } = params;
const responseObject = {};
let totalRecordsCount = 0;
let totalRequestsCount = 0;
let failedRequestsCount = 0;
let successRequestsCount = 0;
let dataObject = {};
const queue = (() => {
let q = Promise.resolve();
return fn => (q = q.then(fn));
})();
const readStream = byline.createStream(fs.createReadStream(key));
readStream.setEncoding('utf8');
const pattern = /^UHL\s|^UTL\s/;
const regExp = new RegExp(pattern);
readStream.on('readable', () => {
let line;
while (null !== (line = readStream.read())) {
if (!regExp.test(line.toString())) {
totalRecordsCount++;
dataObject = soiParser(line);
const { id } = dataObject;
const XMLRequests = createLoSTRequestXML(
options,
{ mapping: event.mapping, row: dataObject }
);
// eslint-disable-next-line no-loop-func
queue(async () => {
try {
for (let i = 0;i < XMLRequests.length;i++) {
console.log('Sending request');
console.log(id);
totalRequestsCount++;
const response = await sendLoSTRequest(
options,
{ data: XMLRequests[i],
url: LOST_URL }
);
console.log('got response');
const responseObj = await xml2js.
parseStringPromise(response.data);
if (Object.keys(responseObj).indexOf('errors') !== -1) {
// console.log('Response have the error:');
// await handleError(options, { err: responseObj, id });
failedRequestsCount++;
fs.writeFileSync(`${ERR_DIR}/${generateKey()}-${id}.xml`, response.data);
} else {
console.log('Response from the Lost Server');
console.log(response[i].data);
successRequestsCount++;
}
}
} catch (err) {
console.log(err);
}
});
}
}
})
.on('end', () => {
console.log('file processed');
console.log(`
************************************************
Total Records Processed:${totalRecordsCount}
Total Requests Sent: ${totalRequestsCount}
Success Requests: ${successRequestsCount}
Failed Requests: ${failedRequestsCount}
************************************************
`);
Object.assign(responseObject, {
failedRequestsCount,
successRequestsCount,
totalRecordsCount,
totalRequestsCount
});
});
}
Thank You

The sample code at the top of your question could be rewritten like
const queue = (() => {
let q = Promise.resolve();
return (fn) => (q = q.then(fn));
})();
stream.on('data', async() => {
while (data = stream.read() !== null) {
console.log('line');
const requests = getRequests(); // sync code,no pblms
queue(async () => {
for (let i = 0; i < requests.length; i++) {
const result = await apiCall(request[i]);
console.log('result from api');
const finalResult = await anotherapiCall(result.data);
}
});
}
});
Hopefully that will be useful for the complete code

If anyone want a solution for synchronisely process the file, ie, linebyline read and execute some Async call, it's recommended to use inbuilt stream transform. There we can create a transform function and return a callback when finishes.
That's will help of any one face this issues.
Through2 is a small npm library that also can be used for the same.

Related

Getting 400 error code when I run axios get request?

I write some code to getting info
const stock = await Stock.find({
exchange: exchange
});
// Here stock array length is 5300
stock.forEach(async (stockEl) => {
const EOD_API = process.env.EOD_HISTORICAL_API
const {data} = await axios.get(`https://eodhistoricaldata.com/api/fundamentals/${stockEl.code}?api_token=${EOD_API}&filter=General::Industry`);
console.log(data);
});
Here I place get request for every stock array element by forEach function. Then it give me error like image-
Click to see images
But When I place it outside of forEach function like this-
const EOD_API = process.env.EOD_HISTORICAL_API
const {data} = await axios.get(`https://eodhistoricaldata.com/api/fundamentals/${stockEl.code}?api_token=${EOD_API}&filter=General::Industry`);
console.log(data);
Then it gives no error. For Remembering Stock has 5300 element, that means axios run 5300 times.
Any solution or idea?
You need to make a few changes:
Replace forEach with for because forEach is not promise aware
Use try, catch => catch any errors
Use Promise.allSettled => it allows you to run all promisses together without waiting each other which in return will enhance your app performance. It returns an array with status ("fulfilled", "rejected")
const fetchSingleStockElement = async (stockEl) => {
try {
const EOD_API = process.env.EOD_HISTORICAL_API,
{ data } = await axios(
`https://eodhistoricaldata.com/api/fundamentals/${stockEl.code}?api_token=${EOD_API}&filter=General::Industry`
);
return data;
} catch (err) {
throw new Error(err);
}
};
const fetchAllStockData = async () => {
let promisesArray = [];
try {
//fetch stock array
const { data } = await Stock.find({
exchange: exchange
});
//fetch single stock
for (let i = 0; i < data.length; i++) {
promisesArray.push(fetchSingleStockElement(data[i].id));
}
const results = await Promise.allSettled(promisesArray);
console.log('results', results);
} catch (err) {
console.log('results error', err);
}
};
Here is a working example with fake API of 4466 entries:
const fetchSingleAirline = async (airlineId) => {
try {
const { data } = await axios(`https://api.instantwebtools.net/v1/airlines/${airlineId}`);
return data;
} catch (err) {
throw new Error(err);
}
};
const fetchAllAirlineData = async () => {
let promisesArray = [];
try {
const { data } = await axios('https://api.instantwebtools.net/v1/airlines');
for (let i = 0; i < data.length; i++) {
promisesArray.push(fetchSingleAirline(data[i].id));
}
const results = await Promise.allSettled(promisesArray);
console.log('results', results);
} catch (err) {
console.log('results error', err);
}
};
Doing await in forEach doesn't hold the process since forEach is not promise-aware. Try this instead:
(async () => {
for (let index = 0; index < stock.length; index++) {
const EOD_API = process.env.EOD_HISTORICAL_API
const {data} = await axios.get(`https://eodhistoricaldata.com/api/fundamentals/${stock[i].code}?api_token=${EOD_API}&filter=General::Industry`);
console.log(data);
}
})();
More information.

Wait for one function to finish before continuing?

When running the following code tidied in functions or not, it still writes to my file incorrectly. One thing that did work was wrapping those functions inside of a setTimeout method, with the seconds somewhere around 10. I just didn't like the idea of hardcoding those values and taking anymore time to complete than it should. What's a better way of going about this? I need help understanding async/await a little more as you can tell but what better way than to fail and ask for help!
genPriceChangeScripts: async () => {
const priceScript = `...`;
const changeData = await get24hrChange();
const globalCmds = [];
const globalPol = [];
const filtered = changeData.filter(function (item) {
return (
!item.symbol.includes("BTCUSDT_") && !item.symbol.includes("ETHUSDT_")
);
});
async function scripts() {
filtered.forEach((e) => {
const data = e.symbol;
const change = priceScript.replace("CHANGE", data);
fs.writeFile(
`../scripts/price_change/${data.toLowerCase()}_price_change.sh`,
change,
function (err) {
if (err) return console.log(err);
}
);
});
console.log("scripts finished");
}
scripts();
async function commands() {
for (let i = 0; i < filtered.length; i++) {
var pushCmds = `"#($CURRENT_DIR/scripts/price_change/${filtered[
i
].symbol.toLowerCase()}_price_change.sh)"`;
globalCmds.push(pushCmds);
}
const commands = globalCmds.join("\n");
const cmdsWithComment = commands.concat("\n#CHANGE3");
fs.readFile("../binance.tmux", "utf-8", (err, data) => {
if (err) {
throw err;
}
const addCmds = data.replace("#CHANGE1", cmdsWithComment);
fs.writeFile("../binance.tmux", addCmds, (err) => {
if (err) {
throw err;
}
});
});
console.log("cmds finished");
}
commands();
async function pols() {
for (let i = 0; i < filtered.length; i++) {
const pushPol = `"\\#{${filtered[
i
].symbol.toLowerCase()}_price_change}"`;
globalPol.push(pushPol);
}
const pol = globalPol.join("\n");
const polWithComment = pol.concat("\n#CHANGE4");
fs.readFile("../binance.tmux", "utf-8", (err, data) => {
if (err) {
throw err;
}
const addPol = data.replace("#CHANGE2", polWithComment);
fs.writeFile("../binance.tmux", addPol, (err) => {
if (err) {
throw err;
}
});
});
console.log("pols finished");
}
pols();
return prompt.end();
},
The issue is that making a function async doesn't make it automatically wait for anything asynchronous going on inside it
async/await is syntax "sugar" for working with Promises, and Promises only
So, if you use the promise version of writeFile/readFile like so
import * as fs from 'fs/promise';
you can write your code as follows
genPriceChangeScripts: async() => {
const priceScript = `...`;
const changeData = await get24hrChange();
const globalCmds = [];
const globalPol = [];
const filtered = changeData.filter(function (item) {
return (!item.symbol.includes("BTCUSDT_") && !item.symbol.includes("ETHUSDT_"));
});
async function scripts() {
const promises = filtered.map((e) => {
const data = e.symbol;
const change = priceScript.replace("CHANGE", data);
return fs.writeFile(`../scripts/price_change/${data.toLowerCase()}_price_change.sh`, change);
});
await Promise.all(promises);
console.log("scripts finished");
}
await scripts();
async function commands() {
for (let i = 0; i < filtered.length; i++) {
var pushCmds = `"#($CURRENT_DIR/scripts/price_change/${filtered[i].symbol.toLowerCase()}_price_change.sh)"`;
globalCmds.push(pushCmds);
}
const commands = globalCmds.join("\n");
const cmdsWithComment = commands.concat("\n#CHANGE3");
const data = await fs.readFile("../binance.tmux", "utf-8");
const addCmds = data.replace("#CHANGE1", cmdsWithComment);
await fs.writeFile("../binance.tmux", addCmds);
console.log("cmds finished");
}
await commands();
async function pols() {
for (let i = 0; i < filtered.length; i++) {
const pushPol = `"\\#{${filtered[i].symbol.toLowerCase()}_price_change}"`;
globalPol.push(pushPol);
}
const pol = globalPol.join("\n");
const polWithComment = pol.concat("\n#CHANGE4");
const data = await fs.readFile("../binance.tmux", "utf-8");
const addPol = data.replace("#CHANGE2", polWithComment);
await fs.writeFile("../binance.tmux", addPol);
console.log("pols finished");
}
await pols();
return prompt.end();
},
You need to await the File System operations in order to wait for the asynchronous functions to return a response before proceeding.
await fs.readFile and await fs.writeFile
See this question for further explanation and examples.
This is in addition to adding await to your other async functions to propagate the promise chain correctly.
Not that I'm saying all your code works, but here are the kind changes I would make to get you in the correct direction:
// code above
function scripts(){
const a = [];
filtered.forEach(e=>{
const data = e.symbol, change = priceScript.replace('CHANGE', data);
a.push(new Promise((resolve, reject)=>{
fs.writeFile(`../scripts/price_change/${data.toLowerCase()}_price_change.sh`,
change,
err=>{
if(err){
reject(err);
}
else{
resolve();
}
}
);
}));
});
return Promise.all(a);
}
await scripts();
// code below
Well... actually I would define the function somewhere else or not use it at all, but I think this is what you need to see.

Optimizing load of big data with Javascript

I'm working with a really big array in JS and I can see most of the time is used for loading and parsing the json data.
// is a Chrome Extension (but maybe I can move it to a nodejs app)
This is basically how I loading my data:
async function loadData(jsonFiles){
const fullData = [];
for(const jsonFile of jsonFiles){
const localUrl = 'http://localhost/'+jsonFile;
const response = await fetch(jsonFile);
if ( response.ok ){
try{
const data = await response.json();
const L = data.length;
for (let k = 0; k < L; k++) {
fullData.push(data[k]);
}
}
catch(e){
}
}
}
return fullData;
}
Is there any faster way to do that? even if it implies to save the data in another way/format
You can do the fetch calls in parallel, but other than that there's not a lot more you can do:
function loadData(jsonFiles){
return Promise.all(
jsonFiles.map(async file => {
const localUrl = 'http://localhost/'+jsonFile;
const response = await fetch(jsonFile);
if (response.ok) {
try{
return await response.json();
} catch (e) {
return null;
}
} else {
return null;
}
})
).then(results => {
return results.filter(result => result); // Filter out the `null`s
}).then(results => {
return results.flat(); // Flatten the results into one array
});
}
Make call and transform to json parallel
async function loadData(jsonFiles){
const calls = [];
for(const jsonFile of jsonFiles){
calls.push(fetch(jsonFile).then(response => response.json()));
}
return await Promise.allSettled(calls)
.then(parts => parts.filter(({status}) => status === "fulfilled"))
.then(parts => parts.map(({value}) => value))
.then(parts => parts.flat());
}

Using async and axios to make a weather app

I'm fairly certain it's a detail that I can't remember how to fix, but I've gotten the code to pull the data from the URL, but I can't call the setResults() method. I'm sure there is a way around it but I'm unsure how to do it.
class Test {
constructor() {
this.testResults = document.getElementsByClassName('test-results');
}
async run() {
console.log(new Date().toISOString(), '[Test]', 'Running the test');
// TODO: Make the API call and handle the results
const url = `http://api.openweathermap.org/data/2.5/weather?q=${query}&appid=25e989bd41e3e24ce13173d8126e0fd6&units=imperial`;
//Using the axios libary to call the data and log it.
const getData = async url => {
try {
const response = await axios.get(url);
const data = response.data;
console.log(data);
var results = data;
} catch (error) {
console.log(error);
}
};
getData(url);
}
setError(message) {
// TODO: Format the error
this.testResults.innerHTML = (message || '').toString();
}
setResults(results) {
results = responses()
this.testResults.innerHTML = (results || '').toString();
}
}
The bug that you did not see is probably related to testResults being a HTMLCollection rather than HTMLElement.
So in order to make the setResults method to work properly you need to adjust it.
Here I'm providing a possible solution.
class Test {
testResults;
constructor() {
this.testResults = document.getElementsByClassName('test-results');
}
async run() {
console.log(new Date().toISOString(), '[Test]', 'Running the test');
// TODO: Make the API call and handle the results
const url = `http://api.openweathermap.org/data/2.5/weather?q=London,uk&appid=25e989bd41e3e24ce13173d8126e0fd6&units=imperial`;
//Using the axios libary to call the data and log it.
const getData = async url => {
try {
const response = await axios.get(url);
const data = response.data;
this.setResults(data);
} catch (error) {
console.log(error);
}
};
getData(url);
}
setError(message) {
// TODO: Format the error
this.testResults[0].innerHTML = (message || '').toString();
}
setResults(results) {
results = JSON.stringify(results);
for(let resultEl of this.testResults) {
resultEl.innerHTML = (results || '').toString();
}
// this.testResults[0].innerHTML = (results || '').toString();
}
}
let testObj = new Test();
testObj.run();

Updating many(100k+) documents in the most efficient way MongoDB

I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}

Categories

Resources