Handling http response one by one in Angular 6 - javascript

Actually i have 15 http requests which are send to the API.
All i want to do is to handle responses one by one whithout waiting the end of all the requests (I have a request which can take some minutes to send result).
Service side :
findOneByOne(): Observable<any> {
const calls = this.getCardsPath().map(el => this.getPromises(el));
return Observable.forkJoin(calls)
.map(res => {
const tab = [];
for (let i = 0; i < res.length; i++) {
tab.push(this.checkInfoService(res[i].json()));
}
return tab;
});
}
getPromises(str: String): Promise<any> {
return this.requester.obtain({
restUrl: "/administration/" + str,
method: RequestMethod.Get
})
.toPromise()
.then(res => res)
.catch(err => err);
}
Component side :
displayDashboardInfoService() {
if (this.featuresFlag.getCurrentVersion() !== "1.08" && this.featuresFlag.getCurrentVersion() !== "-1") {
this.busy = this.dashboardInfoService.findAll()
.then((res: DashboardInfo[]) => this.findPaths(res))
.then((res: DashboardInfo[]) => this.loadItems(res))
.catch((err: any) => {
if (environment.debugLevel >= 3) console.error(err);
});
}
else {
this.dashboardInfoService.findOneByOne()
.subscribe((res) => {
const tab = [];
for (let i = 0; i < res.length; i++) {
tab.push(res[i][0]);
}
this.findPaths(tab);
this.loadItems(tab);
});
}
}
Thanks :)

A solution would be to change the forkJoin to merge so that instead of getting one event when all the requests are done you get an event after each one of them finishes.
If you'd have for example this:
waitForAll() {
this.values = [];
this.loadAllAtOnce([100, 200, 300, 400, 3000])
.subscribe(values => {
this.values = values;
});
}
loadAllAtOnce(values: number[]) {
return forkJoin(
values.map(x => of (x).pipe(delay(x)))
).pipe(
tap(values => {
console.log(values);
})
);
}
It could be rewritten to this:
asTheyCome() {
this.values = [];
this.loadAsSoonAsAvailable([100, 200, 300, 400, 3000])
.subscribe(value => {
this.values.push(value);
});
}
loadAsSoonAsAvailable(values: number[]) {
return merge(
...values.map(x => of (x).pipe(delay(x)))
).pipe(
tap(value => console.log(value))
);
}
You can find a working example here.

Related

GitHub API requests to get language usage

I'm drowning in Promises trying to get a, what I thought would be simple, piece of data back from my GitHub Repos.
All I want to do is find the percentage of languages usage across each of my repositories on GitHub. I've got it working but I am pretty confident I'm miles away from doing it correctly or as succinctly as I could if I knew what I was doing. I would really appreciate any feedback on how to go about this the right way and possibly a refactor that uses more up to date methods.
Here's my code so far (which works). Ignore how I'm outputting it for now, that's just for the proof of concept.
const username = '<GH_USERNAME>' //your username here
const apiUrl = `https://api.github.com/users/${username}/repos`;
fetch(apiUrl)
.then((response) => {
if (response.ok) {
return response.json();
}
})
.then((data) => {
languagePromises = [];
for (let i in data) {
let repo = data[i];
if (!repo['fork'] && repo['language']) {
// console.log(repo['name'], repo['language']);
let langs = repo['languages_url'];
languagePromises.push(fetch(langs))
}
}
Promise.all(languagePromises)
.then((responses) => {
let languages = [];
for (let r of responses) {
if (r.ok) languages.push(r.json())
}
Promise.all(languages)
.then((resolved) => {
let languageTotals = {};
for (langData of resolved) {
for (lang in langData) {
if (!languageTotals[lang]) {
languageTotals[lang] = 0
}
languageTotals[lang] += (langData[lang]);
}
}
// console.log(languageTotals);
let sum = 0;
for (let l in languageTotals) { sum += languageTotals[l] }
for (let l in languageTotals) {
let p = (languageTotals[l] / sum * 100).toFixed(0) + '%';
document.write(l,' ', p, '<br>');
}
});
});
})
.catch((e) => {
console.error('Error:', e);
});
Like is say. It's working but I'm basically looking for advice on how to do this properly.
Here's an example that cleans/reduces a lot of your code:
const apiUrl = `https://api.github.com/users/${username}/repos`;
fetchRepos(apiUrl)
.then((data) => {
return (data
.filter(repo => !repo.fork && repo.language)
.map(repo => ghFetchJson(repo.languages_url))
)
)
.then(languageResults => {
const languageTotals = new Map();
for (const langData of languageResults) {
for (const [lang, count] of Object.entries(langData)) {
languageTotals.set(lang, (languageTotals.get(lang) ?? 0) + count);
}
}
const sum = Array.from(languageTotals).reduce((acc, cur) => acc+cur[0], 0);
for(const [language, count] of languageTotals) {
const p = (count / sum * 100).toFixed(0) + '%';
console.log('%s: %i', language, count);
}
});
}).catch((e) => {
console.error('Error:', e);
});
async function ghFetchJson(url) {
const res = await fetch(url);
return res.json();
}
const username = 'octocat'
const apiUrl = `https://api.github.com/users/${username}/repos`;
fetch(apiUrl)
.then((response) => {
if (!response.ok) {
throw new Error('Request failed')
}
return response.json();
})
.then((repos) => repos.filter(repo => !repo['fork'] && repo['language']).map(repo => fetch(repo['languages_url'])))
.then(e => Promise.all(e))
.then((resps) => resps.filter(r => r.ok).map(r => r.json()))
.then(e => Promise.all(e))
.then((resolved) => {
console.log(resolved)
const languageTotals = {};
for (const langData of resolved) {
for (const lang in langData) {
if (!languageTotals[lang]) {
languageTotals[lang] = 0
}
languageTotals[lang] += (langData[lang]);
}
}
let sum = 0;
for (const lang of languageTotals) {
sum += lang
}
for (const lang of languageTotals) {
let p = (lang / sum * 100).toFixed(0) + '%';
document.write(l, ' ', p, '<br>');
}
})
.catch((e) => {
console.error('Error:', e);
});

Promise should return data after end two for loop

which get data from API:
const countries = [
'Spain',
'England',
];
const getLeagueID = () => {
const newData = [];
return new Promise(resolve => {
for (let i = 0; i < countries.length; i++) {
getApi(`https://www.thesportsdb.com/api/v1/json/1/search_all_leagues.php?c=${countries[i]}`)
.then(({ countrys }) => {
countrys.forEach((league, index) => {
if (league.strSport === 'Soccer') {
const getData = {
strSport: league.strSport,
strLeague: league.strLeague,
};
newData.push(getData);
}
if (index === countrys.length - 1 && i === countries.length - 1) {
resolve(newData);
}
});
})
.catch(err => {
console.log(err);
});
}
});
};
In first for loop I doing increment by countries from list.
When Api return data I create second foreach method. Inside this method I get data and push it to arra newData. The problem is with resolve:
if (index === countrys.length - 1 && i === countries.length - 1) {
resolve(newData);
}
I don't know how to write instruction if, which wait for the end foreach and for loop. My instruction if is wrong because not return all data. First time return 3 records, next time 7 records.
It's work, but sure that it can be improved
const getLeagueID = () => {
return new Promise((resolve, reject) => {
const promises = [];
for (let i = 0; i < countries.length; i++) {
promises.push(
getApi(`https://www.thesportsdb.com/api/v1/json/1/search_all_leagues.php?c=${countries[i]}`)
);
}
Promise.all(promises)
.then(res => {
const newData = [];
res.map(row => {
const data = JSON.parse(row);
const {countrys} = data;
countrys.forEach((league, index) => {
if (league.strSport === 'Soccer') {
const getData = {
strSport: league.strSport,
strLeague: league.strLeague,
};
newData.push(getData);
}
});
});
resolve(newData);
})
.catch(err => {
console.log(err);
reject(err);
});
});
}

Can' access content of an array in react-native

I'm trying to get data from a SQLite Database in React-Native (android). The function returns an Array with data.
If I try logging the array to the console, it works fine, but when I log the length of the Array to the console, it suddenly prints "0".
Functions:
getActByButton(category) {
return new Promise((resolve) => {
const activities = [];
const request = 'SELECT activity FROM activity_button WHERE category = ? AND activity IS NOT NULL';
this.initDB().then((db) => {
db.transaction((tx) => {
tx.executeSql(request, [category]).then(([tx, results]) => {
//console.log('Query COMPLETED');
var len = results.rows.length;
for (let i = len; i > 0; i--) {
activities.push(results.rows.item(i - 1));
}
resolve(activities);
})
}).then((result) => {
this.closeDatabase(db);
})
})
})
}
getIconByAct(activity_ids) {
return new Promise((resolve) => {
const icons = [];
const request = 'SELECT name, icon FROM activity WHERE id = ?';
this.initDB().then((db) => {
db.transaction((tx) => {
for (let id in activity_ids) {
tx.executeSql(request, [id]).then(([tx, results]) => {
console.log('Query COMPLETED');
var len = results.rows.length;
let icon = results.rows.item(0).icon;
let description = results.rows.item(0).name;
icons.push({ icon, description });
})
}
resolve(icons);
}).then((result) => {
this.closeDatabase(db);
})
})
})
}
Function-call:
db.getActByButton(this.props.category).then((data) => {
result_ids = data;
}).then(() => {
db.getIconByAct(result_ids).then((data) => {
console.log('DATA:');
console.log(data);
console.log(data.length);
console.log(data[1]);
})
})
Console:
How is this possible?
And how can I fix it?
Try this:
db.getActByButton(this.props.category).then((data) => {
result_ids = data;
}).then(() => {
db.getIconByAct(result_ids).then((data) => {
console.log(JSON.stringify(data));
console.log(JSON.stringify(data.length));
})
})

javascript optimize .map to spread operator

I am using a recursive function to make async calls if there is an odata nextlink. It works fine as it is by using map to push the items into teamsArray. The problem hover is that I am looping through each item instead of merging the objects together. I tried to use the following but with no avail:
teamsArray = {}
teamsArray = { ...teamsArray, ...latstestResults}
Current code that does work but is not optimized:
export const fetchAllTeams = () => {
return dispatch => {
dispatch(fetchAllTeamsRequest());
};
};
export const fetchAllTeamsRequest = () => {
return dispatch => {
dispatch(getAllTeamStarted());
let teamsArray = [];
getAllTeams("", teamsArray, dispatch);
};
};
const getAllTeams = (url, teamsArray, dispatch) => {
if (url === "") {
url = "https://graph.microsoft.com/v1.0/me/memberOf?$top=10";
}
const getTeams = adalGraphFetch(fetch, url, {})
.then(response => {
if (response.status != 200 && response.status != 204) {
dispatch(fetchAllTeamsFailure("fout"));
return;
}
response.json().then(result => {
if (result["#odata.nextLink"]) {
const teams = objectToArray(result.value);
teams.map(team => {
teamsArray.push(team);
});
getAllTeams(result["#odata.nextLink"], teamsArray, dispatch);
} else {
const latestResult = objectToArray(result.value);
latestResult.map(team => {
teamsArray.push(team);
});
console.log("the teams", teamsArray);
dispatch(fetchAllTeamsSucces(result));
}
});
})
.catch(error => {
dispatch(fetchAllTeamsFailure(error));
});
};
Something like this might work for you.
I refactored the paged fetching into an async function that calls itself if there are more items to fetch, then eventually resolves with the full array of results.
Dry-coded, so there may be bugs and YMMV, but hope it helps.
export const fetchAllTeams = () => {
return dispatch => {
dispatch(fetchAllTeamsRequest());
};
};
export const fetchAllTeamsRequest = () => {
return async dispatch => {
dispatch(getAllTeamStarted());
try {
const teamsArray = await getPaged(
"https://graph.microsoft.com/v1.0/me/memberOf?$top=10",
);
dispatch(fetchAllTeamsSucces(teamsArray));
} catch (err) {
dispatch(fetchAllTeamsFailure(err));
}
};
};
const getPaged = async (url, resultArray = []) => {
const response = await adalGraphFetch(fetch, url, {});
if (response.status != 200 && response.status != 204) {
throw new Error("failed to fetch teams");
}
const result = await response.json();
objectToArray(result.value).forEach(team => resultArray.push(team));
if (result["#odata.nextLink"]) {
// Get more items...
return getPaged(resultArray, result["#odata.nextLink"]);
}
return resultArray; // All done, return the teams array.
};

Updating many(100k+) documents in the most efficient way MongoDB

I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}

Categories

Resources