Via API, I would like to list out the whole users. Each time can only have maximum 100 items for each page. So that I need to get the next_page url to rerun the function - runPages to collect the list. So the condition is when the next_page == null, then will stop the function.
In my code, I can get the next_page url. However, it doesn't run further. Can someone figure out what is the problem?
const runPages = async (values) => {
if (values.next_page != null) {
for (const field of values.results) {
row.appendChild(addCell(field.name));
row.appendChild(addCell(field.email));
tblbody.appendChild(row);
}
values = await checkPages(values.next_page); // get new values.data by url
runPages(values);
}
};
runPages(values);
const checkPages = async (value) => {
return new Promise((resolve, reject) => {
const getNewPageFromApi = async () => {
const GetUrl = `${value}`;
const Doorkey = { username: "XXX", password: "*****" };
try {
const response = await Axios.get(GetUrl, { auth: Doorkey });
if (response.data.next_page != null) {
resolve(response.data);
}
} catch (err) {
reject("no more data");
}
};
getNewPageFromApi();
});
};
I don't know if this is the answer you're looking for but since you're checking for values.next_page != null in the runPages function you can call the resolve(response.data) directly in the Promise inside checkPages function without checking for response.data.next_page != null.
Related
I want to show the current user his information which are stored in my DB. It is working fine. The only problem is to wait for the data to be loaded. In my code below I am using the await keyword to wait for my data to load. But however the onReady function will be triggered twice and also my json object does not include the keyword, but it definitely does.
import wixData from "wix-data";
import wixUsers from "wix-users";
async function getTeamData() {
let user = wixUsers.currentUser;
let user_id = user.id;
let options = {
suppressAuth: true,
suppressHooks: true,
};
wixData
.get("Members/PrivateMembersData", user_id, options)
.then((results) => {
let item = results;
let email = item.loginEmail;
let data;
console.log("Compare email: ", email);
wixData
.query("Users")
.eq("Owner", email)
.find()
.then((found) => {
if (found.items.length > 0) {
data = found.items;
console.log(data);
return data;
} else {
console.log("No matching data found!");
return null;
}
});
})
.catch((error) => {
let error_msg = error.message;
let code = error.code;
console.log(code, error_msg);
});
console.log("Function finished!");
}
function waitForLoading() {
setTimeout(() => {
$w("#preloader").hide("FadeOut");
}, 1500);
}
$w.onReady(async function () {
let data = null;
do {
data = await getTeamData();
} while (data != null);
console.log(data);
let group_name = data[0]["groupName"];
console.log(group_name);
$w("#groupNameTxT").text = group_name;
waitForLoading();
});
Had have anyone a similar problem and know how to solve this?
Thanks in advance!
Every promise chained with then must have a return value otherwise the return will be undefined.
In your case, you missed the return in all promises you have created. You must use return before the wixData variable in both calls.
This might help you understand how it works
I have an async function that checks for the status of an order (checkOrderStatus()). I would like to repeat this function until it returns either "FILLED" or "CANCELED", then use this return value in another function to decide to continue or stop the code. Every order goes through different status before being "FILLED" or "CANCELED", therefore the need to repeat the checkOrderStatus() function (it is an API call).
What I have now is this, to repeat the checkOrderStatus() function:
const watch = filter => {
return new Promise(callback => {
const interval = setInterval(async () => {
if (!(await filter())) return;
clearInterval(interval);
callback();
}, 1000);
});
};
const watchFill = (asset, orderId) => {
return watch(async () => {
const { status } = await checkOrderStatus(asset, orderId);
console.log(`Order status: ${status}`);
if (status === 'CANCELED') return false;
return status === 'FILLED';
});
};
I then call watchFill() from another function, where I would like to check its return value (true or false) and continue the code if true or stop it if false:
const sellOrder = async (asset, orderId) => {
try {
const orderIsFilled = await watchFill(asset, orderId);
if (orderIsFilled) {
//… Continue the code (status === 'FILLED'), calling other async functions …
}
else {
//… Stop the code
return false;
}
}
catch (err) {
console.error('Err sellIfFilled() :', err);
}
};
However, this does not work. I can see the status being updated in the terminal via the console.log in watchFill(), but it never stops and most importantly, the value in the orderIsFilled variable in sellOrder() does not get updated, whatever the value returned by watchFill() becomes.
How can I achieve the desired behavior?
watch never calls resolve (in the original code, this is misleadingly named callback()) with any value, so there's no way const orderIsFilled = await watchFill(asset, orderId); will populate orderIsFilled with anything but undefined.
If you save the result of await filter() in a variable and pass it to
callback as callback(result), your code seems like it should work.
That said, the code can be simplified by using a loop and writing a simple wait function. This way, you can return a value (more natural than figuring out how/when to call resolve), keep the new Promise pattern away from the logic and avoid dealing with setInterval and the bookkeeping that goes with that.
const wait = ms =>
new Promise(resolve => setTimeout(resolve, ms))
;
const watch = async (predicate, ms) => {
for (;; await wait(ms)) {
const result = await predicate();
if (result) {
return result;
}
}
};
/* mock the API for demonstration purposes */
const checkOrderStatus = (() => {
let calls = 0;
return async () => ({
status: ++calls === 3 ? "FILLED" : false
});
})();
const watchFill = (asset, orderId) =>
watch(async () => {
const {status} = await checkOrderStatus();
console.log(`Order status: ${status}`);
return status === "CANCELLED" ? false : status === "FILLED";
}, 1000)
;
const sellOrder = async () => {
try {
const orderIsFilled = await watchFill();
console.log("orderIsFilled:", orderIsFilled);
}
catch (err) {
console.error('Err sellIfFilled() :', err);
}
};
sellOrder();
You can use recursive functionality like this:
const checkOrderStatus = async () => {
// ... function does some work ...
await someOtherFunction() // you can use here the other async function as well
// ... function does some more work after returning from await ...
if(/* if status is FILLED or CANCELED */) {
// return true or false or some info about response for your needs
} else {
checkOrderStatus();
}
}
// this will response back when status will be FILLED or CANCELED
await checkOrderStatus();
The watch function clears the interval timer after the first call if filter resolves with false. setInterval doesn't wait for an async function to finish executing either so you'll have to create a loop yourself. Try this:
const delay = milliseconds => new Promise(resolve => setTimeout(resolve, milliseconds));
const watch = async check => {
while (true) {
if (await check()) {
return;
}
await delay(1000);
}
};
Because watch only resolves when check succeeds, it is not possible to fail so you don't need to check for it (this might be a bug in your code):
const sellOrder = async (asset, orderId) => {
try {
await watchFill(asset, orderId);
//… Continue the code (status === 'FILLED'), calling other async functions …
}
catch (err) {
console.error('Err sellIfFilled() :', err);
}
};
p-wait-for contains an excellent implementation of this. You can use it like so:
import pWaitFor from 'p-wait-for';
const watchFill = (asset, orderId) => pWaitFor(async () => {
const { status } = await checkOrderStatus(asset, orderId);
console.log(`Order status: ${status}`);
if (status === 'CANCELED') return false;
return status === 'FILLED';
}, {
interval: 1000,
leadingCheck: false
});
Sorry if my title is not very explicit I dont know how to explain this properly.
I am trying to use the distinct function for my app that uses loopback 3 and mongodb. It seems to work right but my endpoint wont hit the return inside my function.
This is my code
const distinctUsers = await sellerCollection.distinct('userId',{
hostId : host.id,
eventId:{
"$ne" : eventId
}
}, async function (err, userIds) {;
if(!userIds || userIds.length ==0)
return [];
const filter = {
where:{
id: {
inq: userIds
}
}
};
console.log("should be last")
return await BPUser.find(filter);
});
console.log(distinctUsers);
console.log("wtf??");
//return [];
If I uncomment the return [] it will send the return and later it will show the should be last, so even when I dont have the return it seems to finish. It is now waiting for the response. I dont like the way my code looks so any pointer of how to make this look better I will take it.
It looks like the sellerCollection.distinct takes a callback as one of it's parameters, therefore, you cannot use async/await with a callback-style function, since it's not a promise.
I would suggest turning this call into a promise if you'd like to use async/await:
function findDistinct(hostId, eventId) {
return new Promise((resolve, reject) => {
sellerCollection.distinct(
'userId',
{ hostId, eventId: { "$ne": eventId } },
function (error, userIds) {
if (error) {
reject(error);
return;
}
if (!userIds || userIds.length === 0) {
resolve([]);
return;
}
resolve(userIds);
}
)
})
}
Then, you can use this new function with async/await like such:
async function getDistinctUsers() {
try {
const hostId = ...
const eventId = ...
const distinctUsers = await findDistinct(hostId, eventId)
if (distinctUsers.length === 0) {
return
}
const filter = {
where: {
id: { inq: userIds }
}
}
const bpUsers = await BPUser.find(filter) // assuming it's a promise
console.log(bpUsers)
} catch (error) {
// handle error
}
}
I have been wondering why adding a callback to the mongoose findOneAndUpdate function results in saving the data twice to the DB?
public async addPersonAsFavorite(userId: string, friendId: string) {
if (!await this.isPersonAlreadyFriend(userId, friendId)) {
const friendList = FriendsList.findOneAndUpdate(
{ _id: userId },
{ $push: { friendsList: friendId } },
{ upsert: true, new: true },
(err, data) => {
if (err) console.error(err);
return data;
}
);
return friendList;
}}
public async isPersonAlreadyFriend(userId: string, friendId: string) {
let isFriendFound = false;
await FriendsList.findById(userId, (err, data) => {
if (data) {
console.log(data.friendsList);
}
if (err) console.error(err);
if (data && data.friendsList.indexOf(friendId) > -1) {
isFriendFound = true;
console.log('already friend');
} else {
console.log('not friend');
isFriendFound = false;
}
})
return isFriendFound;
}
If i remove the callback, the data only gets saved once.
EDIT: added second piece of code and new question.
If someone spams the button to add friend. The friend will be added multiple times because before the first friend is added and the check can be done to prevent this it has already added the person multiple times.
How can i make sure that it completes the write to the DB before allowing the function to be called again?
Maybe the problem is in isPersonAlreadyFriend method, because you are trying to call it using async await but then you are passing a callback, what makes the method not return a promise. The rigth way to use promises with mongodb should be something like this:
public async isPersonAlreadyFriend(userId: string, friendId: string) {
let isFriendFound = false;
const data = await FriendsList.findById(userId);
if (data) {
console.log(data.friendsList);
}
if (data && data.friendsList.indexOf(friendId) > -1) {
isFriendFound = true;
console.log('already friend');
} else {
console.log('not friend');
isFriendFound = false;
}
return isFriendFound;
}
Try with this and let me know if it helps
I have a function that runs periodically, that updates the item.price of some Documents in my Prices Collection. The Price Collection has 100k+ items. The function looks like this:
//Just a helper function for multiple GET requests with request.
let _request = (urls, cb) => {
let results = {}, i = urls.length, c = 0;
handler = (err, response, body) => {
let url = response.request.uri.href;
results[url] = { err, response, body };
if (++c === urls.length) {
cb(results);
}
};
while (i--) {
request(urls[i], handler);
}
};
// function to update the prices in our Prices collection.
const update = (cb) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
id = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${id} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
itemid: id,
hash_name: key,
price: jsonResult[key]
}
});
Price.insertMany(allItemsArray).then(docs => {
logger.info(`Saved docs for ${id}`)
}, (e) => {
logger.error(`Error saving docs.`);
});
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
As you can see, to avoid iterating through 100k+ Documents, and updating each and every one of them separately, I delete them all at the beginning, and just call the API that gives me these Items with prices, and use InsertMany to Insert all of them into my Prices Collection.
This updating process will happen every 30 minutes.
But I just now realised, what if some user wants to check the Prices and my Prices Collection is currently empty because it's in the middle of updating itself?
The Question
So do I have to iterate through all of them in order to not delete it? (Remember, there are MANY documents to be updated every 30 mins.) Or is there another solution?
Here's a picture of how my Prices Collection looks (there are 100k docs like these, I just want to update the price property):
Update:
I have re-written my update function a bit and now it looks like this:
const update = (cb = null) => {
Price.remove({}, (err, remove) => {
if (err) {
return logger.error(`Error removing items...`);
}
logger.info(`Removed all items... Beginning to update.`);
_request(urls, (responses) => {
let url, response, gameid;
for (url in responses) {
gameid = url.split('/')[5].split('?')[0];
response = responses[url];
if (response.err) {
logger.error(`Error in request to ${url}: ${err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
logger.info(`Response body for ${gameid} is ${Object.keys(jsonResult).length}.`);
let allItemsArray = Object.keys(jsonResult).map((key, index) => {
return {
game_id: gameid,
market_hash_name: key,
price: jsonResult[key]
}
});
let bulk = Price.collection.initializeUnorderedBulkOp();
allItemsArray.forEach(item => {
bulk.find({market_hash_name: item.market_hash_name})
.upsert().updateOne(item);
});
bulk.execute((err, bulkers) => {
if (err) {
return logger.error(`Error bulking: ${e}`);
}
logger.info(`Updated Items for ${gameid}`)
});
// Price.insertMany(allItemsArray).then(docs => {
// logger.info(`Saved docs for ${gameid}`)
// }, (e) => {
// logger.error(`Error saving docs.`);
// });
}
}
if (cb && typeof cb == 'function') {
cb();
}
})
});
}
Notice the bulk variable now (Thanks #Rahul) but now, the collection takes ages to update. My processor is burning up and it literally takes 3+ minutes to update 60k+ documents. I honestly feel like the previous method, while it might delete all of them and then reinserting them, it also takes 10x faster.
Anyone?
From my experience (updating millions of mongo docs on a hourly basis), here's a realistic approach to very large bulk updates:
do all your API calls separately and write results in as bson into a file
invoke mongoimport and import that bson file into a new empty collection prices_new. Javascript, let alone high-level OO wrappers, are just too slow for that
rename prices_new -> prices dropTarget=true (this will be atomic hence no downtime)
Schematically, it would look like this in JS
let fname = '/tmp/data.bson';
let apiUrls = [...];
async function doRequest(url) {
// perform a request and return an array of records
}
let responses = await Promise.all(apiUrls.map(doRequest));
// if the data too big to fit in memory, use streams instead of this:
let data = flatMap(responses, BSON.serialize).join('\n'));
await fs.writeFile(fname, data);
await child_process.exec(`mongoimport --collection prices_new --drop ${fname}`);
await db.prices_new.renameCollection('prices', true);
There's no need to clear the database and do a fresh insert. You can use the bulkWrite() method for this or use the updateMany() method to do the updates.
You can refactor the existing code to
const update = (cb) => {
_request(urls, responses => {
let bulkUpdateOps = [], gameid;
responses.forEach(url => {
let response = responses[url];
gameid = url.split('/')[5].split('?')[0];
if (response.err) {
logger.error(`Error in request to ${url}: ${response.err}`);
return;
}
if (response.body) {
logger.info(`Request to ${url} successful.`)
let jsonResult = {};
try {
jsonResult = JSON.parse(response.body);
} catch (e) {
logger.error(`Could not parse.`);
}
Object.keys(jsonResult).forEach(key => {
bulkUpdateOps.push({
"updateOne": {
"filter": { market_hash_name: key },
"update": { "$set": {
game_id: gameid,
price: jsonResult[key]
} },
"upsert": true
}
});
});
}
if (bulkUpdateOps.length === 1000) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
Price.bulkWrite(bulkUpdateOps).then(result => {
logger.info(`Updated Items`)
}).catch(e => logger.error(`Error bulking: ${e}`));
}
});
if (cb && typeof cb == 'function') {
cb();
}
}
I have not tested anything but you can try this, might be helpful. I am using bluebird library for concurrency.
let _request = (url) => {
return new Promise((resolve, reject) => {
request(url, (err, response, body) => {
if (err) {
reject(err);
}
resolve(body);
});
});
};
const formatRespose = async (response) => {
// do stuff
return {
query: {}, // itemid: id,
body: {}
};
}
const bulkUpsert = (allItemsArray) => {
let bulk = Price.collection.initializeUnorderedBulkOp();
return new Promise((resolve, reject) => {
allItemsArray.forEach(item => {
bulk.find(item.query).upsert().updateOne(item.body);
});
bulk.execute((err, bulkers) => {
if (err) {
return reject(err);
}
return resolve(bulkers);
});
});
}
const getAndUpdateData = async (urls) => {
const allItemsArray = urls.map((url) => {
const requestData = await _request(url); // you can make this also parallel
const formattedData = formatRespose(requestData); // return {query: {},body: {} };
return formattedData;
});
return await (bulkUpsert(allItemsArray));
};
function update() {
// split urls into as per your need 100/1000
var i, j, chunkUrls = [],
chunk = 100;
for (i = 0, j = urls.length; i < j; i += chunk) {
chunkUrls.push(getAndUpdateData(urls.slice(i, i + chunk)));
}
Bluebird.map(chunkUrls, function (chunk) {
return await chunk;
}, {
concurrency: 1 // depends on concurrent request change 1 = 100 request get and insert in db at time
}).then(function () {
console.log("done");
}).catch(function () {
console.log("error");
});
}