I am using parse server to live query a class containing rows with pointers.
When I use include() in the normal query it get all the data of the pointer but in the live query I only get the objectId
Code:
var currentUser = Parse.User.current();
const Conversation = Parse.Object.extend("conversations");
var fromQuery = new Parse.Query(Conversation);
fromQuery.equalTo("from", currentUser );
var toQuery = new Parse.Query(Conversation);
toQuery.equalTo("to", currentUser);
var mainQuery = Parse.Query.or(fromQuery, toQuery);
mainQuery.include("to")
mainQuery.include("from")
mainQuery.include("lastMessage")
// FIXME: DEBUG:
this.convsubscription = mainQuery.subscribe();
mainQuery.find().then((conversations) => {
for (var i = 0; i < conversations.length; i++){
var object = conversations[i]
this.conversations.unshift(object);
}
})
this.convsubscription.on('update', (object) => {
// we will get the index of updated object
var index = this.conversations.findIndex(x => x.id == object.id);
console.log(index);
// then we will remove the old object and insert the updated one
this.conversations.splice(index, 1 ,object)
console.log(JSON.stringify(this.conversations[index].get('lastMessage')))
})
When I do JSON.stringify(this.conversations[index].get('lastMessage')) it only gives the objectId. I need a way to access the content of the pointer lastMessage
Regards
includeKey()/include() isn't supported in Live Queries:
this is a server side issue, the includeKey is ignored when subscribing to the query. The decision tree is processed synchronously after an object is saved on parse-server, therefore we don't have the opportunity to inject inclusions. We'd need to refactor the whole serverside logic in order to support those.
See related issues to keep track:
https://github.com/parse-community/ParseLiveQuery-iOS-OSX/issues/30
https://github.com/parse-community/parse-server/issues/1686
Related
I'm working on restaurant website (client-side project) and facing this problem, I want to make an admin page will show me all the orders placed by the customers and the way I choose that to save the order details in local storage then save it in this indexedDB then display the (the orders) at the admin page so I made this code and it work all good I guess to save the order and all customer details
document.getElementById('submittheorder').onclick = function() {
let i = 0;
const versionDB = 1;
let indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB || window.shimIndexedDB;
var open = indexedDB.open("CustomersOrders", versionDB);
open.onupgradeneeded = function() {
let db = open.result;
let store = db.createObjectStore("OrdersTable", {
keyPath: "id"
});
let index = store.createIndex("CIndex", ["FullName", "Order", "House", "Road", "Block"]);
};
open.onsuccess = function() {
let db = open.result;
let tx = db.transaction("OrdersTable", "readwrite");
let store = tx.objectStore("OrdersTable");
let index = store.index("CIndex");
store.put({
FullName: (sessionStorage.getItem("Cfullname")),
Order: (sessionStorage.getItem("order")),
House: (sessionStorage.getItem("CHouse")),
Road: (sessionStorage.getItem("CRoad")),
Block: (sessionStorage.getItem("CBlock"))
});
tx.oncomplete = function() {
db.close();
location.href = "Thanks.html";
};
}
}
Now the problem is I want to retrieve all the orders and the details for each object to the admin page
the second problem is that i want to check if the database already exist then insert new object not make a new database and save only one object, in a nutshell i want only one database to make and next times save the orders at that database.
Thank you :)
You can place this logic in the function that handles an upgrade event. There are essentially two ways. You can check if object stores and indices exist, using for example db.objectStoreNames.contains(), or you can compare versions by accessing the version properties from the database object or the event object.
For example, you would want to only create an object store if it did not already exist. If it does not already exists, then you know this is when your database is created.
I'm trying to work pull requests, issues, and commits with repos and I have the following code:
const axios = require('axios');
var gitPullApiLink = "https://api.github.com/repos/elixir-lang/elixir/pulls";
var listOfCommits = [];
var listOfSHAs = [];
var mapOfInfoObjects = new Map();
var mapPullRequestNumberToCommits = new Map();
var mapPRNumbersToCommitObjects = new Map();
var listOfPrObjects = [];
var setOfFileObjects = new Set();
var listOfNumbersOfTargetedIssues = [];
var mapPRnumberToCloseOpenDateObjects = new Map();
class PullRequestParser {
async getListOfPullRequests(pullrequestLink) {
const message = await axios.get(pullrequestLink);
//console.log(message);
listOfPrObjects = message['data'];
}
async getCommitsForEachPullRequestAndPRinformation() {
var listOfPrNumbers = [];
var k;
// this loop will just make a list of Pull Request Numbers
for (k = 0; k < listOfPrObjects.length; k++){
var currPrNumber = listOfPrObjects[k]['number'];
listOfPrNumbers.push(currPrNumber);
}
// I created a separate list just because... I did it this way because on the github API website it seems
// like the pull request has the same number as the issue it affects. I explain how you can see this down below
listOfNumbersOfTargetedIssues = listOfPrNumbers;
// next loop will make objects that contain information about each pull request.
var n;
for (n = 0; n < listOfPrNumbers; n++){
var ApiLinkForEachPullRequest = gitPullApiLink + "/" + listOfPrNumbers[n];
const mes = await axios.get(ApiLinkForEachPullRequest);
var temp = {OpeningDate: mes['data']['created_at'],
ClosingDate: mes['data']['closed_at'],
IssueLink: mes['data']['_links']['issue']['href']};
//mapPRnumberToCloseOpenDateObjects will be a map where the key is the pull request number and the value
// is the object that stores the open date, close date, and issue link for that pull request. The reason
// why I said I think the pull request number is the same as the number of the issue it affects is because
// if you take any object from the map, say you do mapPRnumberToCloseOpenDateObjects.get(10). You'll
// get an object with a pull request number 10. Now if you take this object and look at it's "IssueLink"
// field, the very last part of the link will have the number 10, and if you look at the github API
// it says for a single issue, you do: /repos/:owner/:repo/issues/:issue_number <---- As you can see,
// the IssueLink field will have this structure and in place of the issue_number, the field will be 10
// for our example object.
mapPRnumberToCloseOpenDateObjects.set(listOfPrNumbers[n], temp);
}
//up to this point, we have the pull request numbers. we will now start getting the commits associated with
//each pull request
var j;
for (j = 0; j < listOfPrNumbers.length; j++){
var currentApiLink = gitPullApiLink + "/" + listOfPrNumbers[j] + "/commits";
const res = await axios.get(currentApiLink);
//here we map a single pull request to the information containing the commits. I'll just warn you in
// advance: there's another object called mapPRNumbersToCommitObjects. THIS MAP IS DIFFERENT! I know it's
// subtle, but I hope the language can make the distinction: mapPullRequestNumberToCommits will just
// map a pull request number to some data about the commits it's linked to. In contrast,
// mapPRNumbersToCommitObjects will be the map that actually maps pull request numbers to objects
// containing information about the commits a pull request is associated with!
mapPullRequestNumberToCommits.set(listOfPrNumbers[j], res['data']);
}
// console.log("hewoihoiewa");
}
async createCommitObjects(){
var x;
// the initial loop using x will loop over all pull requests and get the associated commits
for (x = 0; x < listOfPrObjects.length; x++){
//here we will get the commits
var currCommitObjects = mapPullRequestNumberToCommits.get(listOfPrObjects[x]['number']);
//console.log('dhsiu');
// the loop using y will iterate over all commits that we get from a single pull request
var y;
for (y = 0; y < currCommitObjects.length; y++){
var currentSHA = currCommitObjects[y]['sha'];
listOfSHAs.push(currentSHA);
var currApiLink = "https://api.github.com/repos/elixir-lang/elixir/commits/" + currentSHA;
const response = await axios.get(currApiLink,);
//console.log("up to here");
// here we start extracting some information from a single commit
var currentAuthorName = response['data']['commit']['committer']['name'];
var currentDate = response['data']['commit']['committer']['date'];
var currentFiles = response['data']['files'];
// this loop will iterate over all changed files for a single commit. Remember, every commit has a list
// of changed files, so this loop will iterate over all those files, get the necessary information
// from those files.
var z;
// we create this temporary list of file objects because for every file, we want to make an object
// that will store the necessary information for that one file. after we store all the objects for
// each file, we will add this list of file objects as a field for our bigger commit object (see down below)
var tempListOfFileObjects = [];
for (z = 0; z < currentFiles.length; z++){
var fileInConsideration = currentFiles[z];
var nameOfFile = fileInConsideration['filename'];
var numberOfAdditions = fileInConsideration['additions'];
var numberOfDeletions = fileInConsideration['deletions'];
var totalNumberOfChangesToFile = fileInConsideration['changes'];
//console.log("with file");
var tempFileObject = {fileName: nameOfFile, totalAdditions: numberOfAdditions,
totalDeletions: numberOfDeletions, numberOfChanges: totalNumberOfChangesToFile};
// we add the same file objects to both a temporary, local list and a global set. Don't be tripped
// up by this; they're doing the same thing!
setOfFileObjects.add(tempFileObject);
tempListOfFileObjects.push(tempFileObject);
}
// here we make an object that stores information for a single commit. sha, authorName, date are single
// values, but files will be a list of file objects and these file objects will store further information
// for each file.
var tempObj = {sha: currentSHA, authorName: currentAuthorName, date: currentDate, files: tempListOfFileObjects};
var currPrNumber = listOfPrObjects[x]['number'];
console.log(currPrNumber);
// here we will make a single pull request number to an object that will contain all the information for
// every single commit associated with that pull request. So for every pull request, it will map to a list
// of objects where each object stores information about a commit associated with the pull request.
mapPRNumbersToCommitObjects.set(currPrNumber, tempObj);
}
}
return mapPRNumbersToCommitObjects;
}
async startParsingPullRequests() {
this.getListOfPullRequests(gitPullApiLink + "?state=all").then(() => {
this.getCommitsForEachPullRequestAndPRinformation().then(() => {
this.createCommitObjects().then((response) => {
console.log("functions were successful");
return new mapPRNumbersToCommitObjects;
//return mapPRNumbersToCommitObjects;
}).catch((error) => {
console.log("printing first error");
console.log(error);
})
}).catch((error2) => {
console.log("printing the second error");
console.log(error2);
})
}).catch((error3) => {
console.log("printing the third error");
console.log(error3);
});
}
//adding some getter methods so they can be used to work with whatever information people may need.
//I start all of them with the this.startParsingPullRequests() method because by calling that method
it gets all
// the information for the global variables.
async getSetOfFileObjects(){
var dummyMap = await this.startParsingPullRequests();
return {files: setOfFileObjects, prMap: mapPRnumberToCloseOpenDateObjects};
}
async OpenCloseDateObjects(){
var dummyMap = await this.startParsingPullRequests();
return mapPRnumberToCloseOpenDateObjects;
}
async getNumbersOfTargetedIssues(){
var dummyMap = await this.startParsingPullRequests();
return listOfNumbersOfTargetedIssues;
}
}
var dummy = new PullRequestParser();
var dummyMap = dummy.startParsingPullRequests().then((message) => {
console.log("dummyMap is defined! :)");
console.log(dummyMap);
});
module.exports = PullRequestParser;
Whenever I run the code on the webstorm terminal though, with:
node PullRequestParser.js
I get a 403 error, followed by a bunch of error output, with the following statement:
data: {
message: "API rate limit exceeded for 138.186.17.173. (But here's the good news: Authenticated
requests get a higher rate limit. Check out the documentation for more details.)"
I looked up the documentation for this and found out that without authentication, I can make 60 requests per hour to a repo. In order to get authentication, however, the only example provided is an example they provide by using the command line. I don't think this would be enough though because I want to do some further analysis with the results I get. Does anybody know how I can increase the number of requests I can make? Where in the code would I need to make changes and what kind of changes would I need to make? Thanks!
The first line of the documentation says everything you need to know.
For API requests using Basic Authentication or OAuth, you can make up
to 5000 requests per hour.
Using Basic Authentication is pretty simple, so that may be the easiest thing to get you up and running. OAuth is more complicated, but more desirable in production.
The axios library supports basic auth requests out of the box.
async getListOfPullRequests(pullrequestLink) {
const message = await axios.get(pullrequestLink, {
auth: {
username: 'username',
password: 'password'
}
});
//console.log(message);
listOfPrObjects = message['data'];
}
You just need to supply the correct username and password information.
As an example on basic setup one index is created.
db.onupgradeneeded = function(event) {
var db = event.target.result;
var store = db.createObjectStore('name', { keyPath: 'id' });
store.createIndex('by name', 'name', { unique: false });
};
Question:
Is it possible to create/append more indexes to the same objectStore on the future versionupdate? Since if I try:
db.onupgradeneeded = function(event) {
var db = event.target.result;
var store = db.createObjectStore('name', { keyPath: 'id' });
store.createIndex('by newName', 'newName', { unique: false });
};
It throws an error that current objectStore does already exist. An if I try to create store reference using transaction:
db.onupgradeneeded = function(event) {
var db = event.target.result;
var store = db.transaction('name', 'readwrite').objectStore('name');
store.createIndex('by newName', 'newName', { unique: false });
};
It throws that version change transaction is currently running
Yes it is possible. It can be a bit confusing at first. You want to get the existing object store via the implicit transaction created for you within onupgradeneeded. This is a transaction of type versionchange which is basically like a readwrite transaction but specific to the onupgradeneeded handler function.
Something like this:
var request = indexedDB.open(name, oldVersionPlusOne);
request.onupgradeneeded = myOnUpgradeNeeded;
function myOnUpgradeNeeded(event) {
// Get a reference to the request related to this event
// #type IDBOpenRequest (a specialized type of IDBRequest)
var request = event.target;
// Get a reference to the IDBDatabase object for this request
// #type IDBDatabase
var db = request.result;
// Get a reference to the implicit transaction for this request
// #type IDBTransaction
var txn = request.transaction;
// Now, get a reference to the existing object store
// #type IDBObjectStore
var store = txn.objectStore('myStore');
// Now, optionally inspect index names, or create a new index
console.log('existing index names in store', store.indexNames);
// Add a new index to the existing object store
store.createIndex(...);
}
You also will want to take care to increment the version so as to guarantee the onupgradeneeded handler function is called, and to represent that your schema (basically the set of tables and indices and properties of things) has changed in the new version.
You will also need to rewrite the function so that you only create or make changes based on the version. You can use event.oldVersion to help with this, or things like db.objectStoreNames.contains.
Something like this:
function myOnUpgradeNeeded(event) {
var is_new_db = isNaN(event.oldVersion) || event.oldVersion === 0;
if(is_new_db) {
var db = event.target.result;
var store = db.createObjectStore(...);
store.createIndex('my-initial-index');
// Now that you decided you want a second index, you also need
// to do this for brand new databases
store.createIndex('my-second-new-index');
}
// But if the database already exists, we are not creating things,
// instead we are modifying the existing things to get into the
// new state of things we want
var is_old_db_not_yet_current_version = !isNaN(event.oldVersion) && event.oldVersion < 2;
if(is_old_db_not_yet_current_version) {
var txn = event.target.transaction;
var store = txn.objectStore('store');
store.createIndex('my-second-new-index');
}
}
Pay close attention to the fact that I used event.target.transaction instead of db.transaction(...). These are not at all the same thing. One references an existing transaction, and one creates a new one.
Finally, and in addition, a personal rule of mine and not a formal coding requirement, you should never be using db.transaction() from within onupgradeneeded. Stick to modifying the schema when doing upgrades, and do all data changes outside of it.
My question is basically what to do in your cloud function, if you want to reference keys that have been generated when the client called push().
/providerApps/{UID}/ is my path to a list of appointment nodes, so each appointment node is at /providerApps/{UID}/someKey.
I need the "new item in the list", the one that was added with push(), so I thought I could order the keys and simply get the last one, but that does not work:
// (Try to) Listen for new appointments at /providerApps/{pUID}
// and store the appointment at at /clientApps/{cUID}
// cUID is in the new appointment node
exports.storeNewAppForClient = functions.database.ref("/providerApps/{UID}").onWrite(event => {
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log("deletion -> exiting");
return;
}
const pUID = event.params.UID;
const params = event.params;
console.log("params: ", params);
const firstAppVal = event.data.ref.orderByKey().limitToLast(1).val();
// TypeError: event.data.ref.orderByKey(...).limitToLast(...).val is not a function
const date = firstAppVal["dateStr"];
const cUID = firstAppVal["clientUID"];
return event.data.ref.root.child("clientApps").child(cUID).child(date).set(pUID);
});
I guess I could do it on the client side with push().getKey() and allow providers to write into clientApps node, but that seems to be less elegant.
Any ideas how to do this with cloud functions?
As an illustration, my data structure looks like this:
there are provider and their clients who make appointments
Cheers
Change your trigger location to be the newly created appointment instead of the list of appointments. Then you can access the appointment data directly:
exports.storeNewAppForClient = functions.database.ref("/providerApps/{UID}/{pushId}").onWrite(event => {
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log("deletion -> exiting");
return;
}
const pUID = event.params.UID;
const params = event.params;
console.log("params: ", params);
const date = event.data.child('dateStr').val();
const cUID = event.data.child('clientUID').val();
return admin.database().ref('clientApps').child(cUID).child(date).set(pUID);
});
(updated for Frank's comment)
Is it possible to get count of listed products in an Amazon page?
I need to get this number. I know I can use javascript to get it by ID or class, but I know that amazon changes the values of IDs and classes in some period of time, so later on I wouldn't be able to get this number unless I check the ID or class by myself and change it in code.. So is there an API call or something to freely get this number, without changing code every time?
You need a combination of ItemSearch and the ResponseGroup BrowseNodes. It would be something like this if you were to use C# and pass the results back to your JavaScript app:
ItemSearchRequest request = new ItemSearchRequest();
request.ResponseGroup = new string[] { "BrowseNodes", "ItemAttributes" };
request.SearchIndex = "Movies";
request.Keywords = "game of thrones";
ItemSearch search = new ItemSearch();
search.AWSAccessKeyId = access_key_id;
search.AssociateTag = associate_tag;
search.Request = new ItemSearchRequest[] { request };
AWSECommerceServicePortTypeClient port = new AWSECommerceServicePortTypeClient("AWSECommerceServicePort");
port.ChannelFactory.Endpoint.EndpointBehaviors.Add(new AmazonSigningEndpointBehavior(access_key_id, secret_access_key));
ItemSearchResponse response = port.ItemSearch(search);
foreach (var items in response.Items)
{
foreach (var item in items.Item)
{
Console.WriteLine("{0}\t{1}\t{2}", item.ItemAttributes.Title, item.ASIN, item.ItemAttributes.Author[0]);
if (item.BrowseNodes != null)
{
Console.WriteLine(" - BrowseNodes");
foreach (var node in item.BrowseNodes)
{
Console.WriteLine(" -- \t{0}\t{1}\t{2}", node.TotalResults);
}
}
}
}
https://flyingpies.wordpress.com/2009/08/01/17/
https://docs.aws.amazon.com/AWSECommerceService/latest/DG/LocaleUS.html
https://docs.aws.amazon.com/AWSECommerceService/latest/DG/ItemSearch.html
https://docs.aws.amazon.com/AWSECommerceService/latest/DG/RG_BrowseNodes.html