Infinite scroll with AngularJs and Firebase - javascript

How do you implement infinite scroll on data that you get from firebase. So far I found an angularjs directive, that works really great but I'm having difficulty implementing it with fireable as firebase returns all data in one single request and this is not what I want.

Few weeks ago, I made a JS function that allowed an infinite scrolling in my app.
First, a set of data is displayed when the user visit the website:
// Add a callback that is triggered for each message.
var n = 25; // Step size for messages display.
$(window).load(function() {
lastMessagesQuery = messagesRef.limit(n);
lastMessagesQuery.on('child_added', function (snapshot) {
var message = snapshot.val();
$('<div/>').text(message.text).prependTo($('#messagesDiv'));
$('#messagesDiv')[0].scrollTop = $('#messagesDiv')[0].scrollHeight;
});
$('#messagesDiv').fadeTo(1000, 1);
});
Then, the function that makes possible the infinite scrolling:
// Pagination.
var i = 0; // Record variable.
function moreMessages () {
i += n; // Record pagination updates.
moreMessagesQuery = messagesRef; // Firebase reference.
moreMessagesQuery.on('value', function (snapshot) {
var data = snapshot.exportVal(); // Fetch all data from Firebase as an Object.
var keys = Object.keys(data).reverse(); // Due to the Keys are ordered from the oldest to the newest, it nessesary to change its sequence in order to display Firebase data snapshots properly.
var total_keys = Object.keys(data).length;
var k = keys[i]; // Key from where to start counting. Be careful what Key you pick.
if (i < total_keys) { // Stop displaying messages when it reach the last one.
lastMessagesQuery = messagesRef.endAt(null, k).limit(n); // Messages from a Key to the oldest.
lastMessagesQuery.on('child_added', function (snapshot) {
var message = snapshot.val();
$('<div/>').text(message.text).appendTo($('#messagesDiv')).hide().fadeIn(1000); // Add set of messages (from the oldest to the newest) at the end of #messagesDiv.
});
}
});
}
Finally, the infinite scrolling:
// Load more messages when scroll reach the bottom.
$(window).scroll(function() {
if (window.scrollY == document.body.scrollHeight - window.innerHeight) {
moreMessages();
}
});
It works great with small data sets. I hope this helps you to solve your problem (or gives you more ideas).
UPDATE October 2015
Firebase has growth since my original response, which means now it's pretty easy to achieve an infinite scrolling just using its Javascript API:
First, I recommend to create an Index in your Firebase. For this answer, I create this one:
{
"rules": {
".read": true,
".write": false,
"messages": {
".indexOn": "id"
}
}
}
Then, let's make some magic with Firebase:
// #fb: your Firebase.
// #data: messages, users, products... the dataset you want to do something with.
// #_start: min ID where you want to start fetching your data.
// #_end: max ID where you want to start fetching your data.
// #_n: Step size. In other words, how much data you want to fetch from Firebase.
var fb = new Firebase('https://<YOUR-FIREBASE-APP>.firebaseio.com/');
var data = [];
var _start = 0;
var _end = 9;
var _n = 10;
var getDataset = function() {
fb.orderByChild('id').startAt(_start).endAt(_end).limitToLast(_n).on("child_added", function(dataSnapshot) {
data.push(dataSnapshot.val());
});
_start = _start + _n;
_end = _end + _n;
}
Finally, a better Infinite Scrolling (without jQuery):
window.addEventListener('scroll', function() {
if (window.scrollY === document.body.scrollHeight - window.innerHeight) {
getDataset();
}
});
I'm using this approach with React and it's blazing fast no matter how big your data is.

Related

Allowing only one like per item. JS + Firebase Realtime Database

I am new to programming and I am trying to implement a picture gallery in which you can like/vote your favorite pictures. Each picture displays a counter of the number of likes/votes.
The caveat is that while you can vote for different pictures, you can only vote once for each one.
Any ideas on how to achieve this?
const dCounters = document.querySelectorAll('.CountLike');
[].forEach.call(dCounters, function(dCounter) {
const el = dCounter.querySelector('button');
const cId = dCounter.id;
const dDatabase = firebase.database().ref('Like Number Counter').child(cId);
// get firebase data
dDatabase.on('value', function(snap) {
let data = snap.val() || 0;
dCounter.querySelector('span').innerHTML = data;
});
// set firebase data
el.addEventListener('click', function() {
dDatabase.transaction(function(dCount) {
return (dCount || 0) + 1;
});
});
});
I have tried using local storage to check if user has voted already =
localStorage.getItem('iHaveVoted', 'yes')
but this doesnt get into account until browser is refreshed. Therefore, user is still allowed to increase the votes multiple times.

How to increase your limit of Github API uses per hour in Javascript

I'm trying to work pull requests, issues, and commits with repos and I have the following code:
const axios = require('axios');
var gitPullApiLink = "https://api.github.com/repos/elixir-lang/elixir/pulls";
var listOfCommits = [];
var listOfSHAs = [];
var mapOfInfoObjects = new Map();
var mapPullRequestNumberToCommits = new Map();
var mapPRNumbersToCommitObjects = new Map();
var listOfPrObjects = [];
var setOfFileObjects = new Set();
var listOfNumbersOfTargetedIssues = [];
var mapPRnumberToCloseOpenDateObjects = new Map();
class PullRequestParser {
async getListOfPullRequests(pullrequestLink) {
const message = await axios.get(pullrequestLink);
//console.log(message);
listOfPrObjects = message['data'];
}
async getCommitsForEachPullRequestAndPRinformation() {
var listOfPrNumbers = [];
var k;
// this loop will just make a list of Pull Request Numbers
for (k = 0; k < listOfPrObjects.length; k++){
var currPrNumber = listOfPrObjects[k]['number'];
listOfPrNumbers.push(currPrNumber);
}
// I created a separate list just because... I did it this way because on the github API website it seems
// like the pull request has the same number as the issue it affects. I explain how you can see this down below
listOfNumbersOfTargetedIssues = listOfPrNumbers;
// next loop will make objects that contain information about each pull request.
var n;
for (n = 0; n < listOfPrNumbers; n++){
var ApiLinkForEachPullRequest = gitPullApiLink + "/" + listOfPrNumbers[n];
const mes = await axios.get(ApiLinkForEachPullRequest);
var temp = {OpeningDate: mes['data']['created_at'],
ClosingDate: mes['data']['closed_at'],
IssueLink: mes['data']['_links']['issue']['href']};
//mapPRnumberToCloseOpenDateObjects will be a map where the key is the pull request number and the value
// is the object that stores the open date, close date, and issue link for that pull request. The reason
// why I said I think the pull request number is the same as the number of the issue it affects is because
// if you take any object from the map, say you do mapPRnumberToCloseOpenDateObjects.get(10). You'll
// get an object with a pull request number 10. Now if you take this object and look at it's "IssueLink"
// field, the very last part of the link will have the number 10, and if you look at the github API
// it says for a single issue, you do: /repos/:owner/:repo/issues/:issue_number <---- As you can see,
// the IssueLink field will have this structure and in place of the issue_number, the field will be 10
// for our example object.
mapPRnumberToCloseOpenDateObjects.set(listOfPrNumbers[n], temp);
}
//up to this point, we have the pull request numbers. we will now start getting the commits associated with
//each pull request
var j;
for (j = 0; j < listOfPrNumbers.length; j++){
var currentApiLink = gitPullApiLink + "/" + listOfPrNumbers[j] + "/commits";
const res = await axios.get(currentApiLink);
//here we map a single pull request to the information containing the commits. I'll just warn you in
// advance: there's another object called mapPRNumbersToCommitObjects. THIS MAP IS DIFFERENT! I know it's
// subtle, but I hope the language can make the distinction: mapPullRequestNumberToCommits will just
// map a pull request number to some data about the commits it's linked to. In contrast,
// mapPRNumbersToCommitObjects will be the map that actually maps pull request numbers to objects
// containing information about the commits a pull request is associated with!
mapPullRequestNumberToCommits.set(listOfPrNumbers[j], res['data']);
}
// console.log("hewoihoiewa");
}
async createCommitObjects(){
var x;
// the initial loop using x will loop over all pull requests and get the associated commits
for (x = 0; x < listOfPrObjects.length; x++){
//here we will get the commits
var currCommitObjects = mapPullRequestNumberToCommits.get(listOfPrObjects[x]['number']);
//console.log('dhsiu');
// the loop using y will iterate over all commits that we get from a single pull request
var y;
for (y = 0; y < currCommitObjects.length; y++){
var currentSHA = currCommitObjects[y]['sha'];
listOfSHAs.push(currentSHA);
var currApiLink = "https://api.github.com/repos/elixir-lang/elixir/commits/" + currentSHA;
const response = await axios.get(currApiLink,);
//console.log("up to here");
// here we start extracting some information from a single commit
var currentAuthorName = response['data']['commit']['committer']['name'];
var currentDate = response['data']['commit']['committer']['date'];
var currentFiles = response['data']['files'];
// this loop will iterate over all changed files for a single commit. Remember, every commit has a list
// of changed files, so this loop will iterate over all those files, get the necessary information
// from those files.
var z;
// we create this temporary list of file objects because for every file, we want to make an object
// that will store the necessary information for that one file. after we store all the objects for
// each file, we will add this list of file objects as a field for our bigger commit object (see down below)
var tempListOfFileObjects = [];
for (z = 0; z < currentFiles.length; z++){
var fileInConsideration = currentFiles[z];
var nameOfFile = fileInConsideration['filename'];
var numberOfAdditions = fileInConsideration['additions'];
var numberOfDeletions = fileInConsideration['deletions'];
var totalNumberOfChangesToFile = fileInConsideration['changes'];
//console.log("with file");
var tempFileObject = {fileName: nameOfFile, totalAdditions: numberOfAdditions,
totalDeletions: numberOfDeletions, numberOfChanges: totalNumberOfChangesToFile};
// we add the same file objects to both a temporary, local list and a global set. Don't be tripped
// up by this; they're doing the same thing!
setOfFileObjects.add(tempFileObject);
tempListOfFileObjects.push(tempFileObject);
}
// here we make an object that stores information for a single commit. sha, authorName, date are single
// values, but files will be a list of file objects and these file objects will store further information
// for each file.
var tempObj = {sha: currentSHA, authorName: currentAuthorName, date: currentDate, files: tempListOfFileObjects};
var currPrNumber = listOfPrObjects[x]['number'];
console.log(currPrNumber);
// here we will make a single pull request number to an object that will contain all the information for
// every single commit associated with that pull request. So for every pull request, it will map to a list
// of objects where each object stores information about a commit associated with the pull request.
mapPRNumbersToCommitObjects.set(currPrNumber, tempObj);
}
}
return mapPRNumbersToCommitObjects;
}
async startParsingPullRequests() {
this.getListOfPullRequests(gitPullApiLink + "?state=all").then(() => {
this.getCommitsForEachPullRequestAndPRinformation().then(() => {
this.createCommitObjects().then((response) => {
console.log("functions were successful");
return new mapPRNumbersToCommitObjects;
//return mapPRNumbersToCommitObjects;
}).catch((error) => {
console.log("printing first error");
console.log(error);
})
}).catch((error2) => {
console.log("printing the second error");
console.log(error2);
})
}).catch((error3) => {
console.log("printing the third error");
console.log(error3);
});
}
//adding some getter methods so they can be used to work with whatever information people may need.
//I start all of them with the this.startParsingPullRequests() method because by calling that method
it gets all
// the information for the global variables.
async getSetOfFileObjects(){
var dummyMap = await this.startParsingPullRequests();
return {files: setOfFileObjects, prMap: mapPRnumberToCloseOpenDateObjects};
}
async OpenCloseDateObjects(){
var dummyMap = await this.startParsingPullRequests();
return mapPRnumberToCloseOpenDateObjects;
}
async getNumbersOfTargetedIssues(){
var dummyMap = await this.startParsingPullRequests();
return listOfNumbersOfTargetedIssues;
}
}
var dummy = new PullRequestParser();
var dummyMap = dummy.startParsingPullRequests().then((message) => {
console.log("dummyMap is defined! :)");
console.log(dummyMap);
});
module.exports = PullRequestParser;
Whenever I run the code on the webstorm terminal though, with:
node PullRequestParser.js
I get a 403 error, followed by a bunch of error output, with the following statement:
data: {
message: "API rate limit exceeded for 138.186.17.173. (But here's the good news: Authenticated
requests get a higher rate limit. Check out the documentation for more details.)"
I looked up the documentation for this and found out that without authentication, I can make 60 requests per hour to a repo. In order to get authentication, however, the only example provided is an example they provide by using the command line. I don't think this would be enough though because I want to do some further analysis with the results I get. Does anybody know how I can increase the number of requests I can make? Where in the code would I need to make changes and what kind of changes would I need to make? Thanks!
The first line of the documentation says everything you need to know.
For API requests using Basic Authentication or OAuth, you can make up
to 5000 requests per hour.
Using Basic Authentication is pretty simple, so that may be the easiest thing to get you up and running. OAuth is more complicated, but more desirable in production.
The axios library supports basic auth requests out of the box.
async getListOfPullRequests(pullrequestLink) {
const message = await axios.get(pullrequestLink, {
auth: {
username: 'username',
password: 'password'
}
});
//console.log(message);
listOfPrObjects = message['data'];
}
You just need to supply the correct username and password information.

Handling firebase real time changes in progressive web app

I'm fetching the top news from hacker news API which is using firebase. I'm planning to build a progressive web app hence, I'm planning to cache the results into the localstorage.
The current code is here, which fetch and render the top stories. The code is here:
var ref = new Firebase("http://hacker-news.firebaseio.com/v0/");
var itemRef = ref.child('item');
var topStories = [];
var storyCallback = function(snapshot) {
var story = snapshot.val();
var html = '';
if(story.score) {
html = '<i>'+story.score+'</i> '+story.title+''
}
document.getElementById(topStories.indexOf(story.id)).innerHTML = html;
}
ref.child('topstories').once('value', function(snapshot) {
topStories = snapshot.val();
for(var i = 0; i < topStories.length; i++) {
var element = document.createElement("P");
element.id = i;
document.getElementById('items').appendChild(element);
itemRef.child(topStories[i]).on('value', storyCallback);
}
});
ref.child('topstories').on('child_changed', function(snapshot, prevChildName) {
var ref = snapshot.ref()
var index = ref.name();
var oldItemId = topStories[index];
itemRef.child(oldItemId).off();
var newItemId = snapshot.val();
topStories[index] = newItemId
itemRef.child(newItemId).on('value', storyCallback);
});
If I add each stories into the localstorage (by tweaking the above code), how can I skip fetching it from firebase the next time (if it already presents and doesn't changed)?
Note that doesn't change part is important because I can easily get from local storage using the key, how ever it should be in sync with firebase as well. So wondering whether firebase has some way to handle this
If I'm not missing any point you can simply check whether they already exists in local storage or not
if(localStorage.getItem('key')){
//display here
}else{
//fetch
//save for next time
localStorage.setItem('key', JSON.stringify('yourdata'));
//and display here
}
Also you can generalize your function for fetching, display or rendering so you can call at multiple place.

Java/Firebase Script Executing Multiple Times

I am having an interesting issue. The general idea of what I am doing is pulling data from a Firebase database, and populating a table based on that data. Everything runs perfectly during initial population--cells and rows are populated as they should be, but the weird issue is that the scripts seem to execute again randomly. I've logged the incoming data to the console, and can see it print twice after some amount of time.
This second execution does not happen if I am to navigate between pages, or reload the page--in either of those cases everything works as it should. The problem SEEMS to happen when I log back into my computer after locking it??? Does anybody have ANY idea what could be going on here? Relevant portion of script below:
const table = document.getElementById('myTable');
firebase.auth().onAuthStateChanged(firebaseUser => {
if (firebaseUser) {
let user = firebase.auth().currentUser;
let uid = user.uid;
const dbRef = firebase.database().ref().child("data/" + uid);
dbRef.once('value', snap => {
var dataCount = snap.child("secondData").numChildren();
var datalist = snap.child("secondData").val();
var dataArray = Object.keys(datalist).map(function(k) {
return datalist[k]
});
pullAllInfo(dataCount, dataArray);
});
}
});
function pullAllInfo(count, array) {
let k = 0;
let dataArray = [];
for (i = 0; i < count; i++) {
let specificRef = firebase.database().ref().child("secondData/" + array[i]);
specificRef.once('value', snap => {
var optionsTag = array[k];
k++;
var dataId = snap.child("id").val();
var dataName = snap.child("name").val();
var dataCount = snap.child("data").numChildren();
dataArray.push(dataId, dataName, dataCount, optionsTag);
if (k == count) {
buildTable(dataArray);
console.log(dataArray);
}
});
}
}
As you can see from the code above I AM calling .once() for each reference, which would prevent data duplication from the typical .on() call. Just cant seem to figure this one out. ALSO I have an iMac, just for anyone curious about my potential computer unlock diagnosis.
Thanks all!
Most likely, the auth state is changing and setting off your function. Try throwing a log under firebase.auth().onAuthStateChanged like this:
firebase.auth().onAuthStateChanged(firebaseUser => {
console.log( 'auth state changed', firebaseUser );
if (firebaseUser) {
My guess is that you'll see that the AuthState is changing when you log out/log in from your computer.
I solved this issue by creating another global boolean called preLoaded. At the beginning, it is set to false and, once the data is loaded and passed off to build the table, it is set to true. It now looks like this:
if(k == count && preloaded == false){
preloaded = true;
console.log(dataArray);
buildTable(dataArray);
}
All set!

Assemble paginated ajax data in a Bacon FRP stream

I'm learning FRP using Bacon.js, and would like to assemble data from a paginated API in a stream.
The module that uses the data has a consumption API like this:
// UI module, displays unicorns as they arrive
beautifulUnicorns.property.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
The module that assembles the data requests sequential pages from an API and pushes onto the stream every time it gets a new data set:
// beautifulUnicorns module
var curPage = 1
var stream = new Bacon.Bus()
var property = stream.toProperty()
var property.onValue(function(){}) # You have to add an empty subscriber, otherwise future onValues will not receive the initial value. https://github.com/baconjs/bacon.js/wiki/FAQ#why-isnt-my-property-updated
var allUnicorns = [] // !!! stateful list of all unicorns ever received. Is this idiomatic for FRP?
var getNextPage = function(){
/* get data for subsequent pages.
Skipping for clarity */
}
var gotNextPage = function (resp) {
Array.prototype.push.apply(allUnicorns, resp) // just adds the responses to the existing array reference
stream.push(allUnicorns)
curPage++
if (curPage <= pageLimit) { getNextPage() }
}
How do I subscribe to the stream in a way that provides me a full list of all unicorns ever received? Is this flatMap or similar? I don't think I need a new stream out of it, but I don't know. I'm sorry, I'm new to the FRP way of thinking. To be clear, assembling the array works, it just feels like I'm not doing the idiomatic thing.
I'm not using jQuery or another ajax library for this, so that's why I'm not using Bacon.fromPromise
You also may wonder why my consuming module wants the whole set instead of just the incremental update. If it were just appending rows that could be ok, but in my case it's an infinite scroll and it should draw data if both: 1. data is available and 2. area is on screen.
This can be done with the .scan() method. And also you will need a stream that emits items of one page, you can create it with .repeat().
Here is a draft code (sorry not tested):
var itemsPerPage = Bacon.repeat(function(index) {
var pageNumber = index + 1;
if (pageNumber < PAGE_LIMIT) {
return Bacon.fromCallback(function(callback) {
// your method that talks to the server
getDataForAPage(pageNumber, callback);
});
} else {
return false;
}
});
var allItems = itemsPerPage.scan([], function(allItems, itemsFromAPage) {
return allItems.concat(itemsFromAPage);
});
// Here you go
allItems.onValue(function(allUnicorns){
console.log("Got "+ allUnicorns.length +" Unicorns");
// ... some real display work
});
As you noticed, you also won't need .onValue(function(){}) hack, and curPage external state.
Here is a solution using flatMap and fold. When dealing with network you have to remember that the data can come back in a different order than you sent the requests - that's why the combination of fold and map.
var pages = Bacon.fromArray([1,2,3,4,5])
var requests = pages.flatMap(function(page) {
return doAjax(page)
.map(function(value) {
return {
page: page,
value: value
}
})
}).log("Data received")
var allData = requests.fold([], function(arr, data) {
return arr.concat([data])
}).map(function(arr) {
// I would normally write this as a oneliner
var sorted = _.sortBy(arr, "page")
var onlyValues = _.pluck(sorted, "value")
var inOneArray = _.flatten(onlyValues)
return inOneArray
})
allData.log("All data")
function doAjax(page) {
// This would actually be Bacon.fromPromise($.ajax...)
// Math random to simulate the fact that requests can return out
// of order
return Bacon.later(Math.random() * 3000, [
"Page"+page+"Item1",
"Page"+page+"Item2"])
}
http://jsbin.com/damevu/4/edit

Categories

Resources