Combining an observable with a await call to the backEnd - javascript

I'm trying to make a progress bar in angular that works fine to show the progress of a method in backEnd that processes a big Excel file.
I use an #Sse observable that emits data from the back method that I call to process the excel file.
the method works ok, and the communication between the front with the observable sse works too.
The problem is that the responses of the observable aren't being processed by the front until the metod in back finishes, because its method is called with an await.
Lets go with the code because I don't know if
I am explaining it properly.
console.time('import en back');
const subscription = this.sseService
.getServerSentEvent(environment.apiUrl + routesSse._pre + routesSse.getHippoImportProgress)
.subscribe(async (a) => {
const data = JSON.parse(a.data);
console.log(data);
this.currentRow = data.i;
this.currentOperation = data.operacion;
this.totalRows = data.total;
this.progress = this.currentRow == 0 ? 0 : (this.currentRow / this.totalRows) * 100;
this.cdr.detectChanges();
});
const importData = await this.worksSectionService.getImportExcelHippoWorksData(rowsToSend);
console.timeEnd('import en back');
The back generates ok the values from this observable, because I have tested it with console.logs
But data are not reflected in the subscription until the back method has finished
the values from this observable are generated inside the method that is called in the back.
... I've tried to call both observable subscription converted to promise and method call in a Promise.all but the result is the same...
EDIT: if i call the method:
this.worksSectionService.getImportExcelHippoWorksData(rowsToSend)
without the await (only for testing), such like this:
const subscription = this.sseService
.getServerSentEvent(environment.apiUrl + routesSse._pre + routesSse.getHippoImportProgress)
.subscribe(async (a) => {
const data = JSON.parse(a.data);
console.log(data);
this.currentRow = data.i;
this.currentOperation = data.operacion;
this.totalRows = data.total;
this.progress = this.currentRow == 0 ? 0 : (this.currentRow / this.totalRows) * 100;
this.cdr.detectChanges();
});
this.worksSectionService.getImportExcelHippoWorksData(rowsToSend);
, the behaviour is the same

Related

Returning data from firebase functions returns null

I have this code
exports.battle = functions.https.onCall((data, context) => {
if(context.auth){
var difficulty = randInt(2,10) / 10
var user = context.auth["uid"]
var userHP
var userLevel
var userDmg
var resultUserDmg = []
var resultUserHp = []
var monsterHP
var monsterLevel
var monsterDmg
var resultMonsterDmg = []
var resultMonsterHp = []
var ref = db.ref(`players/${user}`);
ref.child("hp").once("value", (snapshot) => {
userHP = snapshot.val()
monsterHP = userHP * difficulty
ref.child("level").once("value", (snapshot) => {
userLevel = snapshot.val()
monsterLevel = userLevel * difficulty
console.log("type"+typeof(userHP))
console.log("value"+userHP)
console.log("type"+typeof(monsterHP))
console.log("value"+monsterHP)
console.log("diff "+difficulty)
while (userHP > 0 && monsterHP > 0){
userDmg = randInt(7, 14) * userLevel
monsterDmg = randInt(7, 14) * userLevel * difficulty
userHP = Math.round((userHP - monsterDmg)*10) /10;
console.log("userHP"+(userHP))
console.log("monsterDmg"+monsterDmg)
resultMonsterDmg.push(Math.round(monsterDmg*10) /10)
resultUserHp.push(Math.round(userHP*10)/10)
monsterHP = Math.round((monsterHP - userDmg)*10) /10;
console.log("userDmg"+userDmg)
console.log("monsterHP"+monsterHP)
resultUserDmg.push(Math.round(userDmg*10)/10)
resultMonsterHp.push(Math.round(monsterHP*10)/10)
console.log("----------")
}
var ref = db.ref(`players/${user}/coins`);
ref.once("value", function(snapshot) {
var coinsNow = parseInt(snapshot.val());
const userRef = db.ref(`players/${user}`);
userRef.update({
'coins' : coinsNow+1
})
});
var result ={
userDmg : resultUserDmg,
userHp : resultUserHp,
monsterDmg : resultMonsterDmg,
monsterHp : resultMonsterHp,
monsterLvl : monsterLevel,
};
console.log("result is "+resultUserDmg)
console.log("result is "+resultUserHp)
console.log("result is "+resultMonsterDmg)
console.log("result is "+resultMonsterHp)
console.log("result is "+monsterLevel)
return result
})
})
}
else{
console.log("User is not logged")
}
})
Simply what this code does is that it calculates the player's life and damage against the monster using a while loop. This works correctly. The problem is when I want to send the data in the result variable to the user for further work. It returns null every time.
Do you know where the error is? The only thing I can think of is that the error would be in the return. Maybe it gets called before the requested data has any value. I don't know. I can't figure it out.
But I am sure that the values I want to send to the user are not null but have the values they should have. Here is the log from the firebase console
Firebase console log
Here is code that i am useing for client returning.
I always get null here
var battle = firebase.functions().httpsCallable('battle')
battle().then(result => {
console.log(result)
})
UPDATE:
Frank's response was great direction. But I don't think it's entirely clear. After a longer search, I found a more understandable solution for me here: https://medium.com/#stephane.giron/firebase-cloud-functions-oncall-returned-before-end-of-the-function-2d898d8ff259
I simply changed this at the beginning of the code ann add this to the end
exports.battle = functions.https.onCall((data, context) => {
if(context.auth){
return new Promise(function(resolve, reject) {
/*
. . . .
code
. . . .
*/
reslove(result)
)}
I hope this has helped someone else. I recommend adding .catch to the end of the code for possible errores but I'm not entirely sure so I don't want to possibly confuse
Results from Promises bubble up to the caller, but only if you actually return them from within your then or catch callbacks. So inside every once callback you'll need to ensure that you return the value to the caller.
So first up, you're missing a top-level return statement in your top-level Cloud Function, which means that your return result never makes it to the caller.
So:
return ref.child("hp").once("value", (snapshot) => {
...
})
Next up, you'll need to do the same for ref.child("level").once("value", (snapshot) => { and other individual once calls.
Finally, if you have a loop where you perform an asynchronous operation (such as once or update) for each item in the loop, you can use Promise.all to wait for all of these operations to complete before completing the operation and return a value to the caller.
I recommend reading up on Promises here in Using Promises on MDN, in the Firebase documentation on terminating functions: sync, async and promises and by watching Doug's excellent video series linked there.
Unrelated, but this code is much more complex than needed:
var ref = db.ref(`players/${user}/coins`);
ref.once("value", function(snapshot) {
var coinsNow = parseInt(snapshot.val());
const userRef = db.ref(`players/${user}`);
userRef.update({
'coins' : coinsNow+1
})
});
The Firebase Realtime Database nowadays has a built-in increment operation, which means that you can simplify this to:
db.ref(`players/${user}/coins`).set(firebase.database.ServerValue.increment(1));

Add streams dynamically to combined stream (eg forkJoin)

My function (lets call it myFunction) is getting an array of streams (myFunction(streams: Observable<number>[])). Each of those streams produces values from 1 to 100, which acts as a progress indicator. When it hits 100 it is done and completed. Now, when all of those observables are done I want to emit a value. I could do it this way:
public myFunction(streams: Observable<number>[]) {
forkJoin(streams).subscribe(_values => this.done$.emit());
}
This works fine, but imagine following case:
myFunction gets called with 2 streams
one of those streams is done, second one is still progressing
myFunction gets called (again) with 3 more streams (2nd one from previous call is still progressing)
I'd like to somehow add those new streams from 3rd bullet to the "queue", which would result in having 5 streams in forkJoin (1 completed, 4 progressing).
I've tried multiple approaches but can't get it working anyhow... My latest approach was this:
private currentProgressObs: Observable<any> | null = null;
private currentProgressSub: Subscription | null = null;
public myFunction(progressStreams: Observable<number>[]) {
const isUploading = this.cumulativeUploadProgressSub && !this.cumulativeUploadProgressSub.closed;
const currentConcatObs = this.currentProgressObs?.pipe(concatAll());
const currentStream = isUploading && this.currentProgressObs ? this.currentProgressObs : of([100]);
if (this.currentProgressSub) {
this.currentProgressSub.unsubscribe();
this.currentProgressSub = null;
}
this.currentProgressObs = forkJoin([currentStream, ...progressStreams]);
this.currentProgressSub = this.currentProgressObs.subscribe(
_lastProgresses => {
this._isUploading$.next(false); // <----- this is the event I want to emit when all progress is completed
this.currentProgressSub?.unsubscribe();
this.currentProgressSub = null;
this.currentProgressObs = null;
},
);
}
Above code only works for the first time. Second call to the myFunction will never emit the event.
I also tried other ways. I've tried recursion with one global stream array, in which I can add streams while the subscription is still avctive but... I failed. How can I achieve this? Which operator and in what oreder should I use? Why it will or won't work?
Here is my suggestion for your issue.
We will have two subjects, one to count the number of request being processed (requestsInProgress) and one more to mange the requests that are being processed (requestMerger)
So the thing that will do is whenever we want to add new request we will pass it to the requestMerger Subject.
Whenever we receive new request for processing in the requestMerger stream we will first increment the requestInProgress counter and after that we will merge the request itself in the source observable. While merging the new request/observable to the source we will also add the finalize operator in order to track when the request has been completed (reached 100), and when we hit the completion criteria we will decrement the request counter with the decrementCounter function.
In order to emit result e.g. to notify someone else in the app for the state of the pending requests we can subscribe to the requestsInProgress Subject.
You can test it out either here or in this stackBlitz
let {
interval,
Subject,
BehaviorSubject
} = rxjs
let {
mergeMap,
map,
takeWhile,
finalize,
first,
distinctUntilChanged
} = rxjs.operators
// Imagine next lines as a service
// Subject responsible for managing strems
let requestMerger = new Subject();
// Subject responsible for tracking streams in progress
let requestsInProgress = new BehaviorSubject(0);
function incrementCounter() {
requestsInProgress.pipe(first()).subscribe(x => {
requestsInProgress.next(x + 1);
});
}
function decrementCounter() {
requestsInProgress.pipe(first()).subscribe(x => {
requestsInProgress.next(x - 1);
});
}
// Adds request to the request being processed
function addRequest(req) {
// The take while is used to complete the request when we have `value === 100` , if you are dealing with http-request `takeWhile` might be redudant, because http request complete by themseves (e.g. the finalize method of the stream will be called even without the `takeWhile` which will decrement the requestInProgress counter)
requestMerger.next(req.pipe(takeWhile(x => x < 100)));
}
// By subscribing to this stream you can determine if all request are processed or if there are any still pending
requestsInProgress
.pipe(
map(x => (x === 0 ? "Loaded" : "Loading")),
distinctUntilChanged()
)
.subscribe(x => {
console.log(x);
document.getElementById("loadingState").innerHTML = x;
});
// This Subject is taking care to store or request that are in progress
requestMerger
.pipe(
mergeMap(x => {
// when new request is added (recieved from the requestMerger Subject) increment the requrest being processed counter
incrementCounter();
return x.pipe(
finalize(() => {
// when new request has been completed decrement the requrest being processed counter
decrementCounter();
})
);
})
)
.subscribe(x => {
console.log(x);
});
// End of fictional service
// Button that adds request to be processed
document.getElementById("add-stream").addEventListener("click", () => {
addRequest(interval(1000).pipe(map(x => x * 25)));
});
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/6.6.6/rxjs.umd.min.js"></script>
<div style="display:flex">
<button id="add-stream">Add stream</button>
<h5>Loading State: <span id="loadingState">false</span> </h5>
</div>
Your problem is that each time your call your function, you are creating a new observable. Your life would be much easier if all calls of your function pushed all upload jobs through the same stream.
You can achieve this using a Subject.
I would suggest you push single "Upload Jobs" though a simple subject and design an observable that emits the state of all upload jobs whenever anything changes: A simple class that offers a createJob() method to submit jobs, and a jobs$ observable to reference the state:
class UploadService {
private jobs = new Subject<UploadJob>();
public jobs$ = this.jobs.pipe(
mergeMap(job => this.processJob(job)),
scan((collection, job) => collection.set(job.id, job), new Map<string, UploadJob>()),
map(jobsMap => Array.from(jobsMap.values()))
);
constructor() {
this.jobs$.subscribe();
}
public createJob(id: string) {
this.jobs.next({ id, progress: 0 });
}
private processJob(job: UploadJob) {
// do work and return observable that
// emits updated status of UploadJob
}
}
Let's break it down:
jobs is a simple subject, that we can push "jobs" through
createJob simply calls jobs.next() to push the new job through the stream
jobs$ is where all the magic happens. It receives each UploadJob and uses:
mergeMap to execute whatever function actually does the work (I called it processJob() for this example) and emits its values into the stream
scan is used to accumulate these UploadJob emissions into a Map (for ease of inserting or updating)
map is used to convert the map into an array (Map<string, UploadJob> => UploadJob[])
this.jobs$.subscribe() is called in the constructor of the class so that jobs will be processed
Now, we can easily derive your isUploading and cumulativeProgress from this jobs$ observable like so:
public isUploading$ = this.jobs$.pipe(
map(jobs => jobs.some(j => j.progress !== 100)),
distinctUntilChanged()
);
public progress$ = this.jobs$.pipe(
map(jobs => {
const current = jobs.reduce((sum, j) => sum + j.progress, 0) / 100;
const total = jobs.length ?? current;
return current / total;
})
);
Here's a working StackBlitz demo.

How to implement While loop inside RXJS subscription

I have an Angular code where in i am trying to subscribe to my 1st api and implementing a while loop inside this subscription. Further i need to subscribe to another api inside the while loop. Reason --> I need to subscribe to the inner api multiple times and the while loop should end based on a flag returned by inner api. I tried implementing the below but its not working. Need some help.
CallBanyanToFetchQuotes() {
const url1 = 'http://ws.integration.banyantechnology.com/services/api/rest/ImportForQuote';
this.http.post(url1, payload)
.subscribe(importForQuoteResponse => {
this.importForQuoteResponse = importForQuoteResponse;
console.log('LoadID = ' + this.importForQuoteResponse.Load.Loadinfo.LoadID);
this.loadId = this.importForQuoteResponse.Load.Loadinfo.LoadID;
while (!this.ratingCompleted) {
const url2 = 'http://ws.integration.banyantechnology.com/services/api/rest/GetQuotes';
this.http.post(url2, payload)
.subscribe(getQuoteResponse => {
this.getQuoteResponse = getQuoteResponse;
if (this.getQuoteResponse.RatingCompleted === true) {
this.ratingCompleted = true;
}
});
}
});
}
this.http.post(url1, payload).pipe(
switchMap(importForQuoteResponse=>{
this.importForQuoteResponse = importForQuoteResponse;
this.loadId = this.importForQuoteResponse.Load.Loadinfo.LoadID;
return timer(0,1000).pipe(
switchMap(()=>this.http.post(url2, payload)),
tap(res=>this.getQuoteResponse=res),
takeWhile(res=>!res.RatingCompleted,true),
filter(res=>res.RatingCompleted === true)
)
})).subscribe(()=>{
this.ratingCompleted = true;
})
a "fool example" in stackblitz
the before code can be explained like: we make the first post, but, we don't want this subscribtion, so we change this subscription to a timer (switchMap). But we don't want the timer, else a second post (another switchMap). Each time timer is executed, is executed the second post and we get the response using tap. We make the call while the response was false (takeWhile) -it's important make the takewhile(...,true), the "true" makes return the last value- and filter the response (filter) so only get the "subscribe" when the response is true.
NOTE: I use timer(0,1000) to make a call each 1000 miliseconds, feel free to change the interval
You can use expand to simulate a while loop. expand passes the input through to the destination immediately, maps to an Observable and receives its output as the next input. Map to EMPTY to end this recursion.
// move the urls out of the function if they are static
const url1 = 'http://ws.integration.banyantechnology.com/services/api/rest/ImportForQuote';
const url2 = 'http://ws.integration.banyantechnology.com/services/api/rest/GetQuotes';
callBanyanToFetchQuotes() {
this.http.post(url1, payload).pipe(
// process response from url1 http request
tap(importForQuoteResponse => {
this.importForQuoteResponse = importForQuoteResponse;
console.log('LoadID = ' + this.importForQuoteResponse.Load.Loadinfo.LoadID);
this.loadId = this.importForQuoteResponse.Load.Loadinfo.LoadID;
}),
// switch to url2 http request
switchMap(_ => this.http.post(url2, payload))
// execute url2 request again if the rating is incomplete or end execution with EMTPY
expand(quoteResponse => quoteResponse.RatingCompleted ? EMPTY : this.http.post(url2, payload))
// process responses from url2 requests
).subscribe(quoteResponse => {
this.getQuoteResponse = quoteResponse;
if (quoteResponse.RatingCompleted === true) {
this.ratingCompleted = true;
}
});
}
The expand approach guarantees that the next http call will be made directly and only after you received a response from the previous one.

Function returned undefined, expected Promise or value and unable to delete old data from firebase database using cloud functions

I'm trying to delete multiple nodes on my database that are older than 12hrs. I"m using a pub/sub function to trigger this event. I don't know if my code is actually looping through all nodes as I'm not using the onWrite, onCreate database triggers on specific. Here is the image sample of the database
this is the pub/sub code
exports.deletejob = functions.pubsub.topic('Oldtask').onPublish(() => {
deleteOldItem();
})
and the deleteOldItem function
function deleteOldItem(){
const CUT_OFF_TIME = 12 * 60 * 1000; // 12 Hours in milliseconds.
//var ref = admin.database().ref(`/articles/${id}`);
const ref = admin.database().ref(`/articles`);
const updates = {};
ref.orderByChild('id').limitToLast(100).on('value', function (response) {
var index = 0;
response.forEach(function (child) {
var element = child.val();
const datetime = element.timestamp;
const now = Date.now();
const cutoff = now - datetime;
if (CUT_OFF_TIME < cutoff){
updates[element.key] = null;
}
});
//This is supposed to be the returened promise
return ref.child(response.key).update(updates);
});
If there's something I'm doing wrong, I'll like to know. The pub/sub is triggered with a JobScheduler already setup on google cloud scheduler
You had several problems in your code that were giving you trouble.
The handling of promises wasn't correct. In particular, your top level function never actually returned a promise, it just called deleteOldItems().
You should use the promise form of once() instead of calling on() with a callback since you don't want to install a listener in this case, you just need the result a single time, and you want to handle it as part of a promise chain.
To delete nodes, you should call remove() on a reference to that node. It also generates a promise for you to use here.
You didn't calculate 12 hours in milliseconds properly, you calculated 12 minutes in milliseconds :)
Here's what I came up with. It uses an http function instead of a pubsub function as well as adding a log statement for my testing, but the modification you need should be trivial/obvious (just change the prototype and remove the response after deleteOldItems, but do make sure you keep returning the result of deleteOldItems()):
const functions = require('firebase-functions');
const admin = require('firebase-admin');
function deleteOldItems() {
const CUT_OFF_TIME = 12 * 60 * 60 * 1000; // 12 Hours in milliseconds.
const ref = admin.database().ref('/articles');
return ref.orderByChild('id').limitToLast(100).once('value')
.then((response) => {
const updatePromises = [];
const now = Date.now();
response.forEach((child) => {
const datetime = child.val().timestamp;
const cutoff = now - datetime;
console.log(`processing ${datetime} my cutoff is ${CUT_OFF_TIME} and ${cutoff}`);
if (CUT_OFF_TIME < cutoff){
updatePromises.push(child.ref.remove())
}
});
return Promise.all(updatePromises);
});
}
exports.doIt = functions.https.onRequest((request, response) => {
return deleteOldItems().then(() => { return response.send('ok') });
}
While I have not tested it, I'm pretty sure this will work to include inside your original function call for cloud scheduler:
exports.deletejob = functions.pubsub.topic('Oldtask').onPublish(() => {
return deleteOldItems();
})
Of course, this is still more complicated than you need, since ordering by id doesn't really gain you anything here. Instead, why not just use the query to return the earliest items before the cut off time (e.g. exactly the ones you want to remove)? I've also switched to limitToFirst to ensure the earliest entries get thrown out, which seems more natural and ensures fairness:
function deleteOldItems() {
const cutOffTime = Date.now() - (12 * 60 * 60 * 1000); // 12 Hours earlier in milliseconds.
const ref = admin.database().ref('/articles');
return ref.orderByChild('timestamp').endAt(cutOffTime).limitToFirst(100).once('value')
.then((response) => {
const updatePromises = [];
response.forEach((child) => {
updatePromises.push(child.ref.remove())
});
return Promise.all(updatePromises);
});
}
If you do this on more than a few items, of course, you probably want to add an index on the timestamp field so the range query is more efficient.

firebase - handle no data from snapshot

I have a firebase reference, where I pull data down for a specific custom index I created.
requestsRef
.orderByChild('systemgameindex')
.startAt(lastrequest.systemgameindex.toString())
.endAt(lastrequest.systemgameindex.toString() + '~')
.limitToFirst(customElem.dataops.limit + 1)
.on('child_added', function (snapshot) {
var request = snapshot.val() || {};
request.key = snapshot.key();
request.systemColor = customElem.getSystemColor(request.system);
request.description = customElem.truncateText(request.description, 65);
customElem.getUserProfile(request);
customElem.getCommentCount(request.key);
if (request.systemgameindex !== lastrequest.systemgameindex) { customElem.push('requests', request); };
customElem.removeSpinnerRoo();
});
Right before I make the call to firebase, I have a custom spinner I dislay with a function called addSpinnerRoo(), and when data is returned, I make a call to removeSpinnerRoo() to hide the spinner on the DOM.
It works beautifully when there's data to return from firebase, but if the firebase query brings back no results, the callback on child_added never gets fired, so I have a spinner still spinning on the DOM.
Is there a way to handle when there's no data returned within Firebase?
Any insight would be appreciated a lot. Thanks
After reading this from the documentation from here:
The callback function receives a DataSnapshot, which is a snapshot of the data. A snapshot is a picture of the data at a particular database reference at a single point in time. Calling val() on a snapshot returns the JavaScript object representation of the data. If no data exists at the reference's location, the snapshots value will be null.
I was able to do use "val" instead of "child_added" to actually have firebase still fire the callback for the ".on()" method. So my code now looks like this:
var data = snapshot.val();
if (data !== null && data !== undefined) {
var requests = _.map(data, function (val, key) {
val.key = key;
return val;
});
_.each(requests, function (request) {
request.systemColor = customElem.getSystemColor(request.system);
request.description = customElem.truncateText(request.description, 65);
customElem.getUserProfile(request);
customElem.getCommentCount(request.key);
customElem.push('requests', request);
});
}
customElem.removeSpinnerRoo();
And with that, I was able to get what I needed. If this helps anyone, great...

Categories

Resources