Possible race condition with cursor when using Promise.all - javascript

In the project that I am working on, built using nodejs & mongo, there is a function that takes in a query and returns set of data based on limit & offset provided to it. Along with this data the function returns a total count stating all the matched objects present in the database. Below is the function:
// options carry the limit & offset values
// mongoQuery carries a mongo matching query
function findMany(query, options, collectionId) {
const cursor = getCursorForCollection(collectionId).find(query, options);
return Promise.all([findManyQuery(cursor), countMany(cursor)]);
}
Now the problem with this is sometime when I give a large limit size I get an error saying:
Uncaught exception: TypeError: Cannot read property '_killCursor' of undefined
At first I thought I might have to increase the pool size in order to fix this issue but after digging around a little bit more I was able to find out that the above code is resulting in a race condition. When I changed the code to:
function findMany(query, options, collectionId) {
const cursor = getCursorForCollection(collectionId).find(query, options);
return findManyQuery(cursor).then((dataSet) => {
return countMany(cursor).then((count)=> {
return Promise.resolve([dataSet, count]);
});
);
}
Everything started working perfectly fine. Now, from what I understand with regard to Promise.all was that it takes an array of promises and resolves them one after the other. If the promises are executed one after the other how can the Promise.all code result in race condition and the chaining of the promises don't result in that.
I am not able to wrap my head around it. Why is this happening?

Since I have very little information to work with, I made an assumption of what you want to achieve and came up with the following using Promise.all() just to demonstrate how you should use Promise.all (which will resolve the array of promises passed to it in no particular order. For this reason, there must be no dependency in any Promise on the order of execution of the Promises. Read more about it here).
// A simple function to sumulate findManyQuery for demo purposes
function findManyQuery(cursors) {
return new Promise((resolve, reject) => {
// Do your checks and run your code (for example)
if (cursors) {
resolve({ dataset: cursors });
} else {
reject({ error: 'No cursor in findManyQuery function' });
}
});
}
// A simple function to sumulate countMany for demo purposes
function countMany(cursors) {
return new Promise((resolve, reject) => {
// Do your checks and run your code (for example)
if (cursors) {
resolve({ count: cursors.length });
} else {
reject({ error: 'No cursor in countMany' });
}
});
}
// A simple function to sumulate getCursorForCollection for demo purposes
function getCursorForCollection(collectionId) {
/*
Simulating the returned cursor using an array of objects
and the Array filter function
*/
return [{
id: 1,
language: 'Javascript',
collectionId: 99
}, {
id: 2,
language: 'Dart',
collectionId: 100
},
{
id: 3,
language: 'Go',
collectionId: 100
}, {
id: 4,
language: 'Swift',
collectionId: 99
}, {
id: 5,
language: 'Kotlin',
collectionId: 101
},
{
id: 6,
language: 'Python',
collectionId: 100
}].filter((row) => row.collectionId === collectionId)
}
function findMany(query = { id: 1 }, options = [], collectionId = 0) {
/*
First I create a function to simulate the assumed use of
query and options parameters just for demo purposes
*/
const filterFunction = function (collectionDocument) {
return collectionDocument.collectionId === query.id && options.indexOf(collectionDocument.language) !== -1;
};
/*
Since I am working with arrays, I replaced find function
with filter function just for demo purposes
*/
const cursors = getCursorForCollection(collectionId).filter(filterFunction);
/*
Using Promise.all([]). NOTE: You should pass the result of the
findManyQuery() to countMany() if you want to get the total
count of the resulting dataset
*/
return Promise.all([findManyQuery(cursors), countMany(cursors)]);
}
// Consuming the findMany function with test parameters
const query = { id: 100 };
const collectionId = 100;
const options = ['Javascript', 'Python', 'Go'];
findMany(query, options, collectionId).then(result => {
console.log(result); // Result would be [ { dataset: [ [Object], [Object] ] }, { count: 2 } ]
}).catch((error) => {
console.log(error);
});

There are ways to write this function in a "pure" way for scalability and testing.
So here's your concern:
In the project that I am working on, built using nodejs & mongo, there is a function that takes in a query and returns set of data based on limit & offset provided to it. Along with this data the function returns a total count stating all the matched objects present in the database.
Note: You'll need to take care of edge case.
const Model = require('path/to/model');
function findManyUsingPromise(model, query = {}, offset = 0, limit = 10) {
return new Promise((resolve, reject) => {
model.find(query, (error, data) => {
if(error) {
reject(error);
}
resolve({
data,
total: data.length || 0
});
}).skip(offset).limit(limit);
});
}
// Call function
findManyUsingPromise(Model, {}, 0, 40).then((result) => {
// Do something with result {data: [object array], total: value }
}).catch((err) => {
// Do something with the error
});

Related

Node: How keep old objects and push new data into file Json

i want to fetch the old values from file Json if my script turns off from work and then insert the new values into the json file. i read the file and then converted the JSON file the code works safely, but i have a problem that it prints for me once
const dataFile = fs.readFileSync('data.json', 'utf8')
let decodeJson = await JSON.parse(dataFile);
decodeJson.table.push({name: 'test'})
this line is inside forEach loop so it is supposed to print 25 lines for each word test cuz after i want to write it in the same file data.json
this line for forEach
getData.data.forEach(async details => {
let currentPage = details.title;
const dataFile = fs.readFileSync('data.json', 'utf8')
if (dataFile === '') { // check if file json empty
try {
obj.table.push({
name: details.title
})
let encodeJson = await JSON.stringify(obj);
fs.writeFileSync("data.json", encodeJson)
console.log('file is empty')
} catch (err) {
console.log(err);
}
} else {
//read data with JSON from file
try {
let decodeJson = await JSON.parse(dataFile);
// i want here keep old values + push new values (details.title)
decodeJson.table.push({
name: 'test'
})
//fs.writeFileSync('data.json', JSON.stringify(decodeJson))
} catch (err) {
console.log(err);
}
}
})
i tried some attempts but i failed and it still print me only one object...why?
let data = []
data.push('test')
decodeJson.table.push(data)
this is the result i got in consolelog
table: [
{ name: 'Kimi ga Nozomu Eien' },
{ name: 'Kita e.: Diamond Dust Drops' },
{ name: 'Loveless' },
{ name: 'Blood+' },
{ name: 'Re: Cutey Honey' },
{ name: 'Solty Rei' },
{ name: 'Juuni Kokuki' },
{ name: 'Shaman King' },
{ name: 'X/1999' },
{ name: 'X' },
{ name: 'Mahou Sensei Negima!' },
{ name: 'Maria-sama ga Miteru' },
{ name: 'Boukyaku no Senritsu' },
{ name: 'Ima, Soko ni Iru Boku' },
{ name: 'Peace Maker Kurogane' },
{ name: 'Pita Ten' },
{ name: 'Power Stone' },
{ name: 'Mononoke Hime' },
{ name: 'RahXephon' },
{ name: 'Samurai 7' },
{ name: 'Scrapped Princess' },
{ name: 's.CRY.ed' },
{ name: 'Shingetsutan Tsukihime' },
{ name: 'Slam Dunk' },
{ name: 'Strange Dawn' },
[ 'test' ]
]
}
also my goal that i want to do is if script stopped and start back work. it returns back the old object that was stored in the Json file + keeps adding a new object from the API i'm still a newbie and learning json so please treat me kindly
forEach loops are synchronous
As already commented, neither JSON.parse nor JSON.stringify is asynchronous, so you don't need to use await for them.
Using await is causing the problem: array.prototype.forEach treats its argument as a synchronous function and discards any value returned from it. Effectively this means any promises returned from an async function provided are discarded without waiting for the promise to be settled.
Also async function executing an await operator synchronously return a promise the first time an await operator is executed within them.
Hence if data.json is empty, the await in
let encodeJson = await JSON.stringify(obj);
returns a promise before creating data.json content, which is sufficient for the forEach loop to continue. All remaining iterations will do the same and when resumed will overwrite the data.json file created in the previous iteration. However, since each iteration updated an outer variable obj, the last version of data.json should be correct.
Similar conditions apply to an existing data file however: each loop iteration reads the existing file and returns a promise when
await JSON.parse(dataFile);
is executed, allowing the next iteration to proceed and read the same input data and overwrite the file written by the previous iteration.
Major lessons are to not use forEach for asynchronous work that needs to complete within a single iteration, and not to use await in code that must execute synchronously.
In this particular case, leaving out the await operators, staying with synchronous writes, and removing async before the loop function declaration should be sufficient to solve the issue. A better solution may be to read data.json once before starting the loop, and write it once after finishing the loop.

Knexjs how to handle multiple updates

I´m quite unsure on how to handle multiple updates / inserts in knex and return whatever it was successfull on the end or not.
I´m passing an array through req.body loop through it and trigger actions based on informations inside the array.
Example:
const data = [...req.body]
for(let i = 0; i < data.length; i++) {
data[i].totals.length
for(let y = 0; y < data[i].totals.length; y++) {
if(data[i].totals[y].info === "Holiday") {
calcHoliday(data[i].totals[y].total, data[i].id)
} else if(data[i].totals[y].info === "ZA") {
calcZA(data[i].totals[y].total, data[i].id)
}
}
calcOvertime(data[i].totalSum, data[i].id)
if(i === data.length -1) {
res.json("Success")
}
}
The Array I´m passing in looks like this:
[
{
"id": 1,
"totals": [
{
"info": "Holiday",
"total": 4
}
]
},
{
"id": 1,
"totals": [
{
"info": "Holiday",
"total": 4
}
]
}
]
Function Example which gets called in for loop:
const calcHoliday = (hours, userid) => {
knex.transaction(trx => {
trx.insert({
created_at: convertedTime,
info: "Booking Holiday - Hours: " + hours,
statuscode: 200
}).into("logs")
.then(() => {
return trx("hours")
.decrement("holiday_hours", hours)
}).then(trx.commit)
.catch(trx.rollback)
}).then(() => console.log("WORKED"))
.catch(err => console.log(err))
}
This is working perfectly fine but I can´t figure out how to gather the results from each table update in order to respond if everything worked or an error appeared. If I call e.g. after one calcHoliday call .then(resp => res.json(resp) I receive only the response from the first operation.
In short I need a way on how to res.json if everything succeeded or an error appeared somewhere.
Thanks in advance!
TLDR;
Turning your insert calls into an array of promises and then using await and a Promise.all() / Promise.allSettled() structure might solve this problem, but there are some UX decisions to make on what to rollback and how to return errors.
Error Handling Choices:
Any error --> all insertions in all loop iterations should be rolled back
Do you want partial success? The way the code is written now, rollback only applies to items in one function call. If one of the hour-decrement calls fails, it will roll back one log insert, but not any that succeeded for previous data in the loop. If you want the whole dataset to rollback, you'd need to pass the txn through each function call or do a bulk insert of all of your rows in one function call, which might be nice for performance reasons anyway depending on the use case.
Partial success --> commits successes, rolls back single loop iterations that fail, sends detailed list of errors and successes
You'd want to use Promise.allSettled(), which aggregates the successes and errors as an array from all promises in the loop.
Partial success --> commits the successes, rolls back single loop iterations that fail, sends just one error
Opinion: This can be a misleading UX unless the error is "some of the insertions were unsuccessful" and the endpoint is idempotent
This looks closest to what you're describing you want. If this is the case, you'd want to use Promise.all(), which throws an error as soon as one promise in the array errors.
Example Implementation:
Since the original code is incomplete, this is a loose, incomplete example of what option 2/3 might look like. This could easily be transformed into option 1.
First, it might help to modify all of your functions with asynchronous calls to be fulfillable as promises. Async/await helps avoid .then() trees that are hard to reason about.
const calcHoliday = async (hours, userid) => {
try {
const result = await knex.transaction(async(trx) => {
await trx.insert({
created_at: convertedTime,
info: "Booking Holiday - Hours: " + hours,
statuscode: 200
}).into("logs")
return trx("hours").decrement("holiday_hours", hours)
}
return result
} catch(err) {
console.log("It didn't work.")
throw new Error(`Error: Failure to insert for user ${userid}:`, err)
}
}
Here are some utilities to get the data transformed, and to get the appropriate unfulfilled promise to supply to the map in Promise.all/allSettled.
/*
Here's an example of how you might transform the original data with maps in order to avoid nested for-loops:
[
{ id: 1, info: 'Holiday', total: 4 },
{ id: 1, info: 'Holiday', total: 4 }
]
*/
const flatData = data.map(item => {
return item.totals.map(total => ({
id: item.id,
...total
}))
}).flat()
// Returns the appropriate promise based on data
const getTotalsPromises = (row) => {
const {info, id, total} = row
if(info === "Holiday") {
return calcHoliday(total, id)
} else if(info === "ZA") {
return calcZA(total, id)
}
}
const getcalcOvertimePromises = (rowInData) = {
// work left to reader
return calcOvertime(rowInData.correctData, rowInData.otherData)
}
If you want option 2:
// Replaces the loop
// Fulfills *all* the promises, creating an array of errors and successes
const responses = await Promise.allSettled([
...flatData.map(getTotalsPromises),
...data.map(getCalcOvertimePromises)
])
// insert loop here to do something with errors if you want
res.send(responses)
OR Option 3
Create an array of all of the promises you want to run, run them, and process up to one error.
// Replaces the loop
// Runs the promises and waits for them all to finish or the first error.
try {
const responses = await Promise.all([
...flatData.map(getTotalsPromises),
...data.map(getCalcOvertimePromises)
])
res.send(responses)
} catch(err){
// Reached if one of the rows errors
res.send(err)
}
Docs:
Promise.allSettled
Promise.all

Is a promise within a promise the best solution? asynchronous node file read within for loop

The Node.js function below takes:
an object, shop which contains a regular expression
an array of filenames
The function will read each csv file listed in the array and test a cell in the first row with a regular expression, returning a new array of matching filenames.
function matchReport(shop, arr) {
return promise = new Promise(resolve => {
var newArray = [];
for(var j=0;j<arr.length;++j) {
let filename = arr[j];
csv()
.fromFile(filename)
.then(reportData => {
if (reportData[0]['Work'].match(shop.productRegex)) {
newArray.push(filename);
}
if (j === arr.length) {
resolve(newArray);
}
});
}
}).then(matches => {
return {
'shop' : shop.name,
'reports' : matches
}
}).catch(e => {
console.log(e);
});
}
Very rarely the function will return with the correct behavior which is this:
{ shop: 'shop1',
reports: [ '../artist-sales-report-2020-11-12(1).csv' ] }
{ shop: 'shop2',
reports:
[ '../artist-sales-report-2020-12-03.csv',
'../artist-sales-report-2020-09-01.csv' ] }
More often it returns with reports missing, like below:
{ shop: 'shop1',
reports: [ '../artist-sales-report-2020-11-12(1).csv' ] }
{ shop: 'shop2',
reports: [ '../artist-sales-report-2020-12-03.csv' ] }
I understand where the problem is taking place, within the csv reportData block. I understand that it is an asynchronous issue and I have tried to write more elaborate if..then or switch statements as a hack solution with no luck. It seems a little sloppy and cluttered to me to create more promises inside of this promise but I have been unsuccessful at that as well.
Using async/await and your disliked nested promises you could simplify your code to something like this, which should always await all results. I made the assumption that your problem is the fromFile method, which feels like it is itself asynchronous since it uses a then that you are not awaiting.
async function matchReport(shop, arr) {
const matches = await Promise.all(arr.map(async filename => {
const reportData = await csv().fromFile( filename );
if( reportData[0]['Work'].match(shop.productRegex) ){
return filename;
}
}));
return {
'shop': shop.name,
'reports': matches.filter( Boolean )
};
}

In rxjs, how do I chain mapping through arrays of data received from different API's?

I'm calling an API and receiving an array of results, I'm checking for pagination and if more pages exist I call the next page, repeat until no more pages.
For each array of results, I call another endpoint and do the exact same thing: I receive an array of results, check for another page and call endpoint again. Wash, rinse repeat.
For instance:
I want to grab a list of countries that might be a paginated response, then for each country I want to grab a list of cities, which might also be paginated. And for each city I execute a set of transformations and then store in a database.
I already tried this, but got stuck:
const grabCountries = Observable.create(async (observer) => {
const url = 'http://api.com/countries'
let cursor = url
do {
const results = fetch(cursor)
// results = {
// data: [ 'Canada', 'France', 'Spain' ],
// next: '47asd8f76358df8f4058898fd8fab'
// }
results.data.forEach(country => { observer.next(country) })
cursor = results.next ? `${url}/${results.next}` : undefined
} while(cursor)
})
const getCities = {
next: (country) => {
const url = 'http://api.com/cities'
let cursor = url
do {
const results = fetch(cursor)
// results = {
// data: [
// 'Montreal', 'Toronto',
// 'Paris', 'Marseilles',
// 'Barcelona', 'Madrid'
// ],
// next: '89ghjg98nd8g8sdfg98gs9h868hfoig'
// }
results.data.forEach(city => {
`**** What do I do here?? ****`
})
cursor = results.next ? `${url}/${results.next}` : undefined
} while(cursor)
}
}
I tried a few approaches:
Making a subject (sometimes I'll need to do parallel processed base on the results of 'grabCountries'. For example I may want to store the countries in a DB in parallel with grabbing the Cities.)
const intermediateSubject = new Subject()
intermediateSubject.subscribe(storeCountriesInDatabase)
intermediateSubject.subscribe(getCities)
I also tried piping and mapping, but it seems like it's basically the same thing.
As I was writing this I thought of this solution and it seems to be working fine, I would just like to know if I'm making this too complicated. There might be cases where I need to make more that just a few API calls in a row. (Imagine, Countries => States => Cities => Bakeries => Reviews => Comments => Replies) So this weird mapping over another observer callback pattern might get nasty.
So this is what I have now basically:
// grabCountries stays the same as above, but the rest is as follows:
const grabCities = (country) =>
Observable.create(async (observer) => {
const url = `http://api.com/${country}/cities`
let cursor = url
do {
const results = fetch(cursor)
// results = {
// data: [
// 'Montreal', 'Toronto',
// 'Paris', 'Marseilles',
// 'Barcelona', 'Madrid'
// ],
// next: '89ghjg98nd8g8sdfg98gs9h868hfoig'
// }
results.data.forEach(city => {
observer.next(city)
})
cursor = results.next ? `${url}/${results.next}` : undefined
} while (cursor)
})
const multiCaster = new Subject()
grabCountries.subscribe(multiCaster)
multiCaster.pipe(map((country) => {
grabCities(country).pipe(map(saveCityToDB)).subscribe()
})).subscribe()
multiCaster.pipe(map(saveCountryToDB)).subscribe()
tl;dr - I call an API that receives a paginated set of results in an array and I need to map through each item and call another api that receives another paginated set of results, each set also in an array.
Is nesting one observable inside another and mapping through the results via 'callApiForCountries.pipe(map(forEachCountryCallApiForCities))' the best method or do you have any other recommendations?
Here's the code that should work with sequential crawling of next url.
You start with a {next:url} until res.next is not available.
of({next:http://api.com/cities}).pipe(
expand(res=>results.next ? `${url}/${results.next}` : undefined
takeWhile(res=>res.next!==undefined)
).subscribe()
OK, so I have spent a lot of brain power on this and have come up with two solutions that seem to be working.
const nestedFlow = () => {
fetchAccountIDs.pipe(map(accountIds => {
getAccountPostIDs(accountIds) // Has the do loop for paging inside
.pipe(
map(fetchPostDetails),
map(mapToDBFormat),
map(storeInDB)
).subscribe()
})).subscribe()
}
const expandedflow = () => {
fetchAccountIDs.subscribe((accountId) => {
// accountId { accountId: '345367geg55sy'}
getAccountPostIDs(accountId).pipe(
expand((results) => {
/*
results : {
postIDs: [
131424234,
247345345,
],
cursor: '374fg8v0ggfgt94',
}
*/
const { postIDs, cursor } = results
if (cursor) return getAccountPostIDs({...accountId, cursor})
return { postIDs, cursor }
}),
takeWhile(hasCursor, true), // recurs until cursor is undefined
concatMap(data => data.postIDs),
map(data => ({ post_id: data })),
map(fetchPostDetails),
map(mapToDBFormat),
map(storeInDB)
).subscribe()
})
}
Both seem to be working with similar performance. I read some where that leaving the data flow is a bad practice and you should pipe everything, but I don't know how to eliminate the first exit in the 'expandedFlow' because the 'expand' needs to call back an observable, but maybe it can be done.
Now I just have to solve the race condition issues from the time the 'complete' is called in getAccountPostIDs the the last record is stored in the DB. Currently in my test, the observer.complete is finishing before 3 of the upsert actions.
Any comments are appreciated and I hope this helps someone out in the future.
What you need is the expand operator. It behaves recursively so it fits the idea of having paginated results.

RxJS Observable fire onCompleted after a number of async actions

I'm trying to create an observable that produces values from a number of asynchronous actions (http requests from a Jenkins server), that will let a subscriber know once all the actions are completed. I feel like I must be misunderstanding something because this fails to do what I expect.
'use strict';
let Rx = require('rx');
let _ = require('lodash');
let values = [
{'id': 1, 'status': true},
{'id': 2, 'status': true},
{'id': 3, 'status': true}
];
function valuesObservable() {
return Rx.Observable.create(function(observer) {
_.map(values, function(value) {
var millisecondsToWait = 1000;
setTimeout(function() { // just using setTimeout here to construct the example
console.log("Sending value: ", value);
observer.onNext(value)
}, millisecondsToWait);
});
console.log("valuesObservable Sending onCompleted");
observer.onCompleted()
});
}
let observer = Rx.Observer.create((data) => {
console.log("Received Data: ", data);
// do something with the info
}, (error) => {
console.log("Error: ", error);
}, () => {
console.log("DONE!");
// do something else once done
});
valuesObservable().subscribe(observer);
Running this, I get output:
valuesObservable Sending onCompleted
DONE!
Sending value: { id: 1, status: true }
Sending value: { id: 2, status: true }
Sending value: { id: 3, status: true }
While what I would like to see is something more like:
Sending value: { id: 1, status: true }
Received Data: { id: 1, status: true }
Sending value: { id: 2, status: true }
Received Data: { id: 2, status: true }
Sending value: { id: 3, status: true }
Received Data: { id: 3, status: true }
valuesObservable Sending onCompleted
DONE!
I don't actually care about the order of the items in the list, I would just like the observer to receive them.
I believe what is happening is that Javascript asynchronously fires the timeout function, and proceeds immediately to the observer.onCompleted() line. Once the subscribing observer receives the onCompleted event (is that the right word?), it decides that it's done and disposes of itself. Then when the async actions complete and the observable fires onNext, the observer no longer exists to take any actions with them.
If I'm right about this, I'm still stumped about how to make it behave in the way I would like. Have I stumbled into an antipattern without realising it? Is there a better way of approaching this whole thing?
Edit:
Since I used setTimeout to construct my example, I realised I can use it to partially solve my problem by giving the observable a timeout.
function valuesObservable() {
return Rx.Observable.create(function(observer) {
let observableTimeout = 10000;
setTimeout(function() {
console.log("valuesObservable Sending onCompleted");
observer.onCompleted();
}, observableTimeout);
_.map(values, function(value) {
let millisecondsToWait = 1000;
setTimeout(function() {
console.log("Sending value: ", value);
observer.onNext(value)
}, millisecondsToWait);
});
});
}
This gets me all of the information from the observable in the order I want (data, then completion) but depending on the choice of timeout, I either may miss some data, or have to wait a long time for the completion event. Is this just a inherent problem of asynchronous programming that I have to live with?
Yes there is a better way. The problem right now is that you are relying on time delays for your synchronization when in fact you can use the Observable operators to do so instead.
The first step is to move away from directly using setTimeout. Instead use timer
Rx.Observable.timer(waitTime);
Next you can lift the values array into an Observable such that each value is emitted as an event by doing:
Rx.Observable.from(values);
And finally you would use flatMap to convert those values into Observables and flatten them into the final sequence. The result being an Observable that emits each time one of the source timers emits, and completes when all the source Observables complete.
Rx.Observable.from(values)
.flatMap(
// Map the value into a stream
value => Rx.Observable.timer(waitTime),
// This function maps the value returned from the timer Observable
// back into the original value you wanted to emit
value => value
)
Thus the complete valuesObservable function would look like:
function valuesObservable(values) {
return Rx.Observable.from(values)
.flatMap(
value => Rx.Observable.timer(waitTime),
value => value
)
.do(
x => console.log(`Sending value: ${value}`),
null,
() => console.log('Sending values completed')
);
}
Note the above would work as well if you weren't using demo stream, i.e. if you had really http streams you could even simplify by using merge (or concat to preserve order)
Rx.Observable.from(streams)
.flatMap(stream => stream);
// OR
Rx.Observable.from(streams).merge();
// Or simply
Rx.Observable.mergeAll(streams);
The best way to construct an observable is to use the existing primitive and then a combination of the existing operators. This avoids a few headaches (unsubscription, error management etc.). Then Rx.Observable.create is certainly useful when nothing else fits your use case. I wonder if generateWithAbsoluteTime would fit.
Anyways, here the issue you run into is that you complete your observer before you send him data. So basically you need to come up with a better completion signal. Maybe :
complete x seconds after last value emitted if no new value is emitted
complete when a value is equal to some 'end' value
With thanks to #paulpdaniels, this is the final code that did what I wanted, including the calls to Jenkins:
'use strict';
let Rx = require('rx');
let jenkinsapi = require('jenkins'); // https://github.com/silas/node-jenkins/issues
let jenkinsOpts = {
"baseUrl": "http://localhost:8080",
"options": {"strictSSL": false},
"job": "my-jenkins-job",
"username": "jenkins",
"apiToken": "f4abcdef012345678917a"
};
let jenkins = jenkinsapi(JSON.parse(JSON.stringify(jenkinsOpts)));
function jobInfoObservable(jenkins, jobName) {
// returns an observable with a containing a single list of builds for a given job
let selector = {tree: 'builds[number,url]'};
return Rx.Observable.fromNodeCallback(function(callback) {
jenkins.job.get(jobName, selector, callback);
})();
}
function buildIDObservable(jenkins, jobName) {
// returns an observable containing a stream of individual build IDs for a given job
return jobInfoObservable(jenkins, jobName).flatMap(function(jobInfo) {
return Rx.Observable.from(jobInfo.builds)
});
}
function buildInfoObservable(jenkins, jobName) {
// returns an observable containing a stream of http response for each build in the history for this job
let buildIDStream = buildIDObservable(jenkins, jobName);
let selector = {'tree': 'actions[parameters[name,value]],building,description,displayName,duration,estimatedDuration,executor,id,number,result,timestamp,url'};
return buildIDStream.flatMap(function(buildID) {
return Rx.Observable.fromNodeCallback(function(callback) {
jenkins.build.get(jobName, buildID.number, selector, callback);
})();
});
}
let observer = Rx.Observer.create((data) => {
console.log("Received Data: ", data);
// do something with the info
}, (error) => {
console.log("Error: ", error);
}, () => {
console.log("DONE!");
// do something else once done
});
buildInfoObservable(jenkins, jenkinsOpts.job).subscribe(observer);
By relying on the Rx built-in operators I managed to avoid messing about with timing logic altogether. This is also much cleaner than nesting multiple Rx.Observable.create statements.

Categories

Resources