I am trying to learn how to use generators and yield, so I tried the following but it doesn't seem to be working.
I am using the following function, which contains 2 async calls:
var client = require('mongodb').MongoClient;
$db = function*(collection, obj){
var documents;
yield client.connect('mongodb://localhost/test', function*(err, db){
var c = db.collection(collection);
yield c.find(obj).toArray(function(err, docs){
documents = docs;
db.close();
});
});
return documents.length;
};
Then to make the call original call, I am doing this:
var qs = require("querystring");
var query = qs.parse("keywords[]=abc&keywords[]=123");
var total = $db("ads", {"details.keywords": {$in: query["keywords[]"]}});
console.log(total);
When I get my output back in the console, I get this:
{}
I was expecting a number such as 200. What is it that I am doing wrong?
TL;DR
For the short answer, you're looking for a helper like co.
var co = require("co");
co(myGen( )).then(function (result) { });
But Why?
There is nothing inherently asynchronous about ES6 iterators, or the generators which define them.
function * allIntegers ( ) {
var i = 1;
while (true) {
yield i;
i += 1;
}
}
var ints = allIntegers();
ints.next().value; // 1
ints.next().value; // 2
ints.next().value; // 3
The .next( ) method, though, actually lets you send data back in to the iterator.
function * exampleGen ( ) {
var a = yield undefined;
var b = yield a + 1;
return b;
}
var exampleIter = exampleGen();
exampleIter.next().value; // undefined
exampleIter.next(12).value; // 13 (I passed 12 back in, which is assigned to a)
exampleIter.next("Hi").value; // "Hi" is assigned to b, and then returned
It might be confusing to think about, but when you yield it's like a return statement; the left hand side hasn't been assigned the value yet... ...and more importantly, if you had put the var y = (yield x) + 1; the parenthesis are resolved before the rest of the expression... ...so you return, and the +1 is put on hold, until a value comes back.
Then when it arrives (passed in, via the .next( )), the rest of the expression is evaluated (and then assigned to the left hand side).
The object that's returned from each call has two properties { value: ..., done: false }
value is what you've returned/yielded and done is whether or not it's hit the actual return statement at the end of the function (including implicit returns).
This is the part that can then be used to make this async magic happen.
function * asyncGen ( id ) {
var key = yield getKeyPromise( id );
var values = yield getValuesPromise( key );
return values;
}
var asyncProcess = asyncGen( 123 );
var getKey = asyncProcess.next( ).value;
getKey.then(function (key) {
return asyncProcess.next( key ).value;
}).then(function (values) {
doStuff(values);
});
There's no magic.
Instead of returning a value, I'm returning a promise.
When the promise completes, I'm pushing the result back in, using .next( result ), which gets me another promise.
When that promise resolves, I push that back in, using .next( newResult ), et cetera, until I'm done.
Can we do better?
We know now that we're just waiting for promises to resolve, then calling .next on the iterator with the result.
Do we have to know, ahead of time what the iterator looks like, to know when we're done?
Not really.
function coroutine (iterator) {
return new Promise(function (resolve, reject) {
function turnIterator (value) {
var result = iterator.next( value );
if (result.done) {
resolve(result.value);
} else {
result.value.then(turnIterator);
}
}
turnIterator();
};
}
coroutine( myGen ).then(function (result) { });
This isn't complete and perfect. co covers extra bases (making sure all yields get treated like promises, so you don't blow up by passing a non-promise value... ...or allowing arrays of promises to be yielded, which becomes one promise which will return the array of results for that yield ...or try/catch around the promise handling, to throw the error back into the iterator... yes, try/catch works perfectly with yield statements, done this way, thanks to a .throw(err) method on the iterator).
These things aren't hard to implement, but they make the example muddier than it needs to be.
This is exactly why co or some other "coroutine" or "spawn" method is perfect for this stuff.
The guys behind the Express server built KoaJS, using Co as a library, and Koa's middleware system just takes generators in its .use method and does the right thing.
But Wait, there's more!
As of ES7, it's very likely that the spec will add language for this exact use-case.
async function doAsyncProcess (id) {
var key = await getKeyPromise(id);
var values = await getValuesPromise(key);
return values;
}
doAsyncProcess(123).then(values => doStuff(values));
The async and await keywords are used together, to achieve the same functionality as the coroutine-wrapped promise-yielding generator, without all of the external boilerplate (and with engine-level optimizations, eventually).
You can try this today, if you're using a transpiler like BabelJS.
I hope this helps.
Yield and generators have nothing to do with asynchrony, their primary purpose is to produce iterable sequences of values, just like this:
function * gen() {
var i = 0;
while (i < 10) {
yield i++;
}
}
for (var i of gen()) {
console.log(i);
}
Just calling a function with a star (generator function) merely creates generator object (that is why you see {} in console), that can be interacted with using next function.
That said, you can use generator functions as an analogue of asynchronous functions, but you need a special runner, like co.
var client = require('mongodb').MongoClient;
$db = function*(collection, obj){
var documents;
yield client.connect('mongodb://localhost/test', function*(err, db){
var c = db.collection(collection);
yield c.find(obj).toArray(function(err, docs){
documents = docs;
db.close();
});
});
return documents.length;
};
var qs = require("querystring");
var query = qs.parse("keywords[]=abc&keywords[]=123");
var total = $db("ads", {"details.keywords": {$in: query["keywords[]"]}});
console.log(total);
As is, total is the iterator for the $db generator function. You would retrieve its yield values via total.next().value. However, the mongodb library is callback based and as such, its functions do not return values, so yield will return null.
You mentioned you were using Promises elsewhere; I would suggest taking a look at the bluebird in particular its promisify functionality. Promisification inverts the callback model so that the arguments to the callback are now used to resolve the promisified function. Even better, promisifyAll will convert an entire callback based API.
Finally, bluebird provides coroutine functionality as well; however its coroutines must return promises. So, your code may be rewritten as follows:
var mongo = require('mongodb');
var Promise = require('bluebird');
//here we convert the mongodb callback based API to a promised based API
Promise.promisifyAll(mongo);
$db = Promise.coroutine(function*(collection, obj){
//existing functions are converted to promised based versions which have
//the same name with 'Async' appended to them
return yield mongo.MongoClient.connectAsync('mongodb://localhost/test')
.then(function(db){
return db.collectionAsync(collection);})
.then(function(collection) {
return collection.countAsync();});
});
var qs = require("querystring");
var query = qs.parse("keywords[]=abc&keywords[]=123");
$db('ads',{"details.keywords": {$in: query["keywords[]"]}})
.then(console.log)
Related
I am thinking about a scenario of building up a promise queue:
//Let's assume that promises is an array of promises
var promiseQueue = [];
for (var promise of promises) {
if (promiseQueue.length) promiseQueue[promiseQueue.length - 1].then(promise);
promiseQueue.push(promise);
}
I am thinking about implementing a function called resolver:
function *resolve() {
var promise;
while (promise = yield) Promise.resolve(promise);
}
and then iterating it:
var promiseGenerator = resolve();
The problem is the for..of here which would be responsible for the actual iteration:
for (var r of promiseGenerator) {
}
At the code above the generator will be successfully iterated, but unfortunately I am not aware of a way to successfully pass a parameter to this generator at the iteration of for..of.
I would like to clarify that I do not need an alternative, I am perfectly aware that we can do something like this:
for (var p in promiseQueue) promiseGenerator.next(promiseQueue[p]);
I am specifically interested to know whether I can pass parameters to the generator when I execute a for..of cycle.
EDIT
The problem raised by amn is that in the example he/she was focusing on would always get undefined. That's true if we pass undefined to next(), but not true if we pass something else. The problem I was raising is that a for..of loop does not allow us to pass anything to yield, which is this specific question is all about, the example is a mere illustration of the problem, showing that the promises we would create will never be created in a for..of loop. However, there is life for Iterable objects outside the realm of for..of loops and we can pass defined values into the yield. An example with the criticized code chunk can look like:
function *resolve() {
var promise;
while (promise = yield) Promise.resolve(promise);
}
var responses = [];
var f = resolve();
var temp;
for (var i = 10; !(temp = f.next(i)).done; i--) responses.push(temp);
As we can see above, the yield above cannot be assumed ab ovo to be undefined. And of course we can pass some custom thenables, like
Promise.resolve({
then: function(onFulfill, onReject) { onFulfill('fulfilled!'); }
});
or even promises which were not resolved yet. The point of the example was to show that we cannot pass values to the yield using the for..of loop, which is quite a feature gap in my opinion.
No, it is not possible to pass arguments to next.
function* generateItems() { /* ... */ }
for (var item of generateItems()) {
console.log(item);
}
is mostly short for
function* generateItems() { /* ... */ }
var iterator = generateItems()[Symbol.iterator]();
do {
const result = iterator.next();
if (result.done) break;
const item = result.value;
console.log(item);
} while (true);
barring a few missing try/catch wrappers. You can see in the spec here that it calls .next with no arguments:
Let nextResult be ? Call(iteratorRecord.[[NextMethod]], iteratorRecord.[[Iterator]], « »).
e.g.
iterator.next.apply(iterator, []);
calling next() with an empty array of arguments.
I'm currently looking at async-await in C#, and noticed similarities to JavaScript promises. Looking into this I see that JavaScript is also going to support async-await statements, and that there are similarities between this and promises (look at this blog post for example).
On a whim, I wondered what JavaScript's implementation of async-await was and found this question (Java Equivalent of C# async/await?).
The accepted answer suggests that async-await (and by extension, I guess, promises) are implementations of a 'state machine'.
Question: What is meant by a 'state machine' in terms of promises, and are JavaScript promises comparable to C#'s async-await?
JavaScript promises are comparable to C# Task objects which have a ContinueWith function that behaves like .then in JavaScript.
By "state machines" it is means that they are typically implemented by a state and a switch statement. The states are places the function can be at when it runs synchronously. I think it's better to see how such a transformation works in practice. For example let's say that your runtime only understands regular functions. An async function looks something like:
async function foo(x) {
let y = x + 5;
let a = await somethingAsync(y);
let b = await somethingAsync2(a);
return b;
}
Now, let's look at all the places the function can be when it executes a step synchronously:
async function foo(x) {
// 1. first stage, initial
let y = x + 5;
let a = await somethingAsync(y);
// 2. after first await
let b = await somethingAsync2(a);
// 3. after second await
return b;
// 4. done, with result `c`.
}
Now, since our runtime only understands synchronous functions - our compiler needs to do something to make that code into a synchronous function. We can make it a regular function and keep state perhaps?
let state = 1;
let waitedFor = null; // nothing waited for
let waitedForValue = null; // nothing to get from await yet.
function foo(x) {
switch(state) {
case 1: {
var y = x + 5;
var a;
waitedFor = somethingAsync(y); // set what we're waiting for
return;
}
case 2: {
var a = waitedForValue;
var b;
waitedFor = somethingAsync(a);
return;
}
case 3: {
b = waitedFor;
returnValue = b; // where do we put this?
return;
}
default: throw new Error("Shouldn't get here");
}
}
Now, it's somewhat useful, but doesn't do anything too interesting - we need to actually run this as a function. Let's put the state in a wrapper and automatically run the promises when they're resolved:
function foo(x) { // note, not async
// we keep our state
let state = 1, numStates = 3;
let waitedFor = null; // nothing waited for
let waitedForValue = null, returnValue = null; // nothing to get from await yet.
// and our modified function
function stateMachine() {
switch(state) {
case 1: {
var y = x + 5;
var a;
waitedFor = somethingAsync(y); // set what we're waiting for
return;
}
case 2: {
var a = waitedForValue;
var b;
waitedFor = somethingAsync(a);
return;
}
case 3: {
b = waitedFor;
returnValue = b; // where do we put this?
return;
}
default: throw new Error("Shouldn't get here");
}
// let's keep a promise for the return value;
let resolve, p = new Promise(r => resolve = r); // keep a reference to the resolve
// now let's kickStart it
Promise.resolve().then(function pump(value) {
stateMachine();
state++; // the next state has progressed
if(state === numStates) resolve(returnValue); // return the value
return Promise.resolve(waitedFor).then(pump);
});
return p; // return the promise
}
Effectively, the Promise.resolve().then(... part calls the stateMachine and waits for the value that's being awaited every time until it is at the final state at which point it resolves the (returned beforehand) promise.
This is effectively what Babel or TypeScript do with your code too. What the C# compiler does is very close - with the biggest difference is that it's put in a class.
Note we are ignoring conditionals, exceptions and loops here - it makes things a little bit more complicated but not much harder (you just need to handle each case separately).
I have a method of rest call using request module which is restRequest() which returns response as promise which is asynchronous method, I have to call this method recursively with different parameters after getting the each results and passing that result to same method.
Example code:
restRequest(url, "POST").then(function(response) {
restRequest(secondUrl, 'GET', response).then(function(response2) {
}):
});
will this works, or any other things are there to solve this one.
I would use the async library for this
Specifically the waterfall
Which would work like
async.waterfall([
function firstRequest(callback) {
restRequest(url, "POST").then(function(response) {
callback(null, response);
});
},
function secondRequest (data, callback) {
restRequest(secondUrl, 'GET', data).then(function(response2) {
callback();
});
}
], function (err, result) {
// Handle err or result
});
Sorry for formatting I'm on mobile.
You can read about how async.waterfall works from the link above.
Your method works but depending on how many requests you have you can end up with quite a deep callback hell
But since you are using promises you can just return your promise chain like
restRequest(url, "POST")
.then(function(resp) {
return restRequest(secondUrl, "GET", resp);
})
.then(function(resp) {
return restRequest(thirdUrl, "GET", resp);
});
.then(function(resp) {
// do whatever keep the chain going or whatever
})
.catch(function(error) {
// if any of the promises error it will immediately call here.
});
With promises you can return a new promise from within a .then and just keep the chain going infinitely.
I'm just biased for async as i think it really improves readability when used right.
you could do something like:
let requestParams = [
[url, 'POST'],
[secondUrl, 'GET'],
...
];
function callRecursive(response){
if(!requestParams.length) return Promise.resolve(response);
let params = requestParams.shift();
if(response) params.push(response);
return restRequest(...params).then(callRecursive);
}
callRecursive().then(successCallbk).catch(errCallBk);
You can supply one or more arguments to bind to your partially applied function.
restRequest(url,"POST").then(restRequest.bind(this,secondUrl, "GET"))
.then(restRequest.bind(this,thirdUrl, "GET"));
Since these are fired off in serial, what you really have is a simple chain of functions (some return promises, some might not) that can compose (or sequence, here) together, which I find to be a neat way to isolate out everything you want to happen and then combine behaviors as needed. It's still a Promise chain under the hood, but expressed as a series. First, a few utility methods to help:
var curry = (f, ...args) =>
(f.length <= args.length) ? f(...args) : (...more) => curry(f, ...args, ...more);
var pipeP = (...fnlist) =>
acc => fnlist.reduce( (acc,fn) => acc.then(fn), Promise.resolve(acc));
then
//make restRequest only return a Promise once it's given its 3rd argument
var restRequest = autocurry(restRequest);
//define what our requests look like
var request1 = restRequest('firstUrl', "POST");//-> curried function, not yet called
var request2 = restRequest('secondUrl', 'GET');//-> curried function, not yet called
//define some simple methods to process responses
var extractURL = x => x.url;//-> simple function
var extractData = x=> x.data;//-> simple function
//final behaviors, i.e. do something with data or handle errors
//var handleData = ... //-> do something with "data"
//var handleError = ... //-> handle errors
//now, create a sort of lazy program chain waiting for a starting value
//that value is passed to request1 as its 3rd arg, starting things off
var handleARequest = pipeP(request1, extractURL, request2, extractData);
//and execute it as needed by passing it a starting request
handleARequest({postdata:5}).then(handleData).catch(handleErrors);
Recursion is the most obvious approach but it's not necessary. An alternative is to build a .then() chain by reducing an array of known parameters (urls and methods).
The process is presented here under "The Collection Kerfuffle".
function asyncSequence(params) {
return params.reduce(function(promise, paramObj) {
return promise.then(function(response) {
return restRequest(paramObj.url, paramObj.method, response);
});
}, Promise.resolve(null)); // a promise resolved with the value to appear as `response` in the first iteration of the reduction.
}
This will cater for any number of requests, as determined by the length of the params array.
Call as follows :
var params = [
{url:'path/1', method:'POST'},
{url:'path/2', method:'GET'},
{url:'path/3', method:'POST'}
];
asyncSequence(params).then(function(lastResponse) {
//all successfully completed
}).catch(function(e) {
// something went wrong
});
How does work app.use in KOA?
When I set some generator inside app.use, everything works perfect.
How can I do the same elsewhere?
When I just execute generator manual:
var getRelationsList = function *() {
var res = yield db.relations.find({});
console.log({'inside: ': res});
}
console.log({'outside: ': getRelationsList().next()});
getRelationsList().next();
I'm getting just { 'outside: ': { value: [Function], done: false } }
This what I expect is:
{ 'outside: ': { value: {object_with_results}, done: false } }
{ 'inside: ': {object_with_results}
EDIT
I changed my code like that:
var getRelationsList = function *() {
var res = yield db.relations.find({});
console.log({'inside: ': res});
}
console.log({'outside ': co(getRelationsList)});
Now inside console log show's me good results but outside console log shows me just empty object.
Generators are a powerful tool for organizing asynchronous code, but they don't magically wait for asynchronous code to run.
What's Happening
Let's walk through your code so you can see what is happening:
getRelationsList is a generator function, that when called returns a new generator. At this point, no code in your generator function has been called (although if you were passing params they would be set). You then call .next on your generator to start execution of the generator function. It will execute up until it hits the first yield statement and return an object with the yielded value and the completion status of the generator.
It seems you understand most of that so far, but generators do not magically transform the yielded out values. When you yield out db.relations.find({}), you'll get the return value of the find function which I'm assuming is a Promise or some type of thenable:
so your 'outside' value is { value:Promise, done:false }
The reason your inside console.log never ran is that you're actually creating a new generator each time you call getRelationsList(), so when you call getRelationsList().next() again after the outside console.log you're creating a new generator and calling next, so it only executes up to the first yield, just like the call on the previous line.
In order to finish execution you must call next twice on the same instance of your generator: once to execute up to the yield and once to continue execution to the end of the function.
var gen = getRelationsList()
gen.next() // { value:Promise, done:false }
gen.next() // { value:undefined, done:true } (will also console.log inside)
You'll notice, however, if you run this, the inside console.log will be undefined. That's because the value of a yield statement is equal to the value passed to the following .next() call.
For example:
var gen2 = getRelationsList()
gen2.next() // { value:Promise, done:false }
gen2.next(100) // { value:undefined, done:true }
Outputs
{ inside:100 }
Because we passed 100 to the second .next() call and that became the value of the yield db.relations.find({}) statement which was then assigned to res.
Here's a link demoing all of this: http://jsfiddle.net/qj1aszub/2/
The Solution
The creators of koa use a little library called co which basically takes yielded out promises and waits for them to complete before passing the resolved value back into the generator function (using the .next() function) so that you can write your asynchronous code in a synchronous style.
co will return a promise, which will require you to call the .then method on to get the value returned from the generator function.
var co = require('co');
var getRelationsList = function *() {
var res = yield db.relations.find({});
console.log({'inside: ': res});
return res
}
co(getRelationsList).then(function(res) {
console.log({'outside: ': res })
}).catch(function(err){
console.log('something went wrong')
});
co also allows you to yield out other generator functions and wait for their completion, so you don't have to wrap things with co at every level and deal with promises until you're at some sort of 'top level':
co(function *() {
var list = yield getRelationsList()
, processed = yield processRelations(list)
, response = yield request.post('/some/api', { data:processed })
return reponse.data
}).then(function(data) {
console.log('got:', data)
})
Your problem is that you call getRelationsList() function multiple times which is incorrect.
Change your code to following
var g = getRelationsList();
console.log('outside: ', g.next());
g.next(); //You get your console.log('inside: .... here
Generators must be acted upon by outside code.
Under the hood koa use the co library to 'Run' the generator.
Here is how you might achieve what your wanting outside of koa:
var co = require('co');
var getRelationsList = function *() {
var res = yield db.relations.find({});
console.log({'inside: ': res});
}
co(getRelationsList).catch(function(err){});
I did a short screencast on JavaScript generators that should help you understand what's going on:
http://knowthen.com/episode-2-understanding-javascript-generators/
++ EDIT
If your using generators to program in more of an synchronous style (eliminating callbacks), then all your work needs to be done in the generator and you should use a library like co to execute the generator.
Here is a more detailed example of how you would interact with a generator, manually. This should help you understand the results your getting.
function * myGenerator () {
var a = yield 'some value';
return a;
}
var iterator = myGenerator();
// above line just instantiates the generator
console.log(iterator);
// empty object returned
// {}
var res1 = iterator.next();
// calling next() start the generator to either the
// first yield statement or to return.
console.log(res1);
// res1 is an object with 2 attributes
// { value: 'some value', done: false }
// value is whatever value was to the right of the first
// yield statment
// done is an indication that the generator hasn't run
// to completion... ie there is more to do
var toReturn = 'Yield returned: ' + res1.value;
var res2 = iterator.next(toReturn);
// calling next(toReturn) passes the value of
// the variable toReturn as the return of the yield
// so it's returned to the variable a in the generator
console.log(res2);
// res2 is an object with 2 attributes
// { value: 'Yield returned: some value', done: true }
// no further yield statements so the 'value' is whatever
// is returned by the generator.
// since the generator was run to completion
// done is returned as true
I'm experimenting with ES6 generators with the help of babel, and I have trouble understand how (or if!) I can effectively use callback based async function to output an iterator.
Let's say I want be able to write a function that takes a number of urls, asynchronously download them and returns them as soon as they are downloaded.
I would like to be able to write something like the following:
let urls = ['http://www.google.com', 'http://www.stackoverflow.com' ];
for ( {url, data} of downloadUrls(urls) ) {
console.log("Content of url", url, "is");
console.log(data);
}
How can I implement downloadUrls ?
Ideally I would like to be able to write the following:
var downloadUrls = function*(urls) {
for( let url of urls ) {
$.ajax(url).done( function(data) {
yield data;
});
}
};
This of course doesn't work, since ``yield'' is being invoked inside a callback and not directly inside the generator.
I can find many examples online of people trying the same, they are either not much transparent), require enabling browser/node flags, or use node-specific features/libraries.
The library closest to what I need seems to be task.js, but I'm unable to have even the simplest example run on current Chrome.
Is there a way to get the intended behaviour using standard and current features , (With current I mean usable with transpilers like babel, but without the need to enable extra flags on the browser) or do I have to wait for async/await ?
2019 update
Yielding via callbacks is actually pretty simple. Since you can only call yield directly from the generator function* where it appears (and not from callbacks), you need to yield a Promise instead, which will be resolved from the callback:
async function* fetchUrls(urls) {
for (const url of urls)
yield new Promise((resolve, reject) => {
fetch(url, { mode: 'no-cors' }).then(response => resolve(response.status));
});
}
(async function main() {
const urls = ['https://www.ietf.org/rfc/rfc2616.txt', 'https://www.w3.org/TR/PNG/iso_8859-1.txt'];
// for-await-of syntax
for await (const status of fetchUrls(urls))
console.log(status);
}());
If the example doesn't work in the browser (it my return 0 instead of 200 due to Cross Origin Read Blocking), try it live on repl.it.
Is there a way to get the intended behaviour using standard and current features
Yes, use promises and generators. Many promise libraries, and some standalone ones, feature the use of generator "coroutines".
But notice that you cannot mix iteration with asynchrony, you can use generators for either only. Your example seems to confuse them a bit - it looks like you expect that for ( {url, data} of downloadUrls(urls) ) { loop to work synchronously, which cannot work.
do I have to wait for async/await?
No, you don't have to wait, Babel already supports them!
Here is a clean way to use a generator / iterator to flatten asynchronous code which works for me in node.js:
var asyncProcedureGenerator1 = function*() {
var it = yield(0); //get a reference to the iterator
try {
var a = yield (asyncPart1.bind(it))(0); //call the function, set this = it
var b = yield (asyncPart2.bind(it))(a);
var c = yield (asyncPart3.bind(it))(b);
console.log("c = ", c);
}
catch(err)
{
console.log("Something went wrong: ", err);
}
};
var runAsyncGenerator = function(generator) {
var asyncProcedureIterator = generator(); //create an iterator
asyncProcedureIterator.next(); //start the iterator
asyncProcedureIterator.next(asyncProcedureIterator); //pass a reference of the iterator to itself
}
var asyncPart1 = function(param1) {
var it = this; //the iterator will be equal to this.
console.log("Starting asyncPart1 with param1 = ", param1);
setTimeout(function() {
console.log("Done with asyncPart1");
var returnValue = 42 + param1;
console.log("asyncPart1 returned ", returnValue);
it.next(returnValue); //when we are done, resume the iterator which has yielded to us.
},2000);
};
var asyncPart2 = function(param1) {
var it = this; //the iterator will be equal to this.
console.log("Starting asyncPart2 with param1 = ", param1);
setTimeout(function() {
console.log("Done with asyncPart2");
var returnValue = param1 / 2;
console.log("asyncPart2 returned ", returnValue);
//it.throw("Uh oh.");
it.next(returnValue);
},2000);
};
var asyncPart3 = function(param1) {
var it = this; //the iterator will be equal to this.
console.log("Starting asyncPart3 with param1 = ", param1);
setTimeout(function() {
console.log("Done with asyncPart3");
var returnValue = param1 / 3;
console.log("asyncPart3 returned ", returnValue);
it.next(returnValue);
},2000);
};
runAsyncGenerator(asyncProcedureGenerator1);
The idea is to run the generator, creator an iterator, and then pass a reference of that iterator to itself.
Then the iterator can call asynchronous functions (with yield) and pass them a reference to itself which allows those functions to either return success and resume the execution by calling iterator.next(result) or failure by calling iterator.throw(error).
I just came up with this pattern, so there may be some gotchas I haven't found yet, but it seems to work and allows very flat code with minimal additions.