Related
I'm working on a library and I'd like to prevent users from calling a specific function in order to prevent infinite loops.
Usually I'd go about doing it like this:
let preventFooCalls = false;
function fireUserCallbacks() {
preventFooCalls = true;
// Fire callbacks of the user here...
preventFooCalls = false;
}
function foo() {
if (preventFooCalls) throw Error();
// Run the content of foo() ...
// It will probably call fireUserCallbacks() at some point
}
However, if fireUserCallbacks is async, this method is not possible. It might be called multiple times, and with async user callbacks, preventFooCalls is not guaranteed to have the correct value. For instance:
let preventFooCalls = false;
async function fireUserCallbacks() {
preventFooCalls = true;
// Fire callbacks of the user here one of which being:
await new Promise(r => setTimeout(r, 1000));
preventFooCalls = false;
}
// Then when doing:
fireUserCallbacks();
foo(); // This will throw even though it's being called from outside fireUserCallbacks()
How can I detect if code is running from within a specific promise?
The only thing I can think of is new Error().stack, but oof that sounds like a terrible way to do it.
Some context
The reason why I want this is because I'm working on a part of a library that takes care of loading assets. Some of these assets might contain other assets with the possibility of infinite recursion. In order to handle recursion I have another function that I want users to call instead. Therefore I want to warn users when they call foo() from within one of the fireUserCallbacks() callbacks. While this will only be an issue when assets actually contain infinite loops, I'd rather block the usage of foo() completely to prevent unexpected hangs due to infinite loops.
edit: Here's a somewhat more sophisticated example of my actual code. I would share my actual code but that is really way too long for this format, tbh this example is already getting a bit too complex.
class AssetManager {
constructor() {
this.registeredAssetTypes = new Map();
this.availableAssets = new Map();
}
registerAssetType(typeId, assetTypeConstructor) {
this.registeredAssetTypes.set(typeId, assetTypeConstructor);
}
fillAvailableAssets(assetDatas) {
for (const assetData of assetDatas) {
const constructor = this.registeredAssetTypes.get(assetData.type);
const asset = new constructor(assetData.id, assetData.data);
this.availableAssets.set(assetData.id, asset);
}
}
async loadAsset(assetId, recursionTracker = null) {
// I have some extra code here that makes sure this function will only have
// a single running instance, but this example is getting way too long already
const asset = this.availableAssets.get(assetId);
let isRootRecursionTracker = false;
if (!recursionTracker) {
isRootRecursionTracker = true;
recursionTracker = new RecursionTracker(assetId);
}
const assetData = await asset.generateAsset(recursionTracker);
if (isRootRecursionTracker) {
// If this call was made from outside any `generateAsset` implementation,
// we will wait for all assets to be loaded and put on the right place.
await recursionTracker.waitForAll();
// Finally we will give the recursionTracker the created root asset,
// in case any of the sub assets reference the root asset.
// Note that circular references in any of the sub assets (i.e. not
// containing the root asset anywhere in the chain) are already taken care of.
if (recursionTracker.rootLoadingAsset) {
recursionTracker.rootLoadingAsset.setLoadedAssetData(assetData);
}
}
return assetData;
}
}
const assetManager = new AssetManager();
class RecursionTracker {
constructor(rootAssetId) {
this.rootAssetId = rootAssetId;
this.rootLoadingAsset = null;
this.loadingAssets = new Map();
}
loadAsset(assetId, cb){
let loadingAsset = this.loadingAssets.get(assetId);
if (!loadingAsset) {
loadingAsset = new LoadingAsset(assetId);
this.loadingAssets.set(assetId, loadingAsset);
if (assetId != this.rootAssetId) {
loadingAsset.startLoading(this);
} else {
this.rootLoadingAsset = loadingAsset;
}
}
loadingAsset.onLoad(cb);
}
async waitForAll() {
const promises = [];
for (const loadingAsset of this.loadingAssets.values()) {
promises.push(loadingAsset.waitForLoad());
}
await Promise.all(promises);
}
}
class LoadingAsset {
constructor(assetId) {
this.assetId = assetId;
this.onLoadCbs = new Set();
this.loadedAssetData = null;
}
async startLoading(recursionTracker) {
const loadedAssetData = await assetManager.loadAsset(this.assetId, recursionTracker);
this.setLoadedAssetData(loadedAssetData);
}
onLoad(cb) {
if (this.loadedAssetData) {
cb(this.loadedAssetData)
} else {
this.onLoadCbs.add(cb);
}
}
setLoadedAssetData(assetData) {
this.loadedAssetData = assetData;
this.onLoadCbs.forEach(cb => cb(assetData));
}
async waitForLoad() {
await new Promise(r => this.onLoad(r));
}
}
class AssetTypeInterface {
constructor(id, rawAssetData) {
this.id = id;
this.rawAssetData = rawAssetData;
}
async generateAsset(recursionTracker) {}
}
class AssetTypeFoo extends AssetTypeInterface {
async generateAsset(recursionTracker) {
// This is here just to simulate network traffic, an indexeddb lookup, or any other async operation:
await new Promise(r => setTimeout(r, 200));
const subAssets = [];
for (const subAssetId of this.rawAssetData.subAssets) {
// This won't work, as it will start waiting for itself to finish:
// const subAsset = await assetManager.loadAsset(subAssetId);
// subAssets.push(subAsset);
// So instead we will create a dummy asset:
const dummyAsset = {}
const insertionIndex = subAssets.length;
subAssets[insertionIndex] = dummyAsset;
// and load the asset with a callback rather than waiting for a promise
recursionTracker.loadAsset(subAssetId, (loadedAsset) => {
// since this will be called outside the `generateAsset` function, this won't hang
subAssets[insertionIndex] = loadedAsset;
});
}
return {
foo: this.id,
subAssets,
}
}
}
assetManager.registerAssetType("foo", AssetTypeFoo);
class AssetTypeBar extends AssetTypeInterface {
async generateAsset(recursionTracker) {
// This is here just to simulate network traffic, an indexeddb lookup, or any other async operation:
await new Promise(r => setTimeout(r, 200));
// We'll just return a simple object for this one.
// No recursion here...
return {
bar: this.id,
};
}
}
assetManager.registerAssetType("bar", AssetTypeBar);
// This is all the raw asset data as stored on the users disk.
// These are not instances of the assets yet, so no circular references yet.
// The assets only reference other assets by their "id"
assetManager.fillAvailableAssets([
{
id: "mainAsset",
type: "foo",
data: {
subAssets: ["subAsset1", "subAsset2"]
}
},
{
id: "subAsset1",
type: "bar",
data: {},
},
{
id: "subAsset2",
type: "foo",
data: {
subAssets: ["mainAsset"]
}
}
]);
// This sets the loading of the "mainAsset" in motion. It recursively loads
// all referenced assets and finally puts the loaded assets in the right place,
// completing the circle.
(async () => {
const asset = await assetManager.loadAsset("mainAsset");
console.log(asset);
})();
Maintain a queue and a set. The queue contains pending requests. The set contains pending requests, requests in progress, and successfully completed requests. (Each item would include the request itself; the request's status: pending, processing, complete; and possibly a retry counter.)
When request is made, check if it is in the set. If it is in the set, it was already requested and will be processed, is being processed, or was processed successfully and is already available. If not in the set, add it to both the set and the queue, then trigger queue processing. If queue processing is already running, the trigger is ignored. If not, queue processing starts.
Queue processing pulls requests off the queue, one by one, and processes them. If a request fails, it can either be put back onto the queue for repeat attempts (a counter can be included in the item to limit retries) or it can be removed from the set so it can be requested again later. Queue processing ends when the queue is empty.
This avoids recursion and unnecessary repeat requests.
Arriving with a theory question :)
I have a front that sends (axios) N requests in a Promise.all() with a map function. This works fine. Each time one of the promises is good, I have a little table that gets updated with each request's answer until I get the full table and the array of the answers at the end. ✅
The problem comes when I want to read, at the same time, the logs of the server
So my objective is to run another axios request to my express.js server that will run each 2 seconds to retrieve the logs of the last 2 seconds, this way I could show the logs of what is happening with each answer in real time.
Any ideas of how doing this two tasks in parallel?
In the front I'm using react and the promise.All has this is structure:
setIsLoading(true); // setting a flag to know this is running
const doAllTheTable = await Promise.all(
tableData.map(async (lineOfMyTable) => {
const answer = await doMyRequest(lineOfMyTable) // my axios.get request
return updateTableLine(answer) // the functions that update the good line
})
);
//all promises are good now
setIsLoading(false)
So, basically I want to have another loop that runs each 2 seconds while "isLoading" is true to update another part of my front and show the logs meanwhile. But I need both things to happen at the same time!
Thank you for your ideas :)
Rather than awaiting your Promise.all immediately, store a reference to the promise so you can start checking the logs:
const doAllTheTablePromise = Promise.all(
tableData.map(async lineOfMyTable => {
const answer = await doMyRequest(lineOfMyTable); // my axios.get request
return updateTableLine(answer); // the functions that update the good line
});
);
let cancelled = false;
(async () => {
while (!cancelled) {
// Check your logs..
await new Promise(r => setTimeout(r, 2000)); // 2 second delay
}
})();
await doAllTheTablePromise;
cancelled = true;
Once your doAllTheTablePromise has resolved, you can stop checking the logs.
Must be many ways to write this. Here's one involving a token provided by the caller of two async processes, foo() and bar(), for communication between them.
async function foo(tableData, token) {
try {
await Promise.all(tableData.map(async (lineOfMyTable) => {
const answer = await doMyRequest(lineOfMyTable);
return updateTableLine(answer);
}));
token.setIsLoading = false; // lower flag when all requests are complete
} catch(error) {
token.setIsLoading = false; // lower flag if synchronous or asynchronous error occurs
}
}
async function bar(token) {
function delay(ms) { // this can be written as inner or outer function, whichever suits.
return new Promise(resolve => {
setTimeout(resolve, ms);
});
}
if(token.setIsLoading) {
let logs = await retrieveLogs();
// process/display logs here
await delay(2000);
return bar(token); // recursive call
} else {
return "complete"; // optional
}
}
async function myCaller() {
// ... preamble
let loadingToken = { // passed to foo() and bar() as a means of communication between them.
'setIsLoading': true // raise flag before calling foo() and bar().
};
return Promise.all(foo(tableData, loadingToken), bar(loadingToken));
}
EDIT:
Maybe better written like this, with the caller looking after lowering the flag:
async function foo(tableData) {
return Promise.all(tableData.map(async (lineOfMyTable) => {
return updateTableLine(await doMyRequest(lineOfMyTable));
}));
}
async function bar(token) {
function delay(ms) { // this can be written as inner or outer function, whichever suits.
return new Promise(resolve => {
setTimeout(resolve, ms);
});
}
if(token.setIsLoading) {
let logs = await retrieveLogs();
// process/display logs here
await delay(2000);
return bar(token); // recursive call
} else {
return "complete"; // optional
}
}
async function myCaller() {
// ... preamble
let loadingToken = { // passed to bar().
'setIsLoading': true // raise flag before calling foo() and bar().
};
return Promise.all(
foo(tableData).finally(() => { loadingToken.setIsLoading = false }),
bar(loadingToken)
);
}
I’ve nodes program which I need to run two function in the beginning of the program
And later on access the function results, currently with await each function at a time this works,
However in order to save a time and not waiting to GetService and GetProcess as I need the data later on in the project
It takes about 4 seconds to get this data and I want to run it on the background as I don’t need the results immediately,
How I can do it in node js, If I run promise.all It would wait until the getService and getProcess and then go to rest of the program.
an example
function main() {
//I want to run this both function in background to save time
let service = await GetServices();
this.process = await GetProcess();
…..//Here additional code is running
//let say that after 30 second this code is called
Let users = GetUser(service);
Let users = GetAdress(this.process);
}
im actually running yeoman generator
https://yeoman.io/authoring/
https://yeoman.io/authoring/user-interactions.html
export default class myGenerator extends Generator {
//here I want run those function in background to save time as the prompt to the user takes some time (lets say user have many questions...)
async initializing() {
let service = await GetServices();
this.process = await GetProcess();
}
async prompting() {
const answers = await this.prompt([
{
type: "input",
name: "name",
message: "Your project name",
default: this.appname // Default to current folder name
},
{
type: "confirm",
name: "list",
choises: this.process //here I need to data from the function running in background
}
]);
}
Let's assume that getServices() may take 3 seconds and getProcess() may take 4 seconds, so if you run these both functions at the same time you will be returned in total 4 seconds with the return values from both promises.
You can execute the code while this process is running in the background there will be a callback when the promises resolved, your late functions will be called at this stage.
Check the below simple example;
let service;
let process;
function main() {
// Both functions will execute in background
Promise.all([getServices(), getProcess()]).then((val) => {
service = val[0];
process = val[1];
console.log(service, process);
// Aafter completed this code will be called
// let users = GetUser(service);
// let users = GetAdress(process);
console.log('I am called after all promises completed.')
});
// Current example.
// let service = await GetServices();
// this.process = await GetProcess();
/* Code blocks.. */
console.log('Code will execute without delay...')
}
function getServices() {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve("service is returned")
}, 3000);
});
}
function getProcess() {
return new Promise((resolve, reject) => {
setTimeout(() => {
resolve("process is returned")
}, 4000);
});
}
main();
You can start the asynchronous operation but not await it yet:
function suppressUnhandledRejections(p) {
p.catch(() => {});
return p;
}
async function main() {
// We have to suppress unhandled rejections on these promises. If they become
// rejected before we await them later, we'd get a warning otherwise.
const servicePromise = suppressUnhandledRejections(GetServices());
this.processPromise = suppressUnhandledRejections(GetProcess());
// Do other stuff
const service = await servicePromise;
const process = await this.processPromise;
}
Also consider using Promise.all() which returns a promise for the completion of all promises passed to it.
async function main() {
const [ services, process, somethingElse ] = await Promise.all([
GetServices(),
GetProcess(),
SomeOtherAsyncOperation(),
]);
// Use the results.
}
To do what who you need, you have to understand the event loop.
Nodejs is designed to work in a single thread unlike languages like go, however nodejs handle proccess on different threads. so you can use nextTick () to add a new event to the main thread and it will be executed at the end of the whole block.
function main() {
//I want to run this both function in background to save time
let service = await GetServices();
this.process = await GetProcess();
…..//Here additional code is running
//Let say that after 30 second this code is called
Let users = GetUser(service);
Let users = GetAdr(this.process);
}
function someFunction(){
// do something...
}
main();
process.nextTick(someFunction());// happens after all main () processes are terminated...
This question already has answers here:
Using async/await with a forEach loop
(33 answers)
Closed 3 years ago.
await is not blocking as expected, when a block of code updates db (using postgres / node )
https://node-postgres.com
I have a list of async function calls, each call udpates a database, and each subsequent call works on data updated by the previous call.
There are about eight calls in a row, and each call must update the complete set of data it is working with, 100% to completion, before going to the next.
I tried to make everything not async, but it appears I am forced to make everything async/await because of the library I am using (postgres / node).
Each function call must complete 100% before going on to the next function call, because the next step does a select on rows where a field is not null (where the previous step fills in a value).
I have an await in front of each call, that does something (see code below):
loads the db from a csv,
next step selects all rows just inserted, calls an API and updates the database,
and so on,
but at one point, when the next function executes, NONE of the rows have been updated (as I trace through and verify, a SQL statement returns nothing back),
the code seems to pass right through going to the second function call, not blocking, honoring the await, and completing it's code block.
If I comment out some of the latter rows (dependent on the previous), and let the program run to completion, the database gets updated.
There is nothing functionally wrong with the code, everything works, just not from beginning to completion.
After running two function calls at the beginning, letting that run, I can then comment out those rows, uncomment the later rows in the flow, and run again, and everything works as expected, but I cannot run to completion with both uncommented.
What can I do to make sure each function call completes 100%, has all updates completed in the database, before going to the next step?
async/await is not working for me.
this is not pseudo-code it's the actual code, that is executing, that I am working with, the function names changed only. It is real working code, cut-n-pasted direct from my IDE.
// these are functions I call below (each in their own .js)
const insert_rows_to_db_from_csv = require('./insert_rows_to_db_from_csv')
const call_api_using_rows_from_function_above = require('./call_api_using_rows_from_function_above')
const and_so_on = require('./and_so_on')
const and_so_on_and_on = require('./and_so_on_and_on')
const and_so_on_and_on_and_on = require('./and_so_on_and_on_and_on')
// each of the above exports a main() function where I can call func.main() just // like this one defined below (this is my main() entry point)
module.exports = {
main: async function (csvFilePath) {
console.log('service: upload.main()')
try {
const csvList = []
let rstream = fs.createReadStream(csvFilePath)
.pipe(csv())
.on('data', (data) => csvList.push(data))
.on('end', async () => {
let num_rows = csvList.length
//step one (if I run these two, with step two calls below commented out, this works)
await insert_rows_to_db_from_csv.main(csvList);
await call_api_using_rows_from_function_above.main();
// step two
// blows up here, on the next function call,
// no rows selected in sql statements, must comment out, let the above run to
// completion, then comment out the rows above, and let these run separate
await work_with_rows_updated_in_previous_call_above.main(); // sets
await and_so_on.main();
await and_so_on_and_on.main();
await and_so_on_and_on_and_on.main();
})
} catch (err) {
console.log(err.stack)
} finally {
}
}
};
here is the one liner I am using to call the insert/update to the DB:
return await pool.query(sql, values);
that's it, nothing more. This is from using:
https://node-postgres.com/
npm install pg
PART 2 - continuing on,
I think the problem might be here. This is where I am doing each
API call, then insert (that the next function call is dependent upon), some code smell here that I can't sort out.
processBatch(batch) is called, that calls the API, gets a response back, and then within there it calls `handleResponseDetail(response), where the insert is happening. I think the problem is here, if there are any ideas?
this is a code block inside:
await call_api_using_rows_from_function_above.main();
It completes with no errors, inserts rows, and commits, then the next function is called, and this next function finds no rows (inserted here). But the await on the entire main() .js blocks and waits, so I don't understand.
/**
* API call, and within call handleResponse which does the DB insert.
* #param batch
* #returns {Promise<*>}
*/
async function processBatch(batch) {
console.log('Processing batch');
return await client.send(batch).then(res => {
return handleResponseDetail(res);
}).catch(err => handleError(err));
}
// should this be async?
function handleResponseDetail(response) {
response.lookups.forEach(async function (lookup) {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup);
}
});
}
Given the code block from your Part 2 edit, the problem is now clear: all of your insert()s are being scheduled outside of the blocking context of the rest of your async/await code! This is because of that .forEach, see this question for more details.
I've annotated your existing code to show the issue:
function handleResponseDetail(response) { //synchronous function
response.lookups.forEach(async function (lookup) { //asynchronous function
//these async functions all get scheduled simultaneously
//without waiting for the previous one to complete - that's why you can't use forEach like this
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup); //this ONLY blocks the inner async function, not the outer `handleResponseDetail`
}
});
}
Here is a fixed version of that function which should work as you expect:
async function handleResponseDetail(response) {
for(const lookup of response.lookups) {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup); //blocks handleResponseDetail until done
}
}
}
Alternatively, if the order of insertion doesn't matter, you can use Promise.all for efficiency:
async function handleResponseDetail(response) {
await Promise.all(response.lookups.map(async lookup => {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup);
}
})); //waits until all insertions have completed before returning
}
To reiterate, you cannot easily use .forEach() with async/await because .forEach() simply calls the given function for each element of the array synchronously, with no regard for awaiting each promise before calling the next. If you need the loop to block between each element, or to wait for all elements to complete processing before returning from the function (this is your use case), you need to use a different for loop or alternatively a Promise.all() as above.
What your main function currently does is merely creating stream, assigning listeners and instantly returning. It does not await for all the listeners to resolve like you are trying to have it do
You need to extract your file reading logic to another function, which will return a Promise that will resolve only when the entire file is read, then await for that Promise inside main
function getCsvList(csvFilePath) {
return new Promise((resolve, reject) => {
const csvList = []
fs.createReadStream(csvFilePath)
.pipe(csv())
.on('data', (data) => csvList.push(data))
.on('end', () => {
resolve(csvList)
})
.on('error', (e) => reject(e))
})
}
module.exports = {
main: async function (csvFilePath) {
try {
const csvList = await getCsvList(csvFilePath)
await insert_rows_to_db_from_csv.main(csvList);
await call_api_using_rows_from_function_above.main();
await work_with_rows_updated_in_previous_call_above.main();
await and_so_on.main();
await and_so_on_and_on.main();
await and_so_on_and_on_and_on.main();
} catch (err) {
console.log(err.stack)
} finally {
}
}
};
In Node.js I'm using the fs.createWriteStream method to append data to a local file. In the Node documentation they mention the drain event when using fs.createWriteStream, but I don't understand it.
var stream = fs.createWriteStream('fileName.txt');
var result = stream.write(data);
In the code above, how can I use the drain event? Is the event used properly below?
var data = 'this is my data';
if (!streamExists) {
var stream = fs.createWriteStream('fileName.txt');
}
var result = stream.write(data);
if (!result) {
stream.once('drain', function() {
stream.write(data);
});
}
The drain event is for when a writable stream's internal buffer has been emptied.
This can only happen when the size of the internal buffer once exceeded its highWaterMark property, which is the maximum bytes of data that can be stored inside a writable stream's internal buffer until it stops reading from the data source.
The cause of something like this can be due to setups that involve reading a data source from one stream faster than it can be written to another resource. For example, take two streams:
var fs = require('fs');
var read = fs.createReadStream('./read');
var write = fs.createWriteStream('./write');
Now imagine that the file read is on a SSD and can read at 500MB/s and write is on a HDD that can only write at 150MB/s. The write stream will not be able to keep up, and will start storing data in the internal buffer. Once the buffer has reached the highWaterMark, which is by default 16KB, the writes will start returning false, and the stream will internally queue a drain. Once the internal buffer's length is 0, then the drain event is fired.
This is how a drain works:
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
And these are the prerequisites for a drain which are part of the writeOrBuffer function:
var ret = state.length < state.highWaterMark;
state.needDrain = !ret;
To see how the drain event is used, take the example from the Node.js documentation.
function writeOneMillionTimes(writer, data, encoding, callback) {
var i = 1000000;
write();
function write() {
var ok = true;
do {
i -= 1;
if (i === 0) {
// last time!
writer.write(data, encoding, callback);
} else {
// see if we should continue, or wait
// don't pass the callback, because we're not done yet.
ok = writer.write(data, encoding);
}
} while (i > 0 && ok);
if (i > 0) {
// had to stop early!
// write some more once it drains
writer.once('drain', write);
}
}
}
The function's objective is to write 1,000,000 times to a writable stream. What happens is a variable ok is set to true, and a loop only executes when ok is true. For each loop iteration, the value of ok is set to the value of stream.write(), which will return false if a drain is required. If ok becomes false, then the event handler for drain waits, and on fire, resumes the writing.
Regarding your code specifically, you don't need to use the drain event because you are writing only once right after opening your stream. Since you have not yet written anything to the stream, the internal buffer is empty, and you would have to be writing at least 16KB in chunks in order for the drain event to fire. The drain event is for writing many times with more data than the highWaterMark setting of your writable stream.
Imagine you're connecting 2 streams with very different bandwidths, say, uploading a local file to a slow server. The (fast) file stream will emit data faster than the (slow) socket stream can consume it.
In this situation, node.js will keep data in memory until the slow stream gets a chance to process it. This can get problematic if the file is very large.
To avoid this, Stream.write returns false when the underlying system buffer is full. If you stop writing, the stream will later emit a drain event to indicate that the system buffer has emptied and it is appropriate to write again.
You can use pause/resume the readable stream and control the bandwidth of the readable stream.
Better: you can use readable.pipe(writable) which will do this for you.
EDIT: There's a bug in your code: regardless of what write returns, your data has been written. You don't need to retry it. In your case, you're writing data twice.
Something like this would work:
var packets = […],
current = -1;
function niceWrite() {
current += 1;
if (current === packets.length)
return stream.end();
var nextPacket = packets[current],
canContinue = stream.write(nextPacket);
// wait until stream drains to continue
if (!canContinue)
stream.once('drain', niceWrite);
else
niceWrite();
}
Here is a version with async/await
const write = (writer, data) => {
return new Promise((resolve) => {
if (!writer.write(data)) {
writer.once('drain', resolve)
}
else {
resolve()
}
})
}
// usage
const run = async () => {
const write_stream = fs.createWriteStream('...')
const max = 1000000
let current = 0
while (current <= max) {
await write(write_stream, current++)
}
}
https://gist.github.com/stevenkaspar/509f792cbf1194f9fb05e7d60a1fbc73
This is a speed-optimized version using Promises (async/await). The caller has to check if it gets a promise back and only in that case await has to be called. Doing await on each call can slow down the program by a factor of 3...
const write = (writer, data) => {
// return a promise only when we get a drain
if (!writer.write(data)) {
return new Promise((resolve) => {
writer.once('drain', resolve)
})
}
}
// usage
const run = async () => {
const write_stream = fs.createWriteStream('...')
const max = 1000000
let current = 0
while (current <= max) {
const promise = write(write_stream, current++)
// since drain happens rarely, awaiting each write call is really slow.
if (promise) {
// we got a drain event, therefore we wait
await promise
}
}
}