Promisfy writing file to filesystem - javascript

I am interested in understanding how to Promisfy this block of code:
const http = require('http');
const fs = require('fs');
const download = function(url, dest, cb) {
let file = fs.createWriteStream(dest);
const request = http.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
file.close(cb); // close() is async, call cb after close completes.
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
My first take on this was something to the extent of:
const http = require('http');
const fs = require('fs');
const download = async (url, dest, cb) => {
let file = fs.createWriteStream(dest);
const request = http.get(url, function(response) {
response.pipe(file);
file.on('finish', function() {
const closed = await file.close(cb); // close() is async, await here?
if (closed) {
// handle cleanup and retval
}
});
}).on('error', function(err) { // Handle errors
const deleted = await fs.unlink(dest); // Delete the file async.
if (!deleted) { ... }
});
};
The implementation above is clearly wrong. What is the right away to approach this to remove callbacks and just use async/await?

Here's a way to manually wrap the pipe operation in a promise. Unfortunately, most of this is just error handling to cover all the possible places an error can occur:
const http = require('http');
const fs = require('fs');
const download = function(url, dest) {
return new Promise((resolve, reject) => {
const file = fs.createWriteStream(dest);
// centralize error cleanup function
function cleanup(err) {
reject(err);
// cleanup partial results when aborting with an error
file.on('close', () => {
fs.unlink(dest);
});
file.end();
}
file.on('error', cleanup).on('finish', resolve);
const request = http.get(url, function(response) {
if (response.status < 200 || response.status >= 300) {
cleanup(new Error(`Unexpected Request Status Code: ${response.status}`);
return;
}
response.pipe(file);
response.on('error', cleanup);
}).on('error', cleanup);
});
};
download(someURL, someDest).then(() => {
console.log("operation complete");
}).catch(err => {
console.log(err);
});
This does not wait for files to be closed or removed in the error conditions before rejecting (figuring that there's typically nothing constructive to do if those cleanup operations have errors anyway). If that is desired, it could be added easily by just calling reject(err) from the asynchronous callbacks for those cleanup operations or by using the fs.promises version of those functions and awaiting them.
A few things to note. This is mostly error handling because there are three possible places you can have errors and some cleanup work needed for some errors.
Added required error handling.
In the OP's original code, they called file.close(), but file is a stream and there is no .close() method on a writeStream. You call .end() to close the write stream.
You also probably need to check for an appropriate response.status because http.get() still returns a response object and stream even if the status is something like 4xx or 5xx.

Here's how I'd re-write your node-style callback API as an asynchronous function:
const http = require('http');
const fs = require('fs');
async function download (url, dest) {
const response = await new Promise((resolve, reject) => {
http.get(url, resolve).once('error', reject);
});
if (response.status < 200 || response.status >= 300) {
throw new Error(`${responses.status} ${http.STATUS_CODES[response.status]}`);
}
const file = await fs.promises.open(dest, 'w');
try {
for await (const data of response) {
await file.write(data);
}
} catch (error) {
await file.close();
await fs.promises.unlink(dest);
throw error;
}
await file.close();
}
Note that this approach uses the FileHandle class in the fs.promises namespace, as well as the Symbol.asyncIterator interface defined on the Readable stream class, which allows you to consume the data events of the response with a for await...of loop and propagate error handling from the error event of the response to the catch block by implicitly rejecting the promise returned by the underlying asynchronous iterator.

Related

Wait for response from request before returning

I am trying to create a function with a GET request that returns a portion of the data from the GET request. However, it keeps returning before the data is retrieved, so I keep getting "undefined". How can I set this up so it actually waits for the data to be set before returning?
let getInfo = async () => {
const request = net.request({
url: URL
})
return new Promise((resolve, reject) => { // Promise being here DOES work
request.on('response', (response) => {
response.on('data', (chunk) => {
//return new Promise((resolve, reject) => { //Promise being here does NOT work
let body = JSON.parse(chunk)
let info = body.data
if (info){
resolve(info);
}
reject();
//})
});
});
request.write('')
request.end()
}).then(data => {
console.log("From then: "+data)
return data
})
}
getInfo().then(data => {
console.log("From outside: "+data)
})
Edit: This is the updated version that still does not work. I am trying to use the native electron method and I don't see why this doesn't work. The "From then:" part displays the info correctly. But when run "From outside:" it prints undefined. Does the issue have anything to do with the response.on being nested inside the request.on?
Solution: As #NidhinDavid showed in his answer, the issue was that the promise was inside the 'response' listener. Moving the 'GET' request from start to finish inside the Promise fixed it to giving the correct output. I have updated my code to reflect that for future individuals.
let getInfo = () => {
let info;
const request = net.request({
url: URL
})
return new Promise((resolve, reject) => {
request.on('response', (response) => {
response.on('data', (chunk) => {
request.write('')
request.end()
let body = JSON.parse(chunk)
info = body.data
if (info) {
resolve(info)
} else {
reject('Something went wrong');
}
});
});
})
}
getInfo()
.then(data => {
// this will be your info object
console.log(data)
})
.catch(err => {
// this will log 'Something went wrong' in case of any error
console.log(err)
})
You need to return inside your, on type event handler. Read more about asynchronous code and synchronous code here
I couldn't find the net module and the one which is included with Nodejs do not have request method. So to get the similar concept of event emiters and promise I am using http module and doing a http request to fetch json and parse it
'use strict'
var https = require('https');
const getInfo = async () => {
// create a new promise chain
// remember it is a chain, if one return is omitted
// then the chain is broken
return new Promise((resolve, reject) => {
var options = {
host: 'support.oneskyapp.com',
path: '/hc/en-us/article_attachments/202761727/example_2.json'
};
// start the request
https.request(options, function (response) {
var str = '';
// data arrives in chunks
// chunks needs to be stitched together before parsing
response.on('data', function (chunk) {
str += chunk;
});
// response body obtained
// resolve (aka return) the result
// or parse it, or do whatever you want with it
response.on('end', function () {
resolve(str)
});
// errors are another event
// listen for errors and reject when they are encountered
response.on('error', function (err) {
reject(err)
})
}).end()
})
}
//*********************************************
// using async await
//*********************************************
// if this is the entry point into app
// then top-level async approach required
(async ()=>{
try{
let data = await getInfo()
console.log("From ASYNC AWAIT ")
console.log(JSON.stringify(JSON.parse(data)))
}
catch (err) {
console.log("operation failed, error: ", err)
}
})();
//************************************************
// using promise chains
//************************************************
getInfo()
.then((data)=>{
console.log("FROM PROMISE CHAIN ")
console.log(JSON.stringify(JSON.parse(data)))
})
.catch((err)=>{
console.log("operation failed, error: ", err)
})
Tyr this, it might works for you,
let info;
const getInfo = async (_url)=>{
const response = await fetch(_url);
const data = await response.json();
info = data;
} ;
const url = "some url";
getInfo(url);
console.log(info);
Async function always returns a promise, so either consume that promise or internally await the data and assign it to some variable.
Check for the valid data required in info by logging it to the console.

How to pause between two asynchronous actions?

I need to make pause between "/system/backup/save" and "/file/print". Because otherwise, the backup will not be completed before the contents of the "/file" directory are displayed.
Now the code is performing a backup, but it gives me a list of files in which there is no backup yet.
const RouterOSAPI = require("node-routeros").RouterOSAPI;
const sleep = require('util').promisify(setTimeout);
var hosts = require('./config.json');
async function backup() {
return new Promise(function (resolve, reject) {
for (let elem of hosts) {
const conn = new RouterOSAPI({
host: elem.host,
user: elem.user,
password: elem.password
})
conn.connect()
.then((client) => {
return conn.write(["/system/backup/save",]).then((data) => {
resolve('COMPLETE - OK');
}).catch((err) => {
reject('ERROR!');
});
sleep(5000);
}).then(() => {
return conn.write("/file/print").then((data2) => {
console.log(data2)
resolve('CHECK - OK');
conn.close();
}).catch((err) => {
reject('ERROR!');
});
}).catch((err) => {
reject('ERROR CONNECT TO ' + elem.name);
});
}
});
}
backup();
Generally, using a delay to wait for completion of an asynchronous process is an anti-pattern  you'll always end up either not waiting long enough or waiting an unnecessarily long time. The former is of course a bigger problem than the latter, but both are problems. If you have any means of having the other end report completion of the backup, that would be your best bet. Looking at the documentation, it seems like conn.write's promise shouldn't be fulfilled until the operation is complete, but I only skimmed the docs so maybe that's not the case.
Other than that:
Don't create the promise explicitly, your async function automatically creates a promise (but you may not want an async function here anyway)
Don't mix using .then/.catch handlers with async functions; use await.
For instance, here's a version that runs the backups and such in parallel and returns an array giving success/failure via allSettled:
const RouterOSAPI = require("node-routeros").RouterOSAPI;
const sleep = require('util').promisify(setTimeout);
var hosts = require('./config.json');
async function backup() {
// Run the hosts in parallel
return await Promise.allSettled(hosts.map(async (host) => {
let conn;
try {
const c = new RouterOSAPI({
host: elem.host,
user: elem.user,
password: elem.password
})
const client = await c.connect();
conn = c;
await conn.write(["/system/backup/save",]);
await sleep(5000); // Only if really unavoidable because the
// backup continues even after the promise
// from `write` is fulfilled
await conn.write("/file/print");
conn = null;
c.close();
} catch (e) {
if (conn) {
try {
conn.close();
} catch {} // Don't let errors in close shadow previous errors
}
throw e;
}
}));
}
backup()
.then(results => {
// Check for status = "rejected" entries in results and report the errors
});
But note that since that function just returns the promise from allSettled, you might not want an async function at all:
const RouterOSAPI = require("node-routeros").RouterOSAPI;
const sleep = require('util').promisify(setTimeout);
var hosts = require('./config.json');
function backup() {
// Run the hosts in parallel
return Promise.allSettled(hosts.map(async (host) => {
let conn;
try {
const c = new RouterOSAPI({
host: elem.host,
user: elem.user,
password: elem.password
})
const client = await c.connect();
conn = c;
await conn.write(["/system/backup/save",]);
await sleep(5000); // Only if really unavoidable because the
// backup continues even after the promise
// from `write` is fulfilled
await conn.write("/file/print");
conn = null;
c.close();
} catch (e) {
if (conn) {
try {
conn.close();
} catch {} // Don't let errors in close shadow previous errors
}
throw e;
}
}));
}
backup()
.then(results => {
// Check for status = "rejected" entries in results and report the errors
});
(There's a subtle difference between those two around what happens if hosts.map throws an error — perhaps because hosts isn't an array — but it's probably not important. The former returns a rejected promise, the latter throws a synchronous error.)

async/await with Limiter for sending requests

I'm trying to limit the number of requests I send to an API.
I'm using Limiter and it's working just like I need, the only issue is that I can't find a way to use it with await (I need all the responses before rendering my page)
Can someone give me a hand with it?
Btw the Log returns a boolean.
const RateLimiter = require('limiter').RateLimiter;
const limiter = new RateLimiter(50, 5000)
for (let i = 0; i < arrayOfOrders.length; i++) {
const response = limiter.removeTokens(1, async (err, remainingRequests) => {
console.log('request')
return await CoreServices.load('updateOrder', {
"OrderNumber": arrayOfOrders[i],
"WorkFlowID": status
})
})
console.log('response', response)
}
console.log('needs to log after all the request');
this is loggin:
response true
response true
response false
needs to log after all the request
request
request
request
...
Promisifying .removeTokens will help, see if this code works
const RateLimiter = require('limiter').RateLimiter;
const limiter = new RateLimiter(50, 5000);
const tokenPromise = n => new Promise((resolve, reject) => {
limiter.removeTokens(n, (err, remainingRequests) => {
if (err) {
reject(err);
} else {
resolve(remainingRequests);
}
});
});
(async() => { // this line required only if this code is top level, otherwise use in an `async function`
const results = await Promise.all(arrayOfOrders.map(async (order) => {
await tokenPromise(1);
console.log('request');
return CoreServices.load('updateOrder', {
"OrderNumber": order,
"WorkFlowID": status
});
}));
console.log('needs to log after all the request');
})(); // this line required only if this code is top level, otherwise use in an `async function`
explanation
Firstly:
const tokenPromise = n => new Promise((resolve, reject) => {
limiter.removeTokens(n, (err, remainingRequests) => {
if (err) {
reject(err);
} else {
resolve(remainingRequests);
}
});
});
promisifies the limiter.removeTokens to use in async/await - in nodejs you could use the built in promisifier, however lately I've had too many instances where that fails - so a manual promisification (I'm making up a lot of words here!) works just as well
Now the code is easy - you can use arrayOfOrders.map rather than a for loop to create an array of promises that all run parallel as much as the rate limiting allows, (the rate limiting is done inside the callback)
await Promise.all(... will wait until all the CoreServices.load have completed (or one has failed - you could use await Promise.allSettled(... instead if you want)
The code in the map callback is tagged async so:
await tokenPromise(1);
will wait until the removeTokens callback is called - and then the request
return CoreServices.load
is made
Note, this was originally return await CoreServices.load but the await is redundant, as return await somepromise in an async function is just the same as return somepromise - so, adjust your code too

can await/async makes blocking to non-blocking process in javascript

I read this article from node
https://nodejs.org/en/docs/guides/blocking-vs-non-blocking/
It says the code below is a process blocker:
const fs = require('fs');
const data = fs.readFileSync('/file.md'); // blocks here until file is read
console.log(data);
// moreWork(); will run after console.log
what if I add await?
will the code above becomes non-blocking or it will stay in its true nature?
Example code:
const fs = require('fs');
const data = await fs.readFileSync('/file.md'); // no more blocking
console.log(data);
Thank you
No, the code can't run since await must use in async function.
And await should use for function that return promise.
the code means:
// await new Promise(...)
// console.log(...)
new Promise().then((...) => console.log(...))
If you should non-block function, you should use fs.readFile instead.
Blocking means that the whole application is blocked.
So, all SetInterval, Promise, Events or whatever async callback are paused until that sync function ends its execution.
It's the same of what you get when you use a for..loop.
NodeJS provides you some non-blocking file system methods with callbacks, just change your code like this.
const fs = require('fs');
fs.readFile('/file.md', (err, data) => {
if (err) throw err;
console.log(data)
});
There is the only way.
await operator wait a promise, and wrapped in async function
you should code like this
const fs = require("fs");
function readFile(fileName) {
return new Promise((resolve, reject) => {
fs.readFile(fileName, (err, data) => {
if (err) reject(err);
resolve(data);
});
});
}
async function f1() {
try {
var x = await readFile("foo.json");
console.log(x);
} catch (e) {
console.log(e); // 30
}
}
f1();

Using Promises with Await/Async Correctly

I'm having some issues understanding how the Promise functionality works, I have previously used Bluebird but I wanted to try to learn the new await/async standard in order to improve as a programmer. I have used async/await and created promises where I feel appropriate however the functions are still executing out of order.
I'm running this on the latest version of Node with Webpack, I'm not getting any meaningful errors. It runs fine just not as expected. My output when running it is:
Searching the Web for: Test String
Web search Completed!
Promise { <pending> }
Response Handler Completed!
Ideally I'd like it to respond with:
Searching the Web for: Test String
Response Handler Completed
Web search Completed
And then return the output of my response handler.
Can anyone spot my mistake?
const https = require('https');
// Replace the subscriptionKey string value with your valid subscription key.
const subscriptionKey = '<samplekey>';
const host = 'api.cognitive.microsoft.com';
const path = '/bing/v7.0/search';
const response_handler = async (response) => {
return new Promise((resolve, reject) => {
let body = '';
response.on('data', (d) => {
body += d;
resolve(body);
});
response.on('end', () => {
console.log('\nRelevant Headers:\n');
for (const header in response.headers)
// header keys are lower-cased by Node.js
{
if (header.startsWith('bingapis-') || header.startsWith('x-msedge-')) { console.log(`${header}: ${response.headers[header]}`); }
}
body = JSON.stringify(JSON.parse(body), null, ' ');
//console.log('\nJSON Test Response:\n');
//console.log(body);
});
response.on('error', (e) => {
console.log(`Error: ${e.message}`);
});
console.log('Response Handler Completed!');
});
};
const bing_web_search = async (search) => {
return new Promise((resolve, reject) => {
console.log(`Searching the Web for: ${search}`);
const request_params = {
method: 'GET',
hostname: host,
path: `${path}?q=${encodeURIComponent(search)}&$responseFilter=${encodeURIComponent('Webpages')}&count=${50}`,
headers: {
'Ocp-Apim-Subscription-Key': subscriptionKey,
},
};
const req = https.request(request_params, response_handler);
console.log('Web search Completed!');
console.log(req.body);
req.end();
});
};
module.exports = {
search: async (search) => {
if (subscriptionKey.length === 32) {
const result = await bing_web_search(search);
console.log('Search Completed');
} else {
console.log('Invalid Bing Search API subscription key!');
console.log('Please paste yours into the source code.');
}
},
};
A bit late but the following should set you on the way, I made changes to the code. If you have any questions please let me know.
const https = require('https');
// Replace the subscriptionKey string value with your valid subscription key.
const subscriptionKey = '<samplekey>';
const host = 'api.cognitive.microsoft.com';
const path = '/bing/v7.0/search';
const response_handler = (resolve,reject) => (response) => { // no need for async, you return a promise
//this one does not return anything, it's the handler for the response and will resolve
// or reject accordingly
let body = '';
response.on('data', (d) => {
body += d;
//cannot resolve yet, we're not done
// you can resolve on end maybe? I don't know nodejs http
// if end event is called when request fails then end would not
// be the correct way either, better use fetch api
//resolve(body);
});
response.on('end', () => {
console.log('\nRelevant Headers:\n');
for (const header in response.headers)
// header keys are lower-cased by Node.js
{
if (header.startsWith('bingapis-') || header.startsWith('x-msedge-')) { console.log(`${header}: ${response.headers[header]}`); }
}
body = JSON.stringify(JSON.parse(body), null, ' ');
resolve(body);//resolving the promise returned by bing_web_search
//console.log('\nJSON Test Response:\n');
//console.log(body);
});
response.on('error', (e) => {
console.log(`Error: ${e.message}`);
//need to reject with the error
reject(e);
});
console.log('Response Handler Completed!');
};
//no need to specify async, you are not awaiting anything
// you are creating a promise, when using non promise asynchronous
// functions that work with callbacks or event emitting objects
// you need resolve and reject functions so you have to return
// new Promise(
// (resolve,reject)=>somecallbackNeedingFunction((err,result)=>
// err ? reject(err) : resolve(result)
// )
// )
const bing_web_search = (search) => {
return new Promise((resolve, reject) => {
console.log(`Searching the Web for: ${search}`);
const request_params = {
method: 'GET',
hostname: host,
path: `${path}?q=${encodeURIComponent(search)}&$responseFilter=${encodeURIComponent('Webpages')}&count=${50}`,
headers: {
'Ocp-Apim-Subscription-Key': subscriptionKey,
},
};
const req = https.request(
request_params,
response_handler(resolve,reject)//passing this resolve and reject
);
//no, request not completed, we just started
console.log('Web search Completed!');
// console.log(req.body); // nothing to log here
req.end();
});
};
module.exports = {
search: async (search) => {
if (subscriptionKey.length === 32) {
//did not change anything bing_web_search returns a promise
// so you can just await it
const result = await bing_web_search(search);
console.log('Search Completed');
//this will resolve with the results
return result
} else {
console.log('Invalid Bing Search API subscription key!');
console.log('Please paste yours into the source code.');
//the caller of this function can handle the rejection
return Promise.reject('Invalid Bing Search API subscription key!');
}
},
};
[update]
Your comment suggest that you do not call search correctly or handle the promise it returns correctly. You have no control over how long a response takes so in a set of responses the first request may return last. This is why you have Promise.all
const searchObjects = [s1,s2];
const Fail = function(reason){this.reason=reason;};
Promise.all(
searchObjects.map(
searchObject => obj.search(searchObject)
.then(
x=>[x,searchObject]//if resolve just pass result
,err =>new Fail([err,searchObject])//if reject add a Fail object with some detail
)
)
)
.then(
results => {
console.log(
"resolved results:",
results.filter(([r,_])=>(r&&r.constructor)!==Fail)
);
console.log(
"failed results:",
results.filter(([r,_])=>(r&&r.constructor)===Fail)
);
}
)
If you have a lot of searches then maybe you want to throttle the amount of responses withing a certain time period or active connections. Let me know if you need help with that.

Categories

Resources