How to close a https stream in Node.js - javascript

I'm loading a .ndjson file via https. I want to close it after reading 100 lines of the file.
const amount = 100;
https.get(url, (res) => {
var { statusCode } = res;
if (statusCode !== 200) {
throw new Error(`Request Failed.\n Status Code: ${statusCode}`);
}
res.setEncoding('utf8');
let rows = [];
res
.pipe(ndjson.parse())
.on('data', function (obj) {
rows.push(obj);
if (rows.length === amount) {
this.end();
}
})
.on('end', () => {
resolve(rows);
});
}).on('error', (e) => {
throw new Error(e.message);
});
But every way I have tried the close the stream, the same error message appears:
Error: Could not parse row {"username"...
at DestroyableTransform.parseRow [as mapper] (C:\Users\thoma\Documents\GitHub\test\node_modules\ndjson\index.js:19:28)
at DestroyableTransform.flush [as _flush] (C:\Users\thoma\Documents\GitHub\test\node_modules\split2\index.js:44:21)
at DestroyableTransform.<anonymous> (C:\Users\thoma\Documents\GitHub\test\node_modules\readable-stream\lib\_stream_transform.js:138:49)
at Object.onceWrapper (events.js:314:30)
at emitNone (events.js:105:13)
at DestroyableTransform.emit (events.js:207:7)
at prefinish (C:\Users\thoma\Documents\GitHub\test\node_modules\readable-stream\lib\_stream_writable.js:596:14)
at finishMaybe (C:\Users\thoma\Documents\GitHub\test\node_modules\readable-stream\lib\_stream_writable.js:604:5)
at afterWrite (C:\Users\thoma\Documents\GitHub\test\node_modules\readable-stream\lib\_stream_writable.js:470:3)
at _combinedTickCallback (internal/process/next_tick.js:144:20)
And the stream works fine when not forcefully closed, so it isn't related to the ndjson file. Is it possible to close the stream in the middle of the request?

There are several solutions :
Send a header to close the connection : this.set("Connection", "close")
Ending stream thanks to this.end()
I do not know which one is the best.
So, in your example, you have an issue due to this (I recommand you to take a look to this link). Try the following code :
res
.pipe(ndjson.parse())
.on('data', obj => {
rows.push(obj);
if (rows.length === amount) {
this.end();
}
})
.on('end', () => {
resolve(rows);
});

Related

cant catch code breaking error thrown inside required class

im trying to scrap a website using pupeteer , this website has captcha and i use a 3rd party library/api to bypass the captcha
here is the simplified version of my code
const puppeteer = require('puppeteer');
const dbc = require('./dpc/deathbycaptcha');
async function init()
{
let page = launchPuppeteer();
try {
await page.goto(`https://www.example.com`, {timeout: 60000});
await captcha_element.screenshot({ path: setting.captcha , omitBackground: true, });
let captchaResult = await solve_captcha().catch((error)=> {throw new Error( error) });
log('ALL DONE');
}
catch (e) {
log('=====================ERROR============================');
log(e);
}
}
here is solve_captcha function
function solve_captcha(){
return new Promise(( resolve , reject )=>{
try
{
const client = new dbc.HttpClient('myUsername', 'myPassword');
client.decode({captcha: setting.captcha}, (captcha) => {
if (captcha) {
resolve(captcha['text']);
}
else
{
reject('error')
}
});
}
catch (e) {
reject(e.toString());
}
})
}
so sometimes i cant connect to captcha solving api due to down server or network problems , it would throw this error
C:\pathToProject\dpc\deathbycaptcha.js:218
throw new Error(err.message);
^
Error: read ECONNRESET
at ClientRequest.<anonymous> (C:\pathToProject\dpc\deathbycaptcha.js:218:15)
←[90m at ClientRequest.emit (events.js:310:20)←[39m
←[90m at Socket.socketErrorListener (_http_client.js:426:9)←[39m
←[90m at Socket.emit (events.js:310:20)←[39m
←[90m at emitErrorNT (internal/streams/destroy.js:92:8)←[39m
←[90m at emitErrorAndCloseNT (internal/streams/destroy.js:60:3)←[39m
←[90m at processTicksAndRejections (internal/process/task_queues.js:84:21)←[39m
this error happens in the deathbycaptcha.js which i've required in my code with
const dbc = require('./dpc/deathbycaptcha');
here is the simplified version of the code in the in the deathbycaptcha.js which is cuzing the error
class HttpClient extends Client {
_call(arguments here) {
var options = {someoptions};
var form = new FormData();
const request = form.submit(options, (err, response) => {
if (err) {
console.log(err.toString())
throw new Error(err.message);
}
// more code here
}
}
so here is the problem , i have this code running inside 2 try/catch blocks
first in the caller function (init()) and second one in the solve_captcha function but none of them are able to catch this error and it will break all the code and the program will exit and i have to run it again
why cant i catch and handle this error in my code ?

How to fix the "Socket Hangup Error " when large number of requests are made really quick

I have a nodejs application that aggregates contents from various websites. Requests are made to fetch the feeds from different sources asynchronously using request streams. I get the socket hangup error pretty often when the requests are made.
err in accessing the link { Error: socket hang up
at createHangUpError (_http_client.js:331:15)
at TLSSocket.socketOnEnd (_http_client.js:423:23)
at emitNone (events.js:111:20)
at TLSSocket.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1064:12)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickDomainCallback (internal/process/next_tick.js:219:9) code: 'ECONNRESET' } https://arstechnica.com/?p=1488489
Environment details:
node version - v8.12.0
Tried out a few suggestions given in related SO posts, but I still get the same error.
NodeJS - What does "socket hang up" actually mean?
import request from 'request';
import FeedParser from 'feedparser';
const extractor = require('unfluff');
export const getFeedsFromSource = function (urlfeed, etag, LastModified, callback) {
console.log(urlfeed, etag, LastModified);
const req = request({
method: 'GET',
url: urlfeed,
headers: {
'If-None-Match': etag,
'If-Modified-Since': LastModified,
Connection: 'keep-alive',
ciphers: 'DES-CBC3-SHA',
},
});
const feedparser = new FeedParser();
const metaData = {};
const htmlData = {};
const feedData = {};
// const pList = null;
req.on('response', function (response) {
const stream = this;
if (response.statusCode === 304) {
console.log('Source not modified: ', urlfeed);
}
if (response.statusCode === 200) {
metaData.etagin = response.headers.etag;
metaData.LastModifiedin = response.headers['last-modified'];
metaData.LastModifiedLocal = response.headers['last-modified'];
stream.pipe(feedparser).end();
}
});
req.on('error', (err) => {
console.log(`getFeed: err.message == ${err.message}`);
callback(err);
});
// req.end();
feedparser.on('readable', function () {
try {
const item = this.read();
if (item !== null) {
request({
method: 'GET',
url: item.link,
}, (err, info) => {
if (!err) {
htmlData.body = info.body;
const parsedData = extractor(htmlData.body, 'en');
feedData.author = [];
feedData.videos = [];
feedData.feedtitle = parsedData.title;
feedData.feedmainpicture = parsedData.image;
feedData.feedsummary = parsedData.description;
feedData.feedmaincontent = parsedData.text;
feedData.author.push(item.author);
if (item.author === null) {
feedData.author = parsedData.author;
}
feedData.feedurl = item.link;
feedData.copyright = item.meta.copyright;
// feedData.videos = parsedData.videos;
feedData.publishedDate = item.pubdate;
if (item.categories.length > 0) {
feedData.categories = item.categories;
feedData.feedtags = item.categories;
} else if (parsedData.keywords !== undefined) {
feedData.categories = parsedData.keywords.split(' ').join('').split(',');
feedData.feedtags = parsedData.keywords.split(' ').join('').split(',');
} else {
feedData.categories = [];
feedData.feedtags = [];
}
metaData.sourcename = item.meta.title;
callback(undefined, feedData, metaData);
} else {
console.log('err in accessing the link', err, item.link);
}
});
}
} catch (err) {
console.log(`getFeed: err.message == ${err.message}`);
}
});
feedparser.on('error', (err) => {
console.log(`getFeed: err.message == ${err.message}`);
});
feedparser.on('end', () => {
console.log('onend');
});
};
Kindly help me out with this issue.
There are many reasons for socket hangup/reset in production apps. From your description I believe the cause isn't due to app overloading with requests (unless you're running a very slow machine).
IMO, the most likely candidate is throttling by remote server due to too many connections from same ip (chrome opens upto 8 connections to any single server, you should try not to exceed this limit, despite each server having different limit), to solve this you should do one of the following:
add host request pooling (basically set Agent.maxSockets)
use proxy service (e.g. Luminati) to distribute requests over many source ips (more relevant for high concurrency requirements)
One more thing to remember, requests can fail for 'natural' networking reasons (e.g. bad\unstable internet connection, server busy spikes), you should always do at least one retry of request before giving up.

Bot Framework V4 - TypeError: Cannot perform 'get' on a proxy that has been revoked

I am trying to make a rest query against a database that stores knowledge articles for users and returns an array of results based on what the user has searched for. Whenever I try to search I get:
"TypeError: Cannot perform 'get' on a proxy that has been revoked"
I have tried adding it to async as shown but I still keep getting the same error. Any idea what I am doing wrong?
const Response = async (turnContext) => {
if (turnContext.activity.value.choice === 'feedbackProvider') {
try {
const feedbackBody = turnContext.activity.value.feedbackBody;
const feedbackEmail = turnContext.activity.value.feedbackEmail;
storage.write(feedbackBody, feedbackEmail);
await turnContext.sendActivity(`Feedback Sent`);
} catch (err) {
console.log('fetch failed', err);
}
} else if (turnContext.activity.value.choice === 'issueRaiser') {
try {
const bugTitle = turnContext.activity.value.issueTitle;
const bugDesc = turnContext.activity.value.issueDescription;
const bugEmail = turnContext.activity.value.issueEmail;
const request = require('request');
request({
method: 'Post',
uri: `<uri>issues?title=${ bugTitle }&description=${ bugDesc } ${ bugEmail }&labels=bug`,
json: true,
headers: {
'Private-Token': '<token>'
}
});
turnContext.sendActivity(`Issue Raised`);
} catch (err) {
console.log('fetch failed', err);
}
} else if (turnContext.activity.value.choice === 'knowledgeBaseSearch') {
try {
const knowledgeBaseTopic = turnContext.activity.value.knowledgeBaseTopic;
request({
url: process.env.SN_KB_URL + knowledgeBaseTopic,
json: true,
auth: {
'username': process.env.Ticket_User,
'password': process.env.Ticket_Key
}
}, async (error, response, body) => {
try {
var stuff = [];
for (var i = 0, len = body.result.length; i < len; i++) {
stuff.push(
CardFactory.heroCard(body.result[i].short_description, ['imageUrl1'], [`${ process.env.SN_KB_Resp_URl }${ body.result[i].number }`])
);
}
let messageWithCarouselOfCards = MessageFactory.carousel(stuff);
await turnContext.sendActivity(messageWithCarouselOfCards);
} catch (err) {
console.log(error);
}
});
} catch (err) {
console.log('fetch failed', err);
}
}
};
Full Error Message:
TypeError: Cannot perform 'get' on a proxy that has been revoked
cardMiddleware.js:35
at Request.request [as _callback] (c:\Bots\sdk4-2\skills\cardMiddleware.js:35:45)
at Request.self.callback (c:\Bots\sdk4-2\node_modules\request\request.js:186:22)
at emitTwo (events.js:126:13)
at Request.emit (events.js:214:7)
at Request.<anonymous> (c:\Bots\sdk4-2\node_modules\request\request.js:1163:10)
at emitOne (events.js:116:13)
at Request.emit (events.js:211:7)
at IncomingMessage.<anonymous> (c:\Bots\sdk4-2\node_modules\request\request.js:1085:12)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
From my post on the forum I was informed that I was using a request module that did not support Promises, which I believe was causing my error. I've now began to use Axios for my request which is shown below;
try {
return await axios.get(process.env.SN_KB_URL + knowledgeBaseTopic, {
headers: {
auth: {
username: process.env.Ticket_User,
password: process.env.Ticket_Key
}
}
})
}
However now when I run the request I get a 401 'Unauthorised' error and I'm not sure what is wrong with my request.
This issue happened because I was using a request module that did not support promises. Changing my request module for one that did support promises (which I found out about by using this article) resolved the issue.
The answer for me was to double check I didn't miss any await usage that might be necessary. Turns out I called this.dialog.run(context, this.dialogState); without the await and that threw the same error. I found the answer on this Github issue
I spent a lot of time struggling with this issue. As other commenters have noted, the issue lies in the fact that the Lex Runtime is not promise-based, so you cannot await requests, which causes the proxy to be revoked.
Here is my solution:
async callLex(context) {
const params = {
botAlias: 'prod',
botName: 'botName',
userId: context.activity.from.id,
contentType: 'text/plain; charset=utf-8',
accept: 'text/plain; charset=utf-8',
inputStream: context.activity.text.trim()
}
let request = lexruntime.postContent(params)
await request.promise().then(
async response => {
console.log(response)
console.log('Success!')
await context.sendActivity(response.message)
},
err => {
console.log(err)
console.log('Error!')
})
}
Rather than directly invoke the request like "lexruntime.postContent(params, callback func)", I exclude the callback function and utilize the "promise" property of AWS.Request to send the request as a promise which enables me to use "await" and keeps the proxy open. See documentation here.
I'm going to put this here only because it's the first result that pops up when searching, although it doesn't directly relate to this issue.
There's a very easy way to use setTimeout() and avoid this error:
await new Promise(resolve => setTimeout(() => resolve(
turnContext.sendActivity('I was sent 5 seconds later')
), 5000));
In my scenario, we were trying to upload files from a Task Module (modal popup of teams) to the bot and in response the bot would give a first confirmation that the attachments are uploading. This activity would close the task module (as the bot must reply within 10 seconds or teams would resend the request). Now when the attachments were uploaded, we wanted to update the previously sent adaptive card with the list of the uploaded attachments. We achieved this using the proactive messaging feature of bot framework.
const conversationReference = TurnContext.getConversationReference(activity);
Promise.all(listOfPromises).then(() => {
await botAdapter.continueConversation(conversationReference, async turnContext => {
await turnContext.sendActivity('All attachments uploaded!');
});
}
Docs: https://learn.microsoft.com/en-us/azure/bot-service/bot-builder-howto-proactive-message?view=azure-bot-service-4.0&tabs=javascript
Check for lines that needs await inside any aync function. I hope that azure will point usto which file or line specifically but I have not figured it out until I looked at all my functions.
Okay, so this is indeed a very cryptic error message as the github thread here suggested .
But I found that I was not await ing in this block:
this.onMessage(async (context, next) => {.
const didBotWelcomedUser = await this.welcomedUserProperty.get(
context,
"false"
);
if (didBotWelcomedUser === false) {
// first time user is in chat
await this.sendWelcomeMessage(context); `<-------- await here was missing`
} else {
await this.sendSuggestedAction(context); `<-------- await here was missing`
}
await next();
});
this.onMembersAdded(async (context, next) => {
await this.sendWelcomeMessage(context);
await next();
});
}
I thought await.next() is enough. We all gotta learn this somehow... Hope you resolve yours.

Nodejs fs.createReadStream event error

I am trying to read data from a file very quickly (data from an ADC). The conversion is started on opening the file, and finished at close. I need to wait opening the file again, and wait for the currently conversion to complete.
My problem is, that when i am opening the file very quickly nodejs wont caught the expected events. Any clue how to fix this?
node.on('input', function(){
readStream = fs.createReadStream(path.location,{encoding: 'utf8'});
if (readyFlag == 1) {
readStream.on('data',(data) => {
data = {payload: data/1000};
node.send(data);
console.log(`data: ${data}`);
})
}
readStream.on('open', () => {
console.log("file opened");
readyFlag = 0;
})
readStream.on('close', () => {
console.log("file closed");
readyFlag = 1;
})
readStream.on('error', (err) => {
console.log(err);
})
})
I end up with the file being opened all the time.
So looks to me like the problem here is the context of readStream - it's not local to the input callback meaning each message will overwrite the reference to the previous stream.
The fact this starts to fail when you increase the frequency of the messages seems logical to me because the stream hasn't had a chance to finish so readyFlag is never reset.
The fix for this should literally a one liner, make the stream a local var
const readStream = ...
I think I have solved it. The readyFlag has to be global to the input scope, otherwise it would not work. I moved the 'open' event inside the the if statement, which is checking for the readyFlag. That seemed to do the problem. Now the system dont hang, and the file is only being opened, when its done reading and have closed the previously open section.
Here is the code:
var readyFlag = 1;
node.on('input', function(msg){
const readStream = fs.createReadStream(path.location,{encoding: 'utf8'});
if (readyFlag == 1) {
readStream.on('data',(data) => {
data = {payload: data/1000};
node.send(data);
console.log('data read');
readyFlag = 0;
})
readStream.on('open', () => {
console.log("file opened");
})
}
readStream.on('close', () => {
console.log("file closed");
readyFlag = 1;
})
readStream.on('error', (err) => {
console.log(err);
})
})

Can't set headers after they are sent. NodeJS

Following is my code where i first authenticate if the users are present in the group and if present push the accounts into the array and save. This is done via a POST request on /addaccount.
groupRouter.post('/addaccount', Verify.verifyOrdinaryUser, function(req, res, next) {
Groups.findById(req.body.group, function(err,group) {
if (err) next(err);
var checkUser = function(user) {
for(var i=0; i<group.users.length; i++)
{
if(group.users[i]==user)
return true;
}
return false;
}
if(checkUser(req.decoded._doc._id)){
User.find({mobile:{$in:req.body.split}}, function(err, users) {
if(err) next(err); var flag = true;
if(users.length == req.body.split.length ) {
for(var i=0; i<users.length; i++) {
if(!checkUser(''+users[i]._id)) {
flag = false;
break;
}
}
if(flag) {
var myObject = {};
myObject.amount = req.body.amount;
myObject.by = req.decoded._doc._id;
myObject.split = req.body.split;
group.accounts.push(myObject);
group.save(function (err, groups) {
if(err) next(err);
console.log('Added entries');
res.json(groups);
})
}
else
res.end('All users not in the group');
}
else
res.end('Split users do not exist');
})
}
else
res.end('No Permission');
})
})
Now when I input an array of users and try to push an object and save via the
code below:
if(flag) {
var myObject = {};
myObject.amount = req.body.amount;
myObject.by = req.decoded._doc._id;
myObject.split = req.body.split;
group.accounts.push(myObject);
group.save(function (err, groups) {
if(err) next(err);
console.log('Added entries');
res.json(groups);
})
}
I get an error saying Error: Can't set headers after they are sent. as shown below.
Error: Can't set headers after they are sent.
at ServerResponse.OutgoingMessage.setHeader (_http_outgoing.js:357:11)
at ServerResponse.header (D:\Ionic\PocketBuddies\node_modules\express\lib\response.js:718:10)
at ServerResponse.send (D:\Ionic\PocketBuddies\node_modules\express\lib\response.js:163:12)
at done (D:\Ionic\PocketBuddies\node_modules\express\lib\response.js:957:10)
at Object.exports.renderFile (D:\Ionic\PocketBuddies\node_modules\jade\lib\index.js:374:12)
at View.exports.__express [as engine] (D:\Ionic\PocketBuddies\node_modules\jade\lib\index.js:417:11)
at View.render (D:\Ionic\PocketBuddies\node_modules\express\lib\view.js:126:8)
at tryRender (D:\Ionic\PocketBuddies\node_modules\express\lib\application.js:639:10)
at EventEmitter.render (D:\Ionic\PocketBuddies\node_modules\express\lib\application.js:591:3)
at ServerResponse.render (D:\Ionic\PocketBuddies\node_modules\express\lib\response.js:961:7)
at D:\Ionic\PocketBuddies\app.js:78:7
at Layer.handle_error (D:\Ionic\PocketBuddies\node_modules\express\lib\router\layer.js:71:5)
at trim_prefix (D:\Ionic\PocketBuddies\node_modules\express\lib\router\index.js:310:13)
at D:\Ionic\PocketBuddies\node_modules\express\lib\router\index.js:280:7
at Function.process_params (D:\Ionic\PocketBuddies\node_modules\express\lib\router\index.js:330:12)
at IncomingMessage.next (D:\Ionic\PocketBuddies\node_modules\express\lib\router\index.js:271:10)
I searched for the issue and it says i cannot update the response after res.end(), but I dont see any res.end() being called here when i res.json is called.
Please help me find an issue here.
I am working on a project and this error is getting me crazy.
Thanks in advance
As correctly pointed out by robertklep the problem here was that the error was not returned and hence in the group.save() function after the error it rendered the error page as well as it hit the res.json() which caused the error.
It works fine if there are no errors.
The code needs to be updated as follows to return the error if it occurs and not move on to the res.json() part.
group.save(function (err, groups) {
if(err) return next(err);
console.log('Added entries');
res.json(groups);
})
This did the trick for me by giving me the actual error which then had to be corrected post which things worked properly.

Categories

Resources