Using node-soap: Getting ECONNREFUSED when passing arguments to a method - javascript

I'm using node-soap to communicate with web services, but i can't get it to work.
The code as it is below throws the error ECONNREFUSED. But if i don't put the args variable in the function I get a response. Does anyone know what can it be?
var soap = require('soap');
var wsdl, url;
var args = {cargoSn: 'MSWU0031179'};
soap.createClient(url, function(err, client) {
//console.log('client');
console.log(client.describe().TransportWebService.TransportWebServiceSoap.GetContainerPosition);
client.GetContainerPosition(args, function(err, result) {
console.log('err');
console.log(err);
console.log('result');
console.log(result);
}, {
proxy: "http://127.0.0.1:8888",
strictSSL: false
});
console.log(client.lastRequest);
}, url);
This is what i get when running the code from above:
err
{ [Error: connect ECONNREFUSED]
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect' }
result
undefined
And this is what I get when calling it without arguments:
client.GetContainerPosition(function(err, result) { ...
err
null
result
{ GetContainerPositionResult: '{"Status":"ERROR","Description":"Nothing found with serial number: ","Data":null}' }
So, it works when no arguments are passed (and obviously finds nothing), but gives an error when they are.

Related

auth0 Management Api NextJs not able to update metadata

Really struggling to get my head around authO management clients so I can update users information. I've created a Next.js API route for the backend but I'm not sure how to actually amend the user's data. I've followed the docs and got to the point below.
API route
The ID params I manually entered to test and my response is an empty JSON object.
The docs don't make it clear what I'm suppose to do in the update user area in.
var ManagementClient = require("auth0").ManagementClient;
export default async (req, res) => {
var auth0 = new ManagementClient({
domain: process.env.AUTH0_BASE_URL,
clientId: process.env.AUTH0_CLIENT_ID,
clientSecret: process.env.AUTH0_CLIENT_SECRET,
scope: "read:users update:users",
});
var params = { id: "**************************" };
var metadata = {
foo: "bar",
};
auth0.users.updateAppMetadata(params, metadata, function (err, user) {
if (err) {
// Handle error.
}
// Updated user.
console.log(user);
res.status(200).json({ user });
});
};
Error:
API resolved without sending a response for /api/authapi, this may result in stalled requests.
SanitizedError [APIError]: getaddrinfo ENOTFOUND http
(node:internal/process/task_queues:81:21) {
statusCode: 'ENOTFOUND',
requestInfo: { method: 'post', url: '***********' },
originalError: Error: getaddrinfo ENOTFOUND http
at GetAddrInfoReqWrap.onlookup [as oncomplete] (node:dns:69:26) {
errno: -3008,
code: 'ENOTFOUND',
syscall: 'getaddrinfo',
hostname: 'http',
response: undefined
}
}

SSL Error in nodejs request package

I'm trying to call freecodecamp api #url https://forum.freecodecamp.org/c/getting-a-developer-job.json?
I am doing some analysis on the data, but when I call this service using 'request' npm package I am getting ssl error as follows:
error { Error: write EPROTO 140735782093632:error:14094410:SSL routines:ssl3_read_bytes:sslv3 alert handshake failure:../deps/openssl/openssl/ssl/s3_pkt.c:1500:SSL alert number 40
140735782093632:error:1409E0E5:SSL routines:ssl3_write_bytes:ssl handshake failure:../deps/openssl/openssl/ssl/s3_pkt.c:659:
at _errnoException (util.js:1022:11)
at WriteWrap.afterWrite [as oncomplete] (net.js:867:14) code: 'EPROTO', errno: 'EPROTO', syscall: 'write' }
Can somebody let me know what the issue is? A wget call pulls the data when provided with --ca-certificate option, also the webservice is giving data to postman without any certificate.
Here is my code:
import request from 'request';
import fs from 'fs';
const BASE_URL = 'forum.freecodecamp.org/c/getting-a-developer-job.json?no_subcategories=false&page=1';
request.get(BASE_URL, {
port: 443,
agentOptions: {
ciphers: 'ALL',
ca: fs.readFileSync('./mycertfile.pem'),
secureProtocol: 'TLSv1_2_method'
},
strictSSL: false
})
.on('response', (response) => {
console.log('Response is ', response);
}).on('error', (err) => {
console.log('error', err);
});
I see no problems there except URI http:// is absent, in my environment i have node v10.5.0, and the slightly modified version of your script works well:
const request = require('request')
const BASE_URL = 'https://forum.freecodecamp.org/c/getting-a-developer-job.json?no_subcategories=false&page=1'
request.get(BASE_URL, { json: true }, (err, res, body) => {
if (err) throw('error', err);
console.log('JSON object is => ', body);
})
outputs to console:
JSON object is => { users:
[ { id: 117390,
username: 'anthony2025',
....

nodejs events.js:141 unhandled error event ETIMEDOUT

I have a Heroku server with node.js and express that pings a website's API every second. This works fine for hours at a time, but every once in a while I'll get this error:
2018-01-10T02:19:28.579566+00:00 app[web.1]: events.js:141
2018-01-10T02:19:28.579578+00:00 app[web.1]: throw er; // Unhandled 'error' event
2018-01-10T02:19:28.579579+00:00 app[web.1]: ^
2018-01-10T02:19:28.579581+00:00 app[web.1]:
2018-01-10T02:19:28.579582+00:00 app[web.1]: Error: connect ETIMEDOUT 45.60.11.241:443
2018-01-10T02:19:28.579583+00:00 app[web.1]: at Object.exports._errnoException (util.js:907:11)
2018-01-10T02:19:28.579584+00:00 app[web.1]: at exports._exceptionWithHostPort (util.js:930:20)
2018-01-10T02:19:28.579585+00:00 app[web.1]: at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1078:14)
2018-01-10T02:19:28.684990+00:00 heroku[web.1]: Process exited with status 1
Sometimes the error is ETIMEDOUT but sometimes it's other things (can't remember right now).
Some other post I read made me think maybe this is a problem?
app.listen(app.get('port'), function() {
console.log("Node app is running at localhost:" + app.get('port'))
})
Or maybe it's the part inside the API call loop?
try
{
async.series([
function(callback) {
apiQuery( callback, method, params);
},
], function(error, results) {
console.log(results)
});
}
catch(e)
{
console.log("something went wrong!")
}
Not sure why the try catch isn't catching the error if it's this.
Maybe it's how I'm starting the loop?
runLoop()
//start looping the api data pulls
function runLoop() {
setInterval(apiLoop, 1000)
}
Would it be better to have apiLoop call itself in the callback function? Or would that create nested functions that keep using increasingly larger memory?
Here is the api call code:
function apiQuery( callback2, method, params )
{
if ( ! params ) params = [];
var host_name = 'www.host.com';
var url = '/Api/' + method;
if ( params ) url += "/" + params.join('/');
var options = {
host: host_name,
path: url,
};
callback = function(response) {
var str = '';
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
return callback2(null, str);
});
}
https.request(options, callback).end();
}
Maybe ETIMEOUT is caused by the website's server, anyway you can catch the error
const req = https.request(options, callback)
req.on('error', (e) => {
console.error(e);
});
req.end();

Cannot parse response SOAP Node.js

firstly sorry for my english, secondly I have a question about use of SOAP in Node.js. I am a beginner with node.js and I need help. This is my function:
var soap = require('soap');
var url = 'http://SOMETHING?wsdl';
var args = {
accountId: 'xxxxx',
userName: 'xxxxx',
password: 'xxxxxx',
targetNSAlias: 'tns',
targetNamespace: 'http://api.ilient.com/'
};
soap.createClient(url, function(err, client) {
if(err) throw err;
client.login(args,function(err, result, raw, soapHeader){
if(err) throw err;
console.log(result);
});
});
when I run I get this error:
Error: Cannot parse response
at /root/node_modules/soap/lib/client.js:321:21
at Request._callback (/root/node_modules/soap/lib/http.js:117:5)
at Request.self.callback (/root/node_modules/request/request.js:186:22)
at Request.emit (events.js:98:17)
at Request.<anonymous> (/root/node_modules/request/request.js:1081:10)
at Request.emit (events.js:95:17)
at IncomingMessage.<anonymous> (/root/node_modules/request/request.js:1001:12)
at IncomingMessage.g (events.js:180:16)
at IncomingMessage.emit (events.js:117:20)
at _stream_readable.js:944:16
someone can help me solve it?
Thanks and sorry for my english again.
I faced a similar problem. Maybe the SOAP web service that you are trying to consume has v1.2 specification and it might expect the content type as application/soap+xml instead of text/xml. In order to force node-soap to use SOAP 1.2 version you could add forceSoap12Headers: true among createClient() parameters.
On a side note, I had to add the ws-addressing headers to soap header because of The message with To ' ' cannot be processed at the receiver, due to an AddressFilter mismatch at the EndpointDispatcher error.
I edited your code as follows:
var soap = require('soap');
var url = 'http://SOMETHING?wsdl';
var args = {
accountId: 'xxxxx',
userName: 'xxxxx',
password: 'xxxxxx',
targetNSAlias: 'tns',
targetNamespace: 'http://api.ilient.com/'
};
var soapOptions = {
forceSoap12Headers: true
};
var soapHeaders = {
'wsa:Action': 'http://tempuri.org/MyPortName/MyAction',
'wsa:To': 'http://SOMETHING.svc'
};
soap.createClient(url, soapOptions, function(err, client) {
if(err) throw err;
client.addSoapHeader(soapHeaders, '', 'wsa', 'http://www.w3.org/2005/08/addressing');
client.login(args, function(err, result, raw){
if(err) throw err;
console.log(result);
});
});
Add this code: client.setSecurity(new soap.BasicAuthSecurity('username', 'password')); after your create client code. It worked for me:
var soap = require('soap');
var url = 'http://SOMETHING?wsdl';
soap.createClient(url, function(err, client) {
if(err) throw err;
client.setSecurity(new soap.BasicAuthSecurity('username', 'password'));
client.login(args, function(err, result) {
if(err) throw err;
console.log(result);
});
});

Node.js sporadic 'ECONNREFUSED' error on request.get

I'm having a sporadic issue with my Node/loopback server. My setup is as follows:
EDIT: My node version is v4.2.4, I'm running Node on Windows 10 Professional.
Client side:
nw.js client that traverses the local file-system and computes the MD5 value of files.
Using a request.post, the client sends the hash of the file to the server in the format 'fileHash: d41d8cd98f00b204e9800998ecf8427e' (that's just an example hash, I know it's an empty file)
function checkHash (fileHash){
request.post({
headers: {'content-type' : 'application/x-www-form-urlencoded'},
url: 'http://localhost:3000/api/checkBoths/hashcheck',
method: 'POST',
form: {
fileHash: fileHash
}
}, function(error, response, body){
if(error) {
console.log(error);
} else {
console.log(response.statusCode, body);
}
});
}
Server side:
Node/loopback server running at localhost:3000.
The hashCheck function is meant to read the data that's posted from the client and query a MySQL database which is for a match of the hash.
The response from the server will be in the format goodResult : true if the hash exists in the database or goodResult : false if not.
var request = require('request');
module.exports = function (CheckBoth) {
var goodResult;
CheckBoth.hashCheck = function (fileHash, cb) {
requestGood(fileHash);
function requestGood (fileHash) {
request.get('http://127.0.0.1:3000/api/Goodhashes/' + fileHash + '/exists', function (error, response, body) {
if (!error && response.statusCode == 200) {
goodResult = JSON.parse(body).exists;
}
if (error) {
console.error(error);
}
});
console.log(goodResult);
}
cb( goodResult);
};
CheckBoth.remoteMethod(
'hashCheck',
{
accepts: {arg: 'fileHash', type: 'string'},
returns: [{arg: 'goodResult', type: 'string'}]
}
);
};
Issue:
The server can respond with ~1000 queries before the following appears throughout the responses:
{ [Error: connect ECONNREFUSED 127.0.0.1:3000]
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 3000 }
I've tried putting in different callbacks within the server code but that has not made any difference. I think I should probably throttle the requests to the server but I don't know how to achieve this.
Any help would be greatly appreciated.
The ulimit default for most systems is 1024. See the manual for limits.conf (http://linux.die.net/man/5/limits.conf).
While #migg is correct about ensuring no memory leaks in your application and its process handling tuning the system for high loads is also normal procedure for quite a few applications.
Try this and see if it helps;
$ ulimit -n 65535
Edit: I have not tested this but here is a document from ibm regarding Windows; http://www-01.ibm.com/support/docview.wss?uid=swg21392080

Categories

Resources