ssh2 node js sftp protocol Error Handshake failed - javascript

Hello i have a little problem, i developped a script sftp client with node js that connect to an sftp server and grab some files, i tested it with my local server its working, but when i tried to use it with production server i received this error :
Error: Handshake failed: no matching key exchange algorithm
i already generated the rsa key using ssh-keygen
here is the relevant part of the script :
var Client = require('ssh2').Client;
var fs = require('fs');
var path = require('path');
var args = process.argv.slice(2);
var connSettings = {
host: args[0] || '127.0.0.1',
port: args[1] || 22,
username: args[2] || 'karim',
password: args[3] || 'karimos',
algorithms: {
hmac: ['hmac-sha2-256', 'hmac-sha2-512', 'hmac-sha1', 'hmac-sha1-96']
}
};

I also had the same problem and solved it by adding the following:
algorithms: {
kex: [
"diffie-hellman-group1-sha1",
"ecdh-sha2-nistp256",
"ecdh-sha2-nistp384",
"ecdh-sha2-nistp521",
"diffie-hellman-group-exchange-sha256",
"diffie-hellman-group14-sha1"
],
cipher: [
"3des-cbc",
"aes128-ctr",
"aes192-ctr",
"aes256-ctr",
"aes128-gcm",
"aes128-gcm#openssh.com",
"aes256-gcm",
"aes256-gcm#openssh.com"
],
serverHostKey: [
"ssh-rsa",
"ecdsa-sha2-nistp256",
"ecdsa-sha2-nistp384",
"ecdsa-sha2-nistp521"
],
hmac: [
"hmac-sha2-256",
"hmac-sha2-512",
"hmac-sha1"
]
}

For myself, I added debug: console.log to my config object. This output more about the connection attempt.
{
"port": 22,
"host": "test.test.com",
"user": "test",
"password": "******",
"debug": console.log
}
Handshake: (remote) KEX method: diffie-hellman-group14-sha1,diffie-hellman-group-exchange-sha1
Handshake: No matching key exchange algorithm
Based on this error I updated my config's algorithm:
{
"port": 22,
"host": "test.test.com",
"user": "test",
"password": "******",
"algorithms": {
"kex": [
"diffie-hellman-group14-sha1","diffie-hellman-group-exchange-sha1"
]
}
}
After adding this algorithm the connection was successful on my machine

You may edit your /etc/ssh/sshd configuration file, on your server, in order to allow the key authentication method :)

My first suggestion would be to upgrade the ssh server on the server you're connecting to so that a more secure configuration can be had. This is the best/most secure solution.
If you cannot make changes on this server and you absolutely need to connect, then you can explicitly set the kex to a list of key exchange methods you want to support (valid algorithm names can be found in the ssh2-streams documentation). For example:
algorithms: {
kex: [ ... ]
}

Have you tried changing your algorithms declaration to...?
algorithms: {
serverHostKey: [ 'hmac-sha2-256', 'hmac-sha2-512', 'hmac-sha1', 'hmac-sha1-96' ],
}

Related

URL parse vs constructor: path missing

I am total beginner in Node.js but I am trying to fix what I thought was a simple issue. I am using the following code example for an AWS Lambda function using Node.js 12 runtime:
function respond(event, context, responseStatus, responseData, physicalResourceId, noEcho) {
var responseBody = JSON.stringify({
Status: responseStatus,
Reason: "See the details in CloudWatch Log Stream: " + context.logStreamName,
PhysicalResourceId: physicalResourceId || context.logStreamName,
StackId: event.StackId,
RequestId: event.RequestId,
LogicalResourceId: event.LogicalResourceId,
NoEcho: noEcho || false,
Data: responseData
});
var https = require("https");
var url = require("url");
var parsedUrl = url.parse(event.ResponseURL);
var options = {
hostname: parsedUrl.hostname,
port: 443,
path: parsedUrl.path,
method: "PUT",
headers: {
"content-type": "",
"content-length": responseBody.length
}
};
var request = https.request(options, function(response) {
context.done();
});
request.on("error", function(error) {
console.log("send(..) failed executing https.request(..): " + error);
context.done();
});
request.write(responseBody);
request.end();
}
Full source code can be found here: https://github.com/aws-samples/amazon-cloudfront-secure-static-site/blob/7f96cdbcfbd7f94c3ab5a4c028b6bafd10744c83/source/witch/witch.js#L70
My IDE gives me a warning that the URL.parse() method is deprecated and so that I should use the URL constructor instead. So the only change I made is replacing:
var parsedUrl = url.parse(event.ResponseURL);
with
var parsedUrl = new url.URL(event.ResponseURL);
But when I do that, the options.path field ends up missing. What is even more confusing to me is that if I log the parsedUrl variable (passing it through JSON.stringify()), I can see that when I use url.parse(), I get a simple string in parsedUrl:
"https://cloudformation-custom-resource-response-useast1.s3.amazonaws.com/arn%3Aaws%3Acloudformation%3Aus-east-1%3A123456789012%3Astack/AcmCertificateStack-ABCDEFGHIJKL/00112233-4455-6677-8899-aabbccddeeff%7CCopyCustomResource%7C00112233-4455-6677-8899-aabbccddeeff?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20220101T000000Z&X-Amz-SignedHeaders=host&X-Amz-Expires=7199&X-Amz-Credential=ABCDEFGHIJKLMNOPQRST%2F20220101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff"
But when using the constructor, I can see in the log an object structure with all the expected fields (protocol, hostname, port, even path):
{
"protocol": "https:",
"slashes": true,
"auth": null,
"host": "cloudformation-custom-resource-response-useast1.s3.amazonaws.com",
"port": null,
"hostname": "cloudformation-custom-resource-response-useast1.s3.amazonaws.com",
"hash": null,
"search": "?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20220101T000000Z&X-Amz-SignedHeaders=host&X-Amz-Expires=7199&X-Amz-Credential=ABCDEFGHIJKLMNOPQRST%2F20220101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff",
"query": "XX-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20220101T000000Z&X-Amz-SignedHeaders=host&X-Amz-Expires=7199&X-Amz-Credential=ABCDEFGHIJKLMNOPQRST%2F20220101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff",
"pathname": "/arn%3Aaws%3Acloudformation%3Aus-east-1%3A123456789012%3Astack/AcmCertificateStack-ABCDEFGHIJKL/00112233-4455-6677-8899-aabbccddeeff%7CCopyCustomResource%7C00112233-4455-6677-8899-aabbccddeeff",
"path": "/arn%3Aaws%3Acloudformation%3Aus-east-1%3A123456789012%3Astack/AcmCertificateStack-ABCDEFGHIJKL/00112233-4455-6677-8899-aabbccddeeff%7CCopyCustomResource%7C00112233-4455-6677-8899-aabbccddeeff?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20220101T000000Z&X-Amz-SignedHeaders=host&X-Amz-Expires=7199&X-Amz-Credential=ABCDEFGHIJKLMNOPQRST%2F20220101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff",
"href": "https://cloudformation-custom-resource-response-useast1.s3.amazonaws.com/arn%3Aaws%3Acloudformation%3Aus-east-1%3A123456789012%3Astack/AcmCertificateStack-ABCDEFGHIJKL/00112233-4455-6677-8899-aabbccddeeff%7CCopyCustomResource%7C00112233-4455-6677-8899-aabbccddeeff?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=20220101T000000Z&X-Amz-SignedHeaders=host&X-Amz-Expires=7199&X-Amz-Credential=ABCDEFGHIJKLMNOPQRST%2F20220101%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Signature=00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff"
}
So if anything the constructor seems to provide a better break down of the URL. I don't why, when I try to copy the parsedUrl.path field to options.path it works when parsedUrl comes from the parse() method but not when it comes from the constructor. The hostname field on the other hand works in both cases.
Any idea what's the issue here?
As indicated by the OP, the use of the url.parse method is discouraged in favor of the WATWG URL API. The legacy .path attribute returns the combined pathname and search components. Although the preferred WATWG URL API does not have the path attribute, the value required by options.path can be constructed by combining the .pathname and .search attributes.

SQL Server connection using tedius and sequelize in NodeJs

Getting below error when trying to connect SQL Server Local database in NodeJs Sequelize.
name: 'SequelizeHostNotFoundError',
message: 'Failed to connect to abcdd\SQL2K12:1433 - getaddrinfo ENOTFOUND abcdd\SQL2K12',
code: 'ESOCKET
Below are my configuration settings and the credentials are correct
"use_env_variable": false,
"username": "username",
"password": "password#123",
"database": "db_test",
"host": "abcdd\\SQL2K12", //actual db hotsname is abcdd\SQL2K12
"dialect": "mssql",
"operatorsAliases": false,
"dialectOptions": {
"encrypt": true,
"trustedConnection": true
}
In SSMS its working fine. And is there issue with port number? As I am not using a port in SSMS.
For me works adding instanceName instead of putting server: 'SERVER\SQLSERVER2K06',
just put "SERVER".
var config = {
server: 'SERVER',
authentication: {
type: 'default',
options: {
userName: 'user',
password: '*****'
}
},
options: {
database: 'NAME',
instanceName: 'SQLSERVER2K06' /*It Works! */
}
}
for tedious:
options.instanceName
The instance name to connect to. The SQL Server Browser service must be running on the database server, and UDP port 1434 on the database server must be reachable.
(no default)
Mutually exclusive with options.port.
check the comments:
https://github.com/tediousjs/node-mssql/issues/130
var config = {
userName: 'test01',
password: 'test01',
server: 'localhost',
options: {
"database":"TestDB",
"instanceName": "SQL2012"
}
};
in your case
var config = { server: 'abcdd', options: {"instanceName": "SQL2K12" } };
Also, make sure TCP enabled on your SQL Server: Sql Server Configuration Manager > SQL Server Network Configuration > Protocols for SQLSERVER > TCP/IP

Disable/Deny all protocols except ws:// in Node Media Server (Nodejs)

I would like to deny access to anyone trying to read my stream from http or rtmp protocols.
I am learning on the fly, so I'm no dev, kinda rookie actually.. so if you could explain to me how to do this in simple terms.. I'd really much appreciate it !
Here the code I'm using right now and that allows every protocols to be played...
const { NodeMediaServer } = require('node-media-server');
const config = {
logType: 3,
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 60,
ping_timeout: 30
},
http: {
port: 8080,
allow_origin: '*',
},
};
var nms = new NodeMediaServer(config)
nms.run();
The best would be an if statement, but I've been searching/testing but I didn't succeed.
Thank you for your help and time !

Strongloop app does not load local datasource

I want to use different environment specific datasource configurations in a Strongloop app. I saw at https://docs.strongloop.com/display/public/LB/Environment-specific+configuration that the priority of configurations are:
Environment-specific configuration, based on the value of NODE_ENV;
for example, server/config.staging.json.
Local configuration file;
for example, server/config.local.json.
Default configuration file;
for example, server/config.json.
I have declared three datasource conf files:
datasources.json:
{}
datasources.local.json:
{
"db": {
"name": "db",
"connector": "loopback-connector-mongodb",
"host":"127.0.0.1",
"port": "27017",
"database": "woowDev"
}
}
and datasources.staging.js:
module.exports = {
db: {
connector: 'mongodb',
hostname: process.env.OPENSHIFT_MONGODB_DB_HOST,
port: process.env.OPENSHIFT_MONGODB_DB_PORT,
user: process.env.OPENSHIFT_MONGODB_DB_USERNAME,
password: process.env.OPENSHIFT_MONGODB_DB_PASSWORD,
database: 'woow'
}
};
Now unless I put the configuration of datasources.local.json in datasources.json it does not work. I keep getting the error: AssertionError: User is referencing a dataSource that does not exist: "db"
I tried also to add the local conf to staging conf and defined the variable NODE_ENV, but it would not load neither datasource.staging.js. I defined the NODE_ENV by doing:
export NODE_ENV=staging
I used node-debug to track down the issue. And it came in this particular source strongloop file:
node_modules/loopback-boot/lib/config-loader.js
the function:
function mergeDataSourceConfig(target, config, fileName) {
for (var ds in target) {
var err = applyCustomConfig(target[ds], config[ds]);
if (err) {
throw new Error('Cannot apply ' + fileName + ' to `' + ds + '`: ' + err);
}
}
}
will not merge configs if "db" key is not defined in the master file i.e. datasources.json.
So, I just modified the datasources.json to:
{
"db": {}
}
and it worked!
Maybe it is my fault but the documentation is not clear enough.
Trick is to add all the datasources(memory/redis/mongo/postgres) in datasources.json and then override parameters in datasources.local.js or datasources.staging.js or datasources.production.js
Sample file configuration:
datasources.json
{
"db": {
"name": "db",
"connector": "memory"
},
"redisDS": {
"name": "redisDS",
"connector": "redis"
},
"testPostgress": {
"port": 5432,
"name": "localPostgress",
"user": "akumar",
"connector": "postgresql"
}
}
datasources.staging.js
module.exports = {
db:{
connector: 'memory'
},
redisDS:{
connector: 'redis'
},
testPostgress:{
database:'stagingPostgress'
}
};
Loopback will override database name in this case similarly you can override other datasource parameters like port and user

log4js-node in nodejs not logging to file

Not a node expert, and this is the first time I'm using log4js-node.
I am trying to get my ERROR logs and any of my console logs to write to a log_file.log file with log4js on a nodejs server running Express. Here is my config file:`
{
"replaceConsole": true,
"appenders": [
{
"type": "file",
"filename":"log_file.log",
"maxLogSize":20480,
"backups": 3,
"category":"relative-logger"
},
{
"type":"logLevelFilter",
"level":"ERROR",
"appender":{
"type":"file",
"filename":"log_file.log"
}
},
{
"appender": {
"type": "smtp",
"recipients": "myemail#gmail.com",
"sender": "myemailadd#gmail.com",
"sendInterval": 60,
"transport": "SMTP",
"SMTP": {
"host": "localhost",
"port": 25
}
}
}]
}`
And here is how I'm requiring the application in my app.js file:
var log4js = require("log4js");
log4js.configure("log_config.json")
logger = log4js.getLogger();
I'm sending manual errors to log4js with this (I can get this to log to the console fine, just can't get the log_file written):
logger.error('A mandrill error occurred: ' + e.name + ' - ' + e.message);
And I'm hoping jog4js catches the application's normal ERROR messages.
How do I get log4js to log to the log_file.log them send me an email of that log? I have installed nodemailer 0.7, fyi, to handle smtp.
maybe you could remove "category":"relative-logger" in your file appender.
Yes remove "category":"relative-logger" it somehow blocks the data transfer into your log file.. Or try something like this:
// Setup Logging
log4js.configure({
appenders: [
{ type: 'console' },
{ type: 'file', filename: '.\\logs\\PesaFastaArchiveData.log' }
]
});
The path is of-course the windows path.

Categories

Resources