ftps module connects but hangs up - javascript

I'm using the ftps module and I've got lftp installed on Cygwin. I'm having trouble because my node js app looks like it's connecting fine but none of my commands are running. The documentation for the module isn't very detailed so I've just been trying what ever I can to get this running. I'm tying to get a file from the ftp site.
Here is my code:
var ftps = require('ftps');
// ftps connection
var ftp = new ftps ({
host: 'test.ftpsite.com',
username: 'test',
password: 'test',
protocol: 'sftp'
});
// look at remote directory
console.log(ftp);
ftp.cd('TestDir/').get('/UploadTest.txt', '/cygdrive/c/Users/Administrator/UploadTest.txt').exec(console.log);
Output:
CMO-Application-Server>node app.js
{ options:
{ host: 'test.ftpsite.com',
username: 'test',
password: 'test' },
cmds: [] }
At this point in the output, the app just hangs up like it's attempting to run the commands. I've been letting it run for about 10 minutes now and still nothing.

For sftp, here's how you could do it with the ssh2 module:
var Connection = require('ssh2');
var ssh = new Connection();
ssh.on('ready', function() {
ssh.sftp(function(err, sftp) {
if (err) throw err;
sftp.fastGet('TestDir/UploadTest.txt',
'/cygdrive/c/Users/Administrator/UploadTest.txt',
function(err) {
if (err) throw err;
ssh.end();
});
});
}).connect({
host: 'test.ftpsite.com',
port: 22,
username: 'test',
password: 'test'
});

Related

Why does couchDB gives me not authorized error when trying to create views

I am using couchDB and when ever i try to create views it is giving me this error
at processTicksAndRejections (internal/process/task_queues.js:97:5) {
code: 'EUNAUTHORIZED',
body: {
error: 'unauthorized',
reason: 'You are not a db or server admin.'
}
}
I am using Node-Couchdb and i am passing the credentials like this
const NodeCouchDb = require('node-couchdb')
require("dotenv-flow").config();
const couch = new NodeCouchDb({
host: process.env.DB_HOST,
protocol: process.env.DB_PROTOCOL,
port: process.env.DB_PORT
})
const couchAuth = new NodeCouchDb({
auth: {
user: process.env.DB_USER_NAME,
pass: process.env.PASSWORD
}
})
module.exports = {
couch
}
Your code is creating two instances of NodeCouchDB, couch and couchAuth where
couch points to server specified by envars without credentials
couchAuth points to the default server (127.0.0.1:5984) with credentials specified by envars
You need to combine parameters, for example
const NodeCouchDb = require("node-couchdb");
const couch = new NodeCouchDb({
host: process.env.DB_HOST,
protocol: process.env.DB_PROTOCOL,
port: process.env.DB_PORT,
auth: {
user: process.env.DB_USER_NAME,
pass: process.env.PASSWORD,
},
});
module.exports = {
couch
};

Make gulp-scp2 ask for password

I'm using gulp to move some folders in a remote directory via scp. Actually if I declare the password in the gulpfile it works fine. I rather want the password is required every time I try to run this task by the prompt, and not to put it visible in the file.
How can I do that?
My gulp task:
var scp = require('gulp-scp2');
gulp.task('deploy', function(done) {
return gulp.src("./dist/**")
.pipe(scp({
host: 'localhost',
username: 'saifer',
dest: '/home/saifer/'
}))
.on('error', function(err) {
console.log(err);
});
});
Starting the task this way obviously throws an error like:
Error: Authentication failure. Available authentication methods: publickey,gssapi-keyex,gssapi-with-mic,password
I'm looking for something to add the scp's passing object to let the gulp taks ask for a password before trying to connect
I believe prompting is an unrelenting problem. You should solve it with a separate module(or just with readline module from standard library). The solution below uses the prompt module:
const util = require('util')
const prompt = require('prompt')
var schema = {
properties: {
password: {
hidden: true
}
}
};
prompt.start();
util.promisify(prompt.get)(schema).then(({ password }) => {
var scp = require('gulp-scp2');
gulp.task('deploy', function(done) {
return gulp.src("./dist/**")
.pipe(scp({
host: 'localhost',
username: 'saifer',
dest: '/home/saifer/',
password
}))
.on('error', function(err) {
console.log(err);
});
});
});

How to make gun accept a self-signed certificate?

gun 0.8.8, Node.js-to-Node.js, Node.js-to-browser
I see the following error in browser console:
VM103:161 WebSocket connection to 'wss://127.0.0.1:8080/gun' failed: Error in connection establishment: net::ERR_INSECURE_RESPONSE
VM103:161 WebSocket connection to 'wss://10.42.0.56:8080/gun' failed: Error in connection establishment: net::ERR_CONNECTION_REFUSED
And there are no messages on Node.js side.
Sorce code of my server:
const Hapi = require('hapi');
const Gun = require('gun');
const pem = require('pem');
pem.createCertificate({ days: 1, selfSigned: true }, function (err, keys) {
if (err) {
throw err
}
const server = new Hapi.Server;
var tls = {
key: keys.serviceKey,
cert: keys.certificate
};
server.connection({
port: 8080,
tls
});
server.connections.forEach(c => Gun({ web: c.listener, file: 'data.json' }));
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
reply('Server works!');
}
});
server.start();
})
In order to make gun work with a self-signed certificate you need two things:
Lunch browser ignoring the certificate errors. For example, Chrome
google-chrome --ignore-certificate-errors
Put the following process option in Node.js code
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
or add the environment variable
export NODE_TLS_REJECT_UNAUTHORIZED=0

handle multiple request to node js api

I was requested to create a simple rest api with node js and make a script that creates 10000 elements in the database through api calls. I created the server with the Hapi framework. If I send a single or 100 'PUT' requests to the API it creates a new element without problem but if I try to make 1000 requests or more it wont be able to create all of them or anything at all. I would like to know what may be the problem, if I'm not doing the script correctly or is the server the problem. So far I've received 2 errors:
{ [Error: connect ECONNRESET] code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'connect' }
and (libuv) kqueue(): Too many open files in system
The first one trying to call the api 1000 times and the second one trying with 10000 times
The code of the server is the following
var Hapi = require('hapi');
var server = new Hapi.Server();
var joi = require("joi");
var dbOpts = {
"url" : "mongodb://localhost:27017/songsDB",
"settings" : {
"db" : {
"native_parser" : false
}
}
};
server.register({
register: require('hapi-mongodb'),
options: dbOpts
},function (err) {
if (err) {
console.error(err);
throw err;
}
});
server.connection({
host: 'localhost',
port: 8080
});
server.route({
method: 'POST',
path: '/songs',
config: {
handler: function (request, reply) {
var newSong = {
name: request.payload.name,
album: request.payload.album,
artist: request.payload.artist
};
var db = request.server.plugins['hapi-mongodb'].db;
db.collection('songs').insert(newSong, {w:1}, function (err, doc){
if (err){
return reply(Hapi.error.internal('Internal MongoDB error', err));
}else{
reply(doc);
}
});
},
validate:{
payload: {
name: joi.string().required(),
album: joi.string().required(),
artist: joi.string().required()
}
}
}
});
server.start(function () {
console.log('Server running at:', server.info.uri);
});
The code for the request is the following
var unirest = require('unirest');
for(var i = 1; i<=10000; i++){
unirest.post('http://localhost:8080/songs')
.header('Accept', 'application/json')
.send({ "name": "song"+i, "artist": "artist"+i, "album":"album"+i})
.end(function (response) {
console.log(response.body);
});
}
If running under OSX, open terminal and then try using:
sudo launchctl limit maxfiles 1000000 1000000
and then try again.
For "too many open files in system", looks like you reach the limit of your system. If you are using Linux, you can do a ulimit -a to display all settings.
There is one which may limit your number of open files.
open files (-n) 1024
Assuming you are on a Mac or Linux, you need to increase the maximum number of open files allowed by the system.
If you insert this into the terminal it will show you what your settings are:
lsof | wc -l
You will see that your 'open files' setting is likely smaller than the number you are trying to work with.
To change this setting, use the following command:
ulimit -n #####
where ##### is some arbitrary number (but higher than what you have).
If you are on a Windows machine, the answer is slightly more complicated. It seems Windows has a per-process limit, which can be modified (though it doesn't sound easy). Look here for some more details:
Windows equivalent of ulimit -n
When I ran the code, the first 11 POSTs would throw errors. Apparently it is because the script begins sending them before the mongodb connection is active. All I did was added a brief timeout to the POSTing to give mongodb a chance to start breathing. When I did that it worked fine. 10000 records upon completion.
All I changed was this:
setTimeout(function () {
for(var i = 1; i<=10000; i++){
unirest.post('http://localhost:8080/songs')
.header('Accept', 'application/json')
.send({ "name": "song"+i, "artist": "artist"+i, "album":"album"+i})
.end(function (response) {
//console.log(response.body);
});
}
}, 5000);

Check what files are present in remote directory with grunt

I'm looking for a way to check which files are present in a remote directory i want to access via ssh or similar and write the filenames into an array.
So far I had no luck. unix rsync has an -n flag which can print every file which is present at the destinated location, but I don't get how to use the rsync-output in grunt.
Here's how you might do it via sftp with ssh2:
var SSH2 = require('ssh2');
var conn = new SSH2();
conn.on('ready', function() {
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('/tmp', function(err, list) {
if (err) throw err;
console.dir(list);
conn.end();
});
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
// password: 'foobarbaz',
privateKey: require('fs').readFileSync('/here/is/my/key')
});

Categories

Resources