Check what files are present in remote directory with grunt - javascript

I'm looking for a way to check which files are present in a remote directory i want to access via ssh or similar and write the filenames into an array.
So far I had no luck. unix rsync has an -n flag which can print every file which is present at the destinated location, but I don't get how to use the rsync-output in grunt.

Here's how you might do it via sftp with ssh2:
var SSH2 = require('ssh2');
var conn = new SSH2();
conn.on('ready', function() {
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('/tmp', function(err, list) {
if (err) throw err;
console.dir(list);
conn.end();
});
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
// password: 'foobarbaz',
privateKey: require('fs').readFileSync('/here/is/my/key')
});

Related

jsftp uploading crappy Files

today I've got a problem with jsftp.
When I'm running my script and upload.txt is empty,
jsftp is correctly uploading an empty file to the server.
When upload.txt is a utf-8 javascript file for example const x = "Hello World!";,
jsftp is uploading a file named .pureftpd-upload.5e35d750.15.79c5.1b3bbd87 with 0 bytes (= no data).
Here's my script:
var ftp = new jsftp({
host: "ftp.example.url",
port: 21,
user: user.name,
pass: user.pass
});
ftp.auth(user.name, user.pass, function(err){
if(err){
openDialog();
console.error(err);
throw err;
}
else{
ftp.put(path_datafile, '/directory/hello.js', function(err){
console.error(err);
throw err;
});
}
});
(There are no errors in the output)
For the people saying you need to read the file first:
var ftp = new jsftp({
host: "ftp.example.url",
port: 21,
user: user.name,
pass: user.pass
});
fs.readFile(path_datafile, 'utf8', function(err, buff){
if(err){
console.log(err);
}
else{
ftp.put(buff, '/directory/hello.js', function(err){
console.error(err);
throw err;
});
}
});
Didn't worked at all.
Then it is showing the error:
Local file doesn't exist
The error here is that the fs module is now converting the file
to utf-8 fs.readfile(file, 'utf8', action).
To solve that problem, only write fs.readfile(file, action),
so that the file is outputted in bytes.
The script will output false but work fine.
So Ragnar is right.
His answer is correct, when you aren't using utf8 encoding with fs.
Don't use it and it will work.
Ragnar:
https://stackoverflow.com/users/1052928/ragnar
His Answer:
https://stackoverflow.com/a/26939160/12470434

Make gulp-scp2 ask for password

I'm using gulp to move some folders in a remote directory via scp. Actually if I declare the password in the gulpfile it works fine. I rather want the password is required every time I try to run this task by the prompt, and not to put it visible in the file.
How can I do that?
My gulp task:
var scp = require('gulp-scp2');
gulp.task('deploy', function(done) {
return gulp.src("./dist/**")
.pipe(scp({
host: 'localhost',
username: 'saifer',
dest: '/home/saifer/'
}))
.on('error', function(err) {
console.log(err);
});
});
Starting the task this way obviously throws an error like:
Error: Authentication failure. Available authentication methods: publickey,gssapi-keyex,gssapi-with-mic,password
I'm looking for something to add the scp's passing object to let the gulp taks ask for a password before trying to connect
I believe prompting is an unrelenting problem. You should solve it with a separate module(or just with readline module from standard library). The solution below uses the prompt module:
const util = require('util')
const prompt = require('prompt')
var schema = {
properties: {
password: {
hidden: true
}
}
};
prompt.start();
util.promisify(prompt.get)(schema).then(({ password }) => {
var scp = require('gulp-scp2');
gulp.task('deploy', function(done) {
return gulp.src("./dist/**")
.pipe(scp({
host: 'localhost',
username: 'saifer',
dest: '/home/saifer/',
password
}))
.on('error', function(err) {
console.log(err);
});
});
});

Nodejs - catch upload to ftp error(ftp module)

How can I catch an upload error in node js via ftp? For example when my computer loses the connection. I am using ftp module and nothing of this(error, close or end) throws any errors. Or may be there are any other solutions, i mean another npm module?
var Client = require('ftp');
var c = new Client();
c.on('ready', callback);
c.on('error', function(err){
console.log(err);
});
c.on('close', function(err){
console.log(err);
});
c.on('end', function(){
console.log("END");
});
c.connect({
host: host,
port: port,
user: username,
password: password
});
...
c.put(uploadfile, uploadpath, onupload);
I know that this is not the best idea to edit the library, but i solved my problem by adding this code
if(err.indexOf("ECONNRESET") > -1){
this._reset();
}
after
sock.once('error', function(err) {
self.emit('error', err);
on 965 line in connection.js of ftp library.

handle multiple request to node js api

I was requested to create a simple rest api with node js and make a script that creates 10000 elements in the database through api calls. I created the server with the Hapi framework. If I send a single or 100 'PUT' requests to the API it creates a new element without problem but if I try to make 1000 requests or more it wont be able to create all of them or anything at all. I would like to know what may be the problem, if I'm not doing the script correctly or is the server the problem. So far I've received 2 errors:
{ [Error: connect ECONNRESET] code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'connect' }
and (libuv) kqueue(): Too many open files in system
The first one trying to call the api 1000 times and the second one trying with 10000 times
The code of the server is the following
var Hapi = require('hapi');
var server = new Hapi.Server();
var joi = require("joi");
var dbOpts = {
"url" : "mongodb://localhost:27017/songsDB",
"settings" : {
"db" : {
"native_parser" : false
}
}
};
server.register({
register: require('hapi-mongodb'),
options: dbOpts
},function (err) {
if (err) {
console.error(err);
throw err;
}
});
server.connection({
host: 'localhost',
port: 8080
});
server.route({
method: 'POST',
path: '/songs',
config: {
handler: function (request, reply) {
var newSong = {
name: request.payload.name,
album: request.payload.album,
artist: request.payload.artist
};
var db = request.server.plugins['hapi-mongodb'].db;
db.collection('songs').insert(newSong, {w:1}, function (err, doc){
if (err){
return reply(Hapi.error.internal('Internal MongoDB error', err));
}else{
reply(doc);
}
});
},
validate:{
payload: {
name: joi.string().required(),
album: joi.string().required(),
artist: joi.string().required()
}
}
}
});
server.start(function () {
console.log('Server running at:', server.info.uri);
});
The code for the request is the following
var unirest = require('unirest');
for(var i = 1; i<=10000; i++){
unirest.post('http://localhost:8080/songs')
.header('Accept', 'application/json')
.send({ "name": "song"+i, "artist": "artist"+i, "album":"album"+i})
.end(function (response) {
console.log(response.body);
});
}
If running under OSX, open terminal and then try using:
sudo launchctl limit maxfiles 1000000 1000000
and then try again.
For "too many open files in system", looks like you reach the limit of your system. If you are using Linux, you can do a ulimit -a to display all settings.
There is one which may limit your number of open files.
open files (-n) 1024
Assuming you are on a Mac or Linux, you need to increase the maximum number of open files allowed by the system.
If you insert this into the terminal it will show you what your settings are:
lsof | wc -l
You will see that your 'open files' setting is likely smaller than the number you are trying to work with.
To change this setting, use the following command:
ulimit -n #####
where ##### is some arbitrary number (but higher than what you have).
If you are on a Windows machine, the answer is slightly more complicated. It seems Windows has a per-process limit, which can be modified (though it doesn't sound easy). Look here for some more details:
Windows equivalent of ulimit -n
When I ran the code, the first 11 POSTs would throw errors. Apparently it is because the script begins sending them before the mongodb connection is active. All I did was added a brief timeout to the POSTing to give mongodb a chance to start breathing. When I did that it worked fine. 10000 records upon completion.
All I changed was this:
setTimeout(function () {
for(var i = 1; i<=10000; i++){
unirest.post('http://localhost:8080/songs')
.header('Accept', 'application/json')
.send({ "name": "song"+i, "artist": "artist"+i, "album":"album"+i})
.end(function (response) {
//console.log(response.body);
});
}
}, 5000);

ftps module connects but hangs up

I'm using the ftps module and I've got lftp installed on Cygwin. I'm having trouble because my node js app looks like it's connecting fine but none of my commands are running. The documentation for the module isn't very detailed so I've just been trying what ever I can to get this running. I'm tying to get a file from the ftp site.
Here is my code:
var ftps = require('ftps');
// ftps connection
var ftp = new ftps ({
host: 'test.ftpsite.com',
username: 'test',
password: 'test',
protocol: 'sftp'
});
// look at remote directory
console.log(ftp);
ftp.cd('TestDir/').get('/UploadTest.txt', '/cygdrive/c/Users/Administrator/UploadTest.txt').exec(console.log);
Output:
CMO-Application-Server>node app.js
{ options:
{ host: 'test.ftpsite.com',
username: 'test',
password: 'test' },
cmds: [] }
At this point in the output, the app just hangs up like it's attempting to run the commands. I've been letting it run for about 10 minutes now and still nothing.
For sftp, here's how you could do it with the ssh2 module:
var Connection = require('ssh2');
var ssh = new Connection();
ssh.on('ready', function() {
ssh.sftp(function(err, sftp) {
if (err) throw err;
sftp.fastGet('TestDir/UploadTest.txt',
'/cygdrive/c/Users/Administrator/UploadTest.txt',
function(err) {
if (err) throw err;
ssh.end();
});
});
}).connect({
host: 'test.ftpsite.com',
port: 22,
username: 'test',
password: 'test'
});

Categories

Resources