Make gulp-scp2 ask for password - javascript

I'm using gulp to move some folders in a remote directory via scp. Actually if I declare the password in the gulpfile it works fine. I rather want the password is required every time I try to run this task by the prompt, and not to put it visible in the file.
How can I do that?
My gulp task:
var scp = require('gulp-scp2');
gulp.task('deploy', function(done) {
return gulp.src("./dist/**")
.pipe(scp({
host: 'localhost',
username: 'saifer',
dest: '/home/saifer/'
}))
.on('error', function(err) {
console.log(err);
});
});
Starting the task this way obviously throws an error like:
Error: Authentication failure. Available authentication methods: publickey,gssapi-keyex,gssapi-with-mic,password
I'm looking for something to add the scp's passing object to let the gulp taks ask for a password before trying to connect

I believe prompting is an unrelenting problem. You should solve it with a separate module(or just with readline module from standard library). The solution below uses the prompt module:
const util = require('util')
const prompt = require('prompt')
var schema = {
properties: {
password: {
hidden: true
}
}
};
prompt.start();
util.promisify(prompt.get)(schema).then(({ password }) => {
var scp = require('gulp-scp2');
gulp.task('deploy', function(done) {
return gulp.src("./dist/**")
.pipe(scp({
host: 'localhost',
username: 'saifer',
dest: '/home/saifer/',
password
}))
.on('error', function(err) {
console.log(err);
});
});
});

Related

Can Tedious connection and request event listeners be put in a separate module for Node.js?

This is my first ever question on here so please excuse any abnormalities in etiquette.
I am new to Node.js and backend programming in general.
Right now I am using Node and Tedious to connect to a local SQL server. I'd like to keep my main.js file clean and so am trying to put everything related to my SQL connection in a separate js file. Below would be the simplest possible form I have for my main.js.
var http = require('http');
var sqlmodule = require('./SQLconnection');
http.createServer(function (req, res) {
sqlmodule.makeConnection();
}).listen(8080);
I then have my SQLconnection.js file.
var Connection = require('tedious').Connection;
exports.makeConnection = function () {
var config = {
userName: 'XXXXXX',
password: 'XXXXXX',
server: 'XXXXXX'
};
var connection = new Connection(config);
};
//The below code is my event listener but I don't know how
//to incorporate it as part of the module.
connection.on('connect', function(err) {
if (err) {
console.error('Connection error', err);
} else {
console.log('Connected');
}
});
I have no problems when the listener isn't present in the file, but I can't find a way to have it part of the SQLconnection.js module. I've tried adding exports and module.exports before it in a few ways but to no success. It listening for an event and not being a normal function is stumping me.
How would I go about getting the event listeners in the separate file?
I'm also trying to go about this as vanilla as possible, so I'm just using Node.js and Tedious at this point.
change
exports.makeConnection = function () {
to
function makeConnection() {
...
module.exports = {makeConnection}
As an additional change, you need to put your connection listener in the sames scope as the connection variable. Personally, I would also have makeConnection return a Promise with the connection so you are not operating on a connection that has failed/not yet connected. Something like
var Connection = require('tedious').Connection;
function makeConnection() {
var config = {
userName: 'XXXXXX',
password: 'XXXXXX',
server: 'XXXXXX'
};
return new Promise((resolve, reject) => {
var connection = new Connection(config);
connection.on('connect', function(err) {
if (err) return reject(err);
resolve(connection);
});
}
};
module.exports = {makeConnection}

how to get pretty urls in hapi js

I have my server.js file working. at localhost:8080 it will serve the file i give it from the the corresponding url name like so http://localhost:8080/about.html, as long as the file exists in public/pages. I'm wondering if I can somehow set a wildcard to leave of extensions for all html files in the url so that I don't have to individually specify each file as an alias in the routes like - ['about','about.html'].
Here is my working code -
'use strict';
const Path = require('path');
const Hapi = require('hapi');
const server = new Hapi.Server();
server.connection({
port: Number(process.argv[2] || 8080),
host: 'localhost'
});
server.register(require('inert'), (err) => {
if (err) {
throw err;
}
server.route({
method: 'GET',
path: '/{param*}',
handler: {
directory: {
path: 'public/pages',
listing: true
}
},
config: {
state: {
parse: false, // parse and store in request.state
failAction: 'ignore' // may also be 'ignore' or 'log'
}
}
});
server.start((err) => {
if (err) {
throw err;
}
console.log('Server running at:', server.info.uri);
});
});
Any help is greatly appreciated, thank you.

Running script from Heroku CLI

I have a script that I want to run through the Heroku CLI. It's just a simple script to create a user in a postgressql database with Sequelize.
This is the script:
const argv = require('yargs').argv;
const Sequelize = require('sequelize');
const sqlizr = require('sqlizr');
require('dotenv').load();
// Check params
if (!argv.username) { throw new Error('username is required!'); }
if (!argv.password) { throw new Error('password is required!'); }
if (!argv.clientId) { throw new Error('client id is required!'); }
// Init db connection
const sequelize = new Sequelize(
process.env.DB_DATABASE,
process.env.DB_USER,
process.env.DB_PASS,
{
host: process.env.DB_HOST,
port: 5432,
dialect: 'postgres',
logging: false
}
)
var client_id = argv.clientId;
if(argv.clientId === -1){
client_id = 0;
}
console.log(sequelize)
sqlizr(sequelize, 'api/models/**/*.js');
// Check if user exists
console.log('Check is user exists...');
sequelize.models.USERS.count({
where: {
USERNAME: argv.username
}
})
.then(result => {
if (result > 0) {
console.error('user already exists!');
process.exit(1);
}
})
.then(() => {
console.log('Creating user...');
sequelize.models.USERS.create({
USERNAME: argv.username,
PASSWORD: argv.password,
CLNT_ID: client_id,
EMAIL: 'email#email.com',
PHONE: '123456789'
})
.then(result => {
console.log('User created successfully!');
})
.catch(error => {
console.error('Could not create user!', error);
})
.finally(result => {
process.exit(1);
});
});
Everything goes well if I execute this command locally:
node bin/createUser.js --username admin --password admin --clientId -1
But If i try to run this through the Heroku CLI like this:
heroku run bin/createUser.js --username admin --password admin --clientId -1
I get this in the terminal:
bin/createUser.js: line 4: syntax error near unexpected token `('
bin/createUser.js: line 4: `const yargs = require('yargs');'
I can't figure out what I'm doing wrong here. Hopefully someone can help me and explain why this is happening
You forgot to specify node in the command, so I suspect that Heroku is trying to run createUser.js as if it were a shell script.
You may need to install a node.js buildpack to be able to run the program on Heroku, but try:
heroku run node bin/createUser.js --username admin --password admin --clientId -1

Check what files are present in remote directory with grunt

I'm looking for a way to check which files are present in a remote directory i want to access via ssh or similar and write the filenames into an array.
So far I had no luck. unix rsync has an -n flag which can print every file which is present at the destinated location, but I don't get how to use the rsync-output in grunt.
Here's how you might do it via sftp with ssh2:
var SSH2 = require('ssh2');
var conn = new SSH2();
conn.on('ready', function() {
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('/tmp', function(err, list) {
if (err) throw err;
console.dir(list);
conn.end();
});
});
}).connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
// password: 'foobarbaz',
privateKey: require('fs').readFileSync('/here/is/my/key')
});

ftps module connects but hangs up

I'm using the ftps module and I've got lftp installed on Cygwin. I'm having trouble because my node js app looks like it's connecting fine but none of my commands are running. The documentation for the module isn't very detailed so I've just been trying what ever I can to get this running. I'm tying to get a file from the ftp site.
Here is my code:
var ftps = require('ftps');
// ftps connection
var ftp = new ftps ({
host: 'test.ftpsite.com',
username: 'test',
password: 'test',
protocol: 'sftp'
});
// look at remote directory
console.log(ftp);
ftp.cd('TestDir/').get('/UploadTest.txt', '/cygdrive/c/Users/Administrator/UploadTest.txt').exec(console.log);
Output:
CMO-Application-Server>node app.js
{ options:
{ host: 'test.ftpsite.com',
username: 'test',
password: 'test' },
cmds: [] }
At this point in the output, the app just hangs up like it's attempting to run the commands. I've been letting it run for about 10 minutes now and still nothing.
For sftp, here's how you could do it with the ssh2 module:
var Connection = require('ssh2');
var ssh = new Connection();
ssh.on('ready', function() {
ssh.sftp(function(err, sftp) {
if (err) throw err;
sftp.fastGet('TestDir/UploadTest.txt',
'/cygdrive/c/Users/Administrator/UploadTest.txt',
function(err) {
if (err) throw err;
ssh.end();
});
});
}).connect({
host: 'test.ftpsite.com',
port: 22,
username: 'test',
password: 'test'
});

Categories

Resources