Failing on sftp deploy, circle CI, nuxt - javascript

For several months I have been able to deploy to my server with no problems using Circle CI, ever since the outage 2 days ago, (not sure if things are related) my deployment script has been failing.
Here is the deployment script:
require('dotenv').config();
console.log("Starting upload")
var SftpUpload = require('sftp-upload'),
fs = require('fs');
var options = {
host: process.env.FTP_HOST,
username: process.env.FTP_UN,
password: process.env.FTP_PW,
path: './dist',
remoteDir: process.env.FTP_PATH,
// excludedFolders: ['**/.git', 'node_modules'],
// exclude: ['.gitignore', '.vscode/tasks.json'],
// privateKey: fs.readFileSync('privateKey_rsa'),
// passphrase: fs.readFileSync('privateKey_rsa.passphrase'),
dryRun: false,
}
console.log(options);
sftp = new SftpUpload(options);
console.log("sftp working ahead")
sftp.on('error', function(err) {
console.log(options)
console.log("igoring error for now")
//throw err;
})
.on('uploading', function(progress) {
console.log(options);
console.log('Uploading', progress.file);
console.log(progress.percent+'% completed');
})
.on('completed', function() {
console.log('Upload Completed');
})
.upload();
Here is the error
'
Starting upload
{
host: '************************',
username: '*********',
password: '************',
path: './dist',
remoteDir: '*****************************************',
dryRun: false
}
sftp working ahead
buffer.js:330
throw new ERR_INVALID_ARG_TYPE(
^
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object. Received undefined
at Function.from (buffer.js:330:9)
at new Buffer (buffer.js:286:17)
at onNEWKEYS (/home/circleci/repo/node_modules/ssh2/lib/Connection.js:2282:29)
at Parser.<anonymous> (/home/circleci/repo/node_modules/ssh2/lib/Connection.js:123:5)
at Parser.emit (events.js:314:20)
at Parser.parsePacket (/home/circleci/repo/node_modules/ssh2/lib/Parser.js:468:12)
at Parser.execute (/home/circleci/repo/node_modules/ssh2/lib/Parser.js:249:14)
at Socket.<anonymous> (/home/circleci/repo/node_modules/ssh2/lib/Connection.js:523:18)
at Socket.emit (events.js:314:20)
at addChunk (_stream_readable.js:297:12) {
code: 'ERR_INVALID_ARG_TYPE'
}
npm ERR! code ELIFECYCLE
npm ERR! errno 1
npm ERR! nyb.nyb.nyb#1.0.0 deploy: `node deploy-sftp.js`
npm ERR! Exit status 1
npm ERR!
npm ERR! Failed at the nyb.nyb.nyb#1.0.0 deploy script.
npm ERR! This is probably not a problem with npm. There is likely additional logging output above.
npm ERR! A complete log of this run can be found in:
npm ERR! /home/circleci/.npm/_logs/2022-09-16T08_55_31_507Z-debug.log
Exited with code exit status 1
CircleCI received exit code 1
All of this happens after I installed the packages and built the static files that i'm sftp'ing to a server using nuxt generate. I'm confused about what is happening and how I might salvage my pipeline. Any suggestions are greatly appreciated, thank you!

While i did not find the cause of above error, i found a workaround by using the ssh2-sftp-client, documentation and source code is here
The new deployment script is as follows:
let SftpClient = require('ssh2-sftp-client');
const path = require('path');
require('dotenv').config()
const config = {
host: process.env.FTP_HOST,
username: process.env.FTP_UN,
password: process.env.FTP_PW,
port: process.env.FTP_PORT || 22
};
async function main() {
const client = new SftpClient('upload-test');
const src = path.join(__dirname, './dist/');
const dst = process.env.FTP_PATH;
try {
await client.connect(config);
client.on('upload', info => {
console.log(`Listener: Uploaded ${info.source}`);
});
let rslt = await client.uploadDir(src, dst);
return rslt;
} catch (err) {
console.error(err);
} finally {
client.end();
}
}
main()
.then(msg => {
console.log(msg);
})
.catch(err => {
console.log(`main error: ${err.message}`);
});
Hope this helps anyone encountering similar problems.

Related

I want to first connect to aws and hit a python command from Nodejs via Api. Not sure how to do?

I want to execute 'python3 sample.py --path.data=/root/tej4' command from nodejs.The below is my code -
const runningPython = async(req,res,next) => {
ssh.connect({
host: '******',
port: 22,
username: 'ubuntu',
privateKey: '*******'
}).then(function () {
exec('python3 sample.py --path.data=/root/tej4', (err, stdout, stderr) => {
if (err) {
console.log(err)
}
// the *entire* stdout and stderr (buffered)
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
})
}
But what I am getting response is its trying to run locally rather than on aws cli. Not sure how to achieve this?
Error: Command failed: python3 sample.py --path.data=/root/tej4
Python was not found; run without arguments to install from the Microsoft Store, or disable this shortcut from Settings > Manage App Execution Aliases.
at ChildProcess.exithandler (node:child_process:397:12)
at ChildProcess.emit (node:events:390:28)
at maybeClose (node:internal/child_process:1064:16)
at Process.ChildProcess._handle.onexit (node:internal/child_process:301:5) {
killed: false,
code: 9009,
signal: null,
cmd: 'python3 sample.py --path.data=/root/tej4'
}
stdout:
stderr: Python was not found; run without arguments to install from the Microsoft Store, or disable this shortcut from Settings > Manage App Execution Aliases.
You have to create an instance of type node_ssh and use that instance to call connect and executeCommand methods
const node_ssh = require('node-ssh');
const ssh = new node_ssh();
ssh
.connect({
host: '******',
port: 22,
username: 'ubuntu',
privateKey: '*******'
})
.then(() => {
ssh.execCommand('python3 sample.py --path.data=/root/tej4', { cwd:'/your-python-script-path-here' })
.then((result) => {
console.log('STDOUT: ' + result.stdout)
console.log('STDERR: ' + result.stderr)
});
You can use ssh lib, but for sudo su and running multiple commands switch to ssh2shell, the code for that is mentioned below:
const runningPython = async (req, res, next) => {
var host = {
server: {
host: ***** ,
username: ***** ,
privateKey: fs.readFileSync('ppk file path'),
},
commands: ["sudo su -", "python3 sample.py --path.data=root/tej4"],
onCommandComplete: function (command, response, sshObj) {
if (sshObj.debug) {
this.emit("msg", this.sshObj.server.host + ": host.onCommandComplete event, command: " + command);
}
if (command === "sudo su -" && response.indexOf("/root") != -1) {
sshObj.commands.unshift("msg:The command and response check worked. Added another cd command.");
sshObj.commands.unshift("cd .ssh");
} else if (command === "python3 sample.py --path.data=root/tej4") {
this.emit("msg", response);
}
},
onEnd: function (sessionText, sshObj) {
if (sshObj.debug) {
this.emit("msg", this.sshObj.server.host + ": host.onEnd event");
}
}
};
let SSH = new SSH2Shell(host),
//Use a callback function to process the full session text
callback = function (sessionText) {
console.log(sessionText)
}
//Start the process
SSH.connect(callback);
}

Why my RestFul API with express and nodejs crashed everyday?

I have restful api with express and nodejs but this api crashed every time.
So.. I have one function for datatime. This function will replace date in the url address everytime to current datetime.. Im not sure but may be when the current date is change with the new current date the API crashed..
I see this error message on the console:
events.js:187
throw er; // Unhandled 'error' event
^
Error: read ECONNRESET
at TCP.onStreamRead (internal/stream_base_commons.js:201:27)
Emitted 'error' event on Connection instance at:
at Connection._handleProtocolError (C:\Users\Admin\Desktop\node-express\node_modules\mysql\lib\Connection.js:426:8)
at Protocol.emit (events.js:210:5)
at Protocol._delegateError (C:\Users\Admin\Desktop\node-express\node_modules\mysql\lib\protocol\Protocol.js:398:10)
at Protocol.handleNetworkError (C:\Users\Admin\Desktop\node-express\node_modules\mysql\lib\protocol\Protocol.js:371:10)
at Connection._handleNetworkError (C:\Users\Admin\Desktop\node-express\node_modules\mysql\lib\Connection.js:421:18)
at Socket.emit (events.js:210:5)
at emitErrorNT (internal/streams/destroy.js:92:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:60:3)
at processTicksAndRejections (internal/process/task_queues.js:80:21) {
errno: 'ECONNRESET',
code: 'ECONNRESET',
syscall: 'read',
fatal: true
}
npm ERR! code ELIFECYCLE
npm ERR! errno 1
npm ERR! express-api#1.0.0 start: `node server.js`
npm ERR!
npm ERR! Failed at the express-api#1.0.0 start script.
npm ERR! This is probably not a problem with npm. There is likely additional logging output above.
npm ERR! A complete log of this run can be found in:
npm ERR! C:\Users\Admin\AppData\Roaming\npm-cache\_logs\2019-12-29T04_48_17_190Z-debug.log
So this is the code for api.. Its very simple.. but I dont know how to fix this error.
When I wake up and try to see the data from the API every time the API is crashed.
This is the code from the api:
// Create express app
var express = require("express")
var app = express()
var mysql = require('mysql')
var express = require("express")
var cors = require('cors')
app.use(cors())
// Server port
var HTTP_PORT = 8000
// Start server
app.listen(HTTP_PORT, () => {
console.log("Server running on port %PORT%".replace("%PORT%", HTTP_PORT))
});
var con = mysql.createConnection({
host: "192.168.0.1",
port: "1234",
user: "username",
password: "password"
});
let aladinModel = '';
let aladinModelStations = '';
function formatDate(date) {
var d = new Date(date),
month = '' + (d.getMonth() + 1),
day = '' + d.getDate(),
year = d.getFullYear();
if (month.length < 2)
month = '0' + month;
if (day.length < 2)
day = '0' + day;
return [year, month, day].join('-');
}
var dateNow = formatDate(Date());
app.route('/')
.get(function (req, res) {
// omitted
res.setHeader('Access-Control-Allow-Origin', '*', 'Cache-Control', 'private, no-cache, no-store, must-revalidate');
const date = req.query.date;
const id = req.query.id;
const daysForward = req.query.daysForward;
try {
const query = `CALL aladin_surfex.Get_mod_cell_values_meteogram_cell('${dateNow}', ${id}, ${daysForward})`;
con.query(query, function (err, result, fields) {
if (err) throw err;
aladinModel = result;
});
res.json({ aladinModel })
} catch (error) {
console.log("Error query database!!!");
}
});
app.route('/stations')
.get(function (req, res) {
// omitted
res.setHeader('Access-Control-Allow-Origin', '*');
try {
const query2 = `SELECT Station,Ime FROM aladin_surfex.stations_cells;`;
con.query(query2, function (err, result2, fields) {
if (err) throw err;
aladinModelStations = result2;
});
res.json({ aladinModelStations })
} catch (error) {
console.log("Error query database!!!");
}
});
app.use(function (req, res) {
res.status(404);
});
I try to remove the cashe, to update npm, to restart the computer. But without the result.
With nodemon crashed the same..
What can I do ? How can to fix that ?
The error seems to be related to the connection to mysql.
As mysqljs documentation (https://github.com/mysqljs/mysql#error-handling):
Note: 'error' events are special in node. If they occur without an attached listener, a stack trace is printed and your process is killed.
You shuld intercept the connection error like this:
connection.on('error', function(err) {
console.log(err.code); // 'ER_BAD_DB_ERROR'
});
so you can investigate when and why the error occours and eventually you can recreate the connection when a problem occurs.
Move app.listen to the bottom of the file, after you declare all the routes and connect to the database.
Use app.get('route', function...) (more on that in the Express docs)
Move res.json() inside the callback function for each database query. The result will come back asynchronously so will not be accessible outside the function. If you’re new to async and callbacks in Javascript I’d recommend googling them and you’ll find a ton of reading materials.
Initialize your variables, e.g const aladinModel = ...

What is the easiest method to know the duration of a video stored in Firebase Storage in a Firebase Cloud Function?

I have a trigger-based cloud function that should find the duration of a video uploaded to Firebase Storage.
I tried using the following npm module: get-video-duration which takes url, the file itself, or stream.
Using the public url of my file doesn't work, my catch logs:
{ Error: spawn ffprobe ENOENT
at exports._errnoException (util.js:1020:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:197:32)
at onErrorNT (internal/child_process.js:376:16)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffprobe',
path: 'ffprobe',
spawnargs:
[ '-v',
'error',
'-show_format',
'-show_streams',
'https://storage.googleapis.com/adboard-dev.appspot.com/5HRuyysoMxe9Tb5vPLDbhEaHtkH2%2F-LAve5VogdAr4ZohU-DE%2FSampleVideo_1280x720_1mb.mp4?GoogleAccessId=firebase-adminsdk-3lthu#adboard-dev.iam.gserviceaccount.com&Expires=16447017600&Signature=cbhn%2BtY2%2FtvcRkvsFp1ywhHKiz%2FLfabfMk6HbD4TEGd%2Brf4njcMz1mQVf6H8nyulTBoRHIgC2uENFEPoEjtON6Um0Jb9P9jgikj6PdhS98m1sPDpTjMiFCTWk6ICjTI%2B%2BWuSVGgDX0tRuq3fADZABKaEcl3CEAI17DCVH98a40XttIDZqeqxIDu1iLi%2F8apQy44pAPJsmVR2dkYHk8Am8e7jIT1OnXG3adO34U3TNhsziPryIIpzo68QANENeieulvleic2BEi7KUhN1K8IxzJXxAfkt9RAFbdrwh%2FOpQ7zTGPRzTC3Vz2FnmKSXVtdKtmftg7BlEXrRr3D7ELJ53g%3D%3D' ],
stdout: '',
stderr: '',
failed: true,
signal: null,
cmd: 'ffprobe -v error -show_format -show_streams https://storage.googleapis.com/adboard-dev.appspot.com/5HRuyysoMxe9Tb5vPLDbhEaHtkH2%2F-LAve5VogdAr4ZohU-DE%2FSampleVideo_1280x720_1mb.mp4?GoogleAccessId=firebase-adminsdk-3lthu#adboard-dev.iam.gserviceaccount.com&Expires=16447017600&Signature=cbhn%2BtY2%2FtvcRkvsFp1ywhHKiz%2FLfabfMk6HbD4TEGd%2Brf4njcMz1mQVf6H8nyulTBoRHIgC2uENFEPoEjtON6Um0Jb9P9jgikj6PdhS98m1sPDpTjMiFCTWk6ICjTI%2B%2BWuSVGgDX0tRuq3fADZABKaEcl3CEAI17DCVH98a40XttIDZqeqxIDu1iLi%2F8apQy44pAPJsmVR2dkYHk8Am8e7jIT1OnXG3adO34U3TNhsziPryIIpzo68QANENeieulvleic2BEi7KUhN1K8IxzJXxAfkt9RAFbdrwh%2FOpQ7zTGPRzTC3Vz2FnmKSXVtdKtmftg7BlEXrRr3D7ELJ53g%3D%3D',
timedOut: false,
killed: false }
Downloading the file then passing it directly don't work too:
{ Error: spawn ffprobe ENOENT
at exports._errnoException (util.js:1020:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:197:32)
at onErrorNT (internal/child_process.js:376:16)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffprobe',
path: 'ffprobe',
spawnargs:
[ '-v',
'error',
'-show_format',
'-show_streams',
'/tmp/SampleVideo_1280x720_1mb.mp4' ],
stdout: '',
stderr: '',
failed: true,
signal: null,
cmd: 'ffprobe -v error -show_format -show_streams /tmp/SampleVideo_1280x720_1mb.mp4',
timedOut: false,
killed: false }
Finally, I created a stream using fs then I passed it, and it it gave me a Duration Not Found! error:
{ AssertionError: No duration found!
at ffprobe.then (/user_code/node_modules/get-video-duration/index.js:34:3)
at process._tickDomainCallback (internal/process/next_tick.js:135:7)
name: 'AssertionError',
actual: null,
expected: true,
operator: '==',
message: 'No duration found!',
generatedMessage: false }
My cloud function code:
exports.recordUploadedFile = functions.storage.object().onFinalize(object => {
let fileType = object.contentType;
if (fileType.startsWith("image/") || fileType.startsWith("video/")) {
let dir = object.name.split("/");
let name = dir.pop();
let fileID = dir.pop();
let uid = dir.pop();
return admin
.storage()
.bucket()
.file(object.name)
.getSignedUrl({
action: "read",
expires: "03-09-2491"
})
.then(urls => {
let file = {
name: name,
link: urls[0],
type: fileType,
duration: 0
}
if (fileType.startsWith("video/")) {
const tempFilePath = path.join(os.tmpdir(), name);
return admin.storage().bucket().file(object.name).download({
destination: tempFilePath
}).then(() => {
const stream = fs.createReadStream(tempFilePath);
return getDuration(stream).then(duration => {
console.log(duration);
file.duration = duration;
return setFile(file, uid, fileID);
}).catch(error => {
console.log(error);
});
});
} else {
return setFile(file, uid, fileID);
}
});
} else {
return admin.storage().bucket().file(object.name).delete();
}
});
I tried multiple video files of multiple sizes, and none of them work.
If there is a better solution to know the video duration, I would love to know it too.
Thank you.
Try using library called fluent-ffmpeg: https://github.com/fluent-ffmpeg/node-fluent-ffmpeg
var ffmpeg = require('fluent-ffmpeg');
ffmpeg.ffprobe(tempFilePath, function(err, metadata) {
//console.dir(metadata); // all metadata
console.log(metadata.format.duration);
});
I ended up using faruk suggested library: fluent-mmpeg but to get it work on Firebase you need to do the following:
You need to use bluebird to "promisify" fluent-mmpeg like this: const ffprobe = Promise.promisify(require("fluent-ffmpeg").ffprobe);
You need to install the static binaries of both ffmpeg and ffprobe, so include them in your pacakge npm i --save #ffmpeg-installer/ffmpeg, #ffprobe-installer/ffprobe
Lastly, set the paths: const ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
const ffprobePath = require("#ffprobe-installer/ffprobe").path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
Here's some tested and working code:
os = require('os');
path = require('path');
gcs = require('#google-cloud/storage')();
const filePath = object.name;
const const fileBucket = object.bucket;
var Promise = require("bluebird");
var ffmpeg = Promise.promisify(require("fluent-ffmpeg"));
var ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
var ffprobePath = require("#ffprobe-installer/ffprobe").path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
const fileName = filePath.split('/').pop();
const tempFilePath = path.join(os.tmpdir(), fileName);
const bucket = gcs.bucket(fileBucket);
bucket.file(filePath).download({
destination: tempFilePath,
validation: false
}).then(function() {
ffmpeg.ffprobe(tempFilePath, function(err, metadata) {
if (err) {
reject(err);
} else {
if (metadata) {
console.log(metadata.format.duration);
console.log(metadata.streams[0].width);
console.log(metadata.streams[0].height);
console.log(metadata);
resolve();
} else {
reject();
}
}
})
}).catch(function(error) {
console.error(error); reject();
})
From package.json:
"#ffmpeg-installer/ffmpeg": "^1.0.17",
"#ffprobe-installer/ffprobe": "^1.0.9",
"#google-cloud/storage": "^1.1.1",
"bluebird": "^3.5.3",
"fluent-ffmpeg": "^2.1.2"

cwd of node child process can't access subfolder

I have problems declaring the path for the current working directory of my child process. I want to execute some R-Scripts and I want to save the results in a folder. But it does not work.
This is working code in my server.js
app.post('/execScript', function (req, res) {
var childProcess = require('child_process');
var project = req.body.project;
var script = req.body.script;
childProcess.exec('Rscript ../app/projects/'+project+'/Scripts/'+script+'',{cwd: '../app/'}, (err) => {
if (err) {
console.error(err);
}
})
});
Note, that the cwd saves all results in the /app folder folder.
When I try the following, it stops working and gives me an error.
app.post('/execScript', function (req, res) {
var childProcess = require('child_process');
var project = req.body.project;
var script = req.body.script;
childProcess.exec('Rscript ../app/projects/'+project+'/Scripts/'+script+'',{cwd: '../app/test/'}, (err) => {
if (err) {
console.error(err);
}
})
});
I basically want to choose a subfolder of /app and it doesn't work.
I get the following error:
{ Error: Command failed: Rscript ../app/projects/das_ist_ein_Test/Scripts/test.R
at ChildProcess.exithandler (child_process.js:211:12)
at emitTwo (events.js:106:13)
at ChildProcess.emit (events.js:191:7)
at maybeClose (internal/child_process.js:885:16)
at Socket.<anonymous> (internal/child_process.js:334:11)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at Pipe._handle.close [as _onclose] (net.js:501:12) killed: false,
code: 2,
signal: null,
cmd: 'Rscript ../app/projects/das_ist_ein_Test/Scripts/test.R' }
Why is that?
Any help would be appreciated !
EDIT:
I made sure that the subfolder exists and that i wrote it correctly.
I got it.
The error occurs because of the way child processes work in node. When given a cwd, you start in that directory and after that you have to move to the file that needs to be executed from you cwd.

NodeJs Error: spawn C:\Windows\system32\cmd.exe; ENOENT

This is my script :
var exec = require('child_process').exec;
exec('dir', function(error, stdout, stderr) { // 'dir' is for example
if (error) {
console.error(`exec error: ${error}`);
return;
}
console.log(`stdout: ${stdout}`);
console.log(`stderr: ${stderr}`);
});
And in the console I have :
exec error: Error: spawn C:\Windows\system32\cmd.exe; ENOENT
Someone can help me ?
This can also be caused if you are feeding in ExecOptions the options parameter, specifically 'cwd', and the path you provide is invalid
e.g:
cp.exec(<path_to_executable>, {
cwd: <path_to_desired_working_dir>
}, (err, stdout, stderr) => {
//......
})
If is not valid, the callback will be called with err equal to
Error: spawn C:\Windows\system32\cmd.exe ENOENT
I got to resolve the issue the problem is to remove the semicolon(;) from an end of the
ComSpec path C:\Windows\System32\cmd.exe
Mycomputer>properties>Advance System Settings>Environment Variables>System Variables
add this path:
ComSpec C:\Windows\System32\cmd.exe
The problem for me was that my solution directory was on a different drive than windows. Creating my solution on my C drive solved the issue.
Increasing the maxBuffer solved the problem for me.
const child_process = require('child_process');
var command = 'git ls-files . --ignored --exclude-standard --others';
child_process.execSync(command, {
maxBuffer: 1024 ** 6,
});

Categories

Resources