I'm trying to execute a terminal command in a pod to get the file content.
It works fine on local:
const Client = require('kubernetes-client').Client;
const Config = require('kubernetes-client/backends/request').config;
const Request = require('kubernetes-client/backends/request');
const config = Config.fromKubeconfig();
const client = new Client({ config: config, version: '1.13' });
const podResponse = await client.api.v1.namespaces(config.namespace).pods(<pod name>).exec.get({
qs: {
command: ['cat', 'README.md'],
container: <container name>,
stdout: true,
stderr: true,
},
});
console.log(podResponse.body);
When I run my Node.js app on Cluster with following changes:
const config = Request.config.getInCluster();
const backend = new Request(config);
const client = new Client({ backend });
it still works fine. I can get pods/services info (Node.js app run on same cluster/namespace)..
But .exec.get doesn't work. It fails with:
{ Error: Unexpected server response: 401
at ClientRequest.req.on (/usr/local/app/node_modules/ws/lib/websocket.js:579:7)
at ClientRequest.emit (events.js:182:13)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:556:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:109:17)
at TLSSocket.socketOnData (_http_client.js:442:20)
at TLSSocket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
at readableAddChunk (_stream_readable.js:264:11)
at TLSSocket.Readable.push (_stream_readable.js:219:10)
at TLSWrap.onStreamRead (internal/stream_base_commons.js:94:17) messages: [] }
Again, I don't need to pass any auth info.. It works fine to get pods/services details.
Seems the issue will be fixed in new release:
kubernetes-client repo
So wondering if there is other way to copy a file from a pod by JavaScript, i.e. analog of:
kubectl cp <file-spec-src> <file-spec-dest>
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
//Copy File from Local folder to POD
const namespaceName = "default";
const podName = "logging-poc-standalone-app-694db6684c-6dsw4";
const containerName = "logging-poc-standalone-app";
const cp = new k8s.Cp(kc);
cp.cpToPod(namespaceName, podName, containerName, "log.log", "/var/log").catch(
(err) => {
console.log(err);
}
);
Related
For several months I have been able to deploy to my server with no problems using Circle CI, ever since the outage 2 days ago, (not sure if things are related) my deployment script has been failing.
Here is the deployment script:
require('dotenv').config();
console.log("Starting upload")
var SftpUpload = require('sftp-upload'),
fs = require('fs');
var options = {
host: process.env.FTP_HOST,
username: process.env.FTP_UN,
password: process.env.FTP_PW,
path: './dist',
remoteDir: process.env.FTP_PATH,
// excludedFolders: ['**/.git', 'node_modules'],
// exclude: ['.gitignore', '.vscode/tasks.json'],
// privateKey: fs.readFileSync('privateKey_rsa'),
// passphrase: fs.readFileSync('privateKey_rsa.passphrase'),
dryRun: false,
}
console.log(options);
sftp = new SftpUpload(options);
console.log("sftp working ahead")
sftp.on('error', function(err) {
console.log(options)
console.log("igoring error for now")
//throw err;
})
.on('uploading', function(progress) {
console.log(options);
console.log('Uploading', progress.file);
console.log(progress.percent+'% completed');
})
.on('completed', function() {
console.log('Upload Completed');
})
.upload();
Here is the error
'
Starting upload
{
host: '************************',
username: '*********',
password: '************',
path: './dist',
remoteDir: '*****************************************',
dryRun: false
}
sftp working ahead
buffer.js:330
throw new ERR_INVALID_ARG_TYPE(
^
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object. Received undefined
at Function.from (buffer.js:330:9)
at new Buffer (buffer.js:286:17)
at onNEWKEYS (/home/circleci/repo/node_modules/ssh2/lib/Connection.js:2282:29)
at Parser.<anonymous> (/home/circleci/repo/node_modules/ssh2/lib/Connection.js:123:5)
at Parser.emit (events.js:314:20)
at Parser.parsePacket (/home/circleci/repo/node_modules/ssh2/lib/Parser.js:468:12)
at Parser.execute (/home/circleci/repo/node_modules/ssh2/lib/Parser.js:249:14)
at Socket.<anonymous> (/home/circleci/repo/node_modules/ssh2/lib/Connection.js:523:18)
at Socket.emit (events.js:314:20)
at addChunk (_stream_readable.js:297:12) {
code: 'ERR_INVALID_ARG_TYPE'
}
npm ERR! code ELIFECYCLE
npm ERR! errno 1
npm ERR! nyb.nyb.nyb#1.0.0 deploy: `node deploy-sftp.js`
npm ERR! Exit status 1
npm ERR!
npm ERR! Failed at the nyb.nyb.nyb#1.0.0 deploy script.
npm ERR! This is probably not a problem with npm. There is likely additional logging output above.
npm ERR! A complete log of this run can be found in:
npm ERR! /home/circleci/.npm/_logs/2022-09-16T08_55_31_507Z-debug.log
Exited with code exit status 1
CircleCI received exit code 1
All of this happens after I installed the packages and built the static files that i'm sftp'ing to a server using nuxt generate. I'm confused about what is happening and how I might salvage my pipeline. Any suggestions are greatly appreciated, thank you!
While i did not find the cause of above error, i found a workaround by using the ssh2-sftp-client, documentation and source code is here
The new deployment script is as follows:
let SftpClient = require('ssh2-sftp-client');
const path = require('path');
require('dotenv').config()
const config = {
host: process.env.FTP_HOST,
username: process.env.FTP_UN,
password: process.env.FTP_PW,
port: process.env.FTP_PORT || 22
};
async function main() {
const client = new SftpClient('upload-test');
const src = path.join(__dirname, './dist/');
const dst = process.env.FTP_PATH;
try {
await client.connect(config);
client.on('upload', info => {
console.log(`Listener: Uploaded ${info.source}`);
});
let rslt = await client.uploadDir(src, dst);
return rslt;
} catch (err) {
console.error(err);
} finally {
client.end();
}
}
main()
.then(msg => {
console.log(msg);
})
.catch(err => {
console.log(`main error: ${err.message}`);
});
Hope this helps anyone encountering similar problems.
When I call the createFile() function in my Cypress test, I am trying to create a file on a hosted SFTP server.
Currently, when I call it, I get the following error message:
The following error was thrown by a plugin. We stopped running your tests because a plugin crashed. Please check your plugins file (C:\Dev\SFTP_POC\cypress\plugins\index.js)
Error: put: Internal server error.
sftp://myHost.com#sftp.next.rec-test.com:2022/reports/
at fmtError (C:\Dev\SFTP_POC\node_modules\ssh2-sftp-client\src\utils.js:55:18)
at WriteStream. (C:\Dev\SFTP_POC\node_modules\ssh2-sftp-client\src\index.js:728:18)
at Object.onceWrapper (events.js:418:26)
at WriteStream.emit (events.js:323:22)
at Object.cb (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:3629:12)
at 101 (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:2622:11)
at SFTP.push (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:278:11)
at CHANNEL_DATA (C:\Dev\SFTP_POC\node_modules\ssh2\lib\client.js:525:23)
at 94 (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\handlers.misc.js:859:16)
at Protocol.onPayload (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:2025:10)
at AESGCMDecipherNative.decrypt (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\crypto.js:987:26)
at Protocol.parsePacket [as _parse] (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:1994:25)
at Protocol.parse (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:293:16)
at Socket. (C:\Dev\SFTP_POC\node_modules\ssh2\lib\client.js:713:21)
at Socket.emit (events.js:311:20)
at addChunk (_stream_readable.js:294:12)
at readableAddChunk (_stream_readable.js:275:11)
at Socket.Readable.push (_stream_readable.js:209:10)
at TCP.onStreamRead (internal/stream_base_commons.js:186:23)
Below is my current index.js code:
module.exports = (on, config) => {
on('task', {
createFile() {
const fs = require('fs');
let data = fs.createReadStream('C:/Dev/SFTP_POC/cypress/fixtures/example.json');
let remote = 'sftp://myHost.com:2022/reports/';
let Client = require('ssh2-sftp-client');
let sftp = new Client();
const config = {
host: 'myHost.com',
port: '2022',
username: 'myUsername',
password: 'myPassword'
};
sftp.connect(config).then(() => {
sftp.put(data, remote);
})
.then(data => {
console.log('Success');
})
.then(() => {
sftp.end();
})
.catch(err => {
console.log(err);
})
return null;
}
})
}
My remote variable is a folder that exists on the server.
However, after the test, a new file isn't being added.
Can someone please tell me what I'm doing wrong here, & how to resolve it
I'm trying to run a typescript + jest + mongodb-memory-server test environment but I'm getting the following error at teardown:
events.js:189
throw err; // Unhandled 'error' event
^
Error [ERR_UNHANDLED_ERROR]: Unhandled error. ({ MongoNetworkError: read ECONNRESET
at Socket.<anonymous> (C:\Plataforma\Estrutura\Final\node_modules\mongodb\lib\core\connection\connection.js:321:24)
at Object.onceWrapper (events.js:286:20)
at Socket.emit (events.js:198:13)
at emitErrorNT (internal/streams/destroy.js:91:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:59:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
errno: 'ECONNRESET',
code: 'ECONNRESET',
syscall: 'read',
name: 'MongoNetworkError',
errorLabels: [ 'TransientTransactionError' ],
[Symbol(mongoErrorContextSymbol)]: {} })
at Connection.emit (events.js:187:17)
at Socket.<anonymous> (C:\Plataforma\Estrutura\Final\node_modules\mongodb\lib\core\connection\connection.js:321:10)
at Object.onceWrapper (events.js:286:20)
at Socket.emit (events.js:198:13)
at emitErrorNT (internal/streams/destroy.js:91:8)
at emitErrorAndCloseNT (internal/streams/destroy.js:59:3)
at process._tickCallback (internal/process/next_tick.js:63:19)
error Command failed with exit code 1.
Here are my config files:
Jest config:
//jest.config.js
module.exports = {
globalSetup: './setup',
globalTeardown: './teardown.js',
testEnvironment: './mongo-environment.js',
transform: {
'^.+\\.jsx?$': 'babel-jest',
'^.+\\.ts?$': 'ts-jest',
},
roots: ['<rootDir>/test'],
moduleFileExtensions: ['ts', 'js', 'json', 'node'],
preset: 'ts-jest',
};
Here's the setup config
//setup.js
const path = require('path');
const fs = require('fs');
const MongodbMemoryServer = require('mongodb-memory-server');
const globalConfigPath = path.join(__dirname, 'globalConfig.json');
const mongod = new MongodbMemoryServer.default({
instance: {
dbName: 'StructureGenerator',
debug: true,
},
binary: {
version: '3.2.18',
}
});
module.exports = async function () {
const mongoConfig = {
mongoDBName: 'StructureGenerator',
mongoUri: await mongod.getConnectionString(),
};
// Write global config to disk because all tests run in different contexts.
fs.writeFileSync(globalConfigPath, JSON.stringify(mongoConfig));
console.log('Config is written');
// Set reference to mongod in order to close the server during teardown.
global.MONGOD = mongod;
process.env.MONGO_URL = mongoConfig.mongoUri;
};
Here's the teardown config
//teardown.js
module.exports = async function () {
await global.MONGOD.stop();
};
Here's the mongo environment
//mongo-environment.js
const NodeEnvironment = require('jest-environment-node');
const path = require('path');
const fs = require('fs');
const globalConfigPath = path.join(__dirname, 'globalConfig.json');
module.exports = class MongoEnvironment extends NodeEnvironment {
constructor(config) {
super(config);
}
async setup() {
console.log('Setup MongoDB Test Environment');
await super.setup();
const globalConfig = JSON.parse(fs.readFileSync(globalConfigPath, 'utf-8'));
this.global.MONGO_URI = globalConfig.mongoUri;
this.global.MONGO_DB_NAME = globalConfig.mongoDBName;
}
async teardown() {
console.log('Teardown MongoDB Test Environment');
await super.teardown();
}
runScript(script) {
return super.runScript(script);
}
};
I tried to find help on google but I didn't manage to find any similar issues. I'm hoping someone can shed me some light on this issue.
Thanks
Everything is working fine here, but when I try to make an array to add multiple IDs I get an error.
follow: ['1046961121792614402', '1000086816572112896']
This only returns an error when adding more than one ID. Any feedback? Thanks
Error Status 401.
at ClientRequest.emit (events.js:182:13)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:555:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:109:17)
at TLSSocket.socketOnData (_http_client.js:441:20)
at TLSSocket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
at readableAddChunk (_stream_readable.js:264:11)
Whole code:
var Twitter = require('twitter');
var client = new Twitter({
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
access_token_key: process.env.TWITTER_ACCESS_TOKEN_KEY,
access_token_secret: process.env.TWITTER_ACCESS_TOKEN_SECRET
});
/**
* Stream statuses filtered by keyword
* number of tweets per second depends on topic popularity
**/
const stream = (bot) => {
client.stream('statuses/filter', { follow: ['104696112179261440', '1231231'] },
function (stream) {
stream.on('data',
function (tweet) {
bot.guilds.get('432635411726346').channels.get('432635264580097').send('Hi');
});
stream.on('error', function (error) {
console.log(error);
});
});
};
module.exports = {
client, stream
};
I have a trigger-based cloud function that should find the duration of a video uploaded to Firebase Storage.
I tried using the following npm module: get-video-duration which takes url, the file itself, or stream.
Using the public url of my file doesn't work, my catch logs:
{ Error: spawn ffprobe ENOENT
at exports._errnoException (util.js:1020:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:197:32)
at onErrorNT (internal/child_process.js:376:16)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffprobe',
path: 'ffprobe',
spawnargs:
[ '-v',
'error',
'-show_format',
'-show_streams',
'https://storage.googleapis.com/adboard-dev.appspot.com/5HRuyysoMxe9Tb5vPLDbhEaHtkH2%2F-LAve5VogdAr4ZohU-DE%2FSampleVideo_1280x720_1mb.mp4?GoogleAccessId=firebase-adminsdk-3lthu#adboard-dev.iam.gserviceaccount.com&Expires=16447017600&Signature=cbhn%2BtY2%2FtvcRkvsFp1ywhHKiz%2FLfabfMk6HbD4TEGd%2Brf4njcMz1mQVf6H8nyulTBoRHIgC2uENFEPoEjtON6Um0Jb9P9jgikj6PdhS98m1sPDpTjMiFCTWk6ICjTI%2B%2BWuSVGgDX0tRuq3fADZABKaEcl3CEAI17DCVH98a40XttIDZqeqxIDu1iLi%2F8apQy44pAPJsmVR2dkYHk8Am8e7jIT1OnXG3adO34U3TNhsziPryIIpzo68QANENeieulvleic2BEi7KUhN1K8IxzJXxAfkt9RAFbdrwh%2FOpQ7zTGPRzTC3Vz2FnmKSXVtdKtmftg7BlEXrRr3D7ELJ53g%3D%3D' ],
stdout: '',
stderr: '',
failed: true,
signal: null,
cmd: 'ffprobe -v error -show_format -show_streams https://storage.googleapis.com/adboard-dev.appspot.com/5HRuyysoMxe9Tb5vPLDbhEaHtkH2%2F-LAve5VogdAr4ZohU-DE%2FSampleVideo_1280x720_1mb.mp4?GoogleAccessId=firebase-adminsdk-3lthu#adboard-dev.iam.gserviceaccount.com&Expires=16447017600&Signature=cbhn%2BtY2%2FtvcRkvsFp1ywhHKiz%2FLfabfMk6HbD4TEGd%2Brf4njcMz1mQVf6H8nyulTBoRHIgC2uENFEPoEjtON6Um0Jb9P9jgikj6PdhS98m1sPDpTjMiFCTWk6ICjTI%2B%2BWuSVGgDX0tRuq3fADZABKaEcl3CEAI17DCVH98a40XttIDZqeqxIDu1iLi%2F8apQy44pAPJsmVR2dkYHk8Am8e7jIT1OnXG3adO34U3TNhsziPryIIpzo68QANENeieulvleic2BEi7KUhN1K8IxzJXxAfkt9RAFbdrwh%2FOpQ7zTGPRzTC3Vz2FnmKSXVtdKtmftg7BlEXrRr3D7ELJ53g%3D%3D',
timedOut: false,
killed: false }
Downloading the file then passing it directly don't work too:
{ Error: spawn ffprobe ENOENT
at exports._errnoException (util.js:1020:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:197:32)
at onErrorNT (internal/child_process.js:376:16)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffprobe',
path: 'ffprobe',
spawnargs:
[ '-v',
'error',
'-show_format',
'-show_streams',
'/tmp/SampleVideo_1280x720_1mb.mp4' ],
stdout: '',
stderr: '',
failed: true,
signal: null,
cmd: 'ffprobe -v error -show_format -show_streams /tmp/SampleVideo_1280x720_1mb.mp4',
timedOut: false,
killed: false }
Finally, I created a stream using fs then I passed it, and it it gave me a Duration Not Found! error:
{ AssertionError: No duration found!
at ffprobe.then (/user_code/node_modules/get-video-duration/index.js:34:3)
at process._tickDomainCallback (internal/process/next_tick.js:135:7)
name: 'AssertionError',
actual: null,
expected: true,
operator: '==',
message: 'No duration found!',
generatedMessage: false }
My cloud function code:
exports.recordUploadedFile = functions.storage.object().onFinalize(object => {
let fileType = object.contentType;
if (fileType.startsWith("image/") || fileType.startsWith("video/")) {
let dir = object.name.split("/");
let name = dir.pop();
let fileID = dir.pop();
let uid = dir.pop();
return admin
.storage()
.bucket()
.file(object.name)
.getSignedUrl({
action: "read",
expires: "03-09-2491"
})
.then(urls => {
let file = {
name: name,
link: urls[0],
type: fileType,
duration: 0
}
if (fileType.startsWith("video/")) {
const tempFilePath = path.join(os.tmpdir(), name);
return admin.storage().bucket().file(object.name).download({
destination: tempFilePath
}).then(() => {
const stream = fs.createReadStream(tempFilePath);
return getDuration(stream).then(duration => {
console.log(duration);
file.duration = duration;
return setFile(file, uid, fileID);
}).catch(error => {
console.log(error);
});
});
} else {
return setFile(file, uid, fileID);
}
});
} else {
return admin.storage().bucket().file(object.name).delete();
}
});
I tried multiple video files of multiple sizes, and none of them work.
If there is a better solution to know the video duration, I would love to know it too.
Thank you.
Try using library called fluent-ffmpeg: https://github.com/fluent-ffmpeg/node-fluent-ffmpeg
var ffmpeg = require('fluent-ffmpeg');
ffmpeg.ffprobe(tempFilePath, function(err, metadata) {
//console.dir(metadata); // all metadata
console.log(metadata.format.duration);
});
I ended up using faruk suggested library: fluent-mmpeg but to get it work on Firebase you need to do the following:
You need to use bluebird to "promisify" fluent-mmpeg like this: const ffprobe = Promise.promisify(require("fluent-ffmpeg").ffprobe);
You need to install the static binaries of both ffmpeg and ffprobe, so include them in your pacakge npm i --save #ffmpeg-installer/ffmpeg, #ffprobe-installer/ffprobe
Lastly, set the paths: const ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
const ffprobePath = require("#ffprobe-installer/ffprobe").path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
Here's some tested and working code:
os = require('os');
path = require('path');
gcs = require('#google-cloud/storage')();
const filePath = object.name;
const const fileBucket = object.bucket;
var Promise = require("bluebird");
var ffmpeg = Promise.promisify(require("fluent-ffmpeg"));
var ffmpegPath = require("#ffmpeg-installer/ffmpeg").path;
var ffprobePath = require("#ffprobe-installer/ffprobe").path;
ffmpeg.setFfmpegPath(ffmpegPath);
ffmpeg.setFfprobePath(ffprobePath);
const fileName = filePath.split('/').pop();
const tempFilePath = path.join(os.tmpdir(), fileName);
const bucket = gcs.bucket(fileBucket);
bucket.file(filePath).download({
destination: tempFilePath,
validation: false
}).then(function() {
ffmpeg.ffprobe(tempFilePath, function(err, metadata) {
if (err) {
reject(err);
} else {
if (metadata) {
console.log(metadata.format.duration);
console.log(metadata.streams[0].width);
console.log(metadata.streams[0].height);
console.log(metadata);
resolve();
} else {
reject();
}
}
})
}).catch(function(error) {
console.error(error); reject();
})
From package.json:
"#ffmpeg-installer/ffmpeg": "^1.0.17",
"#ffprobe-installer/ffprobe": "^1.0.9",
"#google-cloud/storage": "^1.1.1",
"bluebird": "^3.5.3",
"fluent-ffmpeg": "^2.1.2"