Everything is working fine here, but when I try to make an array to add multiple IDs I get an error.
follow: ['1046961121792614402', '1000086816572112896']
This only returns an error when adding more than one ID. Any feedback? Thanks
Error Status 401.
at ClientRequest.emit (events.js:182:13)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:555:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:109:17)
at TLSSocket.socketOnData (_http_client.js:441:20)
at TLSSocket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
at readableAddChunk (_stream_readable.js:264:11)
Whole code:
var Twitter = require('twitter');
var client = new Twitter({
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
access_token_key: process.env.TWITTER_ACCESS_TOKEN_KEY,
access_token_secret: process.env.TWITTER_ACCESS_TOKEN_SECRET
});
/**
* Stream statuses filtered by keyword
* number of tweets per second depends on topic popularity
**/
const stream = (bot) => {
client.stream('statuses/filter', { follow: ['104696112179261440', '1231231'] },
function (stream) {
stream.on('data',
function (tweet) {
bot.guilds.get('432635411726346').channels.get('432635264580097').send('Hi');
});
stream.on('error', function (error) {
console.log(error);
});
});
};
module.exports = {
client, stream
};
Related
I am using a json mock server in my react.js application and I want to send a patch request multiple times to update my data. Is there any way I can batch those requests and make a single request?
I have tried it with axious.all and but when it has more than one request it generates the following error.
Some error occurred Error: listen EADDRINUSE: address already in use 127.0.0.1:5000
at Server.setupListenHandle [as _listen2] (node:net:1334:16)
at listenInCluster (node:net:1382:12)
at GetAddrInfoReqWrap.doListen [as callback] (node:net:1520:7)
at GetAddrInfoReqWrap.onlookup [as oncomplete] (node:dns:73:8) {
code: 'EADDRINUSE',
errno: -4091,
syscall: 'listen',
address: '127.0.0.1',
port: 5000
}
The following is the code:
const applyForJobs = () => {
setApplyingForJobs(true);
const jobs = [];
jobIds.map((jobId) => {
jobs.push(axios.patch(apiUrl + '/' + jobId, { applied: true }));
})
axios.all(jobs)
.then((res) => {
getJobData(true);
setApplyingForJobs(false);
})
.catch((err) => {
console.log(err);
})
setJobIds([]);
setAllowMultipleSelection(false);
}
Bit.io randomly terminates connecting with node.js.
When I try to run the following code my node.js works fine for a few minutes but then randomly crashes and gives the error listed at the bottom of this page. I have tried to fix this but I am stuck. Don't know if it's a problem with bit.io or with me. Thanks!!
Code:
const { Client } = require('pg');
const client = new Client({
user: 'process.env.USER',
host: 'db.bit.io',
database: 'process.env.DATABASE',
password: 'process.env.PASSWORD',
port: 5432,
ssl: true,
});
client.connect();
client.query('SELECT * FROM "HPI_AT_state" limit 10;', (err, res) => {
console.table(res.rows);
})
Error:
node:events:491
throw er; // Unhandled 'error' event
^
Error: Connection terminated unexpectedly
at Connection.<anonymous> (node_modules/pg/lib/client.js:132:73)
at Object.onceWrapper (node:events:627:28)
at Connection.emit (node:events:513:28)
at TLSSocket.<anonymous> (node_modules/pg/lib/connection.js:107:12)
at TLSSocket.emit (node:events:525:35)
at endReadableNT (node:internal/streams/readable:1359:12)
at process.processTicksAndRejections (node:internal/process/task_queues:82:21)
Emitted 'error' event on Client instance at:
at Client._handleErrorEvent (node_modules/pg/lib/client.js:319:10)
at Connection.<anonymous> (node_modules/pg/lib/client.js:149:16)
at Object.onceWrapper (node:events:627:28)
[... lines matching original stack trace ...]
at process.processTicksAndRejections (node:internal/process/task_queues:82:21)
I have figured out the problem. It seems that the issue was because Bit.io terminates the connection if unused for 60 seconds. It was fixed by using Client instead of Pool.
const { Pool } = require('pg');
const pool = new Pool({
user: process.env.USER,
host: 'db.bit.io',
database: process.env.DATABASE,
password: process.env.PASSWORD,
port: 5432,
ssl: true,
});
pool.query('SELECT * FROM "SaveData" limit 10;', (err, res) => {
console.table(res.rows);
});
When I call the createFile() function in my Cypress test, I am trying to create a file on a hosted SFTP server.
Currently, when I call it, I get the following error message:
The following error was thrown by a plugin. We stopped running your tests because a plugin crashed. Please check your plugins file (C:\Dev\SFTP_POC\cypress\plugins\index.js)
Error: put: Internal server error.
sftp://myHost.com#sftp.next.rec-test.com:2022/reports/
at fmtError (C:\Dev\SFTP_POC\node_modules\ssh2-sftp-client\src\utils.js:55:18)
at WriteStream. (C:\Dev\SFTP_POC\node_modules\ssh2-sftp-client\src\index.js:728:18)
at Object.onceWrapper (events.js:418:26)
at WriteStream.emit (events.js:323:22)
at Object.cb (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:3629:12)
at 101 (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:2622:11)
at SFTP.push (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\SFTP.js:278:11)
at CHANNEL_DATA (C:\Dev\SFTP_POC\node_modules\ssh2\lib\client.js:525:23)
at 94 (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\handlers.misc.js:859:16)
at Protocol.onPayload (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:2025:10)
at AESGCMDecipherNative.decrypt (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\crypto.js:987:26)
at Protocol.parsePacket [as _parse] (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:1994:25)
at Protocol.parse (C:\Dev\SFTP_POC\node_modules\ssh2\lib\protocol\Protocol.js:293:16)
at Socket. (C:\Dev\SFTP_POC\node_modules\ssh2\lib\client.js:713:21)
at Socket.emit (events.js:311:20)
at addChunk (_stream_readable.js:294:12)
at readableAddChunk (_stream_readable.js:275:11)
at Socket.Readable.push (_stream_readable.js:209:10)
at TCP.onStreamRead (internal/stream_base_commons.js:186:23)
Below is my current index.js code:
module.exports = (on, config) => {
on('task', {
createFile() {
const fs = require('fs');
let data = fs.createReadStream('C:/Dev/SFTP_POC/cypress/fixtures/example.json');
let remote = 'sftp://myHost.com:2022/reports/';
let Client = require('ssh2-sftp-client');
let sftp = new Client();
const config = {
host: 'myHost.com',
port: '2022',
username: 'myUsername',
password: 'myPassword'
};
sftp.connect(config).then(() => {
sftp.put(data, remote);
})
.then(data => {
console.log('Success');
})
.then(() => {
sftp.end();
})
.catch(err => {
console.log(err);
})
return null;
}
})
}
My remote variable is a folder that exists on the server.
However, after the test, a new file isn't being added.
Can someone please tell me what I'm doing wrong here, & how to resolve it
I'm trying to execute a terminal command in a pod to get the file content.
It works fine on local:
const Client = require('kubernetes-client').Client;
const Config = require('kubernetes-client/backends/request').config;
const Request = require('kubernetes-client/backends/request');
const config = Config.fromKubeconfig();
const client = new Client({ config: config, version: '1.13' });
const podResponse = await client.api.v1.namespaces(config.namespace).pods(<pod name>).exec.get({
qs: {
command: ['cat', 'README.md'],
container: <container name>,
stdout: true,
stderr: true,
},
});
console.log(podResponse.body);
When I run my Node.js app on Cluster with following changes:
const config = Request.config.getInCluster();
const backend = new Request(config);
const client = new Client({ backend });
it still works fine. I can get pods/services info (Node.js app run on same cluster/namespace)..
But .exec.get doesn't work. It fails with:
{ Error: Unexpected server response: 401
at ClientRequest.req.on (/usr/local/app/node_modules/ws/lib/websocket.js:579:7)
at ClientRequest.emit (events.js:182:13)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:556:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:109:17)
at TLSSocket.socketOnData (_http_client.js:442:20)
at TLSSocket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
at readableAddChunk (_stream_readable.js:264:11)
at TLSSocket.Readable.push (_stream_readable.js:219:10)
at TLSWrap.onStreamRead (internal/stream_base_commons.js:94:17) messages: [] }
Again, I don't need to pass any auth info.. It works fine to get pods/services details.
Seems the issue will be fixed in new release:
kubernetes-client repo
So wondering if there is other way to copy a file from a pod by JavaScript, i.e. analog of:
kubectl cp <file-spec-src> <file-spec-dest>
const kc = new k8s.KubeConfig();
kc.loadFromDefault();
//Copy File from Local folder to POD
const namespaceName = "default";
const podName = "logging-poc-standalone-app-694db6684c-6dsw4";
const containerName = "logging-poc-standalone-app";
const cp = new k8s.Cp(kc);
cp.cpToPod(namespaceName, podName, containerName, "log.log", "/var/log").catch(
(err) => {
console.log(err);
}
);
settings.js
exports.dbConfig = {
user: "gpolat",
password: "alphalpha",
server: "DESKTOP-LK9JDJJ",
database: "mydemo",
port: 1433
};
exports.webPort = 9000;
db.js
var sqlDb = require("mssql");
var settings = require("../settings");
exports.executeSql = function (sql, callback) {
var conn = new sqlDb.Connection(settings.dbConfig);
conn.connect()
.then(function () {
var req = new sqlDb.Request(conn);
req.query(sql)
.then(function (recordset) {
callback(recordset);
})
.catch(function (err) {
console.log(err);
callback(null, err);
});
})
.catch(function (err) {
console.log(err);
callback(null, err);
});
};
Console Error:
Debugger listening on 127.0.0.1:5858
Started Listening at: 9000
{ ConnectionError: Failed to connect to DESKTOP-LK9JDJJ:1433 - connect
ECONNREFUSED 192.168.99.1:1433
at Connection.<anonymous> (C:\Users\Gokhan\documents\visual studio
2015\Projects\SampleREST\SampleREST\node_modules\mssql\lib\tedious.js:378:25)
at Object.onceWrapper (events.js:290:19)
at emitOne (events.js:96:13)
at Connection.emit (events.js:188:7)
at Connection.socketError (C:\Users\Gokhan\documents\visual studio
Projects\SampleREST\SampleREST\node_modules\tedious\lib\connection.js:531:1
4)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at emitErrorNT (net.js:1281:8)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
at process._tickCallback (internal/process/next_tick.js:98:9)
name: 'ConnectionError',
message: 'Failed to connect to DESKTOP-LK9JDJJ:1433 - connect ECONNREFUSED
192.168.99.1:1433',
code: 'ESOCKET' }
I get this error while creating the rest API via sql server.
"500: Internal Error. Details: ConnectionError: Failed to connect to DESKTOP-LK9JDJJ:1433 - connect ECONNREFUSED 192.168.99.1:1433"