chrome.tabs not giving output in browser console - javascript

I am working with browser's console and I want to fetch all the details of all the active tabs at the moment.What I am doing is opening browser's console and typing in this:
but this is giving me the following error:
VM713:1 Uncaught TypeError: Cannot read property 'query' of undefined
at <anonymous>:1:13
Thanks
Edit:
var http = require("http"),
io = require("socket.io"),
fs = require("fs"),
util = require("util");
var backlog_size = 2000;
var filename = process.argv[2];
if (!filename) return util.puts("Usage: node <server.js> <filename>");
var linesCount = 0;
// -- Node.js HTTP Server ----------------------------------------------------------
server = http.createServer(function (req, res) {
console.log(req.url)
filePath = req.url
if(filePath=="/"){
filePath='./index.html'
fs.readFile(filePath, function (err, data) {
if (err) {
res.writeHead(500);
return res.end('Error loading ' + filePath);
}
res.writeHead(200);
res.end(data);
});
}
else
{
if(filePath=="/client"){
filePath = './client.html';
fs.readFile(filePath, function (err, data) {
if (err) {
res.writeHead(500);
return res.end('Error loading ' + filePath);
}
res.writeHead(200);
res.end(data);
});
}}
});
server.listen(8000, "0.0.0.0");
var fsTimeout
var textToSend=""
// -- Setup Socket.IO ---------------------------------------------------------
var socket = io(server, {
cors: {
origin: "*",
},
});
socket.on("connection", function (client) {
fs.watchFile(filename, function (curr, prev) {
console.log("file changed");
// if (prev.size > curr.size) return { clear: true };
// var stream = fs.createReadStream(filename, {
// start: prev.size,
// end: curr.size,
// });
// stream.on("data", function (lines) {
// console.log(lines.toString());
// linesCount += count;
// console.log(linesCount);
// client.emit("tail", { lines: lines.toString("utf-8").split("\n") });
// });
if (prev.size > curr.size) return { clear: true };
if(!fsTimeout){
if(prev.ctime.getTime() != curr.ctime.getTime())
{
console.log("file changed")
var stream = fs.createReadStream(filename, {
start: prev.size,
end: curr.size,
});
stream.on("data", function (lines) {
console.log(lines.toString());
textToSend+=lines.toString();
textlen=textToSend.split("\n").length;
// count=lines.toString().split("\n").length
// linesCount += count;
// console.log(linesCount);
console.log(textlen)
if(textlen<10)
{
console.log("me")
socket.emit("tail", { lines: lines.toString("utf-8").split("\n") });}
else
{
console.log("client")
socket.emit("room", { lines: textToSend.toString("utf-8").split("\n") }); };
});
}
fsTimeout = setTimeout(function() { fsTimeout=null }, 5000)}
}
);
});
This is the edited code that I used and made it working using socket.io
1
I am working on a node.js app where I am using socket.io to send data to multiple clients but the socket is only able to send data to one client i.e if I open my webpage in two tabs its not working in both the tabs but when I open just 1 tab of webpage it is able to transmit the data.I dont know why? Can someone help,Here's my code:
server.js

The reason for this is a chrome.tabs is a chrome extension API's object. You can not access this with chrome's dev console.
Tabs documentations
Chrome Extention Quick Start Guide

Related

Download zip file being sent by server on client side?

I have an API that downloads multiple files from AWS S3, creates a zip which is saved to disk, and sends that zip back to the client. The API works, but I have no idea how to handle the response / download the zip to disk on the client side.
This is my API:
reports.get('/downloadMultipleReports/:fileKeys', async (req, res) => {
var s3 = new AWS.S3();
var archiver = require('archiver');
const { promisify } = require('util');
var str_array = req.params.fileKeys.split(',');
console.log('str_array: ',str_array);
for (var i = 0; i < str_array.length; i++) {
var filename = str_array[i].trim();
var filename = str_array[i];
var localFileName = './temp/' + filename.substring(filename.indexOf("/") + 1);
console.log('FILE KEY >>>>>> : ', filename);
const params = { Bucket: config.reportBucket, Key: filename };
const data = await (s3.getObject(params)).promise();
const writeFile = promisify(fs.writeFile);
await writeFile(localFileName, data.Body);
}
// create a file to stream archive data to.
var output = fs.createWriteStream('reportFiles.zip');
var archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// #see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function() {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('./temp', false);
// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
archive.finalize();
output.on('finish', () => {
console.log('Ding! - Zip is done!');
const zipFilePath = "./reportFiles.zip" // or any file format
// res.setHeader('Content-Type', 'application/zip');
fs.exists(zipFilePath, function(exists){
if (exists) {
res.writeHead(200, {
"Content-Type": "application/octet-stream",
"Content-Disposition": "attachment; filename=" + "./reportFiles.zip"
});
fs.createReadStream(zipFilePath).pipe(res);
} else {
response.writeHead(400, {"Content-Type": "text/plain"});
response.end("ERROR File does not exist");
}
});
});
return;
});
And this is how I am calling the API / expecting to download the response:
downloadMultipleReports(){
var fileKeysString = this.state.awsFileKeys.toString();
var newFileKeys = fileKeysString.replace(/ /g, '%20').replace(/\//g, '%2F');
fetch(config.api.urlFor('downloadMultipleReports', { fileKeys: newFileKeys }))
.then((response) => response.body())
this.closeModal();
}
How can I handle the response / download the zip to disk?
This is what ended up working for me:
Server side:
const zipFilePath = "./reportFiles.zip";
fs.exists(zipFilePath, function(exists){
if (exists) {
res.writeHead(200, {
"Content-Type": "application/zip",
"Content-Disposition": "attachment; filename=" + "./reportFiles.zip"
});
fs.createReadStream(zipFilePath).pipe(res);
} else {
response.writeHead(400, {"Content-Type": "text/plain"});
response.end("ERROR File does not exist");
}
});
Client side:
downloadMultipleReports(){
var fileKeysString = this.state.awsFileKeys.toString();
var newFileKeys = fileKeysString.replace(/ /g, '%20').replace(/\//g, '%2F');
fetch(config.api.urlFor('downloadMultipleReports', { fileKeys: newFileKeys }))
.then((res) => {return res.blob()})
.then(blob =>{
download(blob, 'reportFiles.zip', 'application/zip');
this.setState({isOpen: false});
})
}

ssh2 connect to multiple server and get the output nodejs

I am using ssh2 nodejs module to connect to a UNIX application and run a script and it is successful.
Now i want to connect to multiple servers one by one and get the output and store it.
When i try using a for loop to pass the servers one by one from a json as input to the ssh2 the for loop completes much faster than the block which is supposed to get the output from the server.
This is also causing me handshake error.
Kindly help
Here is the code
inc_cron.schedule("*/20 * * * * *", function(id) {
});
//inc_cron.js
var cronFunction = function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path, function(err, data) {
if(err) {
logger.info("Error is in cronFunction = ", err);
} else if(data) {
output_data +=data;
} ssh.close_second();
});
}
}
}
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
ssh.close_second();
ssh.connect_second({
host: hostname,
username: username,
password: password
}, function(err) {
if(err) {
logger.error('Err: ', err);
} else {
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
cb(err, data);
}); } }); }
//connect.js
SSHConnection.prototype.sftp = function(type, path, cb) {
var self = this;
var log_data = '';
self.connection2.exec(path +' ' + type, { pty: true }, function(err, stream) {
if (err) {
logger.log('SECOND :: exec error: ' + err);
}
stream.on('end', function() {
self.connection2.end(); // close parent (and this) connection
}).on('data', function(data) {
logger.info(data.toString());
});
});
};
Without watch your code, be sure to handle correctly the async issue with ssh2... use a promise factory.
One way to do this is to use es7 async await. For this you have to rewrite your getDataFromServer function to return a promise:
var getDataFromServer = function(Name,hostname, username, password, log_path, cb) {
return new Promise(function(resolve,reject){
ssh.close_second();
sh.connect_second({
host: hostname,
username: username,
password: password
},function(err) {
if(err){
reject(err)
}else{
ssh.sftp("",'grep -o "ERROR" '+log_path+'.log.'+yr+'-'+mnth+'-* | wc -l', function(err, data) {
if(err){
reject(err)
}else{
resolve(data)
}
})
}
})
})
}
now you can rewrite your cron function to be an async function.
var cronFunction = async function(inputStr) {
if(appNames['applications'].length>0){
for (i = 0; i < appNames["applications"].length; i++) {
try{
output_data + = await getDataFromServer(appNames["applications"][i].Name,appNames["applications"][i].hostname, appNames["applications"][i].username, appNames["applications"][i].password, appNames["applications"][i].log_path)
}catch(err){
logger.info("Error is in cronFunction = ", err);
}
ssh.close_second();
}
}
}
async await enables you to write async code in syncronous coding style.
However async await is currently (node 7.*) hidden behind a flag (--harmony-async-await). this feature will be enable by default in the upcomming node release (8.0.0) in April 2017.
so to start your app you currently have to use
node --harmony-async-await yourapp.js
P.S.: This code is currently untested and most probably contains bugs .. but you get the idea.

Uploading files using Skipper with Sails.js v0.10 - how to retrieve new file name

I am upgrading to Sails.js version 0.10 and now need to use Skipper to manage my file uploads.
When I upload a file I generate a new name for it using a UUID, and save it in the public/files/ folder (this will change when I've got this all working but it's good for testing right now)
I save the original name, and the uploaded name + path into a Mongo database.
This was all quite straightforward under Sails v0.9.x but using Skipper I can't figure out how to read the new file name and path. (Obviously if I could read the name I could construct the path though so it's really only the name I need)
My Controller looks like this
var uuid = require('node-uuid'),
path = require('path'),
blobAdapter = require('skipper-disk');
module.exports = {
upload: function(req, res) {
var receiver = blobAdapter().receive({
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
}
}),
results = [];
req.file('docs').upload(receiver, function (err, files) {
if (err) return res.serverError(err);
async.forEach(files, function(file, next) {
Document.create({
name: file.filename,
size: file.size,
localName: // ***** how do I get the `saveAs()` value from the uploaded file *****,
path: // *** and likewise how do i get the path ******
}).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}, function(err){
if (err) {
sails.log.error('caught error', err);
return res.serverError({error: err});
} else {
return res.json({ files: results });
}
});
});
},
_config: {}
};
How do I do this?
I've worked this out now and thought I'd share my solution for the benefit of others struggling with similar issues.
The solution was to not use skipper-disk at all but to write my own custom receiver. I've created this as a Sails Service object.
So in file api/services/Uploader.js
// Uploader utilities and helper methods
// designed to be relatively generic.
var fs = require('fs'),
Writable = require('stream').Writable;
exports.documentReceiverStream = function(options) {
var defaults = {
dirname: '/dev/null',
saveAs: function(file){
return file.filename;
},
completed: function(file, done){
done();
}
};
// I don't have access to jQuery here so this is the simplest way I
// could think of to merge the options.
opts = defaults;
if (options.dirname) opts.dirname = options.dirname;
if (options.saveAs) opts.saveAs = options.saveAs;
if (options.completed) opts.completed = options.completed;
var documentReceiver = Writable({objectMode: true});
// This `_write` method is invoked each time a new file is received
// from the Readable stream (Upstream) which is pumping filestreams
// into this receiver. (filename === `file.filename`).
documentReceiver._write = function onFile(file, encoding, done) {
var newFilename = opts.saveAs(file),
fileSavePath = opts.dirname + newFilename,
outputs = fs.createWriteStream(fileSavePath, encoding);
file.pipe(outputs);
// Garbage-collect the bytes that were already written for this file.
// (called when a read or write error occurs)
function gc(err) {
sails.log.debug("Garbage collecting file '" + file.filename + "' located at '" + fileSavePath + "'");
fs.unlink(fileSavePath, function (gcErr) {
if (gcErr) {
return done([err].concat([gcErr]));
} else {
return done(err);
}
});
};
file.on('error', function (err) {
sails.log.error('READ error on file ' + file.filename, '::', err);
});
outputs.on('error', function failedToWriteFile (err) {
sails.log.error('failed to write file', file.filename, 'with encoding', encoding, ': done =', done);
gc(err);
});
outputs.on('finish', function successfullyWroteFile () {
sails.log.debug("file uploaded")
opts.completed({
name: file.filename,
size: file.size,
localName: newFilename,
path: fileSavePath
}, done);
});
};
return documentReceiver;
}
and then my controller just became (in api/controllers/DocumentController.js)
var uuid = require('node-uuid'),
path = require('path');
module.exports = {
upload: function(req, res) {
var results = [],
streamOptions = {
dirname: sails.config.appPath + "/public/files/",
saveAs: function(file) {
var filename = file.filename,
newName = uuid.v4() + path.extname(filename);
return newName;
},
completed: function(fileData, next) {
Document.create(fileData).exec(function(err, savedFile){
if (err) {
next(err);
} else {
results.push({
id: savedFile.id,
url: '/files/' + savedFile.localName
});
next();
}
});
}
};
req.file('docs').upload(Uploader.documentReceiverStream(streamOptions),
function (err, files) {
if (err) return res.serverError(err);
res.json({
message: files.length + ' file(s) uploaded successfully!',
files: results
});
}
);
},
_config: {}
};
I'm sure it can be improved further but this works perfectly for me.
The uploaded file object contains all data you need:
req.file('fileTest').upload({
// You can apply a file upload limit (in bytes)
maxBytes: maxUpload,
adapter: require('skipper-disk')
}, function whenDone(err, uploadedFiles) {
if (err) {
var error = { "status": 500, "error" : err };
res.status(500);
return res.json(error);
} else {
for (var u in uploadedFiles) {
//"fd" contains the actual file path (and name) of your file on disk
fileOnDisk = uploadedFiles[u].fd;
// I suggest you stringify the object to see what it contains and might be useful to you
console.log(JSON.stringify(uploadedFiles[u]));
}
}
});

Upload images to twitter API from node.js

I'm attempting to post an image onto the twitter api, v1.1
I've tried just about all the example out there, and nothing seems to be able to post it.
include Posting images to twitter in Node.js using Oauth
I'm using the oauth library mentioned there, and I also had jsOauth, which I thought I'd give a shot according to https://gist.github.com/lukaszkorecki/1038408
Nothing has worked, and at this point I'm starting to lose hope on whether I can even do this.
function postStatusWithMedia(status, file) {
var err = new Object();
if(fs.existsSync(file) === false) {
err.message = "File not found :(";
parseTwitterError(err);
} else {
var oauth = OAuth(options = {
"consumerKey": consumer_key,
"consumerSecret": consumer_secret,
"accessTokenKey": access_token,
"accessTokenSecret": access_token_secret
});
callbacks = {
onSuccess : function() {
console.log('upload worked!')
},
onFailure : function() {
console.log('upload failed!');
console.dir(arguments);
}
},
uploadData = {
'status' : status,
'media' : Base64.encode(fs.readFileSync(file))
};
oauth.post('https://api.twitter.com/1.1/statuses/update_with_media.json',uploadData, callbacks.onSuccess, callbacks.onFailure);
return false;
}
}
If it can't be done, can you please explain why?
Otherwise, anything that could lead me to the right direction would be great.
var fs = require('fs');
var request = require('request');
var FormData = require('form-data');
var utf8 = require('utf8');
// Encode in UTF-8
status = utf8.encode(status);
var form = new FormData();
form.append('status', status)
form.append('media[]', fs.createReadStream(file));
// Twitter OAuth
form.getLength(function(err, length){
if (err) {
return requestCallback(err);
}
var oauth = {
consumer_key: consumer_key,
consumer_secret: consumer_secret,
token: access_token,
token_secret: access_token_secret
};
var r = request.post({url:"https://api.twitter.com/1.1/statuses/update_with_media.json", oauth:oauth, host: "api.twitter.com", protocol: "https:"}, requestCallback);
r._form = form;
r.setHeader('content-length', length);
});
function requestCallback(err, res, body) {
if(err) {
throw err;
} else {
console.log("Tweet and Image uploaded successfully!");
}
}
I ended up using request and node-form-data to manually construct a multipart/form-data request and send it with the status request, utf8 was for encoding the status into UTF-8, not doing so caused issues with '<3' and other characters.
I have not tested these code.Its from my colleague.sure the code is working.
Perhaps this will help.
//twitter_update_with_media.js
(function() {
var fs, path, request, twitter_update_with_media;
fs = require('fs');
path = require('path');
request = require('request');
twitter_update_with_media = (function() {
function twitter_update_with_media(auth_settings) {
this.auth_settings = auth_settings;
this.api_url = 'https://api.twitter.com/1.1/statuses/update_with_media.json';
}
twitter_update_with_media.prototype.post = function(status, imageUrl, callback) {
var form, r;
r = request.post(this.api_url, {
oauth: this.auth_settings
}, callback);
form = r.form();
form.append('status', status);
return form.append('media[]', request(imageUrl));
};
return twitter_update_with_media;
})();
module.exports = twitter_update_with_media;
}).call(this);
next file
//upload_to_twitter.js
var tuwm = new twitter_update_with_media({
consumer_key: TWITTER_OAUTH_KEY,
consumer_secret: TWITTER_OAUTH_SECRET,
token: access[0],
token_secret: access[1]
});
media_picture.picture = imageURL;
if (media_picture.picture) {
console.log('with media upload');
request.head(media_picture.picture,
function (error, response, body) {
if (!error && response.statusCode == 200) {
var image_size = response.headers['content-length'];
if (image_size > 2000000) { // 2mb max upload limit
console.log('greater than 2mb');
sendMessageWithoutImage(err, req, res, next, twit, wallpost, access);
} else {
console.log('less than 2mb');
console.log('twitter text', content);
tuwm.post(content, media_picture.picture, function(err, response) {
if (err) {
console.log('error', err);
return next(err);
}
error_parse = JSON.parse(response.body);
console.log('with media response', response.body);
if (error_parse.errors) {
console.log('have errors', error_parse);
res.json({
status: 500,
info: error_parse.errors[0].code + ' ' + error_parse.errors[0].message
});
} else {
res.json({
status: 200,
info: "OK",
id: response.id
});
}
});
}
}
});

Using Node.js to connect to a REST API

Is it sensible to use Node.js to write a stand alone app that will connect two REST API's?
One end will be a POS - Point of sale - system
The other will be a hosted eCommerce platform
There will be a minimal interface for configuration of the service. nothing more.
Yes, Node.js is perfectly suited to making calls to external APIs. Just like everything in Node, however, the functions for making these calls are based around events, which means doing things like buffering response data as opposed to receiving a single completed response.
For example:
// get walking directions from central park to the empire state building
var http = require("http");
url = "http://maps.googleapis.com/maps/api/directions/json?origin=Central Park&destination=Empire State Building&sensor=false&mode=walking";
// get is a simple wrapper for request()
// which sets the http method to GET
var request = http.get(url, function (response) {
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "",
data,
route;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
// finished transferring data
// dump the raw data
console.log(buffer);
console.log("\n");
data = JSON.parse(buffer);
route = data.routes[0];
// extract the distance and time
console.log("Walking Distance: " + route.legs[0].distance.text);
console.log("Time: " + route.legs[0].duration.text);
});
});
It may make sense to find a simple wrapper library (or write your own) if you are going to be making a lot of these calls.
Sure. The node.js API contains methods to make HTTP requests:
http.request
http.get
I assume the app you're writing is a web app. You might want to use a framework like Express to remove some of the grunt work (see also this question on node.js web frameworks).
/*Below logics covered in below sample GET API
-DB connection created in class
-common function to execute the query
-logging through bunyan library*/
const { APIResponse} = require('./../commonFun/utils');
const createlog = require('./../lib/createlog');
var obj = new DB();
//Test API
routes.get('/testapi', (req, res) => {
res.status(201).json({ message: 'API microservices test' });
});
dbObj = new DB();
routes.get('/getStore', (req, res) => {
try {
//create DB instance
const store_id = req.body.storeID;
const promiseReturnwithResult = selectQueryData('tablename', whereField, dbObj.conn);
(promiseReturnwithResult).then((result) => {
APIResponse(200, 'Data fetched successfully', result).then((result) => {
res.send(result);
});
}).catch((err) => { console.log(err); throw err; })
} catch (err) {
console.log('Exception caught in getuser API', err);
const e = new Error();
if (err.errors && err.errors.length > 0) {
e.Error = 'Exception caught in getuser API';
e.message = err.errors[0].message;
e.code = 500;
res.status(404).send(APIResponse(e.code, e.message, e.Error));
createlog.writeErrorInLog(err);
}
}
});
//create connection
"use strict"
const mysql = require("mysql");
class DB {
constructor() {
this.conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'pass',
database: 'db_name'
});
}
connect() {
this.conn.connect(function (err) {
if (err) {
console.error("error connecting: " + err.stack);
return;
}
console.log("connected to DBB");
});
}
//End class
}
module.exports = DB
//queryTransaction.js File
selectQueryData= (table,where,db_conn)=>{
return new Promise(function(resolve,reject){
try{
db_conn.query(`SELECT * FROM ${table} WHERE id = ${where}`,function(err,result){
if(err){
reject(err);
}else{
resolve(result);
}
});
}catch(err){
console.log(err);
}
});
}
module.exports= {selectQueryData};
//utils.js file
APIResponse = async (status, msg, data = '',error=null) => {
try {
if (status) {
return { statusCode: status, message: msg, PayLoad: data,error:error }
}
} catch (err) {
console.log('Exception caught in getuser API', err);
}
}
module.exports={
logsSetting: {
name: "USER-API",
streams: [
{
level: 'error',
path: '' // log ERROR and above to a file
}
],
},APIResponse
}
//createlogs.js File
var bunyan = require('bunyan');
const dateFormat = require('dateformat');
const {logsSetting} = require('./../commonFun/utils');
module.exports.writeErrorInLog = (customError) => {
let logConfig = {...logsSetting};
console.log('reached in writeErrorInLog',customError)
const currentDate = dateFormat(new Date(), 'yyyy-mm-dd');
const path = logConfig.streams[0].path = `${__dirname}/../log/${currentDate}error.log`;
const log = bunyan.createLogger(logConfig);
log.error(customError);
}
A more easy and useful tool is just using an API like Unirest; URest is a package in NPM that is just too easy to use jus like
app.get('/any-route', function(req, res){
unirest.get("https://rest.url.to.consume/param1/paramN")
.header("Any-Key", "XXXXXXXXXXXXXXXXXX")
.header("Accept", "text/plain")
.end(function (result) {
res.render('name-of-the-page-according-to-your-engine', {
layout: 'some-layout-if-you-want',
markup: result.body.any-property,
});
});

Categories

Resources