Ember includedCommands: HTTPS request does not fetch - javascript

I am writing an ember includedCommand for fetching and updating the app/index.html file - which uses NodeJS https and fs module to replace the indexFile by calling a function BuildIndexFile, where I am facing a weird issue -
When I perform command ember server --update-index - I can see the BuildIndexFile is being called and the https request is made to the remote server which downloads the file and gets written by fs.writeFileSync in app/index.html.
But when I perform ember update-index which is an included command, I can see BuildIndexFile has been called, and it reaches till console.log('Fetching index.html'); and I believe it is calling https.request... but it closes from there, I have no idea why the call didn't go through, when I debugged using node --inspect-brk ./node_modules/.bin/ember update-index I can see the https is available on the file, but not executing.
I am attaching my sample code available as a in-repo-addon available at lib/hello/index.js -
/* eslint-env node */
'use strict';
const parseArgs = require('minimist');
const watchman = require('fb-watchman');
let client = new watchman.Client();
client.capabilityCheck({optional: [], required: ['relative_root']}, function (error, response) {
if (error) {
console.log(error);
}
console.log('Watchman', response);
});
const ServeCommand = require('ember-cli/lib/commands/serve');
const ARGS = parseArgs(process.argv.slice(2));
const fs = require('fs');
const https = require('https');
module.exports = {
name: 'hello',
isDevelopingAddon() {
return true;
},
includedCommands: function() {
var self = this;
return {
hello: ServeCommand.extend({
name: 'hello',
description: 'A test command that says hello',
availableOptions: ServeCommand.prototype.availableOptions.concat([{
name: 'updateindex',
type: String
}]),
run: function(commandOptions, rawArgs) {
console.log(commandOptions, rawArgs);
if (commandOptions['updateindex']) {
console.log('Update Index')
}
const sampleHelloPromise = sampleHello();
const servePromise = this._super.run.apply(this, arguments);
return Promise.all([sampleHelloPromise, servePromise]);
}
}),
updateIndex: {
name: 'update-index',
description: 'Update Index File',
availableOptions: [{
name: 'index-file',
type: String
}],
run: function(commandOptions, rawArgs) {
BuildIndexFile(self.project.root, 'https://yahoo.com', {});
}
}
}
},
preBuild: function(result) {
let self = this;
if (ARGS['update-index']) {
BuildIndexFile(self.project.root, 'https://google.com', {}).then(function() {
delete ARGS['update-index'];
})
.catch(function(e) {
console.log(e);
});;
}
}
};
async function sampleHello() {
return await new Promise(resolve => {
setTimeout(() => resolve('hello'), 2000);
})
}
const BuildIndexFile = (rootPath, target, headers) => {
try {
debugger;
const indexFile = `${rootPath}/app/index.html`;
let noIndexFile = !fs.existsSync(indexFile);
return new Promise(function (resolve, reject) {
let options = {
hostname: target.replace(/^http(?:s):\/\//i, ''),
port: 443,
method: 'GET'
};
let dataContent = '';
console.log('Fetching index.html');
var request = https.request(options, function(response) {
response.on('data', function(d) {
dataContent += d;
});
response.on('end', function() {
fs.writeFileSync(indexFile, dataContent);
return resolve();
});
});
request.on('error', function(e) {
console.log(e);
return reject(`Error: Creating Index File`);
});
request.end();
});
} catch(e) {
throw e;
}
}

Related

Not able to get Access Token from Google Api

I am new to GTM+GA.I am trying to display google Analytics(GA4) reports on my webpage. I created Oauth Client Id in google cloud console and also done other settings in Google cloud console. Through javascript code i am trying to get access token from google Api and I am getting below exception.
After successful authentication I will integrate GA repots with my web page. Below is my javascript code for getting access token.
function main(propertyId = 'YOUR-GA4-PROPERTY-ID') {
propertyId = '347415282';
const {
OAuth2Client
} = require('google-auth-library');
const {
grpc
} = require('google-gax');
const http = require('http');
const url = require('url');
const open = require('open');
const destroyer = require('server-destroy');
const keys = require('./oauth2.keys.json');
const SCOPES = ['https://www.googleapis.com/auth/analytics.readonly'];
function getAnalyticsDataClient(authClient) {
const sslCreds = grpc.credentials.createSsl();
const credentials = grpc.credentials.combineChannelCredentials(
sslCreds,
grpc.credentials.createFromGoogleCredential(authClient));
return new BetaAnalyticsDataClient({
sslCreds: credentials,
});
}
function getOAuth2Client() {
return new Promise((resolve, reject) => {
const oAuth2Client = new OAuth2Client(
keys.web.client_id,
keys.web.client_secret,
'http://localhost:3000/oauth2callback');
const authorizeUrl = oAuth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES.join(' '),
});
const server = http
.createServer(async(req, res) => {
try {
if (req.url.indexOf('/oauth2callback') > -1) {
const qs = new url.URL(req.url, 'http://localhost:3000')
.searchParams;
const code = qs.get('code');
console.log(`Code is ${code}`);
res.end(
'Authentication successful! Please return to the console.');
server.destroy();
const r = await oAuth2Client.getToken(code);
oAuth2Client.setCredentials(r.tokens);
console.info('Tokens acquired.');
resolve(oAuth2Client);
}
} catch (e) {
reject(e);
}
})
.listen(3000, () => {
console.info(`Opening the browser with URL: ${authorizeUrl}`);
open(authorizeUrl, {
wait: false
}).then(cp => cp.unref());
});
destroyer(server);
});
}
async function runReport() {
const oAuth2Client = await getOAuth2Client();
const analyticsDataClient = getAnalyticsDataClient(oAuth2Client);
const[response] = await analyticsDataClient.runReport({
property: `properties/${propertyId}`,
dateRanges: [{
startDate: '2020-03-31',
endDate: 'today',
},
],
dimensions: [{
name: 'city',
},
],
metrics: [{
name: 'activeUsers',
},
],
});
console.log('Report result:');
response.rows.forEach(row => {
console.log(row.dimensionValues[0], row.metricValues[0]);
});
}
runReport();
process.on('unhandledRejection', err => {
console.error(err.message);
process.exitCode = 1;
});
main(...process.argv.slice(2));
Please let me know how to get rid off this issue.
Regards,
Prabhash

mock dynamodb partiql using jest

I didn't find a way to mock the new feature of dynamodb "PartiQL" defined in a lambda function.
In PartiQL we can use executeStatement to run sql like queries which are a kind of DynamoDBCustomizations.
But he configuration below return an error:
UnrecognizedClientException: The security token included in the
request is invalid
which means that the configuration of jest-mock is not correct.
This is the code to be tested
'use strict';
var ddb = new AWS.DynamoDB({ apiVersion: '2012-08-10' });
const TABLE_NAME = process.env.TABLE_NAME;
async function queryTest() {
let params = {
Statement: `select * from ${TABLE_NAME}`
};
let statementResult = await ddb.executeStatement(params).promise();
return statementResult.Items;
}
module.exports.get = async (event) => {
var result = queryTest();
if (!result) {
return {
statusCode: 200,
body: JSON.stringify(result)
};
}
else {
return {
statusCode: 400,
body: JSON.stringify(result)
};
}
};
and the testing script:
'use strict';
const AWS = require('aws-sdk');
const jestPlugin = require('serverless-jest-plugin');
const lambdaWrapper = jestPlugin.lambdaWrapper;
const mod = require('./handler');
function mock_aws() {
let mockDynamoDBClient = {
executeStatement: {
promise: jest.fn()
},
promise: jest.fn(),
};
jest.mock("aws-sdk", () => {
const config = {
update: jest.fn(),
};
return {
DynamoDB: jest.fn().mockImplementation(() => {
return {
executeStatement: () => mockDynamoDBClient.executeStatement,
};
}),
config: config
};
});
return mockDynamoDBClient;
}
describe('test', () => {
const OLD_ENV = process.env;
var mDynamoDb;
var wrapped;
beforeAll(async () => {
jest.resetModules();
wrapped = await lambdaWrapper.wrap(mod, { handler: 'get' });
process.env = { ...OLD_ENV };
process.env.TABLE_NAME = "TEST";
mDynamoDb = mock_aws();
});
test('implement tests here', async (done) => {
const mResult = [{ 'TEST': 1 }] ;
const response = await wrapped.run({})
expect(response).toEqual({
"statusCode": 200,
"body": JSON.stringify(mResult)
});
done();
});
});

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

NodeJS unable to modify a class obj

this is my first post in this forum. So please forgive me the misstakes.
I want to write a NodeJS server which runs a WebSocket Server (npm ws module).
The NodeJS server contains also a Class Obj which i want to modify a funciton afterwards over the Websocket server.
My Problem is the modified functjion cant acces global variables.
Can someone help if there is a solution for this problem or why this happes because if you do this without the Websocket it works.
Here is the code:
Server code:
const WebSocket = require('ws');
// WebSocket Server
const wss = new WebSocket.Server({ port: 8080 });
wss.on('connection', function connection(ws) {
ws.on('message', function incoming(message) {
try {
message = JSON.parse(message);
if (message.type == "handler") {
handler.modify(message.data);
console.log("modifyed");
}
if (message.type == "func") {
handler.modify_func(message.data);
console.log("modifyed");
}
if (message.type == "run") {
eval(message.data);
}
}
catch (error) {
}
});
});
// Modifying class
class Handler {
constructor() {
this.functions = [];
}
modify(data) {
let temp_class = new Function('return ' + data)();
temp_class.functions.forEach(element => {
if (this.functions.indexOf(element) == -1) {
this.functions.push(element)
}
this[element] = temp_class[element];
});
}
modify_func(data) {
let temp_func = new Function('return ' + data)();
this[temp_func.name] = temp_func;
}
test_func_from_orginal() {
console.log("test_func_from_orginal says:");
console.log(test_val);
}
}
var test_val = "this is the global variable";
var handler = new Handler();
Client code:
const WebSocket = require('ws');
//WebSocket Client
var ws = new WebSocket('ws://localhost:8080');
ws.on('open', function open(event) {
// ws.send(JSON.stringify({ type: "handler", data: Handler.toString() }))
ws.send(JSON.stringify({ type: "func", data: test_func_from_func.toString() }))
console.log("open")
});
//Class Module
class Handler {
static get functions() {
return ["test"];
}
static test_func_from_class() {
console.log("test_func_from_class sayes:")
console.log(test_val);
}
}
function test_func_from_func() {
console.log("test_func_from_func sayes:")
console.log(test_val);
}
setTimeout(function () { ws.send(JSON.stringify({ type: "run", data: 'handler.test_func_from_orginal()' })) }, 1000);
// setTimeout(function () { ws.send(JSON.stringify({ type: "run", data: 'handler.test_func_from_class()' })) }, 1000);
setTimeout(function () { ws.send(JSON.stringify({ type: "run", data: 'handler.test_func_from_func()' })) }, 1000);
Ok, so this is what it's all about - a simple mistake. If you cut out all the websocket stuff (which is not really relevant here, as strings, and not contexts, got passed from and back anyway), you'll get this:
class ServerHandler {
constructor() {
this.functions = [];
}
modify(data) {
let temp_class = new Function('return ' + data)();
// not sure why not just `eval(data)` btw
temp_class.functions.forEach(funcName => {
if (this.functions.indexOf(funcName) == -1) {
this.functions.push(funcName)
}
this[funcName] = temp_class[funcName];
});
}
}
class ClientHandler {
static get functions() {
return ["test_func_from_class"];
// not "test" as in your example
// you actually don't even need this registry:
// all the static methods can be collected in runtime
}
static test_func_from_class() {
console.log("test_func_from_class sayes:")
console.log(test_val);
}
}
var test_val = 42;
var handler = new ServerHandler();
handler.modify(ClientHandler.toString());
eval(`handler.test_func_from_class()`); // 42
This all works fine, as there's no longer a mismatch between a name of method stored in static get functions ("test") and actual name of that method ("test_func_from_class"). The trick is that all the static functions created along with that temporary class are scoped the same way any other entity created in ServerHandler; that's how they 'see' that test_val.
But 'works' here is about mere possibility of this approach from technical perspective, and not about feasibility. Both new Function and eval with arbitrary input are very dangerous security holes - and they're left wide open here.
I found now a solution for my problem.
Server Code
const WebSocket = require('ws');
// WebSocket Server
const wss = new WebSocket.Server({ port: 8080 });
wss.on('connection', function connection(ws) {
ws.on('message', function incoming(message) {
message = JSON.parse(message);
try {
if (message.type == "run") {
eval(message.data);
}
if (message.type == "obj_handler") {
handler.modify(JSON.parse(message.data));
}
}
catch (error) {
console.log(error);
}
// console.log('received: %s', message);
});
ws.send('something');
});
class ServerHandler {
constructor() {
this.data = "hi";
}
modify(data) {
for (const func in data) {
this[func] = eval(data[func]);
}
}
}
var test_val = 42;
var handler = new ServerHandler();
Client Code:
const WebSocket = require('ws');
//WebSocket Client
try {
var ws = new WebSocket('ws://localhost:8080');
ws.on('open', function open(event) {
ws.send(JSON.stringify({ type: "obj_handler", data: update.convert() }))
});
}
catch (error) {
}
// Needed Update with 2 new Functions
update = {
func_test_global: () => {
console.log(test_val);
},
func_test_this: _ => {
console.log(this.data);
},
convert: function () {
let new_update = {};
for (const func in this) {
if (func != "convert")
new_update[func] = "" + this[func];
}
return JSON.stringify(new_update)
}
}
// setTimeout(function () { ws.send(JSON.stringify({ type: "run", data: 'handler.func_test_global()' })) }, 1000);
// setTimeout(function () { ws.send(JSON.stringify({ type: "run", data: 'handler.func_test_this()' })) }, 1000);

CRON Job not working on meteor

In main.js in server folder,
I have this code,
var DDP = require('ddp');
var DDPlogin = require('ddp-login');
var Job = require('meteor-job');
var ddp = new DDP({
host: "127.0.0.1",
port: 3000,
use_ejson: true
});
Meteor.startup(() => {
var myJobs = JobCollection('myJobQueue');
Job.setDDP(ddp);
ddp.connect(function (err) {
if(err) throw err;
DDPlogin(ddp, function (err, token) {
if (err) throw err;
});
});
myJobs.allow({
admin: function (userId, method, params) {
return true;
}
});
Meteor.publish('allJobs', function () {
return myJobs.find({});
});
myJobs.startJobServer();
var workers = Job.processJobs('myJobQueue', 'sendEmail',
function (job, cb) {
console.log(job.data.text);
job.done();
cb(null);
}
);
And in my main.js in client folder,
I have this code,
var myJobs = JobCollection('myJobQueue');
var jobSub = null;
class App extends Component {
componentDidMount(){
if(jobSub !== null)
jobSub.stop();
jobSub = Meteor.subscribe('allJobs');
var job = new Job(myJobs, 'sendEmail',
{
text: 'bozo#clowns.com'
}
);
job.priority('normal')
.retry({ retries: 5,
wait: 60*1000 }) // 1 minute between attempts
.delay(0) // start immediately
.save();
}
...
render(){
console.log(myJobs.find().fetch());
...
}
}
I am using the vsivsi:meteor-job-collection package.
The problem is that console.log() is not executed.
What is wrong in my step by step installation and usage?
I need to console.log() every minute.

Categories

Resources