NodeJs: can't write a file - javascript

I'm new in node, for practice i thought to develop a weather commandline application, but i found a problem with ajax request, i'm usually to use $.ajax of jquery but it doesn't works, ( I've tried to require jquery ). I've solved this problem with another module.
Now the problem is: when i try to print json information on the coords.json and next read it with read-json module there are some "\" & "\n" everywhere in the string, i've tried to replace it with regex and fs module but it doesn't re-write the file... why?
Here the full code:
// index.js
// modules
const program = require('commander');
const clear = require('clear');
const chalk = require('chalk');
const request = require('ajax-request');
const fs = require('fs');
const json = require('read-data').json;
const writeJson = require('write-json');
// Forecast.io Key
const key = "*************";
const freegeoip = "http://freegeoip.net/json/";
let latitude = 0,
longitude = 0 ;
// forecast.io api url
const url = `https://api.darksky.net/forecast/${key}/${latitude},${longitude}`;
// initialize myData with the freegeoip datas
let myData = request({
url: 'http://freegeoip.net/json/',
method: 'GET',
data: {
format: 'json'
},
}, function(err, res, body) {
writeJson('test.json', body, function(err) {
if (err) console.log(err);
});
});
fs.readFile('test.json', 'utf8', function (err,data) {
let result = data.replace(/[\\~#%&*<>?|\-]/g, '');
fs.writeFile('test.json', result, 'utf8', function (err) {
if (err) return console.log(err);
// if i do this is normal json
// console.log(result)
});
});
and the output in the file is:
// coords.json
"{\"ip\":\"**.**.**.**\",\"country_code\":\"IT\",\"country_name\":\"Italy\",\"region_code\":\"62\",\"region_name\":\"Latium\",\"city\":\"Rome\",\"zip_code\":\"00119\",\"time_zone\":\"Europe/Rome\",\"latitude\":**.*,\"longitude\":**.**\"metro_code\":0}\n"
but if i print it in console it's normal...

I really recommend that you use JSON.parse. It will parse your json and put it into a variable you can use:
fs.readFile('test.json', 'utf8', function (err,data) {
data = JSON.parse(data); // Yay you can use anything from the JSON
}

The \ are there to escape the quotes so that they don't end the string. They shouldn't affect anything, and are actually necessary. Have you tried it without the regex? That could be breaking things if it actually removes the .

Related

How you manually parse a raw request withtout using express.raw()

i'm implementing Stripe Webhooks and everything works fine but I'm having problems validating the payload when there are special characters(example, áéíóú). The code:
const endpointSecret = "whsec_xxx";
// stripe signature
const sig = headers['Stripe-Signature'][0]
const stripe = require('stripe')(
'sk_test_yyy'
);
//const buf = new Buffer(body, 'base64');
// let text = buff.toString('ascii');
try {
event = stripe.webhooks.constructEvent(body.text(), sig, endpointSecret);
} catch (err) {
response.setStatusCode(400);
throw err;
}
The thing is that i'm using Realm MongoDB HTTP EndPoints that don't support adding a body parser in the function. So What I have is just this:
exports = async function({ query, headers, body }, response) {
I can read the raw body using body.text() and this works fine in most cases, but not all.
What I Need is to emulate the JSON parsing when I declare the function like this:
app.post('/webhook', express.raw({type: 'application/json'}), (request, response) => {
This way always works. But I don't know how to manually parse the body in the same way that bodyparse does it.
Is there any way to achieve this?

How can I convert this NewsApi return into a JSON FIle?

I am just beginning to dabble into this API and JavaScript in general. I took a coding class at school last year and learned what I believe to be the basics. The API I am using returns a list of news articles and their info as a JSON Array in the console. The current code I have is:
const fs = require('fs');
let apiKey = "myKey"
console.log(apiKey);
const NewsAPI = require('newsapi');
const { json } = require('stream/consumers');
const newsapi = new NewsAPI(apiKey);
var globalResponse
newsapi.v2.everything({
q: 'trump',
}).then(response => {
console.log(response);
globalResponse = response;
})
// parse json
var jsonObj = JSON.parse(globalResponse);
// stringify JSON Object
var jsonContent = JSON.stringify(jsonObj);
console.log(jsonContent);
fs.writeFile("output.json", jsonContent, 'utf8', function (err) {
if (err) {
console.log("An error occured while writing JSON Object to File.");
return console.log(err);
}
console.log("JSON file has been saved.");
});
I can't seem to get the returned JSON to become a file. Its always one error after another

Parsing data in javascript with selenium

I am trying to read data from a json file in javascript. I can read succesfully read from it. When passing in the data in the code it throws errors.
Any help will be appriciated.
My code is as follows :
module.exports = {
'#tags': ['TC2'],
"LAUNCHURL" : function (browser) {
var fs = require('fs');
fs.readFile('C:/NightWatch_Automation/credentials.json', 'utf8', function (err, data) {
if (err) throw err; // we'll not consider error handling for now
var mydata = JSON.parse(data);
var url_get = mydata.credentials[0]['url'] //Passing data works here
browser
//opens salesforce and checks title to match 'Login | Salesforce'
.url(url_get)
.waitForElementVisible('//body', 1000)
//.assert.title('Login | Salesforce')
});
},
"Login": function(browser) {
var fs = require('fs');
fs.readFile('C:/NightWatch_Automation/credentials.json', 'utf8', function (err, data) {
if (err) throw err; // we'll not consider error handling for now
var mydata = JSON.parse(data);
var email = mydata.credentials[0]['email']
browser
.useXpath()
.click("//a[contains(text(),'Sign In')]")
.waitForElementVisible('//body',1000)
.setValue("//input[#aria-label='Enter email address']", email )//Passing data does NOT work here
.click("//button[#type='button']")
.waitForElementVisible("//input[#id='password']")
.setValue("//input[#id='password']","password12345")
.click("//button[#type='button']")
});
}
};
My json file is as follows
{ "credentials": [ {"url": "https://www.walmart.com", "search": "bandaids", "email" : "test#yahoo.com", "password" : "password12345"}] }
Further info
When the values are hard coded, the script works. As soon as a attempt to pass in data in breaks the code and wont complete. Using nightwatch as a test tool.
The paths are different which might be the problem:
LAUNCHURL: fs.readFile('C:/NightWatch_Automation/credentials.json'
Login: fs.readFile('C:/Users/NightWatch_Automation/credentials.json'

Unzip string in JS

I am pulling down objects from s3. the objects are zipped, and I need to be able to unzip them and compare the contents with some strings. My problem is that I can't seem to get them properly unzipped. This is what I am seeing happen: s3 zipped -> over the wire -> to me as JS Buffer -> ???
I am unsure of what I can do next. I have seemingly tried everything, such as pako, and lzutf8 to decompress the strings, but no dice.
here is an attempt with lzutf8:
lzutf8.decompress(buffer,{outputEncoding: "String"}, (result, error) => {
if (err) console.log(err);
if (data) console.log(data);
});
Here is an attempt with pako:
pako.ungzip(buffer,{to: "string"}, (result, error) => {
if (error) console.log(err);
if (result) console.log(data);
})
pako throws an "incorrect header check", and lzutf8 silently does nothing.
I am not married to these libraries, so if there is anything else that will do the job, I am happy to try anything. I am guessing that my problem might have something to do with the encoding types? Not sure though.
Here is what the relevant part of my code looks like:
let pako = require('pako');
let streamBuffers = require('stream-buffers');
let ws = fs.createWriteStream(process.cwd() + 'path-to-file');
let rs = new streamBuffers.ReadableStreamBuffer();
objects.forEach((obj) => {
console.log(obj);
rs.on("data", (data) => {
ws.write(pako.ungzip);
})
rs.push(obj);
})
You can create a readable stream from an object in S3 with the AWS SDK's createReadStream method and then pipe that through a zlib.Gunzip transform stream:
var zlib = require('zlib');
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var params = {Bucket: <bucket>, Key: <key>};
var file = require('fs').createWriteStream(<path/to/file>);
s3.getObject(params).createReadStream().pipe(zlib.createGunzip()).pipe(file);

Node.js - howto block around async call. Or non-blocking xmltojs lib

I'm over my head at the moment.
I'm new to node and writing a passportjs module for Freshbooks. There's a Passport function I'm trying to implement that get's a user's profile.
This code uses Passport's OAuth foo to make a request.
this._oauth.post(url, token, tokenSecret, post_body, post_content_type, function (err, body, res) {
if (err) { return done(new InternalOAuthError('failed to fetch user profile', err)); }
try {
var parser = require('xml2json');
var json = parser.toJson(body); //returns a string containing the JSON structure by default
var util = require('util');
console.log(util.inspect(json));
var profile = { provider: 'freshbooks' };
profile.id = json.response.staff.staff_id;
profile.displayName = json.response.staff.first_name + ' ' + json.response.staff.last_name;
profile.name = { familyName: json.response.staff.last_name,
givenName: json.response.staff.first_name };
if (json.response.staff.email) { profile.emails = [{ value: json.response.staff.email }]; }
profile._raw = body;
profile._json = json;
console.log(util.inspect(json));
done(null, profile);
} catch(e) {
done(e);
}
});
I get a response. It's xml. I'm converting it to JSON, but I don't want that actually. I want a plain-old javascript object.
I looked at https://github.com/Leonidas-from-XIV/node-xml2js but the examples don't show how to get the result out.
var parseString = require('xml2js').parseString;
var xml = "<root>Hello xml2js!</root>"
parseString(xml, function (err, result) {
console.dir(result);
});
What do I do to block around this code till the call is complete and get result out? I'm not sure how to merge these two callbacks together.
you can ask xml2json to return object:
var json = parser.toJson(body, {object: true});
if you decide to use async parser then just put your done callback inside json result handler. There is no need to "block" async function:
var parseString = require('xml2js').parseString;
parseString(body, function(err, json) {
// handle error: return done(err)
// do your logic if no error
// ...
// profile._json = json;
// ...
//
// 'return' result
done(null, profile);
});

Categories

Resources