I am fetching data from an API in order to show sales and finance reports, but I receive a type gzip file which I managed to convert into a Uint8Array. I'd like to somehow parse-decode this into a JSON file that I can use to access data and create charts in my frontend with.
I was trying with different libraries (pako and cborg seemed to be the ones with the closest use cases), but I ultimately get an error Error: CBOR decode error: unexpected character at position 0
This is the code as I have it so far:
let req = https.request(options, function (res) {
console.log("Header: " + JSON.stringify(res.headers));
res.setEncoding("utf8");
res.on("data", function (body) {
const deflatedBody = pako.deflate(body);
console.log("DEFLATED DATA -----> ", typeof deflatedBody, deflatedBody);
console.log(decode(deflatedBody));
});
res.on("error", function (error) {
console.log("connection could not be made " + error.message);
});
});
req.end();
};
I hope anyone has stumbled upon this already and has some idea.
Thanks a lot!
Please visit this answer https://stackoverflow.com/a/12776856/16315663 to retrieve GZIP data from the response.
Assuming, You have already retrieved full data as UInt8Array.
You just need the UInt8Array as String
const jsonString = Buffer.from(dataAsU8Array).toString('utf8')
const parsedData = JSON.parse(jsonString)
console.log(parsedData)
Edit
Here is what worked for me
const {request} = require("https")
const zlib = require("zlib")
const parseGzip = (gzipBuffer) => new Promise((resolve, reject) =>{
zlib.gunzip(gzipBuffer, (err, buffer) => {
if (err) {
reject(err)
return
}
resolve(buffer)
})
})
const fetchJson = (url) => new Promise((resolve, reject) => {
const r = request(url)
r.on("response", (response) => {
if (response.statusCode !== 200) {
reject(new Error(`${response.statusCode} ${response.statusMessage}`))
return
}
const responseBufferChunks = []
response.on("data", (data) => {
console.log(data.length);
responseBufferChunks.push(data)
})
response.on("end", async () => {
const responseBuffer = Buffer.concat(responseBufferChunks)
const unzippedBuffer = await parseGzip(responseBuffer)
resolve(JSON.parse(unzippedBuffer.toString()))
})
})
r.end()
})
fetchJson("https://wiki.mozilla.org/images/f/ff/Example.json.gz")
.then((result) => {
console.log(result)
})
.catch((e) => {
console.log(e)
})
Thank you, I actually just tried this approach and I get the following error:
SyntaxError: JSON Parse error: Unexpected identifier "x"
But I managed to print the data in text format using the below function:
getFinancialReports = (options, callback) => {
// buffer to store the streamed decompression
var buffer = [];
https
.get(options, function (res) {
// pipe the response into the gunzip to decompress
var gunzip = zlib.createGunzip();
res.pipe(gunzip);
gunzip
.on("data", function (data) {
// decompression chunk ready, add it to the buffer
buffer.push(data.toString());
})
.on("end", function () {
// response and decompression complete, join the buffer and return
callback(null, buffer.join(""));
})
.on("error", function (e) {
callback(e);
});
})
.on("error", function (e) {
callback(e);
});
};
Now I would need to pass this into a JSON object.
Related
The problem:
I have a function that maps over countries and regions and creates an array of urls, then makes a GET request to each one. I want to save the responses in a single json file, and I want this function to handle that as well.
Expected results:
I expected to be able to run the function as needed (like when source data is updated), and get a new or updated local json file with all the data objects in one array.
Actual results:
A file with only one record, an array with the last response object.
What I've tried:
I tried using fs.writeFile and fs.readFile. I did not get any errors, but the resulting file had only one record, even though console showed all the requests being made. It seemed that each response was being written over the previous.
Minimum reproducable (node.js) example:
const fs = require('fs')
// subset of countries and codes for demo purposes
const countryDirList = [
'antarctica',
'central-asia',
]
const fbCountryCodes = [
{ "region": "antarctica", "codes": ["ay", "bv"] },
{ "region": "central-asia", "codes": ["kg", "kz"] },
]
const callingUrlsInSequence = async () => {
fs.writeFile('./test.json', '[]', function (err) {
if (err) throw err
console.log('File - test.json - was created successfully.')
})
try {
const urlConstructor = countryDirList.map(async (country) => {
console.log('countries mapped', country)
return fbCountryCodes.filter(async (f) => {
if (country === f.region) {
const urls = f.codes.map(async (c) => {
const response = await axios({
method: 'get',
url: `https://raw.githubusercontent.com/factbook/factbook.json/master/${country}/${c}.json`,
responseType: 'json',
headers: {
'Content-Type': 'application/json',
},
})
fs.readFile('./test.json', function (err, data) {
let json = JSON.parse(data)
json.push(response.data)
setTimeout(() => {
fs.writeFile('./test.json', JSON.stringify(json), function (err) {
if (err) throw err
console.log('The "data to append" was appended to file!')
})
}, 1000)
})
return response.data
})
const dataArr = await Promise.all(urls)
dataArr.map((item) =>
console.log(
'dataArr',
item.Government['Country name']['conventional short form']
)
)
}
})
})
} catch (err) {
console.log('axios error: ', err)
}
}
callingUrlsInSequence()
I'm re-writing this question now because it kept getting downvoted, and I could see that it was not very concise.
I can also see now, that obviously, the fs.readFile inside the fs.writeFile is not going to work in the code I provided, but I'm leaving it there in case it might help someone else, combined with the solution I provided in response to my own question.
I ended up learning how to solve this problem with both node-fetch and axios. They are not exactly the same.
For both:
First, check for existence of destination file, and create one if it's not already there.
const createNew = () => {
try {
if (existsSync('./data.json')) {
console.log('file exists')
return
} else {
writeFile('./data.json', '[]', (error, data) => {
if (error) {
console.log('fs.writeFile - create new - error: ', error)
return
}
})
}
} catch (err) {
console.log('fs.existsSync error: ', err)
}
}
createNew()
Then make the array of urls:
const countryDirList = [...countries]
const fbCountryCodes = [...codes]
const urls = []
// maybe a reducer function would be better, but my map + filter game is much stronger X-D
const makeUrls = (countriesArr, codesArr) =>
countriesArr.map((country) => {
return codesArr.filter((f) => {
if (country === f.region) {
return f.codes.map((code) => {
return urls.push(
`https://raw.githubusercontent.com/factbook/factbook.json/master/${country}/${code}.json`
)
})
}
})
})
makeUrls(countryDirList, fbCountryCodes)
Next, make the requests.
Axios:
fs.readFile('./data.json', (error, data) => {
if (error) {
console.log(error)
return
}
Promise.all(
urls.map(async (url) => {
let response
try {
response = await axios.get(url)
} catch (err) {
console.log('axios error: ', err)
return err
}
return response
})
)
.then((res) => {
const responses = res.map((r) => r.data)
fs.writeFile('./data.json', JSON.stringify(responses, null, 2), (err) => {
if (err) {
console.log('Failed to write data')
return
}
console.log('Updated data file successfully')
})
})
.catch((err) => {
console.log('axios error: ', err)
})
})
Node-fetch:
//same basic structure, readFile with fetch and write file inside
fs.readFile('./data2.json', (error, data) => {
if (error) {
console.log(error)
return
}
async function fetchAll() {
const results = await Promise.all(
urls.map((url) => fetch(url).then((r) => r.json()))
)
fs.writeFile('./data2.json', JSON.stringify(results, null, 2), (err) => {
if (err) {
console.log('Failed to write data')
return
}
console.log('Updated data file successfully')
})
}
fetchAll()
})
Both methods produce exactly the same output: a json file containing a single array with however many response objects in it.
I am sending over a PDF file in formdata with an Axios post request. So that file will get uploaded/saved to a folder on the server. I'm using multer on the server to save the file. And that works great.
Now I also want to add some fields to the DB related to the file. One is the generated file name that gets generated right before the file is saved. So I don't want to make a round trip back to the client and then make another call out to the server to update the DB. So I want a send a few text strings along with the formdata. But no matter what I try, I cannot read any text data from that formdata object in my Node code. FYI I am using Express on my Node server.
Client side code that kicks off the upload process: (notice I am attempting to append additional fields to the formdata object)
const uploadFilesAsync = () => {
const data = new FormData();
const filenames = [];
uploadedFiles.forEach((f) => {
filenames.push(f.name);
data.append('file', f);
});
const fileInfo = {
customer: selectedCustomer,
load: selectedLoad,
filenames
};
data.append('customer', selectedCustomer);
data.append('load', selectedLoad);
data.append('filenames', filenames.toString());
// I also tried the following and then passing fileInfo in with data and setLoaded
const fileInfo = {customer: selectedCustomer, load: selectedLoad,
filenames: filenames.toString()};
uploadFiles(data, setLoaded)
.then((res) => {
console.log('uploadFiles res: ', res);
if (res.status === 200) {
// addFileInfoToDB(fileInfo)
// .then((r) => console.log('addFileInfoToDB: ', r))
// .catch((e) => console.log({ e }));
}
})
.catch((e) => console.log(e));
};
And then the client side function uploadFiles:
export const uploadFiles = (data, setLoaded) => {
console.log({ data });
const config = {
onUploadProgress: function(progressEvent) {
const percentCompleted = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
);
setLoaded(percentCompleted);
},
headers: {
'Content-Type': 'multipart/form-data'
}
};
// and then here if I passed in the fileInfo object, I tried sending `{data, fileInfo}`
// instead of just data, but that broke file upload portion too
return axios
.post(baseURL + '/SDS/upload', data, config)
.then((response) => {
console.log({ response });
return response;
})
.catch((e) => {
return Promise.reject(e);
});
};
And finally the server side function that does all the work:
static async uploadSDS(req, res) {
console.log(req.body);
let uploadSuccess = false;
upload(req, res, async function(err) {
if (err instanceof multer.MulterError) {
// return res.status(500).json({ Error1: err });
//return { status: 500 };
} else if (err) {
// return res.status(500).json({ Error2: err });
//return { status: 500 };
} else {
uploadSuccess = true;
}
console.log(uploadSuccess);
// return res.status(200).send(req.file);
//return { status: 200 };
// if (uploadSuccess) {
// try {
// const result = await SDS.addFileInfoToDB(req.fileInfo);
// if (result) {
// return res.status(200).json({ result });
// }
// } catch (e) {
// console.log(e);
// }
// }
});
}
When I console.log the req.body it is always empty.
I am trying to upload an image from a URL to my Google Cloud Storage (Firebase). The following function shall return the file and a consecutive function will retrieve the actual Signed/Download Url to the new file. After all this I am updating a document in my Firestore Database with the new URL. That part works; the functions wait on uploading the (unfortunately incomplete) image and my document gets updated with the newly created file url. But the actual file/image is incomplete. :-(
async function saveToStorage(fileUrl) {
var storage = admin.storage();
var urlLib = require("url");
var pathLib = require("path");
//Get File Name from provided URL
var parsed = urlLib.parse(fileUrl);
var fileName = pathLib.basename(parsed.pathname);
//Create Storage Reference with new File Name
var bucket = storage.bucket('gs://myprojectname.appspot.com');
//Path Folder
var folderPath = 'data/photos/';
//Path Folder + File
var internalFilePath = folderPath + fileName ;
//Bucket File Ref
var file = bucket.file(internalFilePath);
const request = require('request');
const writeStream = file.createWriteStream({
metadata: {
contentType: 'image/jpg'
}
});
return new Promise((resolve, reject) => {
request.get(fileUrl)
.pipe(writeStream)
.on("error", (err) => {
console.error(`Error occurred`);
reject();
})
.on('finish', () => {
console.info(`Photo saved`);
resolve(file);
});
});
}
The Image that is saved/uploaded/streamed to my Cloud Storage file looks like this:
I have tried using node-fetch and request and rewrote my function in several ways, but always turn out with this result. I'm sure it has something to do with how I use my Promise, because if I omit the Promise the file actually completes but then the main code keeps executing instead of waiting for this Promise.
This has the same result (incomplete file):
return await fetch(fileUrl).then(res => {
const contentType = res.headers.get('content-type');
const writeStream = file.createWriteStream({
metadata: {
contentType
}
});
let p = new Promise((resolve, reject) => {
res.body.pipe(writeStream);
writeStream.on('finish', function() {
console.log("Stream finished")
resolve(file);
});
writeStream.on('error', function() {
reject(new Error("Whoops!"));
});
});
return p.then(
function(file) {
console.log('Photo saved');
return file},
function(error) {
console.error(error);
return;
});
});
And outright returning the stream writes a complete file, but my main code is not waiting for the file (and I need to handle the file)..
return res.body.pipe(writeStream)
.on('finish', () => {
return file;
console.log('Photo')
})
.on('error', err => {
return console.error(err);
});
Thanks for any help on this!
So this is the code that finally worked for me.
return new Promise((resolve, reject) => {
const req = request(fileUrl);
req.pause();
req.on('response', res => {
const writeStream = file.createWriteStream({
metadata: {
contentType: res.headers['content-type']
}
});
req.pipe(writeStream)
.on('finish', () => {
console.log('Photo saved');
resolve(file);
})
.on('error', err => {
writeStream.end();
console.error(err);
reject();
});
req.resume();
});
req.on('error', err => console.error(err));
});
I am trying to invoke a rest API inside an API but it is not returning anything. So I am making a simple lambda which returns a JSON but getting a null value as a response.
var https = require('https');
var dt;
exports.handler = async (event, context) => {
var data = '';
return new Promise((resolve, reject) => {
var params = {
host: "cvwtzygw4a.execute-api.ap-south-1.amazonaws.com",
path: "/test/first"
};
const req = https.request(params, (res) => {
console.log('STATUS: ' + res.statusCode);
res.setEncoding('utf8');
res.o n('data', function(chunk) {
data += chunk;
});
res.on('end', function() {
console.log("DONE");
console.log(data);
dt = JSON.parse(data);
console.log(dt);
});
resolve(dt);
});
req.on('error', (e) => {
reject(e.message);
});
// send the request
req.write('');
req.end();
});
};
You should go through this article to understand how to use NodeJs promises in AWS Lambda. In this, the second solution addresses your use case.
To be specific to your code, I modified to make it very simple using the async/await syntax and the request-promise library.
const request = require('request-promise');
exports.handler = async (event, context) => {
var data = '';
try {
data = await request.get('https://cvwtzygw4a.execute-api.ap-south-1.amazonaws.com/test/first');
console.log('response received', res);
} catch (error) {
console.log('Error', error);
}
return data;
};
Following was the output:
START RequestId: 80d75f93-5fa6-1354-c22c-0597beb075e7 Version: $LATEST
2020-01-03T17:51:19.987Z 80d75f93-5fa6-1354-c22c-0597beb075e7 response received {
"basic" : {"name":"John","age":31,"city":"New York"}
}
END RequestId: 80d75f93-5fa6-1354-c22c-0597beb075e7
REPORT RequestId: 80d75f93-5fa6-1354-c22c-0597beb075e7 Init Duration: 907.81 ms Duration: 1258.54 ms Billed Duration: 1300 ms Memory Size: 128 MB Max Memory Used: 55 MB
"{\n\"basic\" : {\"name\":\"John\",\"age\":31,\"city\":\"New York\"}\n}"
This URL returns JSON:
{
query: {
count: 1,
created: "2015-12-09T17:12:09Z",
lang: "en-US",
diagnostics: {},
...
}
}
I tried this, and it didn't work:
responseObj = readJsonFromUrl('http://query.yahooapis.com/v1/publ...');
var count = responseObj.query.count;
console.log(count) // should be 1
How can I get a JavaScript object from this URL's JSON response?
You can use jQuery .getJSON() function:
$.getJSON('http://query.yahooapis.com/v1/public/yql?q=select%20%2a%20from%20yahoo.finance.quotes%20WHERE%20symbol%3D%27WRC%27&format=json&diagnostics=true&env=store://datatables.org/alltableswithkeys&callback', function(data) {
// JSON result in `data` variable
});
If you don't want to use jQuery you should look at this answer for pure JS solution.
If you want to do it in plain javascript, you can define a function like this:
var getJSON = function(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status === 200) {
callback(null, xhr.response);
} else {
callback(status, xhr.response);
}
};
xhr.send();
};
And use it like this:
getJSON('http://query.yahooapis.com/v1/public/yql?q=select%20%2a%20from%20yahoo.finance.quotes%20WHERE%20symbol%3D%27WRC%27&format=json&diagnostics=true&env=store://datatables.org/alltableswithkeys&callback',
function(err, data) {
if (err !== null) {
alert('Something went wrong: ' + err);
} else {
alert('Your query count: ' + data.query.count);
}
});
Note that data is an object, so you can access its attributes without having to parse it.
With Chrome, Firefox, Safari, Edge, and Webview you can natively use the fetch API which makes this a lot easier, and much more terse.
If you need support for IE or older browsers, you can also use the fetch polyfill.
let url = 'https://example.com';
fetch(url)
.then(res => res.json())
.then(out =>
console.log('Checkout this JSON! ', out))
.catch(err => { throw err });
MDN: Fetch API
Even though Node.js does not have this method built-in, you can use node-fetch which allows for the exact same implementation.
ES8(2017) try
obj = await (await fetch(url)).json();
async function load() {
let url = 'https://my-json-server.typicode.com/typicode/demo/db';
let obj = await (await fetch(url)).json();
console.log(obj);
}
load();
you can handle errors by try-catch
async function load() {
let url = 'http://query.yahooapis.com/v1/publ...';
let obj = null;
try {
obj = await (await fetch(url)).json();
} catch(e) {
console.log('error');
}
console.log(obj);
}
load();
Axios is a promise based HTTP client for the browser and node.js.
It offers automatic transforms for JSON data and it's the official recommendation from the Vue.js team when migrating from the 1.0 version which included a REST client by default.
Performing a GET request
// Make a request for a user with a given ID
axios.get('http://query.yahooapis.com/v1/publ...')
.then(function (response) {
console.log(response);
})
.catch(function (error) {
console.log(error);
});
Or even just axios(url) is enough as a GET request is the default.
Define a function like:
fetchRestaurants(callback) {
fetch(`http://www.restaurants.com`)
.then(response => response.json())
.then(json => callback(null, json.restaurants))
.catch(error => callback(error, null))
}
Then use it like this:
fetchRestaurants((error, restaurants) => {
if (error)
console.log(error)
else
console.log(restaurants[0])
});
async function fetchDataAsync(url) {
const response = await fetch(url);
console.log(await response.json());
}
fetchDataAsync('paste URL');
//Resolved
const fetchPromise1 = fetch(url);
fetchPromise1.then(response => {
console.log(response);
});
//Pending
const fetchPromise = fetch(url);
console.log(fetchPromise);
this morning, i also had the same doubt and now its cleared
i had just used JSON with 'open-weather-map'(https://openweathermap.org/) api and got data from the URL in the index.html file,
the code looks like this:-
//got location
var x = document.getElementById("demo");
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(weatherdata);
} else {
x.innerHTML = "Geolocation is not supported by this browser.";
}
//fetch openweather map url with api key
function weatherdata(position) {
//put corrdinates to get weather data of that location
fetch('https://api.openweathermap.org/data/2.5/weather?lat='+position.coords.latitude+'&lon='+position.coords.longitude+'&appid=b2c336bb5abf01acc0bbb8947211fbc6')
.then(response => response.json())
.then(data => {
console.log(data);
document.getElementById("demo").innerHTML =
'<br>wind speed:-'+data.wind.speed +
'<br>humidity :-'+data.main.humidity +
'<br>temprature :-'+data.main.temp
});
}
<div id="demo"></div>
i had give api key openly because i had free subscription, just have a free subscriptions in beginning.
you can find some good free api's and keys at "rapidapi.com"
as #DanAlboteanu answer in this page and some error corection of that javascript my suggested code is:
fetchRestaurants((error, data) => {
if (error)
console.log(error);
else
console.log(data)
});
and fetchRestaurants method is(please replace your json url with {your url of json data}):
function fetchRestaurants(callback) {
fetch("{your url of json data}")
.then(response => response.json())
.then(json => callback(null, json))
.catch(error => callback(error, null))
}
You can access JSON data by using fetch() in JavaScript
Update url parameter of fetch() with your url.
fetch(url)
.then(function(response){
return response.json();
})
.then(function(data){
console.log(data);
})
Hope It helps, it worked perfectly for me.