Load JSON, YAML, and XML files in a Singleton Pattern - javascript

I am trying to implement a ConfigLoader that loads YAML, XML, and JSON files in a singleton method, i just got started but got stuck in terms of where else to go, and how to load a YAML file
heres what I have so far, i need some insights on how to translate this into a singleton pattern, and some insights on how to load a YAML file
let getXMLFile = function(path, callback) {
//create new object
let request = new XMLHttpRequest();
//open the object and specify verb and file path
request.open("GET", path);
//set header of httprequest
request.setrequestHeader("Content-Type","text/xml");
//set event listener
request.onReadystatechange= function(){
//check to see ready state and event status
if (request.readyState === 4 && request.status === 200){
callback(request.responseXML);
}
};
request.send();
};
getXMLFile("fileName.xml", function(xml){
console.log(xml);
});
"use strict";
//Loading Json File with fetch
//using URL and http
//use request object to define key
let myRequest = new Request("./data_class.json")
//pass request object
fetch(myRequest).then(function(resp){
//return response but act on it with Json method
//convert text to json file
return resp.json();
})
//handle JSON data
.then(function(data){
console.log(data);
});

Related

How to load 2 json with nodejs

I want to load 2 json files:
number.json : {"argent":300,"nbJoueur":11,"nbClub":1,"nbVictoire":0,"nbDefaite":0}
img.json : {"bonus1":false,"bonus2":false,"bonus3":false,"bonus4":false,"bonus5":false,"bonus6":false}
I managed to read the first file, but I don't know what to do to read the second file in the same time.
I have this code :
Javascript :
function load(){
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function(){
if (this.readyState === 4 && this.status === 200){
var recup = JSON.parse(this.responseText);
number["argent"] = recup["argent"];
number["nbJoueur"] = recup["nbJoueur"];
number["nbClub"] = recup["nbClub"];
number["nbVictoire"] = recup["nbVictoire"];
number["nbDefaite"] = recup["nbDefaite"];
}
};
xhr.open("GET","http://localhost:8080/load",true);
xhr.send();
}
Nodejs :
function load(response){
console.log("Load called");
fs.readFile("number.json", function(err,data){
if(err) {
throw err;
response.setHeader("Access-Control-Allow-Origin","*");
response.writeHead(418);
response.end();
}
else{
response.setHeader("Access-Control-Allow-Origin","*");
response.writeHead(200);
response.write(data);
response.end();
}
});
}
Based on this answer, you can get your json file simply that way:
var json = require('path1.json');
var json2 = require('path2.json')
And then you can use the variables in your response callback
You've written a hardcoded handler that can only serve one file.
Instead, you could:
write a general handler that serves either file depending upon the url that's accessed.
Duplicate the load function and create a new path for the second file
Or, if your JSON files aren't going to change frequently, you can put them in a folder and serve it static
Edit: I noticed you aren't using any framework, so I removed my code sample that corresponds to the Express framework.

How to correctly send and receive deflated data

I'm using protobufs for serializing my data. So far I serialized my data on the server (node restify) send it, receive it (Request is made by XMLHttpRequest) and serialize it on the Client.
Now I want to employ zipping to reduce the transfered file size. I tried using the library pako, which uses zlib.
In a basic script that I used to compare the protobuf and zipping performance to json I used it this way, and there were no problems
var buffer = proto.encode(data,'MyType'); // Own library on top of protobufs
var output = pako.deflate(buffer);
var unpacked = pako.inflate(output);
var decoded = proto.decode(buffer,'MyType');
However if I try to do this in a client-server model I can't get it working.
Server:
server.get('/data', function (req, res) {
const data = getMyData();
const buffer = proto.encode(data, 'MyType');
res.setHeader('content-type', 'application/octet-stream;');
res.setHeader('Content-Encoding', 'gzip;');
return res.send(200,buffer);
});
My own protolibrary serializes the data in protobuf and then deflates it:
...
let buffer = type.encode(message).finish();
buffer = pako.deflate(buffer);
return buffer;
The request looks like this:
public getData(){
return new Promise((resolve,reject) => {
const request = new XMLHttpRequest();
request.open("GET", this.url, true);
request.responseType = "arraybuffer";
request.onload = function(evt) {
const arr = new Uint8Array(request.response);
const payload = proto.decode(request.response ,'MyType')
resolve(payload);
};
request.send();
});
}
The proto.decode method first inflates the buffer buffer = pako.inflate(buffer); and then deserializes it from Protobuf.
If the request is made i get following error: "Uncaught incorrect header check" returned by the inflate method of pako:
function inflate(input, options) {
var inflator = new Inflate(options);
inflator.push(input, true);
// That will never happens, if you don't cheat with options :)
if (inflator.err) { throw inflator.msg || msg[inflator.err]; }
return inflator.result;
}
Also I looked at the request in Postman and found following:
The deflated response looks like this: 120,156,60,221,119,64,21,237,119,39,240,247,246,242,10,49,191,244,178,73,54,157 and has a length of 378564
The same request without deflating (the protobuf) looks like this
�:�:
 (� 0�8#H
 (� 0�8#H
� (�0�8#�Hand has a length of 272613.
I'm assuming, that I'm doing something incorrectly on the server side, since the inflated request is larger than the one not using compression.
Is it the content-type Header? I'm out of ideas.

What is the output of a piped file stream?

Perhaps the question is not worded in the greatest way but here's some more context. Using GridFSBucket, I'm able to store a file in mongo and obtain a download stream for that file. Here's my question. Let's say I wanted to send that file back as a response to my http request.
I do:
downloadStream.pipe(res);
On the client side now when I print the responseText, I get some long string with some funky characters that look to be encrypted. What is the format/type of this string/stream? How do I setup my response so that I can get the streamed data as an ArrayBuffer on my client side?
Thanks
UPDATE:
I haven't solved the problem yet, however the suggestion by #Jekrb, gives exactly the same output as doing console.log(this.responseText). It looks like the string is not a buffer. Here is the output from these 2 lines:
console.log(this.responseText.toString('utf8'))
var byteArray = new Uint8Array(arrayBuffer);
UPDATE 2 - THE CODE SNIPPETS
Frontend:
var savePDF = function(blob){
//fs.writeFile("test.pdf",blob);
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (this.readyState === XMLHttpRequest.DONE && this.status === 200){
//TO DO: Handle the file in the response which we will be displayed.
console.log(this.responseText.toString('utf8'));
var arrayBuffer = this.responseText;
if (arrayBuffer) {
var byteArray = new Uint8Array(arrayBuffer);
}
console.log(arrayBuffer);
}
};
xhr.open("POST","/pdf",true);
xhr.responseType = 'arrayBuffer';
xhr.send(blob);
};
BACKEND:
app.post('/pdf',function(req,res){
MongoClient.connect("mongodb://localhost:27017/test", function(err, db) {
if(err) return console.dir(err);
console.log("Connected to Database");
var bucket = new GridFSBucket(db, { bucketName: 'pdfs' });
var CHUNKS_COLL = 'pdfs.chunks';
var FILES_COLL = 'pdfs.files';
// insert file
var uploadStream = bucket.openUploadStream('test.pdf');
var id = uploadStream.id;
uploadStream.once('finish', function() {
console.log("upload finished!")
var downloadStream = bucket.openDownloadStream(id);
downloadStream.pipe(res);
});
// This pipes the POST data to the file
req.pipe(uploadStream);
});
});
My guess is that either the response is being outputted as plain binary which is not base64 encoded (still a buffer) or it is a compressed (gzip) response that needs to be uncompressed first.
Hard to pinpoint the issue without seeing the code though.
UPDATE:
Looks like you're missing the proper response headers.
Try setting these headers before the downloadStream.pipe(res):
res.setHeader('Content-disposition', 'attachment; filename=test.pdf');
res.set('Content-Type', 'application/pdf');
Your stream is likely already a buffer. You might be able to call responseText.toString('utf8') to convert the streamed data into readable string.
I solved it!!!
Basically preset the response type to "arraybuffer" before you make the request using
req.responseType = "arraybuffer"
Now, once you receive the response, don't use responseText, instead use response. response contains the arraybuffer with the data for the file.

A function that compiles the JSON content with Handlebars templates is not working (syntax error?)

There is a web page that is filled with content from JSON file using Handlebars (thank you very much user6709129 for a detailed explanation of this technology: https://stackoverflow.com/a/38947838/6530539).
So I have:
Index.html
content.JSON
contentProcessing.js
contentProcessing.js:
function sendGet(callback) {
/* create an AJAX request using XMLHttpRequest*/
var xhr = new XMLHttpRequest();
/*reference json url taken from: http://www.jsontest.com/*/
/* Specify the type of request by using XMLHttpRequest "open",
here 'GET'(argument one) refers to request type
"http://date.jsontest.com/" (argument two) refers to JSON file location*/
xhr.open('GET', "http://date.jsontest.com/");
/*Using onload event handler you can check status of your request*/
xhr.onload = function () {
if (xhr.status === 200) {
callback(JSON.parse(xhr.responseText));
} else {
alert(xhr.statusText);
}
};
/*Using onerror event handler you can check error state, if your request failed to get the data*/
xhr.onerror = function () {
alert("Network Error");
};
/*send the request to server*/
xhr.send();
}
//For template-1
var dateTemplate = document.getElementById("date-template").innerHTML;
var template = Handlebars.compile(dateTemplate);
sendGet(function (response) {
document.getElementById('testData').innerHTML += template(response);
})
It works great!
Now I want to wrap the Javascript code in a function (let's call it contentPrepare), that uses three variables:
The path to the JSON file
Id of the Handlebars template
Div Id where the content should be placed
Then I want to use this function in the other Javascript file - addContent.js, which will use this function several times with different variables, and thus fill the site with content.
Why it does not work in my test example?
var jsonDir = "json/test.json";
var templId = "date-template";
var finId = 'testData';
function contentPrepare (jsonDir, templId, finId){
function sendGet(callback) {
/* create an AJAX request using XMLHttpRequest*/
var xhr = new XMLHttpRequest();
/*reference json url taken from: http://www.jsontest.com/*/
/* Specify the type of request by using XMLHttpRequest "open",
here 'GET'(argument one) refers to request type
"http://date.jsontest.com/" (argument two) refers to JSON file location*/
xhr.open('GET', jsonDir);
/*Using onload event handler you can check status of your request*/
xhr.onload = function () {
if (xhr.status === 200) {
callback(JSON.parse(xhr.responseText));
} else {
alert(xhr.statusText);
}
};
/*Using onerror event handler you can check error state, if your request failed to get the data*/
xhr.onerror = function () {
alert("Network Error");
};
/*send the request to server*/
xhr.send();
}
//For template-1
var dateTemplate = document.getElementById(templId).innerHTML;
var template = Handlebars.compile(dateTemplate);
sendGet(function (response) {
document.getElementById(finId).innerHTML += template(response);
})
}
P.S.
Eventually, contentProcessing.js and addContent.js will be connected as a commonJS modules and compiled into a single Javascript file using Browserify.
After you have wrapped all your logic into a function - you still should invoke it (that's the whole purpose of having functions)
The call would presumably look something like
var jsonDir = "json/test.json";
var templId = "date-template";
var finId = 'testData';
contentPrepare(jsonDir, templId, finId);

how to find out if XMLHttpRequest.send() worked

I am using XMLHttpRequest to send a file from javascript code to a django view.I need to detect,whether the file has been sent or if some error occurred.I used jquery to write the following javascript.
Ideally I would like to show the user an error message that the file was not uploaded.Is there some way to do this in javascript?
I tried to do this by returning a success/failure message from django view , putting the success/failed message as json and sending back the serialized json from the django view.For this,I made the xhr.open() non-asynchronous. I tried to print the xmlhttpRequest object's responseText .The console.log(xhr.responseText) shows
response= {"message": "success"}
What I am wondering is,whether this is the proper way to do this.In many articles,I found the warning that
Using async=false is not recommended
So,is there any way to find out whether the file has been sent,while keeping xhr.open() asynchronous?
$(document).ready(function(){
$(document).on('change', '#fselect', function(e){
e.preventDefault();
sendFile();
});
});
function sendFile(){
var form = $('#fileform').get(0);
var formData = new FormData(form);
var file = $('#fselect').get(0).files[0];
var xhr = new XMLHttpRequest();
formData.append('myfile', file);
xhr.open('POST', 'uploadfile/', false);
xhr.send(formData);
console.log('response=',xhr.responseText);
}
My django view extracts file from form data and writes to a destination folder.
def store_uploaded_file(request):
message='failed'
to_return = {}
if (request.method == 'POST'):
if request.FILES.has_key('myfile'):
file = request.FILES['myfile']
with open('/uploadpath/%s' % file.name, 'wb+') as dest:
for chunk in file.chunks():
dest.write(chunk)
message="success"
to_return['message']= message
serialized = simplejson.dumps(to_return)
if store_message == "success":
return HttpResponse(serialized, mimetype="application/json")
else:
return HttpResponseServerError(serialized, mimetype="application/json")
EDIT:
I got this working with the help of #FabrícioMatté
xhr.onreadystatechange=function(){
if (xhr.readyState==4 && xhr.status==200){
console.log('xhr.readyState=',xhr.readyState);
console.log('xhr.status=',xhr.status);
console.log('response=',xhr.responseText);
var data = $.parseJSON(xhr.responseText);
var uploadResult = data['message']
console.log('uploadResult=',uploadResult);
if (uploadResult=='failure'){
console.log('failed to upload file');
displayError('failed to upload');
}else if (uploadResult=='success'){
console.log('successfully uploaded file');
}
}
}
Something like the following code should do the job:
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState === 4) {
var response = JSON.parse(xmlhttp.responseText);
if (xmlhttp.status === 200) {
console.log('successful');
} else {
console.log('failed');
}
}
}
XMLHttpRequest objects contain the status and readyState properties, which you can test in the xhr.onreadystatechange event to check if your request was successful.
XMLHttpRequest provides the ability to listen to various events that can occur while the request is being processed. This includes periodic progress notifications, error notifications, and so forth.
So:
function sendFile() {
var form = $('#fileform').get(0);
var formData = new FormData(form);
var file = $('#fselect').get(0).files[0]
var xhr = new XMLHttpRequest();
formData.append('myfile', file);
xhr.open('POST', 'uploadfile/', false);
xhr.addEventListener("load", transferComplete);
xhr.addEventListener("error", transferFailed);
}
function transferComplete(evt) {
console.log("The transfer is complete.");
// Do something
}
function transferFailed(evt) {
console.log("An error occurred while transferring the file.");
// Do something
}
You can read more about Using XMLHttpRequest.

Categories

Resources