I'm using protobufs for serializing my data. So far I serialized my data on the server (node restify) send it, receive it (Request is made by XMLHttpRequest) and serialize it on the Client.
Now I want to employ zipping to reduce the transfered file size. I tried using the library pako, which uses zlib.
In a basic script that I used to compare the protobuf and zipping performance to json I used it this way, and there were no problems
var buffer = proto.encode(data,'MyType'); // Own library on top of protobufs
var output = pako.deflate(buffer);
var unpacked = pako.inflate(output);
var decoded = proto.decode(buffer,'MyType');
However if I try to do this in a client-server model I can't get it working.
Server:
server.get('/data', function (req, res) {
const data = getMyData();
const buffer = proto.encode(data, 'MyType');
res.setHeader('content-type', 'application/octet-stream;');
res.setHeader('Content-Encoding', 'gzip;');
return res.send(200,buffer);
});
My own protolibrary serializes the data in protobuf and then deflates it:
...
let buffer = type.encode(message).finish();
buffer = pako.deflate(buffer);
return buffer;
The request looks like this:
public getData(){
return new Promise((resolve,reject) => {
const request = new XMLHttpRequest();
request.open("GET", this.url, true);
request.responseType = "arraybuffer";
request.onload = function(evt) {
const arr = new Uint8Array(request.response);
const payload = proto.decode(request.response ,'MyType')
resolve(payload);
};
request.send();
});
}
The proto.decode method first inflates the buffer buffer = pako.inflate(buffer); and then deserializes it from Protobuf.
If the request is made i get following error: "Uncaught incorrect header check" returned by the inflate method of pako:
function inflate(input, options) {
var inflator = new Inflate(options);
inflator.push(input, true);
// That will never happens, if you don't cheat with options :)
if (inflator.err) { throw inflator.msg || msg[inflator.err]; }
return inflator.result;
}
Also I looked at the request in Postman and found following:
The deflated response looks like this: 120,156,60,221,119,64,21,237,119,39,240,247,246,242,10,49,191,244,178,73,54,157 and has a length of 378564
The same request without deflating (the protobuf) looks like this
�:�:
(� 0�8#H
(� 0�8#H
� (�0�8#�Hand has a length of 272613.
I'm assuming, that I'm doing something incorrectly on the server side, since the inflated request is larger than the one not using compression.
Is it the content-type Header? I'm out of ideas.
Related
Overarching goal is to save some JSON data I create on a webpage to my files locally. I am definitely sending something to the server, but not in format I seem to able to access.
JsonData looks like:
{MetaData: {Stock: "UTX", Analysis: "LinearTrend2"}
Projections: [2018-10-12: 127.62, 2018-10-11: 126.36000000000001, 2018-10-10: 132.17, 2018-10-09: 140.12, 2018-10-08: 137.73000000000002, …]}
XMLHttpRequest on my webpage:
function UpdateBackTestJSON(JsonUpdate){ //JsonUpdate being the JSON object from above
var request = new XMLHttpRequest();
request.open('POST', 'UpdateBackTestJSON');
request.setRequestHeader("Content-Type", "application/json;charset=UTF-8");
// request.setRequestHeader("Content-Type", "text/plain;charset=UTF-8");
request.onload = function() {
console.log("Updated JSON File");
};
console.log("about to send request");
console.log(JsonUpdate);
request.send(JSON.stringify(JsonUpdate));
}
and I handle posts on my server (rather carelessly I realize, just going for functionality as a start here)
var http = require('http')
, fs = require('fs')
, url = require('url')
, port = 8008;
var server = http.createServer (function (req, res) {
var uri = url.parse(req.url)
var qs = require('querystring');
if (req.method == 'POST'){
var body = '';
req.on('data', function (data){
body += data;
// 1e6 === 1 * Math.pow(10, 6) === 1 * 1000000 ~~~ 1MB
if (body.length > 1e6){
// FLOOD ATTACK OR FAULTY CLIENT, NUKE REQUEST
req.connection.destroy();
}
});
req.on('end', function () {
var POST = qs.parse(body);
console.log(POST); // PARSED POST IS NOT THE RIGHT FORMAT... or something, idk whats going on
UpdateBackTestData(POST);
});
}
function UpdateBackTestData(TheJsonData){
console.log("UpdateBackTestData");
console.log(TheJsonData);
JsonUpdate = JSON.parse(TheJsonData);
console.log(JsonUpdate["MetaData"]);
//var Stock = JsonUpdate["MetaData"]["Stock"];
// var Analysis = JsonUpdate["MetaData"]["Analysis"];
fs.writeFile("/public/BackTestData/"+Analysis+"/"+Stock+".json", TheJsonData, function(err){
if(err){
console.log(err);
}
console.log("updated BackTest JSON!!!");
});
}
Most confusing to me is that when I run this, the Json object Im am trying to pass, does go through to the server, but the entirety of the data is a string used as a key for a blank value in an object. when I parse the body of the POST, I get: {'{MetaData:{'Stock':'UTX','Analysis:'LinearTrend2'},'Projections':[...]}': ''}. So my data is there... but not in a practical format.
I would prefer not to use express or other server tools, as I have a fair amount of other services set up in my server that I don't want to go back and change if I can avoid it.
Thanks for any help
I am having problem sending base64 image data using ajax post
I think I have the wrong value for Content-Type but have tried application/json, text/json and image/jpeg without any success
Javascript
function sendFormData(fD)
{
var urls = fD.get('urls');
console.log('urls', urls);
var xhr = new XMLHttpRequest();
xhr.open('POST', '/editsongs.update_artwork');
alert(urls);
xhr.setRequestHeader("Content-type", "image/jpeg");
xhr.send(urls);
}
Browser console shows
["data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxMTEhUTExMWFhUXGRgbGBgXGR0aGRgXHRgYFx4YGxkYHiogGh4lGxgdIjEhJSkrLi4uGh8zODMtNygtLisBCgoKDg0OGhAQGy0lHSUtLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLf/AABEIANEA8QMBIgACEQEDEQH/xAAcAAACAgMBAQAAAAAAAAAAAAAEBQMGAAECBwj/xABGEAACAQIEBAQDBAUJCAIDAAABAhEDIQAEEjEFIkFREzJhcQaBkRRCobEjUsHh8AcVM2JygsLR8RZDRFNzg5KyJDSzw8T/xAAZAQADAQEBAAAAAAAAAAAAAAABAgMABAX/xAAqEQACAgEDBQACAQQDAAAAAAAAAQIRAxIhMQQTFEFRImGRYnGh8AUjMv/aAAwDAQACEQMRAD8AsoosJt9cSpSdeYKYEXxKlQs4E2kEx6dMWJFBEDr0OPey5nCrR58MalwJUpB1nENbJWthpTyQDG5H5Y0gGqNQOJLLXA7h9K9VypGITSPbD+tSGIFy4mSJx0xz7EXjE7UscacNamW30jAlbLsNxiscqZNwoCK4wDEzJjkLiuoQjGNEYnCY5ZcawkMY5bEkY5GGsUijG1XtjcYPyFAQWMbbdsac9Ks0Y2wQphlwTKLUZg2wAwFmq8m5HYY5HFTRph0VnBI1QDZZgsZGwEnHPmn/ANb+lMcVqLFWRafKMTU+L6bRb88V/LcY+0otQKVmbdcbq1wsSYkgDfc7C3fHIoRlDVIs5tSpFzyXEtcdCdvlhopwp4RSpwvcAf64bj0x5mbSpUjrhdbmicSU1xi08djEWylGRjRGOsC5uodl379sBK2F8HbMAJxUON5lqrQFMA4f1CVU62ke2FaZlWqXgAbepx29MtD1VZzZXaol4RwYKsuAWP4…
Java Server code
public String updateArtwork(Request request, Response response)
{
System.out.println("Received artwork");
for(String s:request.queryParams())
{
System.out.println("---"+s);
}
System.out.println("ReadParms");
return "";
}
just outputs
Received artwork
ReadParms
Updated to Send as Form Instead
// Once we got everything, time to retrieve our objects
function sendData()
{
var fD = new FormData();
// send Files data directly
var files = imgList.filter(
function isFile(obj)
{
return obj.type === 'file';
}
);
files.forEach(
function appendToFD(obj)
{
fD.append('files[]', obj.file);
}
);
// for elems, we will need to grab the data from the server
var elems = imgList.filter(
function isElem(obj)
{
return obj.type === "element";
}
);
var urls = elems.map(
function grabURL(obj)
{
return obj.element.src;
}
);
if (urls.length)
fD.append('urls', JSON.stringify(urls));
sendFormData(fD);
};
function sendFormData(fD)
{
// but here we will just log the formData's content
var files = fD.getAll('files[]');
console.log('files: ', files);
var urls = fD.get('urls');
console.log('urls', urls);
var xhr = new XMLHttpRequest();
xhr.open('POST', '/editsongs.update_artwork');
xhr.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xhr.send(fD);
}
then on server I have
public String updateArtwork(Request request, Response response)
{
System.out.println("Received artwork");
for(String s:request.queryParams())
{
System.out.println("***"+s);
System.out.println(request.queryParams(s));
}
System.out.println("ReadParms");
return "";
}
and its outputs
Received artwork
***-----------------------------330219842643
Content-Disposition: form-data; name
"urls"
["data:image/jpeg;base64,/9j/4AAQSkZJRgABAQAAAQABAAD/2wCEAAkGBxMSEhUSExIWFhUXFxgXGBcYFRgXFxkdGBcWGBgYFx0YHSggHR0lHRkYITEhJSkrLi4uFyA1ODMtNygtLisBCgoKDg0OFQ8PFSsZFRkrLSstLSstKysrLS03KystLSstKy03LSstLSstNzc3KysrLS0tKysrKysrKysrKysrK//AABEIAKoBKQMBIgACEQEDEQH...."]
-----------------------------330219842643--
ReadParms
So I'm now getting the data but I don't understand really understand how to parse the Content-Disposition part in Java.
This code wasn't originally written by me, as you can see the FormData is constructed it doesnt come from an actual form. My first attempt was to try and extract from FormData and send in different way, an alternative would be to not store in FormData in the first place but dont know how to do this.
Update 2
Tried just sending first url rather than formdata or an arrya of urls, because actually there is only ever one url.But it just doesnt work, nothing received by server ?
function sendFormData(urls)
{
console.log('urls', urls[0]);
var xhr = new XMLHttpRequest();
xhr.open('POST', '/editsongs.update_artwork');
xhr.setRequestHeader("Content-type", "text/json");
alert(JSON.stringify(urls[0]));
xhr.send(JSON.stringify(urls[0]));
}
You are trying to view data in the body using queryParams(), which will give you the query params that are located in the url.
Load data from the request body using body().
Perhaps the question is not worded in the greatest way but here's some more context. Using GridFSBucket, I'm able to store a file in mongo and obtain a download stream for that file. Here's my question. Let's say I wanted to send that file back as a response to my http request.
I do:
downloadStream.pipe(res);
On the client side now when I print the responseText, I get some long string with some funky characters that look to be encrypted. What is the format/type of this string/stream? How do I setup my response so that I can get the streamed data as an ArrayBuffer on my client side?
Thanks
UPDATE:
I haven't solved the problem yet, however the suggestion by #Jekrb, gives exactly the same output as doing console.log(this.responseText). It looks like the string is not a buffer. Here is the output from these 2 lines:
console.log(this.responseText.toString('utf8'))
var byteArray = new Uint8Array(arrayBuffer);
UPDATE 2 - THE CODE SNIPPETS
Frontend:
var savePDF = function(blob){
//fs.writeFile("test.pdf",blob);
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (this.readyState === XMLHttpRequest.DONE && this.status === 200){
//TO DO: Handle the file in the response which we will be displayed.
console.log(this.responseText.toString('utf8'));
var arrayBuffer = this.responseText;
if (arrayBuffer) {
var byteArray = new Uint8Array(arrayBuffer);
}
console.log(arrayBuffer);
}
};
xhr.open("POST","/pdf",true);
xhr.responseType = 'arrayBuffer';
xhr.send(blob);
};
BACKEND:
app.post('/pdf',function(req,res){
MongoClient.connect("mongodb://localhost:27017/test", function(err, db) {
if(err) return console.dir(err);
console.log("Connected to Database");
var bucket = new GridFSBucket(db, { bucketName: 'pdfs' });
var CHUNKS_COLL = 'pdfs.chunks';
var FILES_COLL = 'pdfs.files';
// insert file
var uploadStream = bucket.openUploadStream('test.pdf');
var id = uploadStream.id;
uploadStream.once('finish', function() {
console.log("upload finished!")
var downloadStream = bucket.openDownloadStream(id);
downloadStream.pipe(res);
});
// This pipes the POST data to the file
req.pipe(uploadStream);
});
});
My guess is that either the response is being outputted as plain binary which is not base64 encoded (still a buffer) or it is a compressed (gzip) response that needs to be uncompressed first.
Hard to pinpoint the issue without seeing the code though.
UPDATE:
Looks like you're missing the proper response headers.
Try setting these headers before the downloadStream.pipe(res):
res.setHeader('Content-disposition', 'attachment; filename=test.pdf');
res.set('Content-Type', 'application/pdf');
Your stream is likely already a buffer. You might be able to call responseText.toString('utf8') to convert the streamed data into readable string.
I solved it!!!
Basically preset the response type to "arraybuffer" before you make the request using
req.responseType = "arraybuffer"
Now, once you receive the response, don't use responseText, instead use response. response contains the arraybuffer with the data for the file.
I'm consuming a JSON stream and am trying to use fetch to consume it. The stream emits some data every few seconds. Using fetch to consume the stream gives me access to the data only when the stream closes server side. For example:
var target; // the url.
var options = {
method: "POST",
body: bodyString,
}
var drain = function(response) {
// hit only when the stream is killed server side.
// response.body is always undefined. Can't use the reader it provides.
return response.text(); // or response.json();
};
var listenStream = fetch(target, options).then(drain).then(console.log).catch(console.log);
/*
returns a data to the console log with a 200 code only when the server stream has been killed.
*/
However, there have been several chunks of data already sent to the client.
Using a node inspired method in the browser like this works every single time an event is sent:
var request = require('request');
var JSONStream = require('JSONStream');
var es = require('event-stream');
request(options)
.pipe(JSONStream.parse('*'))
.pipe(es.map(function(message) { // Pipe catches each fully formed message.
console.log(message)
}));
What am I missing? My instinct tells me that fetch should be able to mimic the pipe or stream functionality.
response.body gives you access to the response as a stream. To read a stream:
fetch(url).then(response => {
const reader = response.body.getReader();
reader.read().then(function process(result) {
if (result.done) return;
console.log(`Received a ${result.value.length} byte chunk of data`);
return reader.read().then(process);
}).then(() => {
console.log('All done!');
});
});
Here's a working example of the above.
Fetch streams are more memory-efficient than XHR, as the full response doesn't buffer in memory, and result.value is a Uint8Array making it way more useful for binary data. If you want text, you can use TextDecoder:
fetch(url).then(response => {
const reader = response.body.getReader();
const decoder = new TextDecoder();
reader.read().then(function process(result) {
if (result.done) return;
const text = decoder.decode(result.value, {stream: true});
console.log(text);
return reader.read().then(process);
}).then(() => {
console.log('All done!');
});
});
Here's a working example of the above.
Soon TextDecoder will become a transform stream, allowing you to do response.body.pipeThrough(new TextDecoder()), which is much simpler and allows the browser to optimise.
As for your JSON case, streaming JSON parsers can be a little big and complicated. If you're in control of the data source, consider a format that's chunks of JSON separated by newlines. This is really easy to parse, and leans on the browser's JSON parser for most of the work. Here's a working demo, the benefits can be seen at slower connection speeds.
I've also written an intro to web streams, which includes their use from within a service worker. You may also be interested in a fun hack that uses JavaScript template literals to create streaming templates.
Turns out I could get XHR to work - which doesn't really answer the request vs. fetch question. It took a few tries and the right ordering of operations to get it right. Here's the abstracted code. #jaromanda was right.
var _tryXhr = function(target, data) {
console.log(target, data);
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function () {
console.log("state change.. state: "+ this.readyState);
console.log(this.responseText);
if (this.readyState === 4) {
// gets hit on completion.
}
if (this.readyState === 3) {
// gets hit on new event
}
};
xhr.open("POST", target);
xhr.setRequestHeader("cache-control", "no-cache");
xhr.setRequestHeader("Content-Type", "application/json");
xhr.send(data);
};
I have this content script that downloads some binary data using XHR, which is sent later to the background script:
var self = this;
var xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function(e) {
if (this.status == 200) {
self.data = {
data: xhr.response,
contentType: xhr.getResponseHeader('Content-Type')
};
}
};
xhr.send();
... later ...
sendResponse({data: self.data});
After receiving this data in background script, I'd like to form another XHR request that uploads this binary data to my server, so I do:
var formData = new FormData();
var bb = new WebKitBlobBuilder();
bb.append(data.data);
formData.append("data", bb.getBlob(data.contentType));
var req = new XMLHttpRequest();
req.open("POST", serverUrl);
req.send(formData);
The problem is that the file uploaded to the server contains just this string: "[object Object]". I guess this happens because ArrayBuffer type is lost somehow while transferring it from content process to the background? How can I solve that?
Messages passed between a Content Script and a background page are JSON-serialized.
If you want to transfer an ArrayBuffer object through a JSON-serialized channel, wrap the buffer in a view, before and after transferring.
I show an isolated example, so that the solution is generally applicable, and not just in your case. The example shows how to pass around ArrayBuffers and typed arrays, but the method can also be applied to File and Blob objects, by using the FileReader API.
// In your case: self.data = { data: new Uint8Array(xhr.response), ...
// Generic example:
var example = new ArrayBuffer(10);
var data = {
// Create a view
data: Array.apply(null, new Uint8Array(example)),
contentType: 'x-an-example'
};
// Transport over a JSON-serialized channel. In your case: sendResponse
var transportData = JSON.stringify(data);
//"{"data":[0,0,0,0,0,0,0,0,0,0],"contentType":"x-an-example"}"
// At the receivers end. In your case: chrome.extension.onRequest
var receivedData = JSON.parse(transportData);
// data.data is an Object, NOT an ArrayBuffer or Uint8Array
receivedData.data = new Uint8Array(receivedData.data).buffer;
// Now, receivedData is the expected ArrayBuffer object
This solution has been tested successfully in Chrome 18 and Firefox.
new Uint8Array(xhr.response) is used to create a view of the ArrayBuffer, so that the individual bytes can be read.
Array.apply(null, <Uint8Array>) is used to create a plain array, using the keys from the Uint8Array view. This step reduces the size of the serialized message. WARNING: This method only works for small amounts of data. When the size of the typed array exceeds 125836, a RangeError will be thrown. If you need to handle large pieces of data, use other methods to do the conversion between typed arrays and plain arrays.
At the receivers end, the original buffer can be obtained by creating a new Uint8Array, and reading the buffer attribute.
Implementation in your Google Chrome extension:
// Part of the Content script
self.data = {
data: Array.apply(null, new Uint8Array(xhr.response)),
contentType: xhr.getResponseHeader('Content-Type')
};
...
sendResponse({data: self.data});
// Part of the background page
chrome.runtime.onMessage.addListener(function(data, sender, callback) {
...
data.data = new Uint8Array(data.data).buffer;
Documentation
MDN: Typed Arrays
MDN: ArrayBuffer
MDN: Uint8Array
MDN: <Function> .apply
Google Chrome Extension docs: Messaging > Simple one-time requests
"This lets you send a one-time JSON-serializable message from a content script to extension, or vice versa, respectively"
SO bonus: Upload a File in a Google Chrome Extension - Using a Web worker to request, validate, process and submit binary data.
For Chromium Extensions manifest v3, URL.createObjectURL() approach doesn't work anymore because it is prohibited in the service workers.
The best (easiest) way to pass data from a service worker to a content script (and vice-versa), is to convert the blob into a base64 representation.
const fetchBlob = async url => {
const response = await fetch(url);
const blob = await response.blob();
const base64 = await convertBlobToBase64(blob);
return base64;
};
const convertBlobToBase64 = blob => new Promise(resolve => {
const reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = () => {
const base64data = reader.result;
resolve(base64data);
};
});
Then send the base64 to the content script.
Service worker:
chrome.tabs.sendMessage(sender.tab.id, { type: "LOADED_FILE", base64: base64 });
Content script:
chrome.runtime.onMessage.addListener(async (request, sender) => {
if (request.type == "LOADED_FILE" && sender.id == '<your_extension_id>') {
// do anything you want with the data from the service worker.
// e.g. convert it back to a blob
const response = await fetch(request.base64);
const blob = await response.blob();
}
});