How to inflate part of string - javascript

While building a NNTP client in NodeJS, I have encountered the following problem. When calling the XZVER command, the first data I receive from the socket connection contains both a string and an inflated string;
224 compressed data follows (zlib version 1.2.3.3)
^*�u�#����`*�Ti���d���x�;i�R��ɵC���eT�����U'�|/S�0���� rd�
z�t]2���t�bb�3ѥ��>�͊0�ܵ��b&b����<1/ �C�<[<��d���:��VW̡��gBBim�$p#I>5�cZ�*ψ%��u}i�k�j
�u�t���8�K��`>��im
When I split this string and try to inflate it like this;
lines = chunk.toString().split('\r\n');
response = lines.shift();
zlib.inflate(new Buffer(lines.shift()), function (error, data) {
console.log(arguments);
callback();
});
I receive the following error;
[Error: invalid code lengths set] errno: -3, code: 'Z_DATA_ERROR'
Any help is welcome, I am kinda stuck here :(
UPDATE
After implementing the answer of mscdex, the whole function looks like this;
var util = require('util'),
zlib = require('zlib'),
Transform = require('stream').Transform;
function CompressedStream () {
var self = this;
this._transform = function (chunk, encoding, callback) {
var response = chunk.toString(),
crlfidx = response.indexOf('\r\n');
response = response.substring(0, crlfidx);
console.log(response);
zlib.gunzip(chunk.slice(crlfidx + 2), function (error, data) {
console.log(arguments);
callback();
});
};
Transform.call(this/*, { objectMode: true } */);
};
util.inherits(CompressedStream, Transform);
module.exports = CompressedStream;

You should probably avoid using split() in case those two bytes are in the raw data. You might try something like this instead:
var response = chunk.toString(),
crlfidx = response.indexOf('\r\n');
// should probably check crlfidx > -1 here ...
response = response.substring(0, crlfidx);
zlib.inflate(chunk.slice(crlfidx + 2), function (error, data) {
console.log(arguments);
callback();
});
However if you're doing this inside a 'data' event handler, you should be aware that you may not get the data you expect in a single chunk. Specifically you could get a CRLF split between chunks or you could get multiple responses in a single chunk.

It seems that my chunks were incorrectly encoded. By removing socket.setEncoding('utf8');, the problem was solved.

Related

Node.js https.get return invalid json

Any help is welcome, I have been struggling for many hours now...
I have a somewhat straight forward code in which I ping the GitHub API to retrieve a JSON. If I execute this code synchronously it works very well. However, when I receiving multiple call at the same time (or if I run it in async.parallel), sometimes the result of aa is an invalid JSON (I can see it in my logs) and JSON.parse crash. It seems like there's a missing chunk in the final version of aa
app.get('/update-tests', function(req, res) {
const getOptionsGitHub = {
...
};
var req = https.get(getOptionsGitHub, function(res) {
aa = "";
res.on('data', function(chunk) { aa += chunk; });
res.on('end', function() {
try {
console.dir(aa);
callback(JSON.parse(aa));
} catch (e) {
console.dir(e);
}
});
});
res.send(200);
});
Any idea why I would have some missing chunk sometimes?

nodejs decoded string from buffer returns multi line string

I have a buffer which I want to decode so that I can treat it as an object.
I have the below setup
var StringDecoder = require('string_decoder').StringDecoder;
var req = http.request(reqOptions, function(res) {
...
var decoder = new StringDecoder('utf8');
res.on('data', function(chunk) {
var textChunk = decoder.write(chunk);
console.log(textChunk);
});
});
textChunk returns - note how this is on multiple lines.
{"aaa":true,"bbb":true, "cc
c":true, "ddd":true, "eee":true, "f
ff":true, "ggg":true}
so when I try and convert it to an object
JSON.parse(textChunk)
I get Unexpected end of input
How can I return my string as one line so that I can then treat it as an object.
{"aaa":true,"bbb":true, "ccc":true, "ddd":true, "eee":true, "fff":true, "ggg":true}
You need to create a string variable to concatenate all the chunks in, and then use your decoder at the end. Something like this:
var text = '';
res.on('data', function(chunk) {
text += chunk;
});
res.on('end', function() {
var decoder = new StringDecoder('utf8');
var result = decoder.write(text);
// Do something with the result
});
Try request.js
npm install request -g --save
And pipe the result instead, MUCH simpler trust me
The whole http.request / response lines would be replaced by
res.pipe(url) with the result being a nice object returned to parse or consume by a service, factory, or service

Request Stream Get + Post edited JSON body in Node.js

I'm new to Node.js and am working on a project and I'd like to use Request to stream from one endpoint to another. My goal is to use Request to get and post an edited JSON body using a pipe stream. I know that when doing so, content-type and content-length will be preserved in the POST headers. However, I would like to apply .forEach to all JSON objects in the body from the first url, and post them to the second url.
I'm not sure about the correct format, and I'd appreciate some clarification.
I know the basic syntax is this:
request.get('URL').pipe(request.post('URL'));
And so far my best guess is something like this:
request('FIRST_URL', function (error, response, body) {
body = JSON.parse(body);
body.forEach( function(arg) {
//return edited body
});
}).pipe(request.post('SECOND_URL'));
Am I missing something? Is there a better way to do this?
You could write your own transform stream. For example:
var Transform = require('stream').Transform;
var inherits = require('util').inherits;
function JSONTransform() {
Transform.call(this);
this._bufffer = '';
}
inherits(JSONTransform, Transform);
JSONTransform.prototype._transform = function(chunk, enc, cb) {
this._buffer += chunk;
cb();
});
JSONTransform.prototype._flush = function(cb) {
try {
var result = JSON.parse(this._buffer);
this._buffer = null;
// Do whatever transformations
// ...
this.push(JSON.stringify(result));
cb();
} catch (ex) {
cb(ex);
}
});
// Then just pipe
request.get('FIRST_URL')
.pipe(new JSONTransform())
.pipe(request.post('SECOND_URL'));
One other slightly different solution that may be worth considering would be to use a third-party streaming JSON parser module, which may or may not work for your use case.

How to make this nodeJs code synchronous using Meteor.wrapAsync?

I have some nodeJs code on my server that turns a cvs file into a JSON object. I'm trying to figure out how to utilize wrapAsync with my code, but I'm not too sure how to. Here is my attempt at it:
readFile: function(fileName){
var readFileToJsonSync = Meteor.wrapAsync(readFileToJson);
var result = readFileToJsonSync(fileName);
console.log(result);
},
});
if (Meteor.isServer){
var readFileToJson = function(fileName, cb){
var nodeFS = Meteor.npmRequire('node-fs');
var Converter = Meteor.npmRequire("csvtojson").Converter;
var fileStream = nodeFS.createReadStream("/Users/ray/Desktop/juju/upload/"+fileName,'utf8');
//new converter instance
var converter = new Converter({constructResult:true});
//end_parsed will be emitted once parsing finished
converter.on("end_parsed", function (jsonObj) {
return jsonObj ; //here is your result json object
});
//read from file
fileStream.pipe(converter);
}
}
And in the client:
Meteor.call("readFile", Meteor.user().emails[0].address+'/'+Session.get("file1"), function (error, result){
console.log(result);
});
When I run this code asynchronously, the proper output gets printed in the terminal, but undefined is printed out in the client console. This current attempt prints out nothing in both the server and client.
You are missing a return on your method. This may not fix your problem since you should have 2 client logs (1 from the method and 1 from the callback). Let me know.
Meteor.methods({
readFile: function(fileName){
var readFileToJsonSync = Meteor.wrapAsync(readFileToJson);
var result = readFileToJsonSync(fileName);
console.log(result);
return result;
}
});

Javascript String nodejs stream implementation

I need a nodejs stream (http://nodejs.org/api/stream.html) implementation that sends data to a string. Do you know anyone?
To be direct I'm trying to pipe a request response like this:
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
FROM https://github.com/mikeal/request
Thanks
It would not be difficult to write a class that conforms to the Stream interface; here's an example that implements the very basics, and seems to work with the request module you linked:
var stream = require('stream');
var util = require('util');
var request = require('request');
function StringStream() {
stream.Stream.call(this);
this.writable = true;
this.buffer = "";
};
util.inherits(StringStream, stream.Stream);
StringStream.prototype.write = function(data) {
if (data && data.length)
this.buffer += data.toString();
};
StringStream.prototype.end = function(data) {
this.write(data);
this.emit('end');
};
StringStream.prototype.toString = function() {
return this.buffer;
};
var s = new StringStream();
s.on('end', function() {
console.log(this.toString());
});
request('http://google.com').pipe(s);
You might find the class Sink in the pipette module to be handy for this use case. Using that you can write:
var sink = new pipette.Sink(request(...));
sink.on('data', function(buffer) {
console.log(buffer.toString());
}
Sink also handles error events coming back from the stream reasonably gracefully. See https://github.com/Obvious/pipette#sink for details.
[Edit: because I realized I used the wrong event name.]

Categories

Resources