How do I unzip a gzipped body in a request's module response?
I have tried several examples around the web but none of them appear to work.
request(url, function(err, response, body) {
if(err) {
handleError(err)
} else {
if(response.headers['content-encoding'] == 'gzip') {
// How can I unzip the gzipped string body variable?
// For instance, this url:
// http://highsnobiety.com/2012/08/25/norse-projects-fall-2012-lookbook/
// Throws error:
// { [Error: incorrect header check] errno: -3, code: 'Z_DATA_ERROR' }
// Yet, browser displays page fine and debugger shows its gzipped
// And unzipped by browser fine...
if(response.headers['content-encoding'] && response.headers['content-encoding'].toLowerCase().indexOf('gzip') > -1) {
var body = response.body;
zlib.gunzip(response.body, function(error, data) {
if(!error) {
response.body = data.toString();
} else {
console.log('Error unzipping:');
console.log(error);
response.body = body;
}
});
}
}
}
}
I couldn't get request to work either, so ended up using http instead.
var http = require("http"),
zlib = require("zlib");
function getGzipped(url, callback) {
// buffer to store the streamed decompression
var buffer = [];
http.get(url, function(res) {
// pipe the response into the gunzip to decompress
var gunzip = zlib.createGunzip();
res.pipe(gunzip);
gunzip.on('data', function(data) {
// decompression chunk ready, add it to the buffer
buffer.push(data.toString())
}).on("end", function() {
// response and decompression complete, join the buffer and return
callback(null, buffer.join(""));
}).on("error", function(e) {
callback(e);
})
}).on('error', function(e) {
callback(e)
});
}
getGzipped(url, function(err, data) {
console.log(data);
});
try adding encoding: null to the options you pass to request, this will avoid converting the downloaded body to a string and keep it in a binary buffer.
Like #Iftah said, set encoding: null.
Full example (less error handling):
request = require('request');
zlib = require('zlib');
request(url, {encoding: null}, function(err, response, body){
if(response.headers['content-encoding'] == 'gzip'){
zlib.gunzip(body, function(err, dezipped) {
callback(dezipped.toString());
});
} else {
callback(body);
}
});
Actually request module handles the gzip response. In order to tell the request module to decode the body argument in the callback function, We have to set the 'gzip' to true in the options. Let me explain you with an example.
Example:
var opts = {
uri: 'some uri which return gzip data',
gzip: true
}
request(opts, function (err, res, body) {
// now body and res.body both will contain decoded content.
})
Note: The data you get on 'reponse' event is not decoded.
This works for me. Hope it works for you guys too.
The similar problem usually we ran into while working with request module is with JSON parsing. Let me explain it. If u want request module to automatically parse the body and provide you JSON content in the body argument. Then you have to set 'json' to true in the options.
var opts = {
uri:'some uri that provides json data',
json: true
}
request(opts, function (err, res, body) {
// body and res.body will contain json content
})
Reference: https://www.npmjs.com/package/request#requestoptions-callback
As seen in https://gist.github.com/miguelmota/9946206:
Both request and request-promise handle it out of the box as of Dec 2017:
var request = require('request')
request(
{ method: 'GET'
, uri: 'http://www.google.com'
, gzip: true
}
, function (error, response, body) {
// body is the decompressed response body
console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))
console.log('the decoded data is: ' + body)
}
)
I have formulated a more complete answer after trying the different ways to gunzip, and solving errors to do with encoding.
Hope this helps you too:
var request = require('request');
var zlib = require('zlib');
var options = {
url: 'http://some.endpoint.com/api/',
headers: {
'X-some-headers' : 'Some headers',
'Accept-Encoding' : 'gzip, deflate',
},
encoding: null
};
request.get(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
// If response is gzip, unzip first
var encoding = response.headers['content-encoding']
if (encoding && encoding.indexOf('gzip') >= 0) {
zlib.gunzip(body, function(err, dezipped) {
var json_string = dezipped.toString('utf-8');
var json = JSON.parse(json_string);
// Process the json..
});
} else {
// Response is not gzipped
}
}
});
Here is my two cents worth. I had the same problem and found a cool library called concat-stream:
let request = require('request');
const zlib = require('zlib');
const concat = require('concat-stream');
request(url)
.pipe(zlib.createGunzip())
.pipe(concat(stringBuffer => {
console.log(stringBuffer.toString());
}));
Here's a working example (using the request module for node) that gunzips the response
function gunzipJSON(response){
var gunzip = zlib.createGunzip();
var json = "";
gunzip.on('data', function(data){
json += data.toString();
});
gunzip.on('end', function(){
parseJSON(json);
});
response.pipe(gunzip);
}
Full code: https://gist.github.com/0xPr0xy/5002984
I'm using node-fetch. I was getting response.body, what I really wanted was await response.text().
With got, a request alternative, you can simply do:
got(url).then(response => {
console.log(response.body);
});
Decompression is handled automagically when needed.
I used the gunzipSync convenience method in nodejs to decompress the body. This avoids working with callbacks.
import * as zlib from "zlib";
const uncompressedBody:string = zlib.gunzipSync(body).toString("utf-8");
(in typescript)
Related
I am simply trying to output a single value (thumbnail) of an XML file in Node.js. I feel like I am so close but can't figure it out.
var request = require('request');
request('https://boardgamegeek.com/xmlapi/game/1', (error, response, body) => {
if (error) { return console.log(error); }
console.log(body.thumbnail);
});
You need a XML parser, for example xml2js :
var request = require('request');
var parseString = require('xml2js').parseString;
request('https://boardgamegeek.com/xmlapi/game/1', (error, response, body) => {
if (error) { return console.log(error); }
parseString(body, function (err, result) {
console.dir(result);
});
});
Double check by using the console to see all of body I.e:
console.log(body)
Then you will see the options you have available. Show us what you get and we could be more specific or it may be enough for you to work out at a glance. You are on the right track. It just depends on the data structure that is there for you.
I looked at Node.js Documentation and I didn't find how to apply options (settings) and a callback function with error handling. And I have to use .writeFileSync (not asynchronous .writeFile):
const settings = {
flags: 'w',
encoding: null, //must be null
mode: 0o666,
autoClose: true //not found this option
};
fs.writeFileSync(dest, buff, settings);
Before I used:
fs.writeFileSync(dest, buff, function (err) {
if (err) {
...
} else { ... console.log("OK") }
})
but I found that I have to apply encoding: null, option to prevent any modifications of source data (buff), in other case the file can be broken.
Edit:
After amazing answers and explanations I would like to say that I was confused with Node.js Documentation :
fs.writeFileSync(file, data[, options]) is
The synchronous version of fs.writeFile().
Since this is version of fs,writeFile() method I thought it can have the same versions of function's signature...
And here my final version of code, but it still has the issue with decoding of binary files (can be any file types) (* by the way when I tried to use Axios.js I saw Errors: "Request failed with Status Code 500):
function download(url, dest, fileName, callback) {
//import http from 'http';
var request = http.get(url, function (response) {
var bodyParts = [];
var bytes = 0;
response.on("data", function (c) {
bodyParts.push(c);
bytes += c.length;
})
response.on("end", function () {
// flatten into one big buffer
var buff = new Buffer(bytes);
var copied = 0;
for (var i = 0; i < bodyParts.length; i++) {
bodyParts[i].copy(buff, copied, 0);
copied += bodyParts[i].length;
}
const settings = {
flags: 'w',
encoding: null, //not applicable / no changes
mode: 0o666
};
try {
fs.writeFileSync(dest, buff, settings);
let msgOK = {
filename: fileName,
status: 'OK',
text: `File downloaded successfully`
}
if (callback) callback(msgOK);
console.log(msgOK.text);
isLoading = false; //IMPORTANT!
} catch (err) {
console.error(err.stack || err.message);
let msgErr = {
filename: fileName,
status: 'ERROR',
text: `Error in file downloading ${err.message}`
}
ERRORS.push(err);
if (callback) callback(msgErr);
}
})
})
}
The synchronous version of any file system method does not accept a callback and they will throw in case of error, so you should catch it.
When using the synchronous form any exceptions are immediately thrown.
You can use try/catch to handle exceptions or allow them to bubble up.
try {
fs.writeFileSync(dest, buff);
// You don't need callback, the file is saved here
} catch(e) {
console.error(e);
}
There's no setting autoClose for fs.writeFileSync the only available options are:
encoding <String> | <Null> default = 'utf8'
mode <Number> default = 0o666
flag <String> default = 'w'
Last but not least, you should update your node version, since Node.js 4.x end of life is in less than a week. (2018-04-30)
fs.writeFileSync throws an error
so you can do
try {
fs.writeFileSync(dest, buff)
} catch (err) {
// do something
}
and you wouldn't need callback because it's synchronous
just put your code after calling writeFileSync
I suggest that you can use
try {
fs.writeFileSync(dest, buff, settings);
} catch(e) {
// do your error handler
}
I have spent couple of hours trying to get image data as a buffer, search results lead me to using "request" module, others suggestions lead to using other modules in higher version of node, which I cannot use because we depend on node v 6.11 so far.
Here are my trials:
request(imageURL).pipe(fs.createWriteStream('downloaded-img-
1.jpg')).on('close', function () {
console.log('ok');
});
request(imageURL, function (err, message, response) {
fs.writeFile('downloaded-img-2.jpg', response, 'binary', function (err) {
console.log('File saved.');
});
fs.writeFile('downloaded-img-3.jpg', chunks, 'binary', function (err) {
console.log('File saved.');
})
resolve(response);
})
.on('data', function (chunk) {
chunks.push(chunk);
})
.on('response', function (response) {
});
});
The "downloaded-img-1.jpg" gets downloaded correctly, but I have to avoid saving the file on disk, then read it as a stream, it's a PRD environment constraint. So the next option is to use image data, as demonstrated by "downloaded-img-2.jpg" and "downloaded-img-3.jpg", by waiting for the "response" or the hand-made "chunks", the problem is that these 2 images are always corrupted, and I don't know why?
What is the point behind all of that? I am trying to add the image behind the URL in a zip file, and the zip lib I use (js-zip) accepts buffer as an input. Any ideas why I am not getting the "chunks" or the "response" correctly?
I've tested the code below in Node 6.9.2, it will download an image as a buffer. I also write the buffer to a file (just to test all is OK!), the body object is a buffer containing the image data:
"use strict";
var request = require('request');
var fs = require('fs');
var options = {
url: "https://upload.wikimedia.org/wikipedia/commons/thumb/5/52/Hubble2005-01-barred-spiral-galaxy-NGC1300.jpg/1920px-Hubble2005-01-barred-spiral-galaxy-NGC1300.jpg",
method: "get",
encoding: null
};
console.log('Requesting image..');
request(options, function (error, response, body) {
if (error) {
console.error('error:', error);
} else {
console.log('Response: StatusCode:', response && response.statusCode);
console.log('Response: Body: Length: %d. Is buffer: %s', body.length, (body instanceof Buffer));
fs.writeFileSync('test.jpg', body);
}
});
I'm doing an application with react-native. Now I'm trying to send an image from the mobile to the server (Node Js). For this I'm using react-native-image-picker. And the problem is that when I send the image it save a file but it's empty not contain the photo. I think that the problem probably is that the server can't access to the path of the image because is in a different device. But I don't know how I can do it.
React-Native:
openImagePicker(){
const options = {
title: 'Select Avatar',
storageOptions: {
skipBackup: true,
path: 'images'
}
}
ImagePicker.showImagePicker(options, (imagen) =>{
if (imagen.didCancel) {
console.log('User cancelled image picker');
}
else if (imagen.error) {
console.log('ImagePicker Error: ', imagen.error);
}
else if (imagen.customButton) {
console.log('User tapped custom button: ', imagen.customButton);
}
else {
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[path]", imagen.path);
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
.then(response => {
console.log("ok");
})
.catch(function(err) {
console.log(err);
})
}})}
Node Js:
addPhotoUser = function (req, res) {
User.findById(req.params.id, function(err, user) {
fs.readFile(req.body.file.path, function (err, data) {
var pwd = 'home/ubuntu/.../';
var newPath = pwd + req.body.file.name;
fs.writeFile(newPath, data, function (err) {
imageUrl: URL + req.body.file.name;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
});
});
};
Yes, the problem is that the filepath is on the local device and not the server. You want to send the actual data returned to you by react-native-image-picker not the uri. It looks like that library encodes the data with base64 so you're going to want send that to your server, not the uri returned from the library because it won't be accessible on a remote server.
What this means is that you won't be reading any files on your server but instead just decoding a base64 string in the response body and writing that to your filesystem.
For the client side:
let formdata = new FormData();
formdata.append("file[name]", imagen.fileName);
formdata.append("file[data]", imagen.data); // this is base64 encoded!
formdata.append("file[type]", imagen.type);
fetch('http://X/user/photo/58e137dd5d45090d0b000006', {
method: 'PUT',
headers: {
'Content-Type': 'multipart/form-data'
},
body: formdata
})
On the server side atob to decode from base64 before writing to the filesystem:
let decoded = atob(req.body.data)
// now this is binary and can written to the filesystem
From there:
fs.writeFile(newPath, decoded, function (err) {
imageUrl: newPath;
user.save(function(err) {
if(!err) {
console.log('Updated');
} else {
console.log('ERROR: ' + err);
}
res.send(user);
});
});
Note, you don't need the filesystem write that's in your code because you're decoding the image that was sent as a b64 string in your request.
There also seems to be some oddities with how you're using that user object. You seem to be only passing a function that handles errors and not any actual data. I don't know what ORM you're using so it's hard to say how it should work. Maybe something like this?
user.save({imageUrl:uriReturnedByFsWrite}, (err, data)=>{...})
Good luck :)
Make an object then send that object to the server. The object will consist of name,path and type, like this:
var imageData = {name: 'image1', path: uri, type: 'image/jpeg'}
Above is a one way to send the image data. The other way is to convert it into BLOB so that server side programmer doesn't have to do this task on their end. You can make BLOB by use of react-native-fetch-blob.
One more way is to directly upload the images to the amazon server(s3) and send the link to the backend..
Function that returns base64 string:
var RNFetchBlob = require('react-native-fetch-blob').default;
getImageAttachment: function(uri_attachment, mimetype_attachment) {
return new Promise((RESOLVE, REJECT) => {
// Fetch attachment
RNFetchBlob.fetch('GET', config.apiRoot+'/app/'+uri_attachment)
.then((response) => {
let base64Str = response.data;
var imageBase64 = 'data:'+mimetype_attachment+';base64,'+base64Str;
// Return base64 image
RESOLVE(imageBase64)
})
}).catch((error) => {
// error handling
console.log("Error: ", error)
});
},
Cheers :)
How do I unzip a gzipped body in a request's module response?
I have tried several examples around the web but none of them appear to work.
request(url, function(err, response, body) {
if(err) {
handleError(err)
} else {
if(response.headers['content-encoding'] == 'gzip') {
// How can I unzip the gzipped string body variable?
// For instance, this url:
// http://highsnobiety.com/2012/08/25/norse-projects-fall-2012-lookbook/
// Throws error:
// { [Error: incorrect header check] errno: -3, code: 'Z_DATA_ERROR' }
// Yet, browser displays page fine and debugger shows its gzipped
// And unzipped by browser fine...
if(response.headers['content-encoding'] && response.headers['content-encoding'].toLowerCase().indexOf('gzip') > -1) {
var body = response.body;
zlib.gunzip(response.body, function(error, data) {
if(!error) {
response.body = data.toString();
} else {
console.log('Error unzipping:');
console.log(error);
response.body = body;
}
});
}
}
}
}
I couldn't get request to work either, so ended up using http instead.
var http = require("http"),
zlib = require("zlib");
function getGzipped(url, callback) {
// buffer to store the streamed decompression
var buffer = [];
http.get(url, function(res) {
// pipe the response into the gunzip to decompress
var gunzip = zlib.createGunzip();
res.pipe(gunzip);
gunzip.on('data', function(data) {
// decompression chunk ready, add it to the buffer
buffer.push(data.toString())
}).on("end", function() {
// response and decompression complete, join the buffer and return
callback(null, buffer.join(""));
}).on("error", function(e) {
callback(e);
})
}).on('error', function(e) {
callback(e)
});
}
getGzipped(url, function(err, data) {
console.log(data);
});
try adding encoding: null to the options you pass to request, this will avoid converting the downloaded body to a string and keep it in a binary buffer.
Like #Iftah said, set encoding: null.
Full example (less error handling):
request = require('request');
zlib = require('zlib');
request(url, {encoding: null}, function(err, response, body){
if(response.headers['content-encoding'] == 'gzip'){
zlib.gunzip(body, function(err, dezipped) {
callback(dezipped.toString());
});
} else {
callback(body);
}
});
Actually request module handles the gzip response. In order to tell the request module to decode the body argument in the callback function, We have to set the 'gzip' to true in the options. Let me explain you with an example.
Example:
var opts = {
uri: 'some uri which return gzip data',
gzip: true
}
request(opts, function (err, res, body) {
// now body and res.body both will contain decoded content.
})
Note: The data you get on 'reponse' event is not decoded.
This works for me. Hope it works for you guys too.
The similar problem usually we ran into while working with request module is with JSON parsing. Let me explain it. If u want request module to automatically parse the body and provide you JSON content in the body argument. Then you have to set 'json' to true in the options.
var opts = {
uri:'some uri that provides json data',
json: true
}
request(opts, function (err, res, body) {
// body and res.body will contain json content
})
Reference: https://www.npmjs.com/package/request#requestoptions-callback
As seen in https://gist.github.com/miguelmota/9946206:
Both request and request-promise handle it out of the box as of Dec 2017:
var request = require('request')
request(
{ method: 'GET'
, uri: 'http://www.google.com'
, gzip: true
}
, function (error, response, body) {
// body is the decompressed response body
console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))
console.log('the decoded data is: ' + body)
}
)
I have formulated a more complete answer after trying the different ways to gunzip, and solving errors to do with encoding.
Hope this helps you too:
var request = require('request');
var zlib = require('zlib');
var options = {
url: 'http://some.endpoint.com/api/',
headers: {
'X-some-headers' : 'Some headers',
'Accept-Encoding' : 'gzip, deflate',
},
encoding: null
};
request.get(options, function (error, response, body) {
if (!error && response.statusCode == 200) {
// If response is gzip, unzip first
var encoding = response.headers['content-encoding']
if (encoding && encoding.indexOf('gzip') >= 0) {
zlib.gunzip(body, function(err, dezipped) {
var json_string = dezipped.toString('utf-8');
var json = JSON.parse(json_string);
// Process the json..
});
} else {
// Response is not gzipped
}
}
});
Here is my two cents worth. I had the same problem and found a cool library called concat-stream:
let request = require('request');
const zlib = require('zlib');
const concat = require('concat-stream');
request(url)
.pipe(zlib.createGunzip())
.pipe(concat(stringBuffer => {
console.log(stringBuffer.toString());
}));
Here's a working example (using the request module for node) that gunzips the response
function gunzipJSON(response){
var gunzip = zlib.createGunzip();
var json = "";
gunzip.on('data', function(data){
json += data.toString();
});
gunzip.on('end', function(){
parseJSON(json);
});
response.pipe(gunzip);
}
Full code: https://gist.github.com/0xPr0xy/5002984
I'm using node-fetch. I was getting response.body, what I really wanted was await response.text().
With got, a request alternative, you can simply do:
got(url).then(response => {
console.log(response.body);
});
Decompression is handled automagically when needed.
I used the gunzipSync convenience method in nodejs to decompress the body. This avoids working with callbacks.
import * as zlib from "zlib";
const uncompressedBody:string = zlib.gunzipSync(body).toString("utf-8");
(in typescript)