How to stub https.request response.pipe with sinon.js? - javascript

Let's say, that I have this simple code:
var https = require('https');
var options = {
host: 'openshift.redhat.com',
port: 443,
path: '/broker/rest/api',
method: 'GET'
};
var req = https.request(options, function(response) {
console.log(response.statusCode);
response.pipe(save stream to file with fs)
});
req.on('error', function(e) {
console.error(e);
});
req.end();
Well, I'm bit new with sinon.js and I'd like to ask: How to stub response.pipe()?
Of course, I can make stub for https.request and return somethin with .on and .end and thats easy, but I have no idea, how to test if response.pipe() was called with proper arguments... (nodejs documentation says that response is callback)
Documentation is not helpful in this case!
ofc testing env is mocha, and can use chai too
Please give me some advices or examples.
Thanks, Matt

I wrapped your code into a function that accepts a callback because in current implementation we don't actually know when the piping is actually finished. So assuming we have a function like this:
const downloadToFile = function (options, callback) {
let req = https.request(options, function (err, stream) {
let writeStream = fs.createWriteStream('./output.json');
stream.pipe(writeStream);
//Notify that the content was successfully writtent into a file
stream.on('end', () => callback(null));
//Notify the caller that error happened.
stream.on('error', err => callback(err));
});
req.end();
};
There are 3 problems to solve:
As response is a readable stream. We want to mock the data it emits.
We want to mock .pipe method check if we are piping to the right stream.
We also need to mock https.request method not to make actual call
Here is how we can achieve this:
const {PassThrough} = require('stream');
describe('#downloadToFile', () => {
it('should save the data to output.json', function (callback) {
const mockResponse = `{"data": 123}`;
//Using a built-in PassThrough stream to emit needed data.
const mockStream = new PassThrough();
mockStream.push(mockResponse);
mockStream.end(); //Mark that we pushed all the data.
//Patch the 'https' module not to make an actual call
//but to return our stream instead
sinon.stub(https, 'request', function (options, callback) {
callback(null, mockStream);
return {end: sinon.stub()}; //Stub end method btw
});
//Finally keep track of how 'pipe' is going to be called
sinon.spy(mockStream, 'pipe');
downloadToFile({url: 'http://google.com'}, (err) => {
//Here you have the full control over what's happened
sinon.assert.calledOnce(mockStream.pipe);
//We can get the stream that we piped to.
let writable = mockStream.pipe.getCall(0).args[0];
assert.equal(writable.path, './output.json');
//Tell mocha that the test is finished. Pass an error if any.
callback(err);
});
});
});
Later you could make separate functions like: createMockedStream. Or even extract all these preparations into a separate method and keep only asserts in a test.

From Sinon documentation, this has been removed from v3.0.0:
var stub = sinon.stub(object, "method", func);`
Instead you should use:
stub(obj, 'meth').callsFake(fn)

Related

How to pipe a HTTPS get response into an Object

I transformed my code so that instead of requiring an extra node_modules, I could just use some HTTPS GET requests, the problem is that when I try to pipe /releases/ which is basically a raw JSON file, my code requires it back and issues occur like SyntaxError: Unexpected end of JSON input, because for some reason, when I console.log() the so called JSON array, the end isn't completed with ] or }. So I try to pipe the response into an array, but now I get an error: dest.on isn't a function,
Code:
https.get({hostname: `api.github.com`, path: `/repos/${username}/${reponame}/releases`, headers: {'User-Agent': 'a user agent'}}, async (response) => {
var file = new Array()
response.pipe(file)
response.on('end', async function() { //issue occurs at response.pipe ???
var releases = JSON.parse(fs.readFileSync('./releases.json', 'utf8'))
console.log(releases)
The JSON file that I access from Github looks like: https://api.github.com/repos/davidmerfield/randomColor/releases (random repository)
But, my file (releases.json) looks like this
Edit: I did extensive testing. I used the same JSON file my pkg outputted, read it with fs and so on, and everything seems fine. So the issue is most likely with https / response
I found out how to pipe the HTTP request into an object, instead of piping it into a file. Thanks to this post. I did that and turned the string into a JSON array.
https.get({hostname: `api.github.com`, path: `/repos/${username}/${reponame}/releases`, headers: {'User-Agent': 'agent'}}, async response => {
var str = ''
response.on('data', (data) => {
str += data
})
response.on('end', async function() {
var releases = JSON.parse(str)
//and so on...
You can require JSON files. So, if you need this file, you can do something like:
const releases = require('./releases.json');
You do not need to read it with fs, unless you really want to.
TypeError: dest.on is not a function
This error will be thrown if you try to pipe to non-Writable Stream object. Check here
Which in this case Array is not a Writable Stream. You can create a writable stream using fs.createWriteStream() and pipe the response to it.
https.get(
{ hostname: `api.github.com`, path: `/repos/${username}/${reponame}/releases`, headers: { "User-Agent": "a user agent" } },
async response => {
const writableStreamFile = fs.createWriteStream("./releases.json");
response.pipe(writableStreamFile);
response.on("end", async function() {
var releases = JSON.parse(fs.readFileSync("./releases.json", "utf8"));
console.log(releases);
});
}
);

Node.js / Koa: wait for API call before response

I have only basic JS knowledge (mainly jQuery), so this might be a really simple one.
And do realise performance-wise it might not be the best solution, but for now this is just proof of concept.
I have this simple Koa app. All I'm trying to do is to call an external API (Airtable) when the /callapi route is accessed and add some data from the API response to ctx.body.
However, I get a 404 Not Found when going to http://localhost:3000/callapi
I understand this is probably because the API call is asynchronous, so Node.js / Koa don't wait for it to finish and continue the execution, but there is no code setting the response body elsewhere, so it results in 404.
How can I achieve that?
Happy to use other routing middleware or additional middleware if needed.
I believe the Promises with async/await is the new way to go, which Koa embraces, so if I can somehow add / wrap this, it would be the best I guess.
const KoaRoute = require('koa-route');
const Koa = require('koa');
const app = new Koa();
var Airtable = require('airtable');
var base = new Airtable({apiKey: 'keyI6rZxwsXXXXXXX'}).base('appXXXXXX');
app.use(KoaRoute.get('/callapi', async function (ctx) {
await base('Couples').find('reclnxjiMeSljrzP0', function(err, record) {
console.log('Retrieved', record.id);
ctx.body = "Record ID from API: " + record.id;
});
}));
app.listen(3000);
console.log('listening on port 3000');
Looks like await is wrong. Please try this.
...
app.use(KoaRoute.get('/callapi', async function (ctx) {
/* please check return value if it returns correct value. */
try {
const record = await getBase();
console.log('Retrieved', record.id);
ctx.body = "Record ID from API: " + record.id;
} catch (err) {
// handle exception
}
}));
...
EDITED: Please add the following function and try again above thing.
function getBase() {
return new Promise((resolve, reject) => {
base('Couples').find('reclnxjiMeSljrzP0', function(err, record) {
console.log('Retrieved', record.id);
if (err) {
reject(err)
} else {
resolve(record);
}
});
});
}

Node.js - How to test HTTPS requests with promise

I started writing unit tests for my nodeJs application so i could learn about this concept.
After writing some basic tests for simple functions (using Mocha and Chai) i want to move on to some more complex tests.
I have written a simple piece of code that can make a request using node's HTTPS module. That code looks like this:
const https = require('https')
module.exports.doRequest = function (params, postData) {
return new Promise((resolve, reject) => {
const req = https.request(params, (res) => {
let body = []
res.on('data', (chunk) => {
body.push(chunk)
})
res.on('end', () => {
try {
body = JSON.parse(Buffer.concat(body).toString())
} catch (e) {
reject(e)
}
resolve(body)
})
})
req.on('error', (err) => {
reject(err)
})
if (postData) {
req.write(JSON.stringify(postData))
}
req.end()
})
}
Now i want to invoke this method with the following parameters:
const PARAMS = {
host: 'jsonplaceholder.typicode.com',
port: 433,
method: 'GET',
path: `/todos/1`,
headers: {
Authorization: 'Bearer 123'
}
}
And make the request like so:
getTodos = (PARAMS) => {
return doRequest(PARAMS).then((result) => {
if (result.errors) { throw result }
return {
'statusCode': 200,
'body': JSON.stringify({ message: result.title }),
}
}).catch((error) => ({
'statusCode': error.statusCode,
'body': JSON.stringify({ message: error.message }),
}
))
}
Now my question is how i can test this bit of code properly. I have looked on how to tackle this with the Nock.js libary but i don't have a good understanding on where to start.
If anyone can point me in the right direction on how to start with writing some tests for this bit of code i will be thankfull.
In general, you would want to black box your HTTP handling, so that as few modules in your application need to care about the details of HTTP as possible.
In the source folder, you'd have one module (e.g. commonhttp.js). You want this to export your HTTP functions, and other modules in your application use them like this:
const commonhttp = require('./commonhttp');
commonhttp.doRequest( ... ).then( ... );
Other modules, like todos.js, and various other modules, will export their own functions using that module, for example:
const commonhttp = require('./commonhttp');
const todos = {
getTodos( ... ) {
return commonhttp.doRequest( ... );
},
createTodo( ... ) {
return commonhttp.doRequest( ... );
},
// etc.
};
module.exports = todos;
For your unit tests, when you test the todos.js module, you want to mock any calls to the commonhttp module; you can use simple mocha + Sinon for this, and spy on the doRequest method. Basically all you're testing is "when I call getTodos, I expect it to make a call to doRequest with these arguments". You'd follow this pattern for all the modules in your application that uses doRequest.
You also, of course, want to test the commonhttp module -- that spec is where Nock might come in handy. It's not strictly necessary, you can also "block-box" the http module, but you have to set up a lot of complicated spies to mimic the behavior of http; instead, writing a spec (using Nock) that says "ok, I call doRequest with these params, that should have made this HTTP call" does make sense.

Error: write after end while node

I am reading the data from Cassandra using the stream() function of https://www.npmjs.com/package/cassandra-driver, I am listing to the events and piping the stream to the response object but I am getting this error
Error: write after end
this is my code
const JSONStream = require('JSONStream');
res.write('{');
stringsToAppendToStream.push('\"Result\":');
const responseStream = stringsToAppendToStream.join(',');
res.write(responseStream);
let streamObject = casssandraClient.stream(generateSQL);
// console.log(sstreamObject);
streamObject.on('readable', function () {
let row;
while (row = this.read()) {
console.log(row);
streamObject
.pipe(JSONStream.stringify())
.pipe(res);
}
})
streamObject.on('end', function () {
console.log('ending')
res.write('}');
res.end();
})
I tried the callback suggestion given in some other answer while writing the data but it doesn't solve the issue
res.write(messages, function(err) { res.end(); });
it seems like issue is while I pipe() the response but I am not sure how to resolve it.
I was able to solve this issue and it might be that this solution apply for this particular use case only, we don't have to listen for the 'redable' event instead pipe the stream directly to the response.
let streamObject = casssandraClient.stream(sql);
streamObject
.pipe(JSONStream.stringify())
.pipe(res, {
end: false
});
streamObject.on('end', function () {
res.write('}');
res.end();
})

NodeJS - Request file and zip it

I am currently in the process of creating a REST API for my personal website. I'd like to include some downloads and I would like to offer the possibility of selecting multiple ones and download those as a zip file.
My first approach was pretty easy: Array with urls, request for each of them, zip it, send to user, delete. However, I think that this approach is too dirty considering there are things like streams around which seems to be quite fitting for this thing.
Now, I tried around and am currently struggling with the basic concept of working with streams and events throughout different scopes.
The following worked:
const r = request(url, options);
r.on('response', function(res) {
res.pipe(fs.createWriteStream('./file.jpg'));
});
From my understanding r is an incoming stream in this scenario and I listen on the response event on it, as soon as it occurs, I pipe it to a stream which I use to write to the file system.
My first step was to refactor this so it fits my case more but I already failed here:
async function downloadFile(url) {
return request({ method: 'GET', uri: url });
}
Now I wanted to use a function which calls "downloadFile()" with different urls and save all those files to the disk using createWriteStream() again:
const urls = ['https://download1', 'https://download2', 'https://download3'];
urls.forEach(element => {
downloadFile(element).then(data => {
data.pipe(fs.createWriteStream('file.jpg'));
});
});
Using the debugger I found out that the "response" event is non existent in the data object -- Maybe that's already the issue? Moreover, I figured that data.body contains the bytes of my downloaded document (a pdf in this case) so I wonder if I could just stream this to some other place?
After reading some stackoveflow threads I found the following module: archiver
Reading this thread: Dynamically create and stream zip to client
#dankohn suggested an approach like that:
archive
.append(fs.createReadStream(file1), { name: 'file1.txt' })
.append(fs.createReadStream(file2), { name: 'file2.txt' });
Making me assume I need to be capable of extracting a stream from my data object to proceed.
Am I on the wrong track here or am I getting something fundamentally wrong?
Edit: lmao thanks for fixing my question I dunno what happened
Using archiver seems to be a valid approach, however it would be advisable to use streams when feeding large data from the web into the zip archive. Otherwise, the whole archive data would need to be held in memory.
archiver does not support adding files from streams, but zip-stream does. For reading a stream from the web, request comes in handy.
Example
// npm install -s express zip-stream request
const request = require('request');
const ZipStream = require('zip-stream');
const express = require('express');
const app = express();
app.get('/archive.zip', (req, res) => {
var zip = new ZipStream()
zip.pipe(res);
var stream = request('https://loremflickr.com/640/480')
zip.entry(stream, { name: 'picture.jpg' }, err => {
if(err)
throw err;
})
zip.finalize()
});
app.listen(3000)
Update: Example for using multiple files
Adding an example which processes the next file in the callback function of zip.entry() recursively.
app.get('/archive.zip', (req, res) => {
var zip = new ZipStream()
zip.pipe(res);
var queue = [
{ name: 'one.jpg', url: 'https://loremflickr.com/640/480' },
{ name: 'two.jpg', url: 'https://loremflickr.com/640/480' },
{ name: 'three.jpg', url: 'https://loremflickr.com/640/480' }
]
function addNextFile() {
var elem = queue.shift()
var stream = request(elem.url)
zip.entry(stream, { name: elem.name }, err => {
if(err)
throw err;
if(queue.length > 0)
addNextFile()
else
zip.finalize()
})
}
addNextFile()
})
Using Async/Await
You can encapsulate it into a promise to use async/await like:
await new Promise((resolve, reject) => {
zip.entry(stream, { name: elem.name }, err => {
if (err) reject(err)
resolve()
})
})
zip.finalize()

Categories

Resources